DPDK patches and discussions
 help / color / mirror / Atom feed
From: Srikanth Yalavarthi <syalavarthi@marvell.com>
To: Srikanth Yalavarthi <syalavarthi@marvell.com>
Cc: <dev@dpdk.org>, <sshankarnara@marvell.com>, <jerinj@marvell.com>,
	<aprabhu@marvell.com>, <ptakkar@marvell.com>,
	<pshukla@marvell.com>
Subject: [PATCH v7 05/11] app/mldev: add ordered inference test case
Date: Thu, 16 Mar 2023 14:14:28 -0700	[thread overview]
Message-ID: <20230316211434.13409-6-syalavarthi@marvell.com> (raw)
In-Reply-To: <20230316211434.13409-1-syalavarthi@marvell.com>

Added an ordered test case to execute inferences with single
or multiple models. In this test case inference requests for
a model are enqueued after completion of all requests for
the previous model. Test supports inference repetitions.

Operations sequence when testing with N models and R reps,

(load -> start -> (enqueue + dequeue) x R -> stop -> unload) x N

Test case can be executed by selecting "inference_ordered" test
and repetitions can be specified through "--repetitions" argument.

Signed-off-by: Srikanth Yalavarthi <syalavarthi@marvell.com>
Acked-by: Anup Prabhu <aprabhu@marvell.com>
---
 app/test-mldev/meson.build                    |   2 +
 app/test-mldev/ml_options.c                   |  65 ++
 app/test-mldev/ml_options.h                   |  17 +-
 app/test-mldev/test_inference_common.c        | 567 ++++++++++++++++++
 app/test-mldev/test_inference_common.h        |  61 ++
 app/test-mldev/test_inference_ordered.c       | 115 ++++
 app/test-mldev/test_model_common.h            |  10 +
 .../tools/img/mldev_inference_ordered.svg     | 528 ++++++++++++++++
 doc/guides/tools/testmldev.rst                |  88 ++-
 9 files changed, 1445 insertions(+), 8 deletions(-)
 create mode 100644 app/test-mldev/test_inference_common.c
 create mode 100644 app/test-mldev/test_inference_common.h
 create mode 100644 app/test-mldev/test_inference_ordered.c
 create mode 100644 doc/guides/tools/img/mldev_inference_ordered.svg

diff --git a/app/test-mldev/meson.build b/app/test-mldev/meson.build
index b09e1ccc8a..475d76d126 100644
--- a/app/test-mldev/meson.build
+++ b/app/test-mldev/meson.build
@@ -16,6 +16,8 @@ sources = files(
         'test_device_ops.c',
         'test_model_common.c',
         'test_model_ops.c',
+        'test_inference_common.c',
+        'test_inference_ordered.c',
 )
 
 deps += ['mldev']
diff --git a/app/test-mldev/ml_options.c b/app/test-mldev/ml_options.c
index 8ffbab7f75..7b56bca90e 100644
--- a/app/test-mldev/ml_options.c
+++ b/app/test-mldev/ml_options.c
@@ -23,6 +23,7 @@ ml_options_default(struct ml_options *opt)
 	opt->dev_id = 0;
 	opt->socket_id = SOCKET_ID_ANY;
 	opt->nb_filelist = 0;
+	opt->repetitions = 1;
 	opt->debug = false;
 }
 
@@ -90,6 +91,60 @@ ml_parse_models(struct ml_options *opt, const char *arg)
 	return ret;
 }
 
+static int
+ml_parse_filelist(struct ml_options *opt, const char *arg)
+{
+	const char *delim = ",";
+	char filelist[PATH_MAX];
+	char *token;
+
+	if (opt->nb_filelist >= ML_TEST_MAX_MODELS) {
+		ml_err("Exceeded filelist count, max = %d\n", ML_TEST_MAX_MODELS);
+		return -1;
+	}
+
+	strlcpy(filelist, arg, PATH_MAX);
+
+	/* model */
+	token = strtok(filelist, delim);
+	if (token == NULL) {
+		ml_err("Invalid filelist, model not specified = %s\n", arg);
+		return -EINVAL;
+	}
+	strlcpy(opt->filelist[opt->nb_filelist].model, token, PATH_MAX);
+
+	/* input */
+	token = strtok(NULL, delim);
+	if (token == NULL) {
+		ml_err("Invalid filelist, input not specified = %s\n", arg);
+		return -EINVAL;
+	}
+	strlcpy(opt->filelist[opt->nb_filelist].input, token, PATH_MAX);
+
+	/* output */
+	token = strtok(NULL, delim);
+	if (token == NULL) {
+		ml_err("Invalid filelist, output not specified = %s\n", arg);
+		return -EINVAL;
+	}
+	strlcpy(opt->filelist[opt->nb_filelist].output, token, PATH_MAX);
+
+	opt->nb_filelist++;
+
+	if (opt->nb_filelist == 0) {
+		ml_err("Empty filelist. Need at least one filelist entry for the test.");
+		return -EINVAL;
+	}
+
+	return 0;
+}
+
+static int
+ml_parse_repetitions(struct ml_options *opt, const char *arg)
+{
+	return parser_read_uint64(&opt->repetitions, arg);
+}
+
 static void
 ml_dump_test_options(const char *testname)
 {
@@ -100,6 +155,12 @@ ml_dump_test_options(const char *testname)
 		printf("\t\t--models           : comma separated list of models\n");
 		printf("\n");
 	}
+
+	if (strcmp(testname, "inference_ordered") == 0) {
+		printf("\t\t--filelist         : comma separated list of model, input and output\n"
+		       "\t\t--repetitions      : number of inference repetitions\n");
+		printf("\n");
+	}
 }
 
 static void
@@ -122,6 +183,8 @@ static struct option lgopts[] = {
 	{ML_DEVICE_ID, 1, 0, 0},
 	{ML_SOCKET_ID, 1, 0, 0},
 	{ML_MODELS, 1, 0, 0},
+	{ML_FILELIST, 1, 0, 0},
+	{ML_REPETITIONS, 1, 0, 0},
 	{ML_DEBUG, 0, 0, 0},
 	{ML_HELP, 0, 0, 0},
 	{NULL, 0, 0, 0}};
@@ -136,6 +199,8 @@ ml_opts_parse_long(int opt_idx, struct ml_options *opt)
 		{ML_DEVICE_ID, ml_parse_dev_id},
 		{ML_SOCKET_ID, ml_parse_socket_id},
 		{ML_MODELS, ml_parse_models},
+		{ML_FILELIST, ml_parse_filelist},
+		{ML_REPETITIONS, ml_parse_repetitions},
 	};
 
 	for (i = 0; i < RTE_DIM(parsermap); i++) {
diff --git a/app/test-mldev/ml_options.h b/app/test-mldev/ml_options.h
index 61e938d2e2..6a13f97a30 100644
--- a/app/test-mldev/ml_options.h
+++ b/app/test-mldev/ml_options.h
@@ -12,15 +12,19 @@
 #define ML_TEST_MAX_MODELS   8
 
 /* Options names */
-#define ML_TEST	     ("test")
-#define ML_DEVICE_ID ("dev_id")
-#define ML_SOCKET_ID ("socket_id")
-#define ML_MODELS    ("models")
-#define ML_DEBUG     ("debug")
-#define ML_HELP	     ("help")
+#define ML_TEST	       ("test")
+#define ML_DEVICE_ID   ("dev_id")
+#define ML_SOCKET_ID   ("socket_id")
+#define ML_MODELS      ("models")
+#define ML_FILELIST    ("filelist")
+#define ML_REPETITIONS ("repetitions")
+#define ML_DEBUG       ("debug")
+#define ML_HELP	       ("help")
 
 struct ml_filelist {
 	char model[PATH_MAX];
+	char input[PATH_MAX];
+	char output[PATH_MAX];
 };
 
 struct ml_options {
@@ -29,6 +33,7 @@ struct ml_options {
 	int socket_id;
 	struct ml_filelist filelist[ML_TEST_MAX_MODELS];
 	uint8_t nb_filelist;
+	uint64_t repetitions;
 	bool debug;
 };
 
diff --git a/app/test-mldev/test_inference_common.c b/app/test-mldev/test_inference_common.c
new file mode 100644
index 0000000000..6a6999d524
--- /dev/null
+++ b/app/test-mldev/test_inference_common.c
@@ -0,0 +1,567 @@
+/* SPDX-License-Identifier: BSD-3-Clause
+ * Copyright (c) 2022 Marvell.
+ */
+
+#include <errno.h>
+#include <unistd.h>
+
+#include <rte_common.h>
+#include <rte_launch.h>
+#include <rte_lcore.h>
+#include <rte_malloc.h>
+#include <rte_memzone.h>
+#include <rte_mldev.h>
+
+#include "ml_common.h"
+#include "test_inference_common.h"
+
+/* Enqueue inference requests with burst size equal to 1 */
+static int
+ml_enqueue_single(void *arg)
+{
+	struct test_inference *t = ml_test_priv((struct ml_test *)arg);
+	struct ml_request *req = NULL;
+	struct rte_ml_op *op = NULL;
+	struct ml_core_args *args;
+	uint64_t model_enq = 0;
+	uint32_t burst_enq;
+	uint32_t lcore_id;
+	uint16_t fid;
+	int ret;
+
+	lcore_id = rte_lcore_id();
+	args = &t->args[lcore_id];
+	model_enq = 0;
+
+	if (args->nb_reqs == 0)
+		return 0;
+
+next_rep:
+	fid = args->start_fid;
+
+next_model:
+	ret = rte_mempool_get(t->op_pool, (void **)&op);
+	if (ret != 0)
+		goto next_model;
+
+retry:
+	ret = rte_mempool_get(t->model[fid].io_pool, (void **)&req);
+	if (ret != 0)
+		goto retry;
+
+	op->model_id = t->model[fid].id;
+	op->nb_batches = t->model[fid].info.batch_size;
+	op->mempool = t->op_pool;
+
+	op->input.addr = req->input;
+	op->input.length = t->model[fid].inp_qsize;
+	op->input.next = NULL;
+
+	op->output.addr = req->output;
+	op->output.length = t->model[fid].out_qsize;
+	op->output.next = NULL;
+
+	op->user_ptr = req;
+	req->niters++;
+	req->fid = fid;
+
+enqueue_req:
+	burst_enq = rte_ml_enqueue_burst(t->cmn.opt->dev_id, 0, &op, 1);
+	if (burst_enq == 0)
+		goto enqueue_req;
+
+	fid++;
+	if (likely(fid <= args->end_fid))
+		goto next_model;
+
+	model_enq++;
+	if (likely(model_enq < args->nb_reqs))
+		goto next_rep;
+
+	return 0;
+}
+
+/* Dequeue inference requests with burst size equal to 1 */
+static int
+ml_dequeue_single(void *arg)
+{
+	struct test_inference *t = ml_test_priv((struct ml_test *)arg);
+	struct rte_ml_op_error error;
+	struct rte_ml_op *op = NULL;
+	struct ml_core_args *args;
+	struct ml_request *req;
+	uint64_t total_deq = 0;
+	uint8_t nb_filelist;
+	uint32_t burst_deq;
+	uint32_t lcore_id;
+
+	lcore_id = rte_lcore_id();
+	args = &t->args[lcore_id];
+	nb_filelist = args->end_fid - args->start_fid + 1;
+
+	if (args->nb_reqs == 0)
+		return 0;
+
+dequeue_req:
+	burst_deq = rte_ml_dequeue_burst(t->cmn.opt->dev_id, 0, &op, 1);
+
+	if (likely(burst_deq == 1)) {
+		total_deq += burst_deq;
+		if (unlikely(op->status == RTE_ML_OP_STATUS_ERROR)) {
+			rte_ml_op_error_get(t->cmn.opt->dev_id, op, &error);
+			ml_err("error_code = 0x%" PRIx64 ", error_message = %s\n", error.errcode,
+			       error.message);
+			t->error_count[lcore_id]++;
+		}
+		req = (struct ml_request *)op->user_ptr;
+		rte_mempool_put(t->model[req->fid].io_pool, req);
+		rte_mempool_put(t->op_pool, op);
+	}
+
+	if (likely(total_deq < args->nb_reqs * nb_filelist))
+		goto dequeue_req;
+
+	return 0;
+}
+
+bool
+test_inference_cap_check(struct ml_options *opt)
+{
+	struct rte_ml_dev_info dev_info;
+
+	if (!ml_test_cap_check(opt))
+		return false;
+
+	rte_ml_dev_info_get(opt->dev_id, &dev_info);
+	if (opt->nb_filelist > dev_info.max_models) {
+		ml_err("Insufficient capabilities:  Filelist count exceeded device limit, count = %u (max limit = %u)",
+		       opt->nb_filelist, dev_info.max_models);
+		return false;
+	}
+
+	return true;
+}
+
+int
+test_inference_opt_check(struct ml_options *opt)
+{
+	uint32_t i;
+	int ret;
+
+	/* check common opts */
+	ret = ml_test_opt_check(opt);
+	if (ret != 0)
+		return ret;
+
+	/* check file availability */
+	for (i = 0; i < opt->nb_filelist; i++) {
+		if (access(opt->filelist[i].model, F_OK) == -1) {
+			ml_err("Model file not accessible: id = %u, file = %s", i,
+			       opt->filelist[i].model);
+			return -ENOENT;
+		}
+
+		if (access(opt->filelist[i].input, F_OK) == -1) {
+			ml_err("Input file not accessible: id = %u, file = %s", i,
+			       opt->filelist[i].input);
+			return -ENOENT;
+		}
+	}
+
+	if (opt->repetitions == 0) {
+		ml_err("Invalid option, repetitions = %" PRIu64 "\n", opt->repetitions);
+		return -EINVAL;
+	}
+
+	/* check number of available lcores. */
+	if (rte_lcore_count() < 3) {
+		ml_err("Insufficient lcores = %u\n", rte_lcore_count());
+		ml_err("Minimum lcores required to create %u queue-pairs = %u\n", 1, 3);
+		return -EINVAL;
+	}
+
+	return 0;
+}
+
+void
+test_inference_opt_dump(struct ml_options *opt)
+{
+	uint32_t i;
+
+	/* dump common opts */
+	ml_test_opt_dump(opt);
+
+	/* dump test opts */
+	ml_dump("repetitions", "%" PRIu64, opt->repetitions);
+
+	ml_dump_begin("filelist");
+	for (i = 0; i < opt->nb_filelist; i++) {
+		ml_dump_list("model", i, opt->filelist[i].model);
+		ml_dump_list("input", i, opt->filelist[i].input);
+		ml_dump_list("output", i, opt->filelist[i].output);
+	}
+	ml_dump_end;
+}
+
+int
+test_inference_setup(struct ml_test *test, struct ml_options *opt)
+{
+	struct test_inference *t;
+	void *test_inference;
+	int ret = 0;
+	uint32_t i;
+
+	test_inference = rte_zmalloc_socket(test->name, sizeof(struct test_inference),
+					    RTE_CACHE_LINE_SIZE, opt->socket_id);
+	if (test_inference == NULL) {
+		ml_err("failed to allocate memory for test_model");
+		ret = -ENOMEM;
+		goto error;
+	}
+	test->test_priv = test_inference;
+	t = ml_test_priv(test);
+
+	t->nb_used = 0;
+	t->cmn.result = ML_TEST_FAILED;
+	t->cmn.opt = opt;
+	memset(t->error_count, 0, RTE_MAX_LCORE * sizeof(uint64_t));
+
+	/* get device info */
+	ret = rte_ml_dev_info_get(opt->dev_id, &t->cmn.dev_info);
+	if (ret < 0) {
+		ml_err("failed to get device info");
+		goto error;
+	}
+
+	t->enqueue = ml_enqueue_single;
+	t->dequeue = ml_dequeue_single;
+
+	/* set model initial state */
+	for (i = 0; i < opt->nb_filelist; i++)
+		t->model[i].state = MODEL_INITIAL;
+
+	return 0;
+
+error:
+	if (test_inference != NULL)
+		rte_free(test_inference);
+
+	return ret;
+}
+
+void
+test_inference_destroy(struct ml_test *test, struct ml_options *opt)
+{
+	struct test_inference *t;
+
+	RTE_SET_USED(opt);
+
+	t = ml_test_priv(test);
+	if (t != NULL)
+		rte_free(t);
+}
+
+int
+ml_inference_mldev_setup(struct ml_test *test, struct ml_options *opt)
+{
+	struct rte_ml_dev_qp_conf qp_conf;
+	struct test_inference *t;
+	int ret;
+
+	t = ml_test_priv(test);
+
+	ret = ml_test_device_configure(test, opt);
+	if (ret != 0)
+		return ret;
+
+	/* setup queue pairs */
+	qp_conf.nb_desc = t->cmn.dev_info.max_desc;
+	qp_conf.cb = NULL;
+
+	ret = rte_ml_dev_queue_pair_setup(opt->dev_id, 0, &qp_conf, opt->socket_id);
+	if (ret != 0) {
+		ml_err("Failed to setup ml device queue-pair, dev_id = %d, qp_id = %u\n",
+		       opt->dev_id, 0);
+		goto error;
+	}
+
+	ret = ml_test_device_start(test, opt);
+	if (ret != 0)
+		goto error;
+
+	return 0;
+
+error:
+	ml_test_device_close(test, opt);
+
+	return ret;
+}
+
+int
+ml_inference_mldev_destroy(struct ml_test *test, struct ml_options *opt)
+{
+	int ret;
+
+	ret = ml_test_device_stop(test, opt);
+	if (ret != 0)
+		goto error;
+
+	ret = ml_test_device_close(test, opt);
+	if (ret != 0)
+		return ret;
+
+	return 0;
+
+error:
+	ml_test_device_close(test, opt);
+
+	return ret;
+}
+
+/* Callback for IO pool create. This function would compute the fields of ml_request
+ * structure and prepare the quantized input data.
+ */
+static void
+ml_request_initialize(struct rte_mempool *mp, void *opaque, void *obj, unsigned int obj_idx)
+{
+	struct test_inference *t = ml_test_priv((struct ml_test *)opaque);
+	struct ml_request *req = (struct ml_request *)obj;
+
+	RTE_SET_USED(mp);
+	RTE_SET_USED(obj_idx);
+
+	req->input = (uint8_t *)obj +
+		     RTE_ALIGN_CEIL(sizeof(struct ml_request), t->cmn.dev_info.min_align_size);
+	req->output = req->input +
+		      RTE_ALIGN_CEIL(t->model[t->fid].inp_qsize, t->cmn.dev_info.min_align_size);
+	req->niters = 0;
+
+	/* quantize data */
+	rte_ml_io_quantize(t->cmn.opt->dev_id, t->model[t->fid].id,
+			   t->model[t->fid].info.batch_size, t->model[t->fid].input, req->input);
+}
+
+int
+ml_inference_iomem_setup(struct ml_test *test, struct ml_options *opt, uint16_t fid)
+{
+	struct test_inference *t = ml_test_priv(test);
+	char mz_name[RTE_MEMZONE_NAMESIZE];
+	char mp_name[RTE_MEMPOOL_NAMESIZE];
+	const struct rte_memzone *mz;
+	uint64_t nb_buffers;
+	uint32_t buff_size;
+	uint32_t mz_size;
+	uint32_t fsize;
+	FILE *fp;
+	int ret;
+
+	/* get input buffer size */
+	ret = rte_ml_io_input_size_get(opt->dev_id, t->model[fid].id, t->model[fid].info.batch_size,
+				       &t->model[fid].inp_qsize, &t->model[fid].inp_dsize);
+	if (ret != 0) {
+		ml_err("Failed to get input size, model : %s\n", opt->filelist[fid].model);
+		return ret;
+	}
+
+	/* get output buffer size */
+	ret = rte_ml_io_output_size_get(opt->dev_id, t->model[fid].id,
+					t->model[fid].info.batch_size, &t->model[fid].out_qsize,
+					&t->model[fid].out_dsize);
+	if (ret != 0) {
+		ml_err("Failed to get input size, model : %s\n", opt->filelist[fid].model);
+		return ret;
+	}
+
+	/* allocate buffer for user data */
+	mz_size = t->model[fid].inp_dsize + t->model[fid].out_dsize;
+	sprintf(mz_name, "ml_user_data_%d", fid);
+	mz = rte_memzone_reserve(mz_name, mz_size, opt->socket_id, 0);
+	if (mz == NULL) {
+		ml_err("Memzone allocation failed for ml_user_data\n");
+		ret = -ENOMEM;
+		goto error;
+	}
+
+	t->model[fid].input = mz->addr;
+	t->model[fid].output = t->model[fid].input + t->model[fid].inp_dsize;
+
+	/* load input file */
+	fp = fopen(opt->filelist[fid].input, "r");
+	if (fp == NULL) {
+		ml_err("Failed to open input file : %s\n", opt->filelist[fid].input);
+		ret = -errno;
+		goto error;
+	}
+
+	fseek(fp, 0, SEEK_END);
+	fsize = ftell(fp);
+	fseek(fp, 0, SEEK_SET);
+	if (fsize != t->model[fid].inp_dsize) {
+		ml_err("Invalid input file, size = %u (expected size = %" PRIu64 ")\n", fsize,
+		       t->model[fid].inp_dsize);
+		ret = -EINVAL;
+		fclose(fp);
+		goto error;
+	}
+
+	if (fread(t->model[fid].input, 1, t->model[fid].inp_dsize, fp) != t->model[fid].inp_dsize) {
+		ml_err("Failed to read input file : %s\n", opt->filelist[fid].input);
+		ret = -errno;
+		fclose(fp);
+		goto error;
+	}
+	fclose(fp);
+
+	/* create mempool for quantized input and output buffers. ml_request_initialize is
+	 * used as a callback for object creation.
+	 */
+	buff_size = RTE_ALIGN_CEIL(sizeof(struct ml_request), t->cmn.dev_info.min_align_size) +
+		    RTE_ALIGN_CEIL(t->model[fid].inp_qsize, t->cmn.dev_info.min_align_size) +
+		    RTE_ALIGN_CEIL(t->model[fid].out_qsize, t->cmn.dev_info.min_align_size);
+	nb_buffers = RTE_MIN((uint64_t)ML_TEST_MAX_POOL_SIZE, opt->repetitions);
+
+	t->fid = fid;
+	sprintf(mp_name, "ml_io_pool_%d", fid);
+	t->model[fid].io_pool = rte_mempool_create(mp_name, nb_buffers, buff_size, 0, 0, NULL, NULL,
+						   ml_request_initialize, test, opt->socket_id, 0);
+	if (t->model[fid].io_pool == NULL) {
+		ml_err("Failed to create io pool : %s\n", "ml_io_pool");
+		ret = -ENOMEM;
+		goto error;
+	}
+
+	return 0;
+
+error:
+	if (mz != NULL)
+		rte_memzone_free(mz);
+
+	if (t->model[fid].io_pool != NULL) {
+		rte_mempool_free(t->model[fid].io_pool);
+		t->model[fid].io_pool = NULL;
+	}
+
+	return ret;
+}
+
+void
+ml_inference_iomem_destroy(struct ml_test *test, struct ml_options *opt, uint16_t fid)
+{
+	char mz_name[RTE_MEMZONE_NAMESIZE];
+	char mp_name[RTE_MEMPOOL_NAMESIZE];
+	const struct rte_memzone *mz;
+	struct rte_mempool *mp;
+
+	RTE_SET_USED(test);
+	RTE_SET_USED(opt);
+
+	/* release user data memzone */
+	sprintf(mz_name, "ml_user_data_%d", fid);
+	mz = rte_memzone_lookup(mz_name);
+	if (mz != NULL)
+		rte_memzone_free(mz);
+
+	/* destroy io pool */
+	sprintf(mp_name, "ml_io_pool_%d", fid);
+	mp = rte_mempool_lookup(mp_name);
+	if (mp != NULL)
+		rte_mempool_free(mp);
+}
+
+int
+ml_inference_mem_setup(struct ml_test *test, struct ml_options *opt)
+{
+	struct test_inference *t = ml_test_priv(test);
+
+	/* create op pool */
+	t->op_pool = rte_ml_op_pool_create("ml_test_op_pool", ML_TEST_MAX_POOL_SIZE, 0, 0,
+					   opt->socket_id);
+	if (t->op_pool == NULL) {
+		ml_err("Failed to create op pool : %s\n", "ml_op_pool");
+		return -ENOMEM;
+	}
+
+	return 0;
+}
+
+void
+ml_inference_mem_destroy(struct ml_test *test, struct ml_options *opt)
+{
+	struct test_inference *t = ml_test_priv(test);
+
+	RTE_SET_USED(opt);
+
+	/* release op pool */
+	if (t->op_pool != NULL)
+		rte_mempool_free(t->op_pool);
+}
+
+/* Callback for mempool object iteration. This call would dequantize output data. */
+static void
+ml_request_finish(struct rte_mempool *mp, void *opaque, void *obj, unsigned int obj_idx)
+{
+	struct test_inference *t = ml_test_priv((struct ml_test *)opaque);
+	struct ml_request *req = (struct ml_request *)obj;
+	struct ml_model *model = &t->model[req->fid];
+
+	RTE_SET_USED(mp);
+	RTE_SET_USED(obj_idx);
+
+	if (req->niters == 0)
+		return;
+
+	t->nb_used++;
+	rte_ml_io_dequantize(t->cmn.opt->dev_id, model->id, t->model[req->fid].info.batch_size,
+			     req->output, model->output);
+}
+
+int
+ml_inference_result(struct ml_test *test, struct ml_options *opt, uint16_t fid)
+{
+	struct test_inference *t = ml_test_priv(test);
+	uint64_t error_count = 0;
+	uint32_t i;
+
+	RTE_SET_USED(opt);
+
+	/* check for errors */
+	for (i = 0; i < RTE_MAX_LCORE; i++)
+		error_count += t->error_count[i];
+
+	rte_mempool_obj_iter(t->model[fid].io_pool, ml_request_finish, test);
+
+	if ((t->nb_used > 0) && (error_count == 0))
+		t->cmn.result = ML_TEST_SUCCESS;
+	else
+		t->cmn.result = ML_TEST_FAILED;
+
+	return t->cmn.result;
+}
+
+int
+ml_inference_launch_cores(struct ml_test *test, struct ml_options *opt, uint16_t start_fid,
+			  uint16_t end_fid)
+{
+	struct test_inference *t = ml_test_priv(test);
+	uint32_t lcore_id;
+	uint32_t id = 0;
+
+	RTE_LCORE_FOREACH_WORKER(lcore_id)
+	{
+		if (id == 2)
+			break;
+
+		t->args[lcore_id].nb_reqs = opt->repetitions;
+		t->args[lcore_id].start_fid = start_fid;
+		t->args[lcore_id].end_fid = end_fid;
+
+		if (id % 2 == 0)
+			rte_eal_remote_launch(t->enqueue, test, lcore_id);
+		else
+			rte_eal_remote_launch(t->dequeue, test, lcore_id);
+
+		id++;
+	}
+
+	return 0;
+}
diff --git a/app/test-mldev/test_inference_common.h b/app/test-mldev/test_inference_common.h
new file mode 100644
index 0000000000..abb20fc9fb
--- /dev/null
+++ b/app/test-mldev/test_inference_common.h
@@ -0,0 +1,61 @@
+/* SPDX-License-Identifier: BSD-3-Clause
+ * Copyright (c) 2022 Marvell.
+ */
+
+#ifndef _ML_TEST_INFERENCE_COMMON_
+#define _ML_TEST_INFERENCE_COMMON_
+
+#include <rte_common.h>
+#include <rte_mempool.h>
+#include <rte_mldev.h>
+
+#include "test_model_common.h"
+
+struct ml_request {
+	uint8_t *input;
+	uint8_t *output;
+	uint16_t fid;
+	uint64_t niters;
+};
+
+struct ml_core_args {
+	uint64_t nb_reqs;
+	uint16_t start_fid;
+	uint16_t end_fid;
+};
+
+struct test_inference {
+	/* common data */
+	struct test_common cmn;
+
+	/* test specific data */
+	struct ml_model model[ML_TEST_MAX_MODELS];
+	struct rte_mempool *op_pool;
+
+	uint64_t nb_used;
+	uint16_t fid;
+
+	int (*enqueue)(void *arg);
+	int (*dequeue)(void *arg);
+
+	struct ml_core_args args[RTE_MAX_LCORE];
+	uint64_t error_count[RTE_MAX_LCORE];
+} __rte_cache_aligned;
+
+bool test_inference_cap_check(struct ml_options *opt);
+int test_inference_opt_check(struct ml_options *opt);
+void test_inference_opt_dump(struct ml_options *opt);
+int test_inference_setup(struct ml_test *test, struct ml_options *opt);
+void test_inference_destroy(struct ml_test *test, struct ml_options *opt);
+
+int ml_inference_mldev_setup(struct ml_test *test, struct ml_options *opt);
+int ml_inference_mldev_destroy(struct ml_test *test, struct ml_options *opt);
+int ml_inference_iomem_setup(struct ml_test *test, struct ml_options *opt, uint16_t fid);
+void ml_inference_iomem_destroy(struct ml_test *test, struct ml_options *opt, uint16_t fid);
+int ml_inference_mem_setup(struct ml_test *test, struct ml_options *opt);
+void ml_inference_mem_destroy(struct ml_test *test, struct ml_options *opt);
+int ml_inference_result(struct ml_test *test, struct ml_options *opt, uint16_t fid);
+int ml_inference_launch_cores(struct ml_test *test, struct ml_options *opt, uint16_t start_fid,
+			      uint16_t end_fid);
+
+#endif /* _ML_TEST_INFERENCE_COMMON_ */
diff --git a/app/test-mldev/test_inference_ordered.c b/app/test-mldev/test_inference_ordered.c
new file mode 100644
index 0000000000..1cd91dc3d3
--- /dev/null
+++ b/app/test-mldev/test_inference_ordered.c
@@ -0,0 +1,115 @@
+/* SPDX-License-Identifier: BSD-3-Clause
+ * Copyright (c) 2022 Marvell.
+ */
+
+#include <rte_common.h>
+#include <rte_launch.h>
+
+#include "ml_common.h"
+#include "test_inference_common.h"
+
+static int
+test_inference_ordered_driver(struct ml_test *test, struct ml_options *opt)
+{
+	struct test_inference *t;
+	uint16_t fid = 0;
+	int ret = 0;
+
+	t = ml_test_priv(test);
+
+	ret = ml_inference_mldev_setup(test, opt);
+	if (ret != 0)
+		return ret;
+
+	ret = ml_inference_mem_setup(test, opt);
+	if (ret != 0)
+		return ret;
+
+next_model:
+	/* load model */
+	ret = ml_model_load(test, opt, &t->model[fid], fid);
+	if (ret != 0)
+		goto error;
+
+	/* start model */
+	ret = ml_model_start(test, opt, &t->model[fid], fid);
+	if (ret != 0)
+		goto error;
+
+	ret = ml_inference_iomem_setup(test, opt, fid);
+	if (ret != 0)
+		goto error;
+
+	/* launch inferences for one model using available queue pairs */
+	ret = ml_inference_launch_cores(test, opt, fid, fid);
+	if (ret != 0) {
+		ml_err("failed to launch cores");
+		goto error;
+	}
+
+	rte_eal_mp_wait_lcore();
+
+	ret = ml_inference_result(test, opt, fid);
+	if (ret != ML_TEST_SUCCESS)
+		goto error;
+
+	ml_inference_iomem_destroy(test, opt, fid);
+
+	/* stop model */
+	ret = ml_model_stop(test, opt, &t->model[fid], fid);
+	if (ret != 0)
+		goto error;
+
+	/* unload model */
+	ret = ml_model_unload(test, opt, &t->model[fid], fid);
+	if (ret != 0)
+		goto error;
+
+	fid++;
+	if (fid < opt->nb_filelist)
+		goto next_model;
+
+	ml_inference_mem_destroy(test, opt);
+
+	ret = ml_inference_mldev_destroy(test, opt);
+	if (ret != 0)
+		return ret;
+
+	t->cmn.result = ML_TEST_SUCCESS;
+
+	return 0;
+
+error:
+	ml_inference_iomem_destroy(test, opt, fid);
+	ml_inference_mem_destroy(test, opt);
+	ml_model_stop(test, opt, &t->model[fid], fid);
+	ml_model_unload(test, opt, &t->model[fid], fid);
+
+	t->cmn.result = ML_TEST_FAILED;
+
+	return ret;
+}
+
+static int
+test_inference_ordered_result(struct ml_test *test, struct ml_options *opt)
+{
+	struct test_inference *t;
+
+	RTE_SET_USED(opt);
+
+	t = ml_test_priv(test);
+
+	return t->cmn.result;
+}
+
+static const struct ml_test_ops inference_ordered = {
+	.cap_check = test_inference_cap_check,
+	.opt_check = test_inference_opt_check,
+	.opt_dump = test_inference_opt_dump,
+	.test_setup = test_inference_setup,
+	.test_destroy = test_inference_destroy,
+	.test_driver = test_inference_ordered_driver,
+	.test_result = test_inference_ordered_result,
+};
+
+ML_TEST_REGISTER(inference_ordered);
diff --git a/app/test-mldev/test_model_common.h b/app/test-mldev/test_model_common.h
index 74aec0a797..5ee975109d 100644
--- a/app/test-mldev/test_model_common.h
+++ b/app/test-mldev/test_model_common.h
@@ -20,6 +20,16 @@ struct ml_model {
 	uint16_t id;
 	struct rte_ml_model_info info;
 	enum model_state state;
+
+	uint64_t inp_dsize;
+	uint64_t inp_qsize;
+	uint64_t out_dsize;
+	uint64_t out_qsize;
+
+	uint8_t *input;
+	uint8_t *output;
+
+	struct rte_mempool *io_pool;
 };
 
 int ml_model_load(struct ml_test *test, struct ml_options *opt, struct ml_model *model,
diff --git a/doc/guides/tools/img/mldev_inference_ordered.svg b/doc/guides/tools/img/mldev_inference_ordered.svg
new file mode 100644
index 0000000000..12fa6acaec
--- /dev/null
+++ b/doc/guides/tools/img/mldev_inference_ordered.svg
@@ -0,0 +1,528 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- SPDX-License-Identifier: BSD-3-Clause -->
+<!-- Copyright (c) 2022 Marvell. -->
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+   width="243mm"
+   height="144mm"
+   viewBox="0 0 243 144"
+   version="1.1"
+   id="svg5"
+   inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
+   sodipodi:docname="inference_ordered.svg"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:xlink="http://www.w3.org/1999/xlink"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:svg="http://www.w3.org/2000/svg">
+  <sodipodi:namedview
+     id="namedview7"
+     pagecolor="#ffffff"
+     bordercolor="#000000"
+     borderopacity="0.25"
+     inkscape:showpageshadow="2"
+     inkscape:pageopacity="0.0"
+     inkscape:pagecheckerboard="0"
+     inkscape:deskcolor="#d1d1d1"
+     inkscape:document-units="mm"
+     showgrid="false"
+     inkscape:zoom="0.74564394"
+     inkscape:cx="488.83922"
+     inkscape:cy="234.69647"
+     inkscape:window-width="1920"
+     inkscape:window-height="1017"
+     inkscape:window-x="1912"
+     inkscape:window-y="-8"
+     inkscape:window-maximized="1"
+     inkscape:current-layer="layer1" />
+  <defs
+     id="defs2">
+    <marker
+       style="overflow:visible"
+       id="RoundedArrow"
+       refX="5"
+       refY="0"
+       orient="auto-start-reverse"
+       inkscape:stockid="RoundedArrow"
+       markerWidth="6.1347523"
+       markerHeight="5.9304948"
+       viewBox="0 0 6.1347524 5.9304951"
+       inkscape:isstock="true"
+       inkscape:collect="always"
+       preserveAspectRatio="xMidYMid">
+      <path
+         transform="scale(0.7)"
+         d="m -0.21114562,-4.1055728 6.42229122,3.21114561 a 1,1 90 0 1 0,1.78885438 L -0.21114562,4.1055728 A 1.236068,1.236068 31.717474 0 1 -2,3 v -6 a 1.236068,1.236068 148.28253 0 1 1.78885438,-1.1055728 z"
+         style="fill:context-stroke;fill-rule:evenodd;stroke:none"
+         id="path1367" />
+    </marker>
+    <linearGradient
+       inkscape:collect="always"
+       id="linearGradient31002">
+      <stop
+         style="stop-color:#fff6d5;stop-opacity:1;"
+         offset="0"
+         id="stop30998" />
+      <stop
+         style="stop-color:#fff6d5;stop-opacity:0;"
+         offset="1"
+         id="stop31000" />
+    </linearGradient>
+    <marker
+       style="overflow:visible"
+       id="TriangleStart"
+       refX="4"
+       refY="0"
+       orient="auto-start-reverse"
+       inkscape:stockid="TriangleStart"
+       markerWidth="5.3244081"
+       markerHeight="6.155385"
+       viewBox="0 0 5.3244081 6.1553851"
+       inkscape:isstock="true"
+       inkscape:collect="always"
+       preserveAspectRatio="xMidYMid">
+      <path
+         transform="scale(0.5)"
+         style="fill:context-stroke;fill-rule:evenodd;stroke:context-stroke;stroke-width:1pt"
+         d="M 5.77,0 -2.88,5 V -5 Z"
+         id="path135" />
+    </marker>
+    <linearGradient
+       inkscape:collect="always"
+       xlink:href="#linearGradient31002"
+       id="linearGradient31004"
+       x1="19.620968"
+       y1="102.90323"
+       x2="279.1532"
+       y2="102.90323"
+       gradientUnits="userSpaceOnUse"
+       gradientTransform="matrix(0.89215122,0,0,0.73190287,13.449912,42.668706)" />
+  </defs>
+  <g
+     inkscape:label="Layer 1"
+     inkscape:groupmode="layer"
+     id="layer1">
+    <g
+       id="g1340"
+       transform="translate(-25.225796,-45.983871)">
+      <rect
+         style="fill:url(#linearGradient31004);fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0.404032;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect27876"
+         width="231.09595"
+         height="132.45081"
+         x="31.177822"
+         y="51.758469"
+         ry="3.5071263" />
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00d7fb;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1813"
+         width="38.408459"
+         height="45.86002"
+         x="34.901794"
+         y="99.14959"
+         ry="5.2246051"
+         inkscape:connector-avoid="true" />
+      <path
+         style="display:inline;fill:none;fill-rule:evenodd;stroke:#5d36ff;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart)"
+         d="m 73.310253,115.94935 36.498807,-11.6509"
+         id="path1906"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1813"
+         inkscape:connection-end="#rect1724-0" />
+      <path
+         style="display:inline;fill:none;fill-rule:evenodd;stroke:#5d36ff;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart)"
+         d="m 73.310253,122.0796 36.117817,1e-5"
+         id="path1908"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1813"
+         inkscape:connection-end="#rect1724-4-8" />
+      <path
+         style="display:inline;fill:none;fill-rule:evenodd;stroke:#5d36ff;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart)"
+         d="M 73.310253,128.20983 109.80905,139.8607"
+         id="path1910"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1813"
+         inkscape:connection-end="#rect1724-6-7" />
+      <path
+         style="display:inline;fill:#0000ff;fill-opacity:1;fill-rule:evenodd;stroke:#ff8500;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart)"
+         d="m 141.71839,99.266314 19.42262,-10e-7"
+         id="path1912"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1724-0"
+         inkscape:connection-end="#rect1679" />
+      <path
+         style="display:inline;fill:none;fill-rule:evenodd;stroke:#00fb00;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart)"
+         d="m 197.14101,99.266313 19.42259,10e-7"
+         id="path1914"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1679"
+         inkscape:connection-end="#rect1724" />
+      <path
+         style="display:inline;fill:none;fill-rule:evenodd;stroke:#ff8500;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart)"
+         d="m 141.71839,122.07961 19.42262,-1e-5"
+         id="path1916"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1724-4-8"
+         inkscape:connection-end="#rect1679-4" />
+      <path
+         style="display:inline;fill:none;fill-rule:evenodd;stroke:#00fb00;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart)"
+         d="m 197.14101,122.0796 19.42259,1e-5"
+         id="path1918"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1679-4"
+         inkscape:connection-end="#rect1724-4" />
+      <path
+         style="display:inline;fill:none;fill-rule:evenodd;stroke:#ff8500;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart)"
+         d="m 141.71839,144.89282 19.42262,0"
+         id="path1920"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1724-6-7"
+         inkscape:connection-end="#rect1679-8" />
+      <path
+         style="display:inline;fill:none;fill-rule:evenodd;stroke:#00fb00;stroke-width:0.5;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-end:url(#TriangleStart)"
+         d="m 197.14101,144.89282 19.42259,0"
+         id="path1922"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1679-8"
+         inkscape:connection-end="#rect1724-6" />
+      <text
+         xml:space="preserve"
+         style="font-size:8.46667px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-121.09793"
+         y="54.031597"
+         id="text4093"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan4091"
+           style="font-size:8.46667px;stroke-width:0.75"
+           x="-121.09793"
+           y="54.031597">Model X</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:6.35px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-118.63563"
+         y="179.13635"
+         id="text4097"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan4095"
+           style="font-size:6.35px;stroke-width:0.75"
+           x="-118.63563"
+           y="179.13635">Queue</tspan><tspan
+           sodipodi:role="line"
+           style="font-size:6.35px;stroke-width:0.75"
+           x="-126.57313"
+           y="179.13635"
+           id="tspan4099">Pair 1</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:6.35px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-141.44887"
+         y="179.13635"
+         id="text4097-5"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan4095-6"
+           style="font-size:6.35px;stroke-width:0.75"
+           x="-141.44887"
+           y="179.13635">Queue</tspan><tspan
+           sodipodi:role="line"
+           style="font-size:6.35px;stroke-width:0.75"
+           x="-149.38637"
+           y="179.13635"
+           id="tspan4099-4">Pair 2</tspan></text>
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.354849;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:0.709699, 0.709699;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1924"
+         width="44.145252"
+         height="72.532341"
+         x="157.06865"
+         y="85.813438"
+         ry="4.31247" />
+      <g
+         id="g1224">
+        <rect
+           style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+           id="rect1679"
+           width="36"
+           height="18"
+           x="161.14101"
+           y="90.266312"
+           ry="3" />
+        <rect
+           style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+           id="rect1679-8"
+           width="36"
+           height="18"
+           x="161.14101"
+           y="135.89282"
+           ry="3" />
+        <rect
+           style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+           id="rect1679-4"
+           width="36"
+           height="18"
+           x="161.14101"
+           y="113.07959"
+           ry="3" />
+      </g>
+      <text
+         xml:space="preserve"
+         style="font-size:6.35px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-95.820801"
+         y="179.13635"
+         id="text4097-8"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan4095-4"
+           style="font-size:6.35px;stroke-width:0.75"
+           x="-95.820801"
+           y="179.13635">Queue</tspan><tspan
+           sodipodi:role="line"
+           style="font-size:6.35px;stroke-width:0.75"
+           x="-103.7583"
+           y="179.13635"
+           id="tspan4099-5">Pair 0</tspan></text>
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.317648;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1811"
+         width="44.196934"
+         height="16.731901"
+         x="157.04254"
+         y="56.49292"
+         ry="2.761292" />
+      <text
+         xml:space="preserve"
+         style="font-size:3.5859px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:0.317649;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:0.952945, 0.317649;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-60.009941"
+         y="186.38451"
+         id="text4156"
+         transform="matrix(0,-1.040508,0.96106903,0,0,0)"><tspan
+           sodipodi:role="line"
+           id="tspan4154"
+           style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Arial;-inkscape-font-specification:'Arial Bold';stroke-width:0.317648"
+           x="-60.009941"
+           y="186.38451">Machine Learning</tspan><tspan
+           sodipodi:role="line"
+           style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:Arial;-inkscape-font-specification:'Arial Bold';stroke-width:0.317648"
+           x="-64.492317"
+           y="186.38451"
+           id="tspan4158">Hardware Engine</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:5.64444px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#0000ff;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-99.705231"
+         y="125.91087"
+         id="text3708"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan3706"
+           style="font-size:5.64444px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.75"
+           x="-99.705231"
+           y="125.91087">lcore 1</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:5.64444px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#0000ff;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-145.3221"
+         y="125.50572"
+         id="text3708-8"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan3706-7"
+           style="font-size:5.64444px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.75"
+           x="-145.3221"
+           y="125.50572">lcore 5</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:5.64444px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#0000ff;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-122.51577"
+         y="125.52089"
+         id="text3708-5"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan3706-87"
+           style="font-size:5.64444px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.75"
+           x="-122.51577"
+           y="125.52089">lcore 3</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:5.64444px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-162.06549"
+         y="125.4589"
+         id="text4542"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan4540"
+           style="font-size:5.64444px;stroke-width:0.75"
+           x="-162.06549"
+           y="125.4589">Enqueue Workers</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:5.64444px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#0000ff;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-99.705231"
+         y="232.67706"
+         id="text3708-9"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan3706-9"
+           style="font-size:5.64444px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.75"
+           x="-99.705231"
+           y="232.67706">lcore 2</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:5.64444px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#0000ff;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-122.51025"
+         y="232.66466"
+         id="text3708-7"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan3706-8"
+           style="font-size:5.64444px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.75"
+           x="-122.51025"
+           y="232.66466">lcore 4</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:5.64444px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#0000ff;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:2.25, 0.750001;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-145.33035"
+         y="232.65778"
+         id="text3708-78"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan3706-0"
+           style="font-size:5.64444px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.75"
+           x="-145.33035"
+           y="232.65778">lcore 6</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-size:5.64444px;font-family:Arial;-inkscape-font-specification:Arial;text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-162.06549"
+         y="232.59988"
+         id="text4542-3"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan4540-7"
+           style="font-size:5.64444px;stroke-width:0.75"
+           x="-162.06549"
+           y="232.59988">Dequeue Workers</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:6.35px;font-family:Arial;-inkscape-font-specification:'Arial Bold';text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:0.750001;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-177.01665"
+         y="220.07283"
+         id="text5181"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan5179"
+           style="font-size:6.35px;stroke-width:0.75"
+           x="-177.01665"
+           y="220.07283">test: inference_ordered</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-weight:bold;font-size:4.23333px;font-family:Arial;-inkscape-font-specification:'Arial Bold';text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:none;fill-rule:evenodd;stroke:#00d7fb;stroke-width:0.499999;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-178.63324"
+         y="98.67057"
+         id="text15571"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan15569"
+           style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:4.23333px;font-family:Arial;-inkscape-font-specification:Arial;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.5;stroke-opacity:1"
+           x="-178.63324"
+           y="98.67057">nb_worker_threads =  2 * MIN(nb_queue_pairs, (lcore_count - 1) / 2)</tspan></text>
+      <text
+         xml:space="preserve"
+         style="font-weight:bold;font-size:4.23333px;font-family:Arial;-inkscape-font-specification:'Arial Bold';text-align:center;writing-mode:tb-rl;text-anchor:middle;fill:none;fill-rule:evenodd;stroke:#00d7fb;stroke-width:0.499999;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         x="-171.18085"
+         y="89.26754"
+         id="text15571-3"
+         transform="rotate(-90)"><tspan
+           sodipodi:role="line"
+           id="tspan15569-9"
+           style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:4.23333px;font-family:Arial;-inkscape-font-specification:Arial;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.5;stroke-opacity:1"
+           x="-171.18085"
+           y="89.26754">inferences_per_queue_pair = repetitions / nb_queue_pairs</tspan></text>
+      <path
+         style="display:inline;fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.75;stroke-linecap:butt;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1;marker-start:url(#RoundedArrow);marker-end:url(#RoundedArrow)"
+         d="m 179.14101,85.813438 0,-12.588618"
+         id="path31090"
+         inkscape:connector-type="polyline"
+         inkscape:connector-curvature="0"
+         inkscape:connection-start="#rect1924" />
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00fb00;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1724"
+         width="32.290321"
+         height="11.709678"
+         x="216.5636"
+         y="93.411476"
+         ry="2" />
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00fb00;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1724-4"
+         width="32.290321"
+         height="11.709678"
+         x="216.5636"
+         y="116.22478"
+         ry="2" />
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00fb00;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1724-6"
+         width="32.290321"
+         height="11.709678"
+         x="216.5636"
+         y="139.03798"
+         ry="2" />
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.354849;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:0.709699, 0.709699;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1924-6"
+         width="44.145252"
+         height="72.532341"
+         x="210.6364"
+         y="85.813477"
+         ry="4.31247" />
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#ff8500;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1724-0"
+         width="32.290321"
+         height="11.709678"
+         x="109.42807"
+         y="93.411476"
+         ry="2" />
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#ff8500;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1724-6-7"
+         width="32.290321"
+         height="11.709678"
+         x="109.42807"
+         y="139.03798"
+         ry="2"
+         inkscape:connector-avoid="true" />
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#ff8500;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1724-4-8"
+         width="32.290321"
+         height="11.709678"
+         x="109.42807"
+         y="116.22478"
+         ry="2"
+         inkscape:connector-avoid="true" />
+      <rect
+         style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#00a6fb;stroke-width:0.354849;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:0.709699, 0.709699;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
+         id="rect1924-6-3"
+         width="44.145252"
+         height="72.532341"
+         x="103.50092"
+         y="85.813477"
+         ry="4.31247" />
+    </g>
+  </g>
+</svg>
diff --git a/doc/guides/tools/testmldev.rst b/doc/guides/tools/testmldev.rst
index b8a2a16ca2..164fbca64f 100644
--- a/doc/guides/tools/testmldev.rst
+++ b/doc/guides/tools/testmldev.rst
@@ -44,8 +44,8 @@ The following are the command-line options supported by the test application.
 
 * ``--test <name>``
 
-        Name of the test to execute. ML tests are divided into two groups, Device and Model
-        tests. Test name should be one of the following supported tests.
+        Name of the test to execute. ML tests are divided into three groups, Device, Model
+        and Inference tests. Test name should be one of the following supported tests.
 
       **ML Device Tests** ::
 
@@ -55,6 +55,10 @@ The following are the command-line options supported by the test application.
 
          model_ops
 
+      **ML Inference Tests** ::
+
+         inference_ordered
+
 * ``--dev_id <n>``
 
         Set the device id of the ML device to be used for the test. Default value is `0`.
@@ -69,6 +73,23 @@ The following are the command-line options supported by the test application.
         ``model_list`` in comma separated form (i.e. ``--models model_A.bin,model_B.bin``).
         Maximum number of models supported by the test is ``8``.
 
+* ``--filelist <file_list>``
+
+        Set the list of model, input, output and reference files to be used for the tests.
+        Application expects the ``file_list`` to be in comma separated form
+        (i.e. ``--filelist <model,input,output>[,reference]``).
+
+        Multiple filelist entries can be specified when running the tests with multiple models.
+        Both quantized and dequantized outputs are written to the disk. Dequantized output file
+        would have the name specified by the user through ``--filelist`` option. A suffix ``.q``
+        is appended to quantized output filename. Maximum number of filelist entries supported
+        by the test is ``8``.
+
+* ``--repetitions <n>``
+
+        Set the number of inference repetitions to be executed in the test per each model. Default
+        value is `1`.
+
 * ``--debug``
 
         Enable the tests to run in debug mode.
@@ -196,6 +217,69 @@ Command to run model_ops test:
         --test=model_ops --models model_1.bin,model_2.bin,model_3.bin, model_4.bin
 
 
+ML Inference Tests
+------------------
+
+Inference tests are a set of tests to validate end-to-end inference execution on ML device.
+These tests executes the full sequence of operations required to run inferences with one or
+multiple models.
+
+Application Options
+~~~~~~~~~~~~~~~~~~~
+
+Supported command line options for inference tests are following::
+
+        --debug
+        --test
+        --dev_id
+        --socket_id
+        --filelist
+        --repetitions
+
+
+List of files to be used for the inference tests can be specified through the option
+``--filelist <file_list>`` as a comma separated list. A filelist entry would be of the format
+``--filelist <model_file,input_file,output_file>[,reference_file]`` and is used to specify the
+list of files required to test with a single model. Multiple filelist entries are supported by
+the test, one entry per model. Maximum number of file entries supported by the test is `8`.
+
+.. Note::
+
+    * The ``--filelist <file_list>`` is a mandatory option for running inference tests.
+    * Options not supported by the tests are ignored if specified.
+
+
+INFERENCE_ORDERED Test
+~~~~~~~~~~~~~~~~~~~~~~
+
+This is a functional test for validating the end-to-end inference execution on ML device. This
+test configures ML device and queue pairs as per the queue-pair related options (queue_pairs and
+queue_size) specified by the user. Upon successful configuration of the device and queue pairs,
+the first model specified through the filelist is loaded to the device and inferences are enqueued
+by a pool of worker threads to the ML device. Total number of inferences enqueued for the model
+are equal to the repetitions specified. A dedicated pool of worker threads would dequeue the
+inferences from the device. The model is unloaded upon completion of all inferences for the model.
+The test would continue loading and executing inference requests for all models specified
+through ``filelist`` option in an ordered manner.
+
+.. _figure_mldev_inference_ordered:
+
+.. figure:: img/mldev_inference_ordered.*
+
+   Execution of inference_ordered on single model.
+
+
+Example
+^^^^^^^
+
+Example command to run inference_ordered test:
+
+.. code-block:: console
+
+    sudo <build_dir>/app/dpdk-test-mldev -c 0xf -a <PCI_ID> -- \
+        --test=inference_ordered --filelist model.bin,input.bin,output.bin
+
+
 Debug mode
 ----------
 
-- 
2.17.1


  parent reply	other threads:[~2023-03-16 21:15 UTC|newest]

Thread overview: 122+ messages / expand[flat|nested]  mbox.gz  Atom feed  top
2022-11-29  7:07 [PATCH v1 00/12] implement mldev test application Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 01/12] app/mldev: implement test framework for mldev Srikanth Yalavarthi
2022-11-29  8:20   ` [PATCH v2 " Srikanth Yalavarthi
2022-11-29  8:20     ` [PATCH v2 02/12] app/mldev: add common test functions Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 03/12] app/mldev: add test case to validate device ops Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 04/12] app/mldev: add test case to validate model ops Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 05/12] app/mldev: add ordered inference test case Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 06/12] app/mldev: add test case to interleave inferences Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 07/12] app/mldev: enable support for burst inferences Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 08/12] app/mldev: enable support for queue pairs and size Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 09/12] app/mldev: enable support for inference batches Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 10/12] app/mldev: enable support for inference validation Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 11/12] app/mldev: enable reporting stats in mldev app Srikanth Yalavarthi
2022-11-29  8:21     ` [PATCH v2 12/12] app/mldev: add documentation for mldev test cases Srikanth Yalavarthi
2022-12-08 19:29     ` [PATCH v3 01/12] app/mldev: implement test framework for mldev Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 02/12] app/mldev: add common test functions Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 03/12] app/mldev: add test case to validate device ops Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 04/12] app/mldev: add test case to validate model ops Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 05/12] app/mldev: add ordered inference test case Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 06/12] app/mldev: add test case to interleave inferences Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 07/12] app/mldev: enable support for burst inferences Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 08/12] app/mldev: enable support for queue pairs and size Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 09/12] app/mldev: enable support for inference batches Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 10/12] app/mldev: enable support for inference validation Srikanth Yalavarthi
2022-12-08 19:29       ` [PATCH v3 11/12] app/mldev: enable reporting stats in mldev app Srikanth Yalavarthi
2023-02-03  9:49         ` Anup Prabhu
2022-12-08 19:29       ` [PATCH v3 12/12] app/mldev: add documentation for mldev test cases Srikanth Yalavarthi
2023-02-02 12:39         ` Anup Prabhu
2022-11-29  7:07 ` [PATCH v1 02/12] app/mldev: add common test functions Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 03/12] app/mldev: add test case to validate device ops Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 04/12] app/mldev: add test case to validate model ops Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 05/12] app/mldev: add ordered inference test case Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 06/12] app/mldev: add test case to interleave inferences Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 07/12] app/mldev: enable support for burst inferences Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 08/12] app/mldev: enable support for queue pairs and size Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 09/12] app/mldev: enable support for inference batches Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 10/12] app/mldev: enable support for inference validation Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 11/12] app/mldev: enable reporting stats in mldev app Srikanth Yalavarthi
2022-11-29  7:07 ` [PATCH v1 12/12] app/mldev: add documentation for mldev test cases Srikanth Yalavarthi
2023-02-07 15:49 ` [PATCH v4 00/12] Implementation of mldev test application Srikanth Yalavarthi
2023-02-07 15:49   ` [PATCH v4 01/12] app/mldev: implement test framework for mldev Srikanth Yalavarthi
2023-02-14  4:55     ` Shivah Shankar Shankar Narayan Rao
2023-03-03  8:15     ` Anup Prabhu
2023-02-07 15:49   ` [PATCH v4 02/12] app/mldev: add common test functions Srikanth Yalavarthi
2023-02-23  9:03     ` Anup Prabhu
2023-02-07 15:49   ` [PATCH v4 03/12] app/mldev: add test case to validate device ops Srikanth Yalavarthi
2023-03-01  5:35     ` Anup Prabhu
2023-02-07 15:49   ` [PATCH v4 04/12] app/mldev: add test case to validate model ops Srikanth Yalavarthi
2023-03-02  2:58     ` Anup Prabhu
2023-03-09 18:42     ` Thomas Monjalon
2023-03-10  2:55       ` [EXT] " Srikanth Yalavarthi
2023-02-07 15:49   ` [PATCH v4 05/12] app/mldev: add ordered inference test case Srikanth Yalavarthi
2023-02-27  6:11     ` Anup Prabhu
2023-03-09 20:06     ` Thomas Monjalon
2023-03-10  8:13       ` [EXT] " Srikanth Yalavarthi
2023-02-07 15:49   ` [PATCH v4 06/12] app/mldev: add test case to interleave inferences Srikanth Yalavarthi
2023-02-20  6:31     ` Anup Prabhu
2023-03-09 20:15     ` Thomas Monjalon
2023-03-10  8:14       ` [EXT] " Srikanth Yalavarthi
2023-02-07 15:49   ` [PATCH v4 07/12] app/mldev: enable support for burst inferences Srikanth Yalavarthi
2023-02-20 10:11     ` Anup Prabhu
2023-02-07 15:49   ` [PATCH v4 08/12] app/mldev: enable support for queue pairs and size Srikanth Yalavarthi
2023-03-02  8:15     ` Anup Prabhu
2023-02-07 15:49   ` [PATCH v4 09/12] app/mldev: enable support for inference batches Srikanth Yalavarthi
2023-02-27  3:46     ` Anup Prabhu
2023-02-07 15:49   ` [PATCH v4 10/12] app/mldev: enable support for inference validation Srikanth Yalavarthi
2023-02-16 12:23     ` Anup Prabhu
2023-02-07 15:49   ` [PATCH v4 11/12] app/mldev: enable reporting stats in mldev app Srikanth Yalavarthi
2023-02-16  4:21     ` Anup Prabhu
2023-02-07 15:49   ` [PATCH v4 12/12] app/mldev: add documentation for mldev test cases Srikanth Yalavarthi
2023-02-15 12:26     ` Shivah Shankar Shankar Narayan Rao
2023-03-03  6:07     ` Anup Prabhu
2023-03-10  8:09 ` [PATCH v5 00/12] Implementation of mldev test application Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 01/12] app/mldev: implement test framework for mldev Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 02/12] app/mldev: add common test functions Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 03/12] app/mldev: add test case to validate device ops Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 04/12] app/mldev: add test case to validate model ops Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 05/12] app/mldev: add ordered inference test case Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 06/12] app/mldev: add test case to interleave inferences Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 07/12] app/mldev: enable support for burst inferences Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 08/12] app/mldev: enable support for queue pairs and size Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 09/12] app/mldev: enable support for inference batches Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 10/12] app/mldev: enable support for inference validation Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 11/12] app/mldev: enable reporting stats in mldev app Srikanth Yalavarthi
2023-03-10  8:09   ` [PATCH v5 12/12] app/mldev: add documentation for mldev test cases Srikanth Yalavarthi
2023-03-11 15:08 ` [PATCH v6 00/12] Implementation of mldev test application Srikanth Yalavarthi
2023-03-11 15:08   ` [PATCH v6 01/12] app/mldev: implement test framework for mldev Srikanth Yalavarthi
2023-03-11 15:08   ` [PATCH v6 02/12] app/mldev: add common test functions Srikanth Yalavarthi
2023-03-11 15:08   ` [PATCH v6 03/12] app/mldev: add test case to validate device ops Srikanth Yalavarthi
2023-03-11 15:08   ` [PATCH v6 04/12] app/mldev: add test case to validate model ops Srikanth Yalavarthi
2023-03-11 15:08   ` [PATCH v6 05/12] app/mldev: add ordered inference test case Srikanth Yalavarthi
2023-03-16 17:45     ` Thomas Monjalon
2023-03-16 17:47       ` [EXT] " Srikanth Yalavarthi
2023-03-16 18:01         ` Thomas Monjalon
2023-03-16 21:31           ` Srikanth Yalavarthi
2023-03-11 15:08   ` [PATCH v6 06/12] app/mldev: add test case to interleave inferences Srikanth Yalavarthi
2023-03-11 15:09   ` [PATCH v6 07/12] app/mldev: enable support for burst inferences Srikanth Yalavarthi
2023-03-11 15:09   ` [PATCH v6 08/12] app/mldev: enable support for queue pairs and size Srikanth Yalavarthi
2023-03-11 15:09   ` [PATCH v6 09/12] app/mldev: enable support for inference batches Srikanth Yalavarthi
2023-03-16 17:47     ` Thomas Monjalon
2023-03-16 17:52       ` [EXT] " Srikanth Yalavarthi
2023-03-11 15:09   ` [PATCH v6 10/12] app/mldev: enable support for inference validation Srikanth Yalavarthi
2023-03-11 15:09   ` [PATCH v6 11/12] app/mldev: enable reporting stats in mldev app Srikanth Yalavarthi
2023-03-11 15:09   ` [PATCH v6 12/12] app/mldev: add documentation for mldev test cases Srikanth Yalavarthi
2023-03-16 17:50     ` Thomas Monjalon
2023-03-16 17:56       ` [EXT] " Srikanth Yalavarthi
2023-03-16 18:03         ` Thomas Monjalon
2023-03-16 18:07           ` Srikanth Yalavarthi
2023-03-16 21:32             ` Srikanth Yalavarthi
2023-03-16 21:14 ` [PATCH v7 00/11] Implementation of mldev test application Srikanth Yalavarthi
2023-03-16 21:14   ` [PATCH v7 01/11] app/mldev: implement test framework for mldev Srikanth Yalavarthi
2023-03-16 21:14   ` [PATCH v7 02/11] app/mldev: add common test functions Srikanth Yalavarthi
2023-03-16 21:14   ` [PATCH v7 03/11] app/mldev: add test case to validate device ops Srikanth Yalavarthi
2023-03-16 21:14   ` [PATCH v7 04/11] app/mldev: add test case to validate model ops Srikanth Yalavarthi
2023-03-16 21:14   ` Srikanth Yalavarthi [this message]
2023-03-16 21:14   ` [PATCH v7 06/11] app/mldev: add test case to interleave inferences Srikanth Yalavarthi
2023-03-16 21:14   ` [PATCH v7 07/11] app/mldev: enable support for burst inferences Srikanth Yalavarthi
2023-03-16 21:14   ` [PATCH v7 08/11] app/mldev: enable support for queue pairs and size Srikanth Yalavarthi
2023-03-16 21:14   ` [PATCH v7 09/11] app/mldev: enable support for inference batches Srikanth Yalavarthi
2023-03-16 21:14   ` [PATCH v7 10/11] app/mldev: enable support for inference validation Srikanth Yalavarthi
2023-03-16 21:14   ` [PATCH v7 11/11] app/mldev: enable reporting stats in mldev app Srikanth Yalavarthi
2023-03-19 22:08   ` [PATCH v7 00/11] Implementation of mldev test application Thomas Monjalon

Reply instructions:

You may reply publicly to this message via plain-text email
using any one of the following methods:

* Save the following mbox file, import it into your mail client,
  and reply-to-all from there: mbox

  Avoid top-posting and favor interleaved quoting:
  https://en.wikipedia.org/wiki/Posting_style#Interleaved_style

* Reply using the --to, --cc, and --in-reply-to
  switches of git-send-email(1):

  git send-email \
    --in-reply-to=20230316211434.13409-6-syalavarthi@marvell.com \
    --to=syalavarthi@marvell.com \
    --cc=aprabhu@marvell.com \
    --cc=dev@dpdk.org \
    --cc=jerinj@marvell.com \
    --cc=pshukla@marvell.com \
    --cc=ptakkar@marvell.com \
    --cc=sshankarnara@marvell.com \
    /path/to/YOUR_REPLY

  https://kernel.org/pub/software/scm/git/docs/git-send-email.html

* If your mail client supports setting the In-Reply-To header
  via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line before the message body.
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for NNTP newsgroup(s).