[dpdk-dev] [PATCH 10/13] cryptodev: move IV parameters to crypto session

Pablo de Lara pablo.de.lara.guarch at intel.com
Sun May 28 23:05:19 CEST 2017


Since IV parameters (offset and length) should not
change for operations in the same session, these parameters
are moved to the crypto transform structure, so they will
be stored in the sessions.

Also, IV is not related to only cipher algorithms anymore,
as other algorithms like GMAC or the wireless algorithms,
such as SNOW3G, use it.

Therefore, the IV will be parsed from the first transform,
when creating the session of a PMD.

Signed-off-by: Pablo de Lara <pablo.de.lara.guarch at intel.com>
---
 app/test-crypto-perf/cperf_ops.c                   |  33 ++++--
 app/test-crypto-perf/cperf_ops.h                   |   3 +-
 app/test-crypto-perf/cperf_options.h               |   6 +-
 app/test-crypto-perf/cperf_options_parsing.c       |  13 ++-
 app/test-crypto-perf/cperf_test_latency.c          |   7 +-
 app/test-crypto-perf/cperf_test_throughput.c       |   6 +-
 app/test-crypto-perf/cperf_test_vector_parsing.c   |   6 +-
 app/test-crypto-perf/cperf_test_vectors.c          |   6 +-
 app/test-crypto-perf/cperf_test_verify.c           |   6 +-
 app/test-crypto-perf/main.c                        |   9 +-
 doc/guides/tools/cryptoperf.rst                    |   8 +-
 drivers/crypto/aesni_gcm/aesni_gcm_pmd.c           |  25 ++--
 drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c       |  12 +-
 drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h   |   5 +
 drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c         |   8 +-
 drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c     |  37 +++---
 drivers/crypto/aesni_mb/rte_aesni_mb_pmd_private.h |   5 +
 drivers/crypto/armv8/rte_armv8_pmd.c               |  12 +-
 drivers/crypto/armv8/rte_armv8_pmd_ops.c           |  12 +-
 drivers/crypto/armv8/rte_armv8_pmd_private.h       |   8 +-
 drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c        |  31 ++---
 drivers/crypto/dpaa2_sec/dpaa2_sec_priv.h          |  30 +++--
 drivers/crypto/kasumi/rte_kasumi_pmd.c             |  25 ++--
 drivers/crypto/kasumi/rte_kasumi_pmd_ops.c         |  11 +-
 drivers/crypto/kasumi/rte_kasumi_pmd_private.h     |   1 +
 drivers/crypto/null/null_crypto_pmd_ops.c          |   7 +-
 drivers/crypto/openssl/rte_openssl_pmd.c           |  14 ++-
 drivers/crypto/openssl/rte_openssl_pmd_ops.c       |  74 +++++++-----
 drivers/crypto/openssl/rte_openssl_pmd_private.h   |   5 +
 drivers/crypto/qat/qat_adf/qat_algs.h              |   4 +
 drivers/crypto/qat/qat_crypto.c                    |  22 ++--
 drivers/crypto/qat/qat_crypto_capabilities.h       | 129 +++++++++++----------
 drivers/crypto/snow3g/rte_snow3g_pmd.c             |  25 ++--
 drivers/crypto/snow3g/rte_snow3g_pmd_ops.c         |  11 +-
 drivers/crypto/snow3g/rte_snow3g_pmd_private.h     |   1 +
 drivers/crypto/zuc/rte_zuc_pmd.c                   |  16 +--
 drivers/crypto/zuc/rte_zuc_pmd_ops.c               |  11 +-
 drivers/crypto/zuc/rte_zuc_pmd_private.h           |   1 +
 lib/librte_cryptodev/rte_crypto_sym.h              |  98 ++++++++--------
 lib/librte_cryptodev/rte_cryptodev.c               |   8 +-
 lib/librte_cryptodev/rte_cryptodev.h               |  10 +-
 41 files changed, 436 insertions(+), 325 deletions(-)

diff --git a/app/test-crypto-perf/cperf_ops.c b/app/test-crypto-perf/cperf_ops.c
index 4846b68..9a997a4 100644
--- a/app/test-crypto-perf/cperf_ops.c
+++ b/app/test-crypto-perf/cperf_ops.c
@@ -105,13 +105,11 @@ cperf_set_ops_cipher(struct rte_crypto_op **ops,
 		sym_op->m_src = bufs_in[i];
 		sym_op->m_dst = bufs_out[i];
 
-		/* cipher parameters */
-		sym_op->cipher.iv.offset = iv_offset;
-		sym_op->cipher.iv.length = test_vector->iv.length;
 		memcpy(rte_crypto_op_ctod_offset(ops[i], uint8_t *, iv_offset),
 				test_vector->iv.data,
 				test_vector->iv.length);
 
+		/* cipher parameters */
 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
@@ -131,7 +129,7 @@ cperf_set_ops_auth(struct rte_crypto_op **ops,
 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
 		const struct cperf_options *options,
 		const struct cperf_test_vector *test_vector,
-		uint16_t iv_offset __rte_unused)
+		uint16_t iv_offset)
 {
 	uint16_t i;
 
@@ -143,6 +141,10 @@ cperf_set_ops_auth(struct rte_crypto_op **ops,
 		sym_op->m_src = bufs_in[i];
 		sym_op->m_dst = bufs_out[i];
 
+		memcpy(rte_crypto_op_ctod_offset(ops[i], uint8_t *, iv_offset),
+				test_vector->iv.data,
+				test_vector->iv.length);
+
 		/* authentication parameters */
 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
 			sym_op->auth.digest.data = test_vector->digest.data;
@@ -207,13 +209,11 @@ cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
 		sym_op->m_src = bufs_in[i];
 		sym_op->m_dst = bufs_out[i];
 
-		/* cipher parameters */
-		sym_op->cipher.iv.offset = iv_offset;
-		sym_op->cipher.iv.length = test_vector->iv.length;
 		memcpy(rte_crypto_op_ctod_offset(ops[i], uint8_t *, iv_offset),
 				test_vector->iv.data,
 				test_vector->iv.length);
 
+		/* cipher parameters */
 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
@@ -286,13 +286,11 @@ cperf_set_ops_aead(struct rte_crypto_op **ops,
 		sym_op->m_src = bufs_in[i];
 		sym_op->m_dst = bufs_out[i];
 
-		/* cipher parameters */
-		sym_op->cipher.iv.offset = iv_offset;
-		sym_op->cipher.iv.length = test_vector->iv.length;
 		memcpy(rte_crypto_op_ctod_offset(ops[i], uint8_t *, iv_offset),
 				test_vector->iv.data,
 				test_vector->iv.length);
 
+		/* cipher parameters */
 		sym_op->cipher.data.length = options->test_buffer_size;
 		sym_op->cipher.data.offset =
 				RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
@@ -341,7 +339,8 @@ cperf_set_ops_aead(struct rte_crypto_op **ops,
 static struct rte_cryptodev_sym_session *
 cperf_create_session(uint8_t dev_id,
 	const struct cperf_options *options,
-	const struct cperf_test_vector *test_vector)
+	const struct cperf_test_vector *test_vector,
+	uint16_t iv_offset)
 {
 	struct rte_crypto_sym_xform cipher_xform;
 	struct rte_crypto_sym_xform auth_xform;
@@ -355,6 +354,7 @@ cperf_create_session(uint8_t dev_id,
 		cipher_xform.next = NULL;
 		cipher_xform.cipher.algo = options->cipher_algo;
 		cipher_xform.cipher.op = options->cipher_op;
+		cipher_xform.iv.offset = iv_offset;
 
 		/* cipher different than null */
 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
@@ -362,9 +362,12 @@ cperf_create_session(uint8_t dev_id,
 					test_vector->cipher_key.data;
 			cipher_xform.cipher.key.length =
 					test_vector->cipher_key.length;
+			cipher_xform.iv.length = test_vector->iv.length;
+
 		} else {
 			cipher_xform.cipher.key.data = NULL;
 			cipher_xform.cipher.key.length = 0;
+			cipher_xform.iv.length = 0;
 		}
 		/* create crypto session */
 		sess = rte_cryptodev_sym_session_create(dev_id,	&cipher_xform);
@@ -376,6 +379,7 @@ cperf_create_session(uint8_t dev_id,
 		auth_xform.next = NULL;
 		auth_xform.auth.algo = options->auth_algo;
 		auth_xform.auth.op = options->auth_op;
+		auth_xform.iv.offset = iv_offset;
 
 		/* auth different than null */
 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
@@ -384,6 +388,7 @@ cperf_create_session(uint8_t dev_id,
 			auth_xform.auth.key.length =
 					test_vector->auth_key.length;
 			auth_xform.auth.key.data = test_vector->auth_key.data;
+			auth_xform.iv.length = test_vector->iv.length;
 		} else {
 			auth_xform.auth.digest_length = 0;
 			auth_xform.auth.key.length = 0;
@@ -405,6 +410,7 @@ cperf_create_session(uint8_t dev_id,
 		cipher_xform.next = NULL;
 		cipher_xform.cipher.algo = options->cipher_algo;
 		cipher_xform.cipher.op = options->cipher_op;
+		cipher_xform.iv.offset = iv_offset;
 
 		/* cipher different than null */
 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
@@ -412,9 +418,11 @@ cperf_create_session(uint8_t dev_id,
 					test_vector->cipher_key.data;
 			cipher_xform.cipher.key.length =
 					test_vector->cipher_key.length;
+			cipher_xform.iv.length = test_vector->iv.length;
 		} else {
 			cipher_xform.cipher.key.data = NULL;
 			cipher_xform.cipher.key.length = 0;
+			cipher_xform.iv.length = 0;
 		}
 
 		/*
@@ -424,10 +432,12 @@ cperf_create_session(uint8_t dev_id,
 		auth_xform.next = NULL;
 		auth_xform.auth.algo = options->auth_algo;
 		auth_xform.auth.op = options->auth_op;
+		auth_xform.iv.offset = iv_offset;
 
 		/* auth different than null */
 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
 			auth_xform.auth.digest_length = options->auth_digest_sz;
+			auth_xform.iv.length = test_vector->iv.length;
 			/* auth options for aes gcm */
 			if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
 				options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
@@ -443,6 +453,7 @@ cperf_create_session(uint8_t dev_id,
 			auth_xform.auth.digest_length = 0;
 			auth_xform.auth.key.length = 0;
 			auth_xform.auth.key.data = NULL;
+			auth_xform.iv.length = 0;
 		}
 
 		/* create crypto session for aes gcm */
diff --git a/app/test-crypto-perf/cperf_ops.h b/app/test-crypto-perf/cperf_ops.h
index f7b431c..bb83cd5 100644
--- a/app/test-crypto-perf/cperf_ops.h
+++ b/app/test-crypto-perf/cperf_ops.h
@@ -42,7 +42,8 @@
 
 typedef struct rte_cryptodev_sym_session *(*cperf_sessions_create_t)(
 		uint8_t dev_id, const struct cperf_options *options,
-		const struct cperf_test_vector *test_vector);
+		const struct cperf_test_vector *test_vector,
+		uint16_t iv_offset);
 
 typedef int (*cperf_populate_ops_t)(struct rte_crypto_op **ops,
 		struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
diff --git a/app/test-crypto-perf/cperf_options.h b/app/test-crypto-perf/cperf_options.h
index b928c58..43b3482 100644
--- a/app/test-crypto-perf/cperf_options.h
+++ b/app/test-crypto-perf/cperf_options.h
@@ -20,10 +20,11 @@
 #define CPERF_TEST_FILE		("test-file")
 #define CPERF_TEST_NAME		("test-name")
 
+#define CPERF_IV_SZ		("iv-sz")
+
 #define CPERF_CIPHER_ALGO	("cipher-algo")
 #define CPERF_CIPHER_OP		("cipher-op")
 #define CPERF_CIPHER_KEY_SZ	("cipher-key-sz")
-#define CPERF_CIPHER_IV_SZ	("cipher-iv-sz")
 
 #define CPERF_AUTH_ALGO		("auth-algo")
 #define CPERF_AUTH_OP		("auth-op")
@@ -66,11 +67,12 @@ struct cperf_options {
 	uint32_t silent:1;
 	uint32_t csv:1;
 
+	uint16_t iv_sz;
+
 	enum rte_crypto_cipher_algorithm cipher_algo;
 	enum rte_crypto_cipher_operation cipher_op;
 
 	uint16_t cipher_key_sz;
-	uint16_t cipher_iv_sz;
 
 	enum rte_crypto_auth_algorithm auth_algo;
 	enum rte_crypto_auth_operation auth_op;
diff --git a/app/test-crypto-perf/cperf_options_parsing.c b/app/test-crypto-perf/cperf_options_parsing.c
index d172671..f808103 100644
--- a/app/test-crypto-perf/cperf_options_parsing.c
+++ b/app/test-crypto-perf/cperf_options_parsing.c
@@ -488,9 +488,9 @@ parse_cipher_key_sz(struct cperf_options *opts, const char *arg)
 }
 
 static int
-parse_cipher_iv_sz(struct cperf_options *opts, const char *arg)
+parse_iv_sz(struct cperf_options *opts, const char *arg)
 {
-	return parse_uint16_t(&opts->cipher_iv_sz, arg);
+	return parse_uint16_t(&opts->iv_sz, arg);
 }
 
 static int
@@ -594,7 +594,7 @@ static struct option lgopts[] = {
 	{ CPERF_CIPHER_OP, required_argument, 0, 0 },
 
 	{ CPERF_CIPHER_KEY_SZ, required_argument, 0, 0 },
-	{ CPERF_CIPHER_IV_SZ, required_argument, 0, 0 },
+	{ CPERF_IV_SZ, required_argument, 0, 0 },
 
 	{ CPERF_AUTH_ALGO, required_argument, 0, 0 },
 	{ CPERF_AUTH_OP, required_argument, 0, 0 },
@@ -644,7 +644,7 @@ cperf_options_default(struct cperf_options *opts)
 	opts->cipher_algo = RTE_CRYPTO_CIPHER_AES_CBC;
 	opts->cipher_op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
 	opts->cipher_key_sz = 16;
-	opts->cipher_iv_sz = 16;
+	opts->iv_sz = 16;
 
 	opts->auth_algo = RTE_CRYPTO_AUTH_SHA1_HMAC;
 	opts->auth_op = RTE_CRYPTO_AUTH_OP_GENERATE;
@@ -674,7 +674,7 @@ cperf_opts_parse_long(int opt_idx, struct cperf_options *opts)
 		{ CPERF_CIPHER_ALGO,	parse_cipher_algo },
 		{ CPERF_CIPHER_OP,	parse_cipher_op },
 		{ CPERF_CIPHER_KEY_SZ,	parse_cipher_key_sz },
-		{ CPERF_CIPHER_IV_SZ,	parse_cipher_iv_sz },
+		{ CPERF_IV_SZ,	parse_iv_sz },
 		{ CPERF_AUTH_ALGO,	parse_auth_algo },
 		{ CPERF_AUTH_OP,	parse_auth_op },
 		{ CPERF_AUTH_KEY_SZ,	parse_auth_key_sz },
@@ -904,6 +904,8 @@ cperf_options_dump(struct cperf_options *opts)
 	printf("# out of place: %s\n", opts->out_of_place ? "yes" : "no");
 
 	printf("#\n");
+	printf("# iv size: %u\n", opts->iv_sz);
+	printf("#\n");
 
 	if (opts->op_type == CPERF_AUTH_ONLY ||
 			opts->op_type == CPERF_CIPHER_THEN_AUTH ||
@@ -928,7 +930,6 @@ cperf_options_dump(struct cperf_options *opts)
 		printf("# cipher operation: %s\n",
 			rte_crypto_cipher_operation_strings[opts->cipher_op]);
 		printf("# cipher key size: %u\n", opts->cipher_key_sz);
-		printf("# cipher iv size: %u\n", opts->cipher_iv_sz);
 		printf("#\n");
 	}
 }
diff --git a/app/test-crypto-perf/cperf_test_latency.c b/app/test-crypto-perf/cperf_test_latency.c
index 780eef0..40a5b27 100644
--- a/app/test-crypto-perf/cperf_test_latency.c
+++ b/app/test-crypto-perf/cperf_test_latency.c
@@ -212,7 +212,12 @@ cperf_latency_test_constructor(uint8_t dev_id, uint16_t qp_id,
 	ctx->options = options;
 	ctx->test_vector = test_vector;
 
-	ctx->sess = op_fns->sess_create(dev_id, options, test_vector);
+	/* IV goes at the end of the crypto operation */
+	uint16_t iv_offset = sizeof(struct rte_crypto_op) +
+		sizeof(struct rte_crypto_sym_op) +
+		sizeof(struct cperf_op_result *);
+
+	ctx->sess = op_fns->sess_create(dev_id, options, test_vector, iv_offset);
 	if (ctx->sess == NULL)
 		goto err;
 
diff --git a/app/test-crypto-perf/cperf_test_throughput.c b/app/test-crypto-perf/cperf_test_throughput.c
index 144b550..379f64d 100644
--- a/app/test-crypto-perf/cperf_test_throughput.c
+++ b/app/test-crypto-perf/cperf_test_throughput.c
@@ -195,7 +195,11 @@ cperf_throughput_test_constructor(uint8_t dev_id, uint16_t qp_id,
 	ctx->options = options;
 	ctx->test_vector = test_vector;
 
-	ctx->sess = op_fns->sess_create(dev_id, options, test_vector);
+	/* IV goes at the end of the cryptop operation */
+	uint16_t iv_offset = sizeof(struct rte_crypto_op) +
+		sizeof(struct rte_crypto_sym_op);
+
+	ctx->sess = op_fns->sess_create(dev_id, options, test_vector, iv_offset);
 	if (ctx->sess == NULL)
 		goto err;
 
diff --git a/app/test-crypto-perf/cperf_test_vector_parsing.c b/app/test-crypto-perf/cperf_test_vector_parsing.c
index 62d0c91..7c751ea 100644
--- a/app/test-crypto-perf/cperf_test_vector_parsing.c
+++ b/app/test-crypto-perf/cperf_test_vector_parsing.c
@@ -306,12 +306,12 @@ parse_entry(char *entry, struct cperf_test_vector *vector,
 		if (tc_found)
 			vector->iv.length = data_length;
 		else {
-			if (opts->cipher_iv_sz > data_length) {
+			if (opts->iv_sz > data_length) {
 				printf("Global iv shorter than "
-					"cipher_iv_sz\n");
+					"iv_sz\n");
 				return -1;
 			}
-			vector->iv.length = opts->cipher_iv_sz;
+			vector->iv.length = opts->iv_sz;
 		}
 
 	} else if (strstr(key_token, "ciphertext")) {
diff --git a/app/test-crypto-perf/cperf_test_vectors.c b/app/test-crypto-perf/cperf_test_vectors.c
index 36b3f6f..d995b3e 100644
--- a/app/test-crypto-perf/cperf_test_vectors.c
+++ b/app/test-crypto-perf/cperf_test_vectors.c
@@ -414,16 +414,16 @@ cperf_test_vector_get_dummy(struct cperf_options *options)
 			t_vec->cipher_key.length = options->cipher_key_sz;
 			t_vec->ciphertext.data = ciphertext;
 			t_vec->cipher_key.data = cipher_key;
-			t_vec->iv.data = rte_malloc(NULL, options->cipher_iv_sz,
+			t_vec->iv.data = rte_malloc(NULL, options->iv_sz,
 					16);
 			if (t_vec->iv.data == NULL) {
 				rte_free(t_vec);
 				return NULL;
 			}
-			memcpy(t_vec->iv.data, iv, options->cipher_iv_sz);
+			memcpy(t_vec->iv.data, iv, options->iv_sz);
 		}
 		t_vec->ciphertext.length = options->max_buffer_size;
-		t_vec->iv.length = options->cipher_iv_sz;
+		t_vec->iv.length = options->iv_sz;
 		t_vec->data.cipher_offset = 0;
 		t_vec->data.cipher_length = options->max_buffer_size;
 	}
diff --git a/app/test-crypto-perf/cperf_test_verify.c b/app/test-crypto-perf/cperf_test_verify.c
index a599d91..68e6a94 100644
--- a/app/test-crypto-perf/cperf_test_verify.c
+++ b/app/test-crypto-perf/cperf_test_verify.c
@@ -199,7 +199,11 @@ cperf_verify_test_constructor(uint8_t dev_id, uint16_t qp_id,
 	ctx->options = options;
 	ctx->test_vector = test_vector;
 
-	ctx->sess = op_fns->sess_create(dev_id, options, test_vector);
+	/* IV goes at the end of the cryptop operation */
+	uint16_t iv_offset = sizeof(struct rte_crypto_op) +
+		sizeof(struct rte_crypto_sym_op);
+
+	ctx->sess = op_fns->sess_create(dev_id, options, test_vector, iv_offset);
 	if (ctx->sess == NULL)
 		goto err;
 
diff --git a/app/test-crypto-perf/main.c b/app/test-crypto-perf/main.c
index 9ec2a4b..fb31f5f 100644
--- a/app/test-crypto-perf/main.c
+++ b/app/test-crypto-perf/main.c
@@ -138,7 +138,8 @@ cperf_verify_devices_capabilities(struct cperf_options *opts,
 					capability,
 					opts->auth_key_sz,
 					opts->auth_digest_sz,
-					opts->auth_aad_sz);
+					opts->auth_aad_sz,
+					opts->iv_sz);
 			if (ret != 0)
 				return ret;
 		}
@@ -159,7 +160,7 @@ cperf_verify_devices_capabilities(struct cperf_options *opts,
 			ret = rte_cryptodev_sym_capability_check_cipher(
 					capability,
 					opts->cipher_key_sz,
-					opts->cipher_iv_sz);
+					opts->iv_sz);
 			if (ret != 0)
 				return ret;
 		}
@@ -187,7 +188,7 @@ cperf_check_test_vector(struct cperf_options *opts,
 				return -1;
 			if (test_vec->iv.data == NULL)
 				return -1;
-			if (test_vec->iv.length != opts->cipher_iv_sz)
+			if (test_vec->iv.length != opts->iv_sz)
 				return -1;
 			if (test_vec->cipher_key.data == NULL)
 				return -1;
@@ -228,7 +229,7 @@ cperf_check_test_vector(struct cperf_options *opts,
 				return -1;
 			if (test_vec->iv.data == NULL)
 				return -1;
-			if (test_vec->iv.length != opts->cipher_iv_sz)
+			if (test_vec->iv.length != opts->iv_sz)
 				return -1;
 			if (test_vec->cipher_key.data == NULL)
 				return -1;
diff --git a/doc/guides/tools/cryptoperf.rst b/doc/guides/tools/cryptoperf.rst
index 2d225d5..842f51e 100644
--- a/doc/guides/tools/cryptoperf.rst
+++ b/doc/guides/tools/cryptoperf.rst
@@ -246,9 +246,9 @@ The following are the appication command-line options:
 
         Set the size of cipher key.
 
-* ``--cipher-iv-sz <n>``
+* ``--iv-sz <n>``
 
-        Set the size of cipher iv.
+        Set the size of iv.
 
 * ``--auth-algo <name>``
 
@@ -375,7 +375,7 @@ on two cores for cipher encryption aes-cbc, ten operations in silent mode::
 
    dpdk-test-crypto-perf -l 4-7 --vdev crypto_aesni_mb_pmd1
    --vdev crypto_aesni_mb_pmd2 -w 0000:00:00.0 -- --devtype crypto_aesni_mb
-   --cipher-algo aes-cbc --cipher-key-sz 16 --cipher-iv-sz 16
+   --cipher-algo aes-cbc --cipher-key-sz 16 --iv-sz 16
    --cipher-op encrypt --optype cipher-only --silent
    --ptest latency --total-ops 10
 
@@ -386,7 +386,7 @@ with packet verification::
 
    dpdk-test-crypto-perf -l 4-7 --vdev crypto_openssl -w 0000:00:00.0 --
    --devtype crypto_openssl --cipher-algo aes-gcm --cipher-key-sz 16
-   --cipher-iv-sz 16 --cipher-op encrypt --auth-algo aes-gcm --auth-key-sz 16
+   --iv-sz 16 --cipher-op encrypt --auth-algo aes-gcm --auth-key-sz 16
    --auth-digest-sz 16 --auth-aad-sz 16 --auth-op generate --optype aead
    --silent --ptest verify --total-ops 10
    --test-file test_aes_gcm.data
diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
index 573e071..e7eb1f5 100644
--- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
+++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
@@ -104,6 +104,10 @@ aesni_gcm_set_session_parameters(struct aesni_gcm_session *sess,
 		return -EINVAL;
 	}
 
+	/* Set IV parameters */
+	sess->iv.offset = xform->iv.offset;
+	sess->iv.length = xform->iv.length;
+
 	/* Select Crypto operation */
 	if (cipher_xform->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT &&
 			auth_xform->auth.op == RTE_CRYPTO_AUTH_OP_GENERATE)
@@ -136,6 +140,16 @@ aesni_gcm_set_session_parameters(struct aesni_gcm_session *sess,
 		return -EINVAL;
 	}
 
+	/* IV check */
+	if (xform->iv.length != 16 && xform->iv.length != 12 &&
+			xform->iv.length != 0) {
+		GCM_LOG_ERR("iv");
+		return -EINVAL;
+	}
+
+	sess->iv.length = xform->iv.length;
+	sess->iv.offset = xform->iv.offset;
+
 	/* Digest check */
 	if (digest_length != 16 &&
 			digest_length != 12 &&
@@ -232,20 +246,13 @@ process_gcm_crypto_op(struct rte_crypto_op *op,
 
 	src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset);
 
-	/* sanity checks */
-	if (sym_op->cipher.iv.length != 16 && sym_op->cipher.iv.length != 12 &&
-			sym_op->cipher.iv.length != 0) {
-		GCM_LOG_ERR("iv");
-		return -1;
-	}
-
 	IV_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
-				sym_op->cipher.iv.offset);
+				session->iv.offset);
 	/*
 	 * GCM working in 12B IV mode => 16B pre-counter block we need
 	 * to set BE LSB to 1, driver expects that 16B is allocated
 	 */
-	if (sym_op->cipher.iv.length == 12) {
+	if (session->iv.length == 12) {
 		uint32_t *iv_padd = (uint32_t *)&(IV_ptr[12]);
 		*iv_padd = rte_bswap32(1);
 	}
diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c
index 7b68a20..6dd22e5 100644
--- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c
+++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_ops.c
@@ -43,6 +43,7 @@ static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_AES_GMAC,
 				.block_size = 16,
@@ -68,6 +69,7 @@ static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_AES_GCM,
 				.block_size = 16,
@@ -93,6 +95,11 @@ static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 12,
+				.max = 12,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_AES_GCM,
 				.block_size = 16,
@@ -100,11 +107,6 @@ static const struct rte_cryptodev_capabilities aesni_gcm_pmd_capabilities[] = {
 					.min = 16,
 					.max = 32,
 					.increment = 16
-				},
-				.iv_size = {
-					.min = 12,
-					.max = 12,
-					.increment = 0
 				}
 			}, }
 		}, }
diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h
index b27ad40..50dea82 100644
--- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h
+++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd_private.h
@@ -90,6 +90,11 @@ enum aesni_gcm_key {
 
 /** AESNI GCM private session structure */
 struct aesni_gcm_session {
+	struct {
+		uint16_t length;
+		uint16_t offset;
+	} iv;
+	/**< IV parameters */
 	uint16_t digest_length;
 	/**< Digest length */
 	enum aesni_gcm_operation op;
diff --git a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c
index 284e111..8e9f188 100644
--- a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c
+++ b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c
@@ -299,6 +299,10 @@ aesni_mb_set_session_parameters(const struct aesni_mb_op_fns *mb_ops,
 		return -1;
 	}
 
+	/* Set IV parameters */
+	sess->iv.offset = xform->iv.offset;
+	sess->iv.length = xform->iv.length;
+
 	if (aesni_mb_set_session_auth_parameters(mb_ops, sess, auth_xform)) {
 		MB_LOG_ERR("Invalid/unsupported authentication parameters");
 		return -1;
@@ -471,8 +475,8 @@ set_mb_job_params(JOB_AES_HMAC *job, struct aesni_mb_qp *qp,
 
 	/* Set IV parameters */
 	job->iv = rte_crypto_op_ctod_offset(op, uint8_t *,
-			op->sym->cipher.iv.offset);
-	job->iv_len_in_bytes = op->sym->cipher.iv.length;
+			session->iv.offset);
+	job->iv_len_in_bytes = session->iv.length;
 
 	/* Data  Parameter */
 	job->src = rte_pktmbuf_mtod(m_src, uint8_t *);
diff --git a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c
index d1bc28e..936ae7c 100644
--- a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c
+++ b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_ops.c
@@ -44,6 +44,7 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_MD5_HMAC,
 				.block_size = 64,
@@ -65,6 +66,7 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
 				.block_size = 64,
@@ -86,6 +88,7 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
 				.block_size = 64,
@@ -107,6 +110,7 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
 				.block_size = 64,
@@ -128,6 +132,7 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
 				.block_size = 128,
@@ -149,6 +154,7 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
 				.block_size = 128,
@@ -170,6 +176,7 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC,
 				.block_size = 16,
@@ -191,6 +198,11 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 16,
+				.max = 16,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_AES_CBC,
 				.block_size = 16,
@@ -199,11 +211,6 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 					.max = 32,
 					.increment = 8
 				},
-				.iv_size = {
-					.min = 16,
-					.max = 16,
-					.increment = 0
-				}
 			}, }
 		}, }
 	},
@@ -211,6 +218,11 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 16,
+				.max = 16,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_AES_CTR,
 				.block_size = 16,
@@ -219,11 +231,6 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 					.max = 32,
 					.increment = 8
 				},
-				.iv_size = {
-					.min = 16,
-					.max = 16,
-					.increment = 0
-				}
 			}, }
 		}, }
 	},
@@ -231,6 +238,11 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 16,
+				.max = 16,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_AES_DOCSISBPI,
 				.block_size = 16,
@@ -239,11 +251,6 @@ static const struct rte_cryptodev_capabilities aesni_mb_pmd_capabilities[] = {
 					.max = 16,
 					.increment = 0
 				},
-				.iv_size = {
-					.min = 16,
-					.max = 16,
-					.increment = 0
-				}
 			}, }
 		}, }
 	},
diff --git a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_private.h b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_private.h
index 0d82699..5c50d37 100644
--- a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_private.h
+++ b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd_private.h
@@ -167,6 +167,11 @@ struct aesni_mb_qp {
 /** AES-NI multi-buffer private session structure */
 struct aesni_mb_session {
 	JOB_CHAIN_ORDER chain_order;
+	struct {
+		uint16_t length;
+		uint16_t offset;
+	} iv;
+	/**< IV parameters */
 
 	/** Cipher Parameters */
 	struct {
diff --git a/drivers/crypto/armv8/rte_armv8_pmd.c b/drivers/crypto/armv8/rte_armv8_pmd.c
index 77d79df..afd3993 100644
--- a/drivers/crypto/armv8/rte_armv8_pmd.c
+++ b/drivers/crypto/armv8/rte_armv8_pmd.c
@@ -431,7 +431,7 @@ armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
 	case RTE_CRYPTO_CIPHER_AES_CBC:
 		sess->cipher.algo = calg;
 		/* IV len is always 16 bytes (block size) for AES CBC */
-		sess->cipher.iv_len = 16;
+		sess->iv.length = 16;
 		break;
 	default:
 		return -EINVAL;
@@ -525,6 +525,9 @@ armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
 		return -EINVAL;
 	}
 
+	/* Set IV offset */
+	sess->iv.offset = xform->iv.offset;
+
 	if (is_chained_op) {
 		ret = armv8_crypto_set_session_chained_parameters(sess,
 						cipher_xform, auth_xform);
@@ -651,13 +654,8 @@ process_armv8_chained_op
 				sess->auth.digest_length);
 	}
 
-	if (unlikely(op->sym->cipher.iv.length != sess->cipher.iv_len)) {
-		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
-		return;
-	}
-
 	arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
-					op->sym->cipher.iv.offset);
+					sess->iv.offset);
 	arg.cipher.key = sess->cipher.key.data;
 	/* Acquire combined mode function */
 	crypto_func = sess->crypto_func;
diff --git a/drivers/crypto/armv8/rte_armv8_pmd_ops.c b/drivers/crypto/armv8/rte_armv8_pmd_ops.c
index 4d9ccbf..7f60514 100644
--- a/drivers/crypto/armv8/rte_armv8_pmd_ops.c
+++ b/drivers/crypto/armv8/rte_armv8_pmd_ops.c
@@ -46,6 +46,7 @@ static const struct rte_cryptodev_capabilities
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 			{.sym = {
 				.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+				.iv_size = { 0 },
 				{.auth = {
 					.algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
 					.block_size = 64,
@@ -67,6 +68,7 @@ static const struct rte_cryptodev_capabilities
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 			{.sym = {
 				.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+				.iv_size = { 0 },
 				{.auth = {
 					.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
 					.block_size = 64,
@@ -88,6 +90,11 @@ static const struct rte_cryptodev_capabilities
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 			{.sym = {
 				.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+				.iv_size = {
+					.min = 16,
+					.max = 16,
+					.increment = 0
+				},
 				{.cipher = {
 					.algo = RTE_CRYPTO_CIPHER_AES_CBC,
 					.block_size = 16,
@@ -96,11 +103,6 @@ static const struct rte_cryptodev_capabilities
 						.max = 16,
 						.increment = 0
 					},
-					.iv_size = {
-						.min = 16,
-						.max = 16,
-						.increment = 0
-					}
 				}, }
 			}, }
 	},
diff --git a/drivers/crypto/armv8/rte_armv8_pmd_private.h b/drivers/crypto/armv8/rte_armv8_pmd_private.h
index ccd5fdc..bcd931d 100644
--- a/drivers/crypto/armv8/rte_armv8_pmd_private.h
+++ b/drivers/crypto/armv8/rte_armv8_pmd_private.h
@@ -153,14 +153,16 @@ struct armv8_crypto_session {
 	crypto_func_t crypto_func;
 	/**< cryptographic function to use for this session */
 
-	/** Cipher Parameters */
+	struct {
+		uint16_t length;
+		uint16_t offset;
+	} iv;
+	/**< IV parameters */
 	struct {
 		enum rte_crypto_cipher_operation direction;
 		/**< cipher operation direction */
 		enum rte_crypto_cipher_algorithm algo;
 		/**< cipher algorithm */
-		int iv_len;
-		/**< IV length */
 
 		struct {
 			uint8_t data[256];
diff --git a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c
index c192141..40b74e1 100644
--- a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c
+++ b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c
@@ -88,7 +88,7 @@ build_authenc_fd(dpaa2_sec_session *sess,
 	uint8_t *old_icv;
 	uint32_t mem_len = (7 * sizeof(struct qbman_fle)) + icv_len;
 	uint8_t *IV_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
-			op->sym->cipher.iv.offset);
+			sess->iv.offset);
 
 	PMD_INIT_FUNC_TRACE();
 
@@ -138,7 +138,7 @@ build_authenc_fd(dpaa2_sec_session *sess,
 		   sess->digest_length,
 		   sym_op->cipher.data.offset,
 		   sym_op->cipher.data.length,
-		   sym_op->cipher.iv.length,
+		   sess->iv.length,
 		   sym_op->m_src->data_off);
 
 	/* Configure Output FLE with Scatter/Gather Entry */
@@ -163,7 +163,7 @@ build_authenc_fd(dpaa2_sec_session *sess,
 				DPAA2_VADDR_TO_IOVA(sym_op->auth.digest.data));
 		sge->length = sess->digest_length;
 		DPAA2_SET_FD_LEN(fd, (sym_op->auth.data.length +
-					sym_op->cipher.iv.length));
+					sess->iv.length));
 	}
 	DPAA2_SET_FLE_FIN(sge);
 
@@ -175,13 +175,13 @@ build_authenc_fd(dpaa2_sec_session *sess,
 	DPAA2_SET_FLE_SG_EXT(fle);
 	DPAA2_SET_FLE_FIN(fle);
 	fle->length = (sess->dir == DIR_ENC) ?
-			(sym_op->auth.data.length + sym_op->cipher.iv.length) :
-			(sym_op->auth.data.length + sym_op->cipher.iv.length +
+			(sym_op->auth.data.length + sess->iv.length) :
+			(sym_op->auth.data.length + sess->iv.length +
 			 sess->digest_length);
 
 	/* Configure Input SGE for Encap/Decap */
 	DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(IV_ptr));
-	sge->length = sym_op->cipher.iv.length;
+	sge->length = sess->iv.length;
 	sge++;
 
 	DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
@@ -198,7 +198,7 @@ build_authenc_fd(dpaa2_sec_session *sess,
 		sge->length = sess->digest_length;
 		DPAA2_SET_FD_LEN(fd, (sym_op->auth.data.length +
 				 sess->digest_length +
-				 sym_op->cipher.iv.length));
+				 sess->iv.length));
 	}
 	DPAA2_SET_FLE_FIN(sge);
 	if (auth_only_len) {
@@ -310,7 +310,7 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
 	struct sec_flow_context *flc;
 	struct ctxt_priv *priv = sess->ctxt;
 	uint8_t *IV_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
-			op->sym->cipher.iv.offset);
+			sess->iv.offset);
 
 	PMD_INIT_FUNC_TRACE();
 
@@ -347,21 +347,21 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
 	flc = &priv->flc_desc[0].flc;
 	DPAA2_SET_FD_ADDR(fd, DPAA2_VADDR_TO_IOVA(fle));
 	DPAA2_SET_FD_LEN(fd, sym_op->cipher.data.length +
-			 sym_op->cipher.iv.length);
+			 sess->iv.length);
 	DPAA2_SET_FD_COMPOUND_FMT(fd);
 	DPAA2_SET_FD_FLC(fd, DPAA2_VADDR_TO_IOVA(flc));
 
 	PMD_TX_LOG(DEBUG, "cipher_off: 0x%x/length %d,ivlen=%d data_off: 0x%x",
 		   sym_op->cipher.data.offset,
 		   sym_op->cipher.data.length,
-		   sym_op->cipher.iv.length,
+		   sess->iv.length,
 		   sym_op->m_src->data_off);
 
 	DPAA2_SET_FLE_ADDR(fle, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
 	DPAA2_SET_FLE_OFFSET(fle, sym_op->cipher.data.offset +
 			     sym_op->m_src->data_off);
 
-	fle->length = sym_op->cipher.data.length + sym_op->cipher.iv.length;
+	fle->length = sym_op->cipher.data.length + sess->iv.length;
 
 	PMD_TX_LOG(DEBUG, "1 - flc = %p, fle = %p FLEaddr = %x-%x, length %d",
 		   flc, fle, fle->addr_hi, fle->addr_lo, fle->length);
@@ -369,12 +369,12 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
 	fle++;
 
 	DPAA2_SET_FLE_ADDR(fle, DPAA2_VADDR_TO_IOVA(sge));
-	fle->length = sym_op->cipher.data.length + sym_op->cipher.iv.length;
+	fle->length = sym_op->cipher.data.length + sess->iv.length;
 
 	DPAA2_SET_FLE_SG_EXT(fle);
 
 	DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(IV_ptr));
-	sge->length = sym_op->cipher.iv.length;
+	sge->length = sess->iv.length;
 
 	sge++;
 	DPAA2_SET_FLE_ADDR(sge, DPAA2_MBUF_VADDR_TO_IOVA(sym_op->m_src));
@@ -1220,6 +1220,11 @@ dpaa2_sec_session_configure(struct rte_cryptodev *dev,
 		RTE_LOG(ERR, PMD, "invalid session struct");
 		return NULL;
 	}
+
+	/* Set IV parameters */
+	session->iv.offset = xform->iv.offset;
+	session->iv.length = xform->iv.length;
+
 	/* Cipher Only */
 	if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER && xform->next == NULL) {
 		session->ctxt_type = DPAA2_SEC_CIPHER;
diff --git a/drivers/crypto/dpaa2_sec/dpaa2_sec_priv.h b/drivers/crypto/dpaa2_sec/dpaa2_sec_priv.h
index d4ca86c..b1d116b 100644
--- a/drivers/crypto/dpaa2_sec/dpaa2_sec_priv.h
+++ b/drivers/crypto/dpaa2_sec/dpaa2_sec_priv.h
@@ -187,6 +187,10 @@ typedef struct dpaa2_sec_session_entry {
 		uint8_t *data;	/**< pointer to key data */
 		size_t length;	/**< key length in bytes */
 	} auth_key;
+	struct {
+		uint16_t length; /**< IV length in bytes */
+		uint16_t offset; /**< IV offset in bytes */
+	} iv;
 	uint16_t digest_length;
 	uint8_t status;
 	union {
@@ -201,6 +205,7 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_MD5_HMAC,
 				.block_size = 64,
@@ -222,6 +227,7 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
 				.block_size = 64,
@@ -243,6 +249,7 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
 				.block_size = 64,
@@ -264,6 +271,7 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
 				.block_size = 64,
@@ -285,6 +293,7 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
 				.block_size = 128,
@@ -306,6 +315,7 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
 				.block_size = 128,
@@ -327,6 +337,11 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 16,
+				.max = 16,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_AES_CBC,
 				.block_size = 16,
@@ -334,11 +349,6 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 					.min = 16,
 					.max = 32,
 					.increment = 8
-				},
-				.iv_size = {
-					.min = 16,
-					.max = 16,
-					.increment = 0
 				}
 			}, }
 		}, }
@@ -347,6 +357,11 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 8,
+				.max = 8,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_3DES_CBC,
 				.block_size = 8,
@@ -354,11 +369,6 @@ static const struct rte_cryptodev_capabilities dpaa2_sec_capabilities[] = {
 					.min = 16,
 					.max = 24,
 					.increment = 8
-				},
-				.iv_size = {
-					.min = 8,
-					.max = 8,
-					.increment = 0
 				}
 			}, }
 		}, }
diff --git a/drivers/crypto/kasumi/rte_kasumi_pmd.c b/drivers/crypto/kasumi/rte_kasumi_pmd.c
index 4905641..056682b 100644
--- a/drivers/crypto/kasumi/rte_kasumi_pmd.c
+++ b/drivers/crypto/kasumi/rte_kasumi_pmd.c
@@ -111,6 +111,13 @@ kasumi_set_session_parameters(struct kasumi_session *sess,
 		return -EINVAL;
 	}
 
+	/* Sanity checks. */
+	if (xform->iv.length != KASUMI_IV_LENGTH) {
+		KASUMI_LOG_ERR("Wrong IV length");
+		return -EINVAL;
+	}
+	sess->iv_offset = xform->iv.offset;
+
 	if (cipher_xform) {
 		/* Only KASUMI F8 supported */
 		if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_KASUMI_F8)
@@ -184,13 +191,6 @@ process_kasumi_cipher_op(struct rte_crypto_op **ops,
 	uint32_t num_bytes[num_ops];
 
 	for (i = 0; i < num_ops; i++) {
-		/* Sanity checks. */
-		if (ops[i]->sym->cipher.iv.length != KASUMI_IV_LENGTH) {
-			ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
-			KASUMI_LOG_ERR("iv");
-			break;
-		}
-
 		src[i] = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
 				(ops[i]->sym->cipher.data.offset >> 3);
 		dst[i] = ops[i]->sym->m_dst ?
@@ -199,7 +199,7 @@ process_kasumi_cipher_op(struct rte_crypto_op **ops,
 			rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
 				(ops[i]->sym->cipher.data.offset >> 3);
 		IV_ptr = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
-				ops[i]->sym->cipher.iv.offset);
+				session->iv_offset);
 		IV[i] = *((uint64_t *)(IV_ptr));
 		num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
 
@@ -223,13 +223,6 @@ process_kasumi_cipher_op_bit(struct rte_crypto_op *op,
 	uint64_t IV;
 	uint32_t length_in_bits, offset_in_bits;
 
-	/* Sanity checks. */
-	if (unlikely(op->sym->cipher.iv.length != KASUMI_IV_LENGTH)) {
-		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
-		KASUMI_LOG_ERR("iv");
-		return 0;
-	}
-
 	offset_in_bits = op->sym->cipher.data.offset;
 	src = rte_pktmbuf_mtod(op->sym->m_src, uint8_t *);
 	if (op->sym->m_dst == NULL) {
@@ -239,7 +232,7 @@ process_kasumi_cipher_op_bit(struct rte_crypto_op *op,
 	}
 	dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *);
 	IV_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
-			op->sym->cipher.iv.offset);
+			session->iv_offset);
 	IV = *((uint64_t *)(IV_ptr));
 	length_in_bits = op->sym->cipher.data.length;
 
diff --git a/drivers/crypto/kasumi/rte_kasumi_pmd_ops.c b/drivers/crypto/kasumi/rte_kasumi_pmd_ops.c
index 62ebdbd..3f587f5 100644
--- a/drivers/crypto/kasumi/rte_kasumi_pmd_ops.c
+++ b/drivers/crypto/kasumi/rte_kasumi_pmd_ops.c
@@ -43,6 +43,7 @@ static const struct rte_cryptodev_capabilities kasumi_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_KASUMI_F9,
 				.block_size = 8,
@@ -68,6 +69,11 @@ static const struct rte_cryptodev_capabilities kasumi_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 8,
+				.max = 8,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_KASUMI_F8,
 				.block_size = 8,
@@ -75,11 +81,6 @@ static const struct rte_cryptodev_capabilities kasumi_pmd_capabilities[] = {
 					.min = 16,
 					.max = 16,
 					.increment = 0
-				},
-				.iv_size = {
-					.min = 8,
-					.max = 8,
-					.increment = 0
 				}
 			}, }
 		}, }
diff --git a/drivers/crypto/kasumi/rte_kasumi_pmd_private.h b/drivers/crypto/kasumi/rte_kasumi_pmd_private.h
index fb586ca..6a0d47a 100644
--- a/drivers/crypto/kasumi/rte_kasumi_pmd_private.h
+++ b/drivers/crypto/kasumi/rte_kasumi_pmd_private.h
@@ -92,6 +92,7 @@ struct kasumi_session {
 	sso_kasumi_key_sched_t pKeySched_hash;
 	enum kasumi_operation op;
 	enum rte_crypto_auth_operation auth_op;
+	uint16_t iv_offset;
 } __rte_cache_aligned;
 
 
diff --git a/drivers/crypto/null/null_crypto_pmd_ops.c b/drivers/crypto/null/null_crypto_pmd_ops.c
index 12c946c..3c88c50 100644
--- a/drivers/crypto/null/null_crypto_pmd_ops.c
+++ b/drivers/crypto/null/null_crypto_pmd_ops.c
@@ -43,6 +43,7 @@ static const struct rte_cryptodev_capabilities null_crypto_pmd_capabilities[] =
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_NULL,
 				.block_size = 1,
@@ -64,6 +65,7 @@ static const struct rte_cryptodev_capabilities null_crypto_pmd_capabilities[] =
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = { 0 },
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_NULL,
 				.block_size = 1,
@@ -72,11 +74,6 @@ static const struct rte_cryptodev_capabilities null_crypto_pmd_capabilities[] =
 					.max = 0,
 					.increment = 0
 				},
-				.iv_size = {
-					.min = 0,
-					.max = 0,
-					.increment = 0
-				}
 			}, },
 		}, }
 	},
diff --git a/drivers/crypto/openssl/rte_openssl_pmd.c b/drivers/crypto/openssl/rte_openssl_pmd.c
index c3e3cf2..2a3e6ac 100644
--- a/drivers/crypto/openssl/rte_openssl_pmd.c
+++ b/drivers/crypto/openssl/rte_openssl_pmd.c
@@ -398,6 +398,10 @@ openssl_set_session_parameters(struct openssl_session *sess,
 		return -EINVAL;
 	}
 
+	/* Set IV parameters */
+	sess->iv.offset = xform->iv.offset;
+	sess->iv.length = xform->iv.length;
+
 	/* cipher_xform must be check before auth_xform */
 	if (cipher_xform) {
 		if (openssl_set_session_cipher_parameters(
@@ -925,8 +929,8 @@ process_openssl_combined_op
 	}
 
 	iv = rte_crypto_op_ctod_offset(op, uint8_t *,
-			op->sym->cipher.iv.offset);
-	ivlen = op->sym->cipher.iv.length;
+			sess->iv.offset);
+	ivlen = sess->iv.length;
 	aad = op->sym->auth.aad.data;
 	aadlen = op->sym->auth.aad.length;
 
@@ -990,7 +994,7 @@ process_openssl_cipher_op
 			op->sym->cipher.data.offset);
 
 	iv = rte_crypto_op_ctod_offset(op, uint8_t *,
-			op->sym->cipher.iv.offset);
+			sess->iv.offset);
 
 	if (sess->cipher.mode == OPENSSL_CIPHER_LIB)
 		if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
@@ -1032,7 +1036,7 @@ process_openssl_docsis_bpi_op(struct rte_crypto_op *op,
 			op->sym->cipher.data.offset);
 
 	iv = rte_crypto_op_ctod_offset(op, uint8_t *,
-			op->sym->cipher.iv.offset);
+			sess->iv.offset);
 
 	block_size = DES_BLOCK_SIZE;
 
@@ -1091,7 +1095,7 @@ process_openssl_docsis_bpi_op(struct rte_crypto_op *op,
 						last_block_len, sess->cipher.bpi_ctx);
 				/* Prepare parameters for CBC mode op */
 				iv = rte_crypto_op_ctod_offset(op, uint8_t *,
-						op->sym->cipher.iv.offset);
+						sess->iv.offset);
 				dst += last_block_len - srclen;
 				srclen -= last_block_len;
 			}
diff --git a/drivers/crypto/openssl/rte_openssl_pmd_ops.c b/drivers/crypto/openssl/rte_openssl_pmd_ops.c
index 22a6873..b730196 100644
--- a/drivers/crypto/openssl/rte_openssl_pmd_ops.c
+++ b/drivers/crypto/openssl/rte_openssl_pmd_ops.c
@@ -44,6 +44,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_MD5_HMAC,
 				.block_size = 64,
@@ -65,6 +66,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_MD5,
 				.block_size = 64,
@@ -86,6 +88,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
 				.block_size = 64,
@@ -107,6 +110,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA1,
 				.block_size = 64,
@@ -128,6 +132,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
 				.block_size = 64,
@@ -149,6 +154,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA224,
 				.block_size = 64,
@@ -170,6 +176,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
 				.block_size = 64,
@@ -191,6 +198,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 			.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 			{.sym = {
 				.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+				.iv_size = { 0 },
 				{.auth = {
 					.algo = RTE_CRYPTO_AUTH_SHA256,
 					.block_size = 64,
@@ -212,6 +220,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
 				.block_size = 128,
@@ -233,6 +242,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA384,
 				.block_size = 128,
@@ -254,6 +264,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
 				.block_size = 128,
@@ -275,6 +286,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SHA512,
 				.block_size = 128,
@@ -296,6 +308,11 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 16,
+				.max = 16,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_AES_CBC,
 				.block_size = 16,
@@ -303,11 +320,6 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 					.min = 16,
 					.max = 32,
 					.increment = 8
-				},
-				.iv_size = {
-					.min = 16,
-					.max = 16,
-					.increment = 0
 				}
 			}, }
 		}, }
@@ -316,6 +328,11 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 16,
+				.max = 16,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_AES_CTR,
 				.block_size = 16,
@@ -323,11 +340,6 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 					.min = 16,
 					.max = 32,
 					.increment = 8
-				},
-				.iv_size = {
-					.min = 16,
-					.max = 16,
-					.increment = 0
 				}
 			}, }
 		}, }
@@ -336,6 +348,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_AES_GCM,
 				.block_size = 16,
@@ -361,6 +374,11 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 12,
+				.max = 16,
+				.increment = 4
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_AES_GCM,
 				.block_size = 16,
@@ -368,11 +386,6 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 					.min = 16,
 					.max = 32,
 					.increment = 8
-				},
-				.iv_size = {
-					.min = 12,
-					.max = 16,
-					.increment = 4
 				}
 			}, }
 		}, }
@@ -381,6 +394,7 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_AES_GMAC,
 				.block_size = 16,
@@ -406,6 +420,11 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 8,
+				.max = 8,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_3DES_CBC,
 				.block_size = 8,
@@ -413,11 +432,6 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 					.min = 16,
 					.max = 24,
 					.increment = 8
-				},
-				.iv_size = {
-					.min = 8,
-					.max = 8,
-					.increment = 0
 				}
 			}, }
 		}, }
@@ -426,6 +440,11 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 8,
+				.max = 8,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_3DES_CTR,
 				.block_size = 8,
@@ -433,11 +452,6 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 					.min = 16,
 					.max = 24,
 					.increment = 8
-				},
-				.iv_size = {
-					.min = 8,
-					.max = 8,
-					.increment = 0
 				}
 			}, }
 		}, }
@@ -446,6 +460,11 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 8,
+				.max = 8,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_DES_DOCSISBPI,
 				.block_size = 8,
@@ -453,11 +472,6 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
 					.min = 8,
 					.max = 8,
 					.increment = 0
-				},
-				.iv_size = {
-					.min = 8,
-					.max = 8,
-					.increment = 0
 				}
 			}, }
 		}, }
diff --git a/drivers/crypto/openssl/rte_openssl_pmd_private.h b/drivers/crypto/openssl/rte_openssl_pmd_private.h
index 28a8e36..d7eae6c 100644
--- a/drivers/crypto/openssl/rte_openssl_pmd_private.h
+++ b/drivers/crypto/openssl/rte_openssl_pmd_private.h
@@ -108,6 +108,11 @@ struct openssl_session {
 	enum openssl_chain_order chain_order;
 	/**< chain order mode */
 
+	struct {
+		uint16_t length;
+		uint16_t offset;
+	} iv;
+	/**< IV parameters */
 	/** Cipher Parameters */
 	struct {
 		enum rte_crypto_cipher_operation direction;
diff --git a/drivers/crypto/qat/qat_adf/qat_algs.h b/drivers/crypto/qat/qat_adf/qat_algs.h
index 9acd68a..84bd35b 100644
--- a/drivers/crypto/qat/qat_adf/qat_algs.h
+++ b/drivers/crypto/qat/qat_adf/qat_algs.h
@@ -128,6 +128,10 @@ struct qat_session {
 	uint32_t *aad_len;
 	struct qat_crypto_instance *inst;
 	uint16_t digest_length;
+	struct {
+		uint16_t offset;
+		uint16_t length;
+	} iv;
 	rte_spinlock_t lock;	/* protects this struct */
 };
 
diff --git a/drivers/crypto/qat/qat_crypto.c b/drivers/crypto/qat/qat_crypto.c
index f72d3e3..0fcf744 100644
--- a/drivers/crypto/qat/qat_crypto.c
+++ b/drivers/crypto/qat/qat_crypto.c
@@ -457,6 +457,10 @@ qat_crypto_sym_configure_session(struct rte_cryptodev *dev,
 	int qat_cmd_id;
 	PMD_INIT_FUNC_TRACE();
 
+	/* Set IV parameters */
+	session->iv.offset = xform->iv.offset;
+	session->iv.length = xform->iv.length;
+
 	/* Get requested QAT command id */
 	qat_cmd_id = qat_get_cmd_id(xform);
 	if (qat_cmd_id < 0 || qat_cmd_id >= ICP_QAT_FW_LA_CMD_DELIMITER) {
@@ -640,7 +644,7 @@ qat_bpicipher_preprocess(struct qat_session *ctx,
 		else
 			/* runt block, i.e. less than one full block */
 			iv = rte_crypto_op_ctod_offset(op, uint8_t *,
-					sym_op->cipher.iv.offset);
+					ctx->iv.offset);
 
 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
 		rte_hexdump(stdout, "BPI: src before pre-process:", last_block,
@@ -696,7 +700,7 @@ qat_bpicipher_postprocess(struct qat_session *ctx,
 		else
 			/* runt block, i.e. less than one full block */
 			iv = rte_crypto_op_ctod_offset(op, uint8_t *,
-					sym_op->cipher.iv.offset);
+					ctx->iv.offset);
 
 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_RX
 		rte_hexdump(stdout, "BPI: src before post-process:", last_block,
@@ -939,7 +943,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 	}
 
 	IV_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
-					op->sym->cipher.iv.offset);
+					ctx->iv.offset);
 	if (do_cipher) {
 
 		if (ctx->qat_cipher_alg ==
@@ -979,19 +983,19 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 		 * use request descriptor embedded IV
 		 *
 		 */
-		if (op->sym->cipher.iv.length) {
-			if (op->sym->cipher.iv.length <=
+		if (ctx->iv.length) {
+			if (ctx->iv.length <=
 					sizeof(cipher_param->u.cipher_IV_array)) {
 				rte_memcpy(cipher_param->u.cipher_IV_array,
 						IV_ptr,
-						op->sym->cipher.iv.length);
+						ctx->iv.length);
 			} else {
 				ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
 						qat_req->comn_hdr.serv_specif_flags,
 						ICP_QAT_FW_CIPH_IV_64BIT_PTR);
 				cipher_param->u.s.cipher_IV_ptr =
 						rte_crypto_op_ctophys_offset(op,
-							op->sym->cipher.iv.offset);
+							ctx->iv.offset);
 			}
 		}
 		min_ofs = cipher_ofs;
@@ -1157,7 +1161,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 
 	if (ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_128 ||
 			ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_GALOIS_64) {
-		if (op->sym->cipher.iv.length == 12) {
+		if (ctx->iv.length == 12) {
 			/*
 			 * For GCM a 12 bit IV is allowed,
 			 * but we need to inform the f/w
@@ -1192,7 +1196,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 			rte_pktmbuf_mtod(op->sym->m_src, uint8_t*),
 			rte_pktmbuf_data_len(op->sym->m_src));
 	rte_hexdump(stdout, "iv:", IV_ptr,
-			op->sym->cipher.iv.length);
+			ctx->iv.length);
 	rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
 			ctx->digest_length);
 	rte_hexdump(stdout, "aad:", op->sym->auth.aad.data,
diff --git a/drivers/crypto/qat/qat_crypto_capabilities.h b/drivers/crypto/qat/qat_crypto_capabilities.h
index 1294f24..11f3687 100644
--- a/drivers/crypto/qat/qat_crypto_capabilities.h
+++ b/drivers/crypto/qat/qat_crypto_capabilities.h
@@ -39,6 +39,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_SHA1_HMAC,	\
 				.block_size = 64,			\
@@ -60,6 +61,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_SHA224_HMAC,	\
 				.block_size = 64,			\
@@ -81,6 +83,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,	\
 				.block_size = 64,			\
@@ -102,6 +105,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_SHA384_HMAC,	\
 				.block_size = 64,			\
@@ -123,6 +127,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_SHA512_HMAC,	\
 				.block_size = 128,			\
@@ -144,6 +149,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_MD5_HMAC,	\
 				.block_size = 64,			\
@@ -165,6 +171,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_AES_XCBC_MAC,	\
 				.block_size = 16,			\
@@ -186,6 +193,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_AES_GCM,	\
 				.block_size = 16,			\
@@ -211,6 +219,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_AES_GMAC,	\
 				.block_size = 16,			\
@@ -236,6 +245,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,	\
 				.block_size = 16,			\
@@ -261,6 +271,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 12,				\
+				.max = 12,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_AES_GCM,	\
 				.block_size = 16,			\
@@ -268,11 +283,6 @@
 					.min = 16,			\
 					.max = 32,			\
 					.increment = 8			\
-				},					\
-				.iv_size = {				\
-					.min = 12,			\
-					.max = 12,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -281,6 +291,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 16,				\
+				.max = 16,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_AES_CBC,	\
 				.block_size = 16,			\
@@ -288,11 +303,6 @@
 					.min = 16,			\
 					.max = 32,			\
 					.increment = 8			\
-				},					\
-				.iv_size = {				\
-					.min = 16,			\
-					.max = 16,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -301,6 +311,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 16,				\
+				.max = 16,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_AES_DOCSISBPI,\
 				.block_size = 16,			\
@@ -308,11 +323,6 @@
 					.min = 16,			\
 					.max = 16,			\
 					.increment = 0			\
-				},					\
-				.iv_size = {				\
-					.min = 16,			\
-					.max = 16,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -321,6 +331,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 16,				\
+				.max = 16,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,	\
 				.block_size = 16,			\
@@ -328,11 +343,6 @@
 					.min = 16,			\
 					.max = 16,			\
 					.increment = 0			\
-				},					\
-				.iv_size = {				\
-					.min = 16,			\
-					.max = 16,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -341,6 +351,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 16,				\
+				.max = 16,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_AES_CTR,	\
 				.block_size = 16,			\
@@ -348,11 +363,6 @@
 					.min = 16,			\
 					.max = 32,			\
 					.increment = 8			\
-				},					\
-				.iv_size = {				\
-					.min = 16,			\
-					.max = 16,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -361,6 +371,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_NULL,		\
 				.block_size = 1,			\
@@ -382,6 +393,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = { 0 },				\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_NULL,		\
 				.block_size = 1,			\
@@ -389,11 +401,6 @@
 					.min = 0,			\
 					.max = 0,			\
 					.increment = 0			\
-				},					\
-				.iv_size = {				\
-					.min = 0,			\
-					.max = 0,			\
-					.increment = 0			\
 				}					\
 			}, },						\
 		}, }							\
@@ -402,6 +409,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 8,				\
+				.max = 8,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_KASUMI_F8,	\
 				.block_size = 8,			\
@@ -409,11 +421,6 @@
 					.min = 16,			\
 					.max = 16,			\
 					.increment = 0			\
-				},					\
-				.iv_size = {				\
-					.min = 8,			\
-					.max = 8,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -422,6 +429,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_KASUMI_F9,	\
 				.block_size = 8,			\
@@ -447,6 +455,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 8,				\
+				.max = 8,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_3DES_CBC,	\
 				.block_size = 8,			\
@@ -454,11 +467,6 @@
 					.min = 16,			\
 					.max = 24,			\
 					.increment = 8			\
-				},					\
-				.iv_size = {				\
-					.min = 8,			\
-					.max = 8,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -467,6 +475,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 8,				\
+				.max = 8,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_3DES_CTR,	\
 				.block_size = 8,			\
@@ -474,11 +487,6 @@
 					.min = 16,			\
 					.max = 24,			\
 					.increment = 8			\
-				},					\
-				.iv_size = {				\
-					.min = 8,			\
-					.max = 8,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -487,6 +495,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 8,				\
+				.max = 8,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_DES_CBC,	\
 				.block_size = 8,			\
@@ -494,11 +507,6 @@
 					.min = 8,			\
 					.max = 8,			\
 					.increment = 0			\
-				},					\
-				.iv_size = {				\
-					.min = 8,			\
-					.max = 8,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -507,6 +515,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 8,				\
+				.max = 8,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_DES_DOCSISBPI,\
 				.block_size = 8,			\
@@ -514,11 +527,6 @@
 					.min = 8,			\
 					.max = 8,			\
 					.increment = 0			\
-				},					\
-				.iv_size = {				\
-					.min = 8,			\
-					.max = 8,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -529,6 +537,11 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,	\
+			.iv_size = {					\
+				.min = 16,				\
+				.max = 16,				\
+				.increment = 0				\
+			},						\
 			{.cipher = {					\
 				.algo = RTE_CRYPTO_CIPHER_ZUC_EEA3,	\
 				.block_size = 16,			\
@@ -536,11 +549,6 @@
 					.min = 16,			\
 					.max = 16,			\
 					.increment = 0			\
-				},					\
-				.iv_size = {				\
-					.min = 16,			\
-					.max = 16,			\
-					.increment = 0			\
 				}					\
 			}, }						\
 		}, }							\
@@ -549,6 +557,7 @@
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
 		{.sym = {						\
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,	\
+			.iv_size = { 0 },				\
 			{.auth = {					\
 				.algo = RTE_CRYPTO_AUTH_ZUC_EIA3,	\
 				.block_size = 16,			\
diff --git a/drivers/crypto/snow3g/rte_snow3g_pmd.c b/drivers/crypto/snow3g/rte_snow3g_pmd.c
index 8ebe302..30b9172 100644
--- a/drivers/crypto/snow3g/rte_snow3g_pmd.c
+++ b/drivers/crypto/snow3g/rte_snow3g_pmd.c
@@ -111,6 +111,13 @@ snow3g_set_session_parameters(struct snow3g_session *sess,
 		return -EINVAL;
 	}
 
+	/* Sanity checks. */
+	if (xform->iv.length != SNOW3G_IV_LENGTH) {
+		SNOW3G_LOG_ERR("Wrong IV length");
+		return -EINVAL;
+	}
+	sess->iv_offset = xform->iv.offset;
+
 	if (cipher_xform) {
 		/* Only SNOW 3G UEA2 supported */
 		if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_SNOW3G_UEA2)
@@ -183,13 +190,6 @@ process_snow3g_cipher_op(struct rte_crypto_op **ops,
 	uint32_t num_bytes[SNOW3G_MAX_BURST];
 
 	for (i = 0; i < num_ops; i++) {
-		/* Sanity checks. */
-		if (unlikely(ops[i]->sym->cipher.iv.length != SNOW3G_IV_LENGTH)) {
-			ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
-			SNOW3G_LOG_ERR("iv");
-			break;
-		}
-
 		src[i] = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
 				(ops[i]->sym->cipher.data.offset >> 3);
 		dst[i] = ops[i]->sym->m_dst ?
@@ -198,7 +198,7 @@ process_snow3g_cipher_op(struct rte_crypto_op **ops,
 			rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
 				(ops[i]->sym->cipher.data.offset >> 3);
 		IV[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
-				ops[i]->sym->cipher.iv.offset);
+				session->iv_offset);
 		num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
 
 		processed_ops++;
@@ -219,13 +219,6 @@ process_snow3g_cipher_op_bit(struct rte_crypto_op *op,
 	uint8_t *IV;
 	uint32_t length_in_bits, offset_in_bits;
 
-	/* Sanity checks. */
-	if (unlikely(op->sym->cipher.iv.length != SNOW3G_IV_LENGTH)) {
-		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
-		SNOW3G_LOG_ERR("iv");
-		return 0;
-	}
-
 	offset_in_bits = op->sym->cipher.data.offset;
 	src = rte_pktmbuf_mtod(op->sym->m_src, uint8_t *);
 	if (op->sym->m_dst == NULL) {
@@ -235,7 +228,7 @@ process_snow3g_cipher_op_bit(struct rte_crypto_op *op,
 	}
 	dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *);
 	IV = rte_crypto_op_ctod_offset(op, uint8_t *,
-				op->sym->cipher.iv.offset);
+				session->iv_offset);
 	length_in_bits = op->sym->cipher.data.length;
 
 	sso_snow3g_f8_1_buffer_bit(&session->pKeySched_cipher, IV,
diff --git a/drivers/crypto/snow3g/rte_snow3g_pmd_ops.c b/drivers/crypto/snow3g/rte_snow3g_pmd_ops.c
index 7ce96be..a0f1488 100644
--- a/drivers/crypto/snow3g/rte_snow3g_pmd_ops.c
+++ b/drivers/crypto/snow3g/rte_snow3g_pmd_ops.c
@@ -43,6 +43,7 @@ static const struct rte_cryptodev_capabilities snow3g_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_SNOW3G_UIA2,
 				.block_size = 16,
@@ -68,6 +69,11 @@ static const struct rte_cryptodev_capabilities snow3g_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 16,
+				.max = 16,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_SNOW3G_UEA2,
 				.block_size = 16,
@@ -75,11 +81,6 @@ static const struct rte_cryptodev_capabilities snow3g_pmd_capabilities[] = {
 					.min = 16,
 					.max = 16,
 					.increment = 0
-				},
-				.iv_size = {
-					.min = 16,
-					.max = 16,
-					.increment = 0
 				}
 			}, }
 		}, }
diff --git a/drivers/crypto/snow3g/rte_snow3g_pmd_private.h b/drivers/crypto/snow3g/rte_snow3g_pmd_private.h
index 03973b9..e8943a7 100644
--- a/drivers/crypto/snow3g/rte_snow3g_pmd_private.h
+++ b/drivers/crypto/snow3g/rte_snow3g_pmd_private.h
@@ -91,6 +91,7 @@ struct snow3g_session {
 	enum rte_crypto_auth_operation auth_op;
 	sso_snow3g_key_schedule_t pKeySched_cipher;
 	sso_snow3g_key_schedule_t pKeySched_hash;
+	uint16_t iv_offset;
 } __rte_cache_aligned;
 
 
diff --git a/drivers/crypto/zuc/rte_zuc_pmd.c b/drivers/crypto/zuc/rte_zuc_pmd.c
index df58ec4..266882b 100644
--- a/drivers/crypto/zuc/rte_zuc_pmd.c
+++ b/drivers/crypto/zuc/rte_zuc_pmd.c
@@ -110,6 +110,13 @@ zuc_set_session_parameters(struct zuc_session *sess,
 		return -EINVAL;
 	}
 
+	/* Sanity checks. */
+	if (xform->iv.length != ZUC_IV_KEY_LENGTH) {
+		ZUC_LOG_ERR("Wrong IV length");
+		return -EINVAL;
+	}
+	sess->iv_offset = xform->iv.offset;
+
 	if (cipher_xform) {
 		/* Only ZUC EEA3 supported */
 		if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_ZUC_EEA3)
@@ -183,13 +190,6 @@ process_zuc_cipher_op(struct rte_crypto_op **ops,
 	uint8_t *cipher_keys[ZUC_MAX_BURST];
 
 	for (i = 0; i < num_ops; i++) {
-		/* Sanity checks. */
-		if (unlikely(ops[i]->sym->cipher.iv.length != ZUC_IV_KEY_LENGTH)) {
-			ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
-			ZUC_LOG_ERR("iv");
-			break;
-		}
-
 		if (((ops[i]->sym->cipher.data.length % BYTE_LEN) != 0)
 				|| ((ops[i]->sym->cipher.data.offset
 					% BYTE_LEN) != 0)) {
@@ -219,7 +219,7 @@ process_zuc_cipher_op(struct rte_crypto_op **ops,
 			rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
 				(ops[i]->sym->cipher.data.offset >> 3);
 		IV[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
-				ops[i]->sym->cipher.iv.offset);
+				session->iv_offset);
 		num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
 
 		cipher_keys[i] = session->pKey_cipher;
diff --git a/drivers/crypto/zuc/rte_zuc_pmd_ops.c b/drivers/crypto/zuc/rte_zuc_pmd_ops.c
index e793459..4804bd1 100644
--- a/drivers/crypto/zuc/rte_zuc_pmd_ops.c
+++ b/drivers/crypto/zuc/rte_zuc_pmd_ops.c
@@ -43,6 +43,7 @@ static const struct rte_cryptodev_capabilities zuc_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
+			.iv_size = { 0 },
 			{.auth = {
 				.algo = RTE_CRYPTO_AUTH_ZUC_EIA3,
 				.block_size = 16,
@@ -68,6 +69,11 @@ static const struct rte_cryptodev_capabilities zuc_pmd_capabilities[] = {
 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
 		{.sym = {
 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			.iv_size = {
+				.min = 16,
+				.max = 16,
+				.increment = 0
+			},
 			{.cipher = {
 				.algo = RTE_CRYPTO_CIPHER_ZUC_EEA3,
 				.block_size = 16,
@@ -75,11 +81,6 @@ static const struct rte_cryptodev_capabilities zuc_pmd_capabilities[] = {
 					.min = 16,
 					.max = 16,
 					.increment = 0
-				},
-				.iv_size = {
-					.min = 16,
-					.max = 16,
-					.increment = 0
 				}
 			}, }
 		}, }
diff --git a/drivers/crypto/zuc/rte_zuc_pmd_private.h b/drivers/crypto/zuc/rte_zuc_pmd_private.h
index 030f120..cee1b5d 100644
--- a/drivers/crypto/zuc/rte_zuc_pmd_private.h
+++ b/drivers/crypto/zuc/rte_zuc_pmd_private.h
@@ -92,6 +92,7 @@ struct zuc_session {
 	enum rte_crypto_auth_operation auth_op;
 	uint8_t pKey_cipher[ZUC_IV_KEY_LENGTH];
 	uint8_t pKey_hash[ZUC_IV_KEY_LENGTH];
+	uint16_t iv_offset;
 } __rte_cache_aligned;
 
 
diff --git a/lib/librte_cryptodev/rte_crypto_sym.h b/lib/librte_cryptodev/rte_crypto_sym.h
index 4b921e8..9309a08 100644
--- a/lib/librte_cryptodev/rte_crypto_sym.h
+++ b/lib/librte_cryptodev/rte_crypto_sym.h
@@ -345,6 +345,55 @@ struct rte_crypto_sym_xform {
 		struct rte_crypto_cipher_xform cipher;
 		/**< Cipher xform */
 	};
+	struct {
+		uint16_t offset;
+		/**< Starting point for Initialisation Vector or Counter,
+		 * specified as number of bytes from start of crypto
+		 * operation.
+		 *
+		 * - For block ciphers in CBC or F8 mode, or for KASUMI
+		 * in F8 mode, or for SNOW 3G in UEA2 mode, this is the
+		 * Initialisation Vector (IV) value.
+		 *
+		 * - For block ciphers in CTR mode, this is the counter.
+		 *
+		 * - For GCM mode, this is either the IV (if the length
+		 * is 96 bits) or J0 (for other sizes), where J0 is as
+		 * defined by NIST SP800-38D. Regardless of the IV
+		 * length, a full 16 bytes needs to be allocated.
+		 *
+		 * - For CCM mode, the first byte is reserved, and the
+		 * nonce should be written starting at &iv[1] (to allow
+		 * space for the implementation to write in the flags
+		 * in the first byte). Note that a full 16 bytes should
+		 * be allocated, even though the length field will
+		 * have a value less than this.
+		 *
+		 * - For AES-XTS, this is the 128bit tweak, i, from
+		 * IEEE Std 1619-2007.
+		 *
+		 * For optimum performance, the data pointed to SHOULD
+		 * be 8-byte aligned.
+		 */
+		uint16_t length;
+		/**< Length of valid IV data.
+		 *
+		 * - For block ciphers in CBC or F8 mode, or for KASUMI
+		 * in F8 mode, or for SNOW 3G in UEA2 mode, this is the
+		 * length of the IV (which must be the same as the
+		 * block length of the cipher).
+		 *
+		 * - For block ciphers in CTR mode, this is the length
+		 * of the counter (which must be the same as the block
+		 * length of the cipher).
+		 *
+		 * - For GCM mode, this is either 12 (for 96-bit IVs)
+		 * or 16, in which case data points to J0.
+		 *
+		 * - For CCM mode, this is the length of the nonce,
+		 * which can be in the range 7 to 13 inclusive.
+		 */
+	} iv;	/**< Initialisation vector parameters */
 };
 
 struct rte_cryptodev_sym_session;
@@ -434,55 +483,6 @@ struct rte_crypto_sym_op {
 			  */
 		} data; /**< Data offsets and length for ciphering */
 
-		struct {
-			uint16_t offset;
-			/**< Starting point for Initialisation Vector or Counter,
-			 * specified as number of bytes from start of crypto
-			 * operation.
-			 *
-			 * - For block ciphers in CBC or F8 mode, or for KASUMI
-			 * in F8 mode, or for SNOW 3G in UEA2 mode, this is the
-			 * Initialisation Vector (IV) value.
-			 *
-			 * - For block ciphers in CTR mode, this is the counter.
-			 *
-			 * - For GCM mode, this is either the IV (if the length
-			 * is 96 bits) or J0 (for other sizes), where J0 is as
-			 * defined by NIST SP800-38D. Regardless of the IV
-			 * length, a full 16 bytes needs to be allocated.
-			 *
-			 * - For CCM mode, the first byte is reserved, and the
-			 * nonce should be written starting at &iv[1] (to allow
-			 * space for the implementation to write in the flags
-			 * in the first byte). Note that a full 16 bytes should
-			 * be allocated, even though the length field will
-			 * have a value less than this.
-			 *
-			 * - For AES-XTS, this is the 128bit tweak, i, from
-			 * IEEE Std 1619-2007.
-			 *
-			 * For optimum performance, the data pointed to SHOULD
-			 * be 8-byte aligned.
-			 */
-			uint16_t length;
-			/**< Length of valid IV data.
-			 *
-			 * - For block ciphers in CBC or F8 mode, or for KASUMI
-			 * in F8 mode, or for SNOW 3G in UEA2 mode, this is the
-			 * length of the IV (which must be the same as the
-			 * block length of the cipher).
-			 *
-			 * - For block ciphers in CTR mode, this is the length
-			 * of the counter (which must be the same as the block
-			 * length of the cipher).
-			 *
-			 * - For GCM mode, this is either 12 (for 96-bit IVs)
-			 * or 16, in which case data points to J0.
-			 *
-			 * - For CCM mode, this is the length of the nonce,
-			 * which can be in the range 7 to 13 inclusive.
-			 */
-		} iv;	/**< Initialisation vector parameters */
 	} cipher;
 
 	struct {
diff --git a/lib/librte_cryptodev/rte_cryptodev.c b/lib/librte_cryptodev/rte_cryptodev.c
index b65cd9c..c9e4fa3 100644
--- a/lib/librte_cryptodev/rte_cryptodev.c
+++ b/lib/librte_cryptodev/rte_cryptodev.c
@@ -381,7 +381,7 @@ rte_cryptodev_sym_capability_check_cipher(
 	if (param_range_check(key_size, capability->cipher.key_size))
 		return -1;
 
-	if (param_range_check(iv_size, capability->cipher.iv_size))
+	if (param_range_check(iv_size, capability->iv_size))
 		return -1;
 
 	return 0;
@@ -390,7 +390,8 @@ rte_cryptodev_sym_capability_check_cipher(
 int
 rte_cryptodev_sym_capability_check_auth(
 		const struct rte_cryptodev_symmetric_capability *capability,
-		uint16_t key_size, uint16_t digest_size, uint16_t aad_size)
+		uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
+		uint16_t iv_size)
 {
 	if (param_range_check(key_size, capability->auth.key_size))
 		return -1;
@@ -401,6 +402,9 @@ rte_cryptodev_sym_capability_check_auth(
 	if (param_range_check(aad_size, capability->auth.aad_size))
 		return -1;
 
+	if (param_range_check(iv_size, capability->iv_size))
+		return -1;
+
 	return 0;
 }
 
diff --git a/lib/librte_cryptodev/rte_cryptodev.h b/lib/librte_cryptodev/rte_cryptodev.h
index bd09176..82cfddd 100644
--- a/lib/librte_cryptodev/rte_cryptodev.h
+++ b/lib/librte_cryptodev/rte_cryptodev.h
@@ -168,6 +168,8 @@ struct rte_cryptodev_symmetric_capability {
 	enum rte_crypto_sym_xform_type xform_type;
 	/**< Transform type : Authentication / Cipher */
 	RTE_STD_C11
+	struct rte_crypto_param_range iv_size;
+	/**< Initialisation vector data size range */
 	union {
 		struct {
 			enum rte_crypto_auth_algorithm algo;
@@ -189,8 +191,6 @@ struct rte_cryptodev_symmetric_capability {
 			/**< algorithm block size */
 			struct rte_crypto_param_range key_size;
 			/**< cipher key size range */
-			struct rte_crypto_param_range iv_size;
-			/**< Initialisation vector data size range */
 		} cipher;
 		/**< Symmetric Cipher transform capabilities */
 	};
@@ -237,7 +237,7 @@ rte_cryptodev_sym_capability_get(uint8_t dev_id,
  *
  * @param	capability	Description of the symmetric crypto capability.
  * @param	key_size	Cipher key size.
- * @param	iv_size		Cipher initial vector size.
+ * @param	iv_size		Initial vector size.
  *
  * @return
  *   - Return 0 if the parameters are in range of the capability.
@@ -256,6 +256,7 @@ rte_cryptodev_sym_capability_check_cipher(
  * @param	key_size	Auth key size.
  * @param	digest_size	Auth digest size.
  * @param	aad_size	Auth aad size.
+ * @param	iv_size		Initial vector size.
  *
  * @return
  *   - Return 0 if the parameters are in range of the capability.
@@ -264,7 +265,8 @@ rte_cryptodev_sym_capability_check_cipher(
 int
 rte_cryptodev_sym_capability_check_auth(
 		const struct rte_cryptodev_symmetric_capability *capability,
-		uint16_t key_size, uint16_t digest_size, uint16_t aad_size);
+		uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
+		uint16_t iv_size);
 
 /**
  * Provide the cipher algorithm enum, given an algorithm string
-- 
2.7.4



More information about the dev mailing list