[dpdk-dev] [PATCH v3 12/26] cryptodev: pass IV as offset

Pablo de Lara pablo.de.lara.guarch at intel.com
Thu Jun 29 13:35:07 CEST 2017


Since IV now is copied after the crypto operation, in
its private size, IV can be passed only with offset
and length.

Signed-off-by: Pablo de Lara <pablo.de.lara.guarch at intel.com>
Acked-by: Akhil Goyal <akhil.goyal at nxp.com>
---
 app/test-crypto-perf/cperf_ops.c            |  49 +++++++------
 doc/guides/prog_guide/cryptodev_lib.rst     |   3 +-
 doc/guides/rel_notes/release_17_08.rst      |   2 +
 drivers/crypto/aesni_gcm/aesni_gcm_pmd.c    |  80 +++++++++++----------
 drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c  |   3 +-
 drivers/crypto/armv8/rte_armv8_pmd.c        |   3 +-
 drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c |   8 ++-
 drivers/crypto/kasumi/rte_kasumi_pmd.c      |  26 ++++---
 drivers/crypto/openssl/rte_openssl_pmd.c    |  12 ++--
 drivers/crypto/qat/qat_crypto.c             |  30 +++++---
 drivers/crypto/snow3g/rte_snow3g_pmd.c      |  14 ++--
 drivers/crypto/zuc/rte_zuc_pmd.c            |   7 +-
 examples/ipsec-secgw/esp.c                  |  14 +---
 examples/l2fwd-crypto/main.c                |   5 +-
 lib/librte_cryptodev/rte_crypto_sym.h       |   7 +-
 test/test/test_cryptodev.c                  | 107 +++++++++++-----------------
 test/test/test_cryptodev_blockcipher.c      |   8 +--
 test/test/test_cryptodev_perf.c             |  60 ++++++----------
 18 files changed, 211 insertions(+), 227 deletions(-)

diff --git a/app/test-crypto-perf/cperf_ops.c b/app/test-crypto-perf/cperf_ops.c
index 7404abc..10002cd 100644
--- a/app/test-crypto-perf/cperf_ops.c
+++ b/app/test-crypto-perf/cperf_ops.c
@@ -106,10 +106,7 @@ cperf_set_ops_cipher(struct rte_crypto_op **ops,
 		sym_op->m_dst = bufs_out[i];
 
 		/* cipher parameters */
-		sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ops[i],
-							uint8_t *, iv_offset);
-		sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
-							iv_offset);
+		sym_op->cipher.iv.offset = iv_offset;
 		sym_op->cipher.iv.length = test_vector->iv.length;
 
 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
@@ -123,11 +120,13 @@ cperf_set_ops_cipher(struct rte_crypto_op **ops,
 	}
 
 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
-		for (i = 0; i < nb_ops; i++)
-			memcpy(ops[i]->sym->cipher.iv.data,
-				test_vector->iv.data,
-				test_vector->iv.length);
-	}
+		for (i = 0; i < nb_ops; i++) {
+			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
+					uint8_t *, iv_offset);
+
+			memcpy(iv_ptr, test_vector->iv.data,
+					test_vector->iv.length);
+	}	}
 
 	return 0;
 }
@@ -217,10 +216,7 @@ cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
 		sym_op->m_dst = bufs_out[i];
 
 		/* cipher parameters */
-		sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ops[i],
-							uint8_t *, iv_offset);
-		sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
-							iv_offset);
+		sym_op->cipher.iv.offset = iv_offset;
 		sym_op->cipher.iv.length = test_vector->iv.length;
 
 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
@@ -277,10 +273,13 @@ cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
 	}
 
 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
-		for (i = 0; i < nb_ops; i++)
-			memcpy(ops[i]->sym->cipher.iv.data,
-				test_vector->iv.data,
-				test_vector->iv.length);
+		for (i = 0; i < nb_ops; i++) {
+			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
+					uint8_t *, iv_offset);
+
+			memcpy(iv_ptr, test_vector->iv.data,
+					test_vector->iv.length);
+		}
 	}
 
 	return 0;
@@ -305,10 +304,7 @@ cperf_set_ops_aead(struct rte_crypto_op **ops,
 		sym_op->m_dst = bufs_out[i];
 
 		/* cipher parameters */
-		sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ops[i],
-							uint8_t *, iv_offset);
-		sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
-							iv_offset);
+		sym_op->cipher.iv.offset = iv_offset;
 		sym_op->cipher.iv.length = test_vector->iv.length;
 
 		sym_op->cipher.data.length = options->test_buffer_size;
@@ -357,10 +353,13 @@ cperf_set_ops_aead(struct rte_crypto_op **ops,
 	}
 
 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
-		for (i = 0; i < nb_ops; i++)
-			memcpy(ops[i]->sym->cipher.iv.data,
-				test_vector->iv.data,
-				test_vector->iv.length);
+		for (i = 0; i < nb_ops; i++) {
+			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
+					uint8_t *, iv_offset);
+
+			memcpy(iv_ptr, test_vector->iv.data,
+					test_vector->iv.length);
+		}
 	}
 
 	return 0;
diff --git a/doc/guides/prog_guide/cryptodev_lib.rst b/doc/guides/prog_guide/cryptodev_lib.rst
index c9a29f8..48c58a9 100644
--- a/doc/guides/prog_guide/cryptodev_lib.rst
+++ b/doc/guides/prog_guide/cryptodev_lib.rst
@@ -537,8 +537,7 @@ chain.
             } data;   /**< Data offsets and length for ciphering */
 
             struct {
-                uint8_t *data;
-                phys_addr_t phys_addr;
+                uint16_t offset;
                 uint16_t length;
             } iv;     /**< Initialisation vector parameters */
         } cipher;
diff --git a/doc/guides/rel_notes/release_17_08.rst b/doc/guides/rel_notes/release_17_08.rst
index 6acbf35..68e8022 100644
--- a/doc/guides/rel_notes/release_17_08.rst
+++ b/doc/guides/rel_notes/release_17_08.rst
@@ -157,6 +157,8 @@ API Changes
   * Removed the field ``opaque_data`` from ``rte_crypto_op``.
   * Pointer to ``rte_crypto_sym_op`` in ``rte_crypto_op`` has been replaced
     with a zero length array.
+  * Replaced pointer and physical address of IV in ``rte_crypto_sym_op`` with
+    offset from the start of the crypto operation.
 
 
 ABI Changes
diff --git a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
index a0154ff..217ea65 100644
--- a/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
+++ b/drivers/crypto/aesni_gcm/aesni_gcm_pmd.c
@@ -180,12 +180,14 @@ aesni_gcm_get_session(struct aesni_gcm_qp *qp, struct rte_crypto_op *op)
  *
  */
 static int
-process_gcm_crypto_op(struct rte_crypto_sym_op *op,
+process_gcm_crypto_op(struct rte_crypto_op *op,
 		struct aesni_gcm_session *session)
 {
 	uint8_t *src, *dst;
-	struct rte_mbuf *m_src = op->m_src;
-	uint32_t offset = op->cipher.data.offset;
+	uint8_t *iv_ptr;
+	struct rte_crypto_sym_op *sym_op = op->sym;
+	struct rte_mbuf *m_src = sym_op->m_src;
+	uint32_t offset = sym_op->cipher.data.offset;
 	uint32_t part_len, total_len, data_len;
 
 	RTE_ASSERT(m_src != NULL);
@@ -198,46 +200,48 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op,
 	}
 
 	data_len = m_src->data_len - offset;
-	part_len = (data_len < op->cipher.data.length) ? data_len :
-			op->cipher.data.length;
+	part_len = (data_len < sym_op->cipher.data.length) ? data_len :
+			sym_op->cipher.data.length;
 
 	/* Destination buffer is required when segmented source buffer */
-	RTE_ASSERT((part_len == op->cipher.data.length) ||
-			((part_len != op->cipher.data.length) &&
-					(op->m_dst != NULL)));
+	RTE_ASSERT((part_len == sym_op->cipher.data.length) ||
+			((part_len != sym_op->cipher.data.length) &&
+					(sym_op->m_dst != NULL)));
 	/* Segmented destination buffer is not supported */
-	RTE_ASSERT((op->m_dst == NULL) ||
-			((op->m_dst != NULL) &&
-					rte_pktmbuf_is_contiguous(op->m_dst)));
+	RTE_ASSERT((sym_op->m_dst == NULL) ||
+			((sym_op->m_dst != NULL) &&
+					rte_pktmbuf_is_contiguous(sym_op->m_dst)));
 
 
-	dst = op->m_dst ?
-			rte_pktmbuf_mtod_offset(op->m_dst, uint8_t *,
-					op->cipher.data.offset) :
-			rte_pktmbuf_mtod_offset(op->m_src, uint8_t *,
-					op->cipher.data.offset);
+	dst = sym_op->m_dst ?
+			rte_pktmbuf_mtod_offset(sym_op->m_dst, uint8_t *,
+					sym_op->cipher.data.offset) :
+			rte_pktmbuf_mtod_offset(sym_op->m_src, uint8_t *,
+					sym_op->cipher.data.offset);
 
 	src = rte_pktmbuf_mtod_offset(m_src, uint8_t *, offset);
 
 	/* sanity checks */
-	if (op->cipher.iv.length != 16 && op->cipher.iv.length != 12 &&
-			op->cipher.iv.length != 0) {
+	if (sym_op->cipher.iv.length != 16 && sym_op->cipher.iv.length != 12 &&
+			sym_op->cipher.iv.length != 0) {
 		GCM_LOG_ERR("iv");
 		return -1;
 	}
 
+	iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+				sym_op->cipher.iv.offset);
 	/*
 	 * GCM working in 12B IV mode => 16B pre-counter block we need
 	 * to set BE LSB to 1, driver expects that 16B is allocated
 	 */
-	if (op->cipher.iv.length == 12) {
-		uint32_t *iv_padd = (uint32_t *)&op->cipher.iv.data[12];
+	if (sym_op->cipher.iv.length == 12) {
+		uint32_t *iv_padd = (uint32_t *)&(iv_ptr[12]);
 		*iv_padd = rte_bswap32(1);
 	}
 
-	if (op->auth.digest.length != 16 &&
-			op->auth.digest.length != 12 &&
-			op->auth.digest.length != 8) {
+	if (sym_op->auth.digest.length != 16 &&
+			sym_op->auth.digest.length != 12 &&
+			sym_op->auth.digest.length != 8) {
 		GCM_LOG_ERR("digest");
 		return -1;
 	}
@@ -245,13 +249,13 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op,
 	if (session->op == AESNI_GCM_OP_AUTHENTICATED_ENCRYPTION) {
 
 		aesni_gcm_enc[session->key].init(&session->gdata,
-				op->cipher.iv.data,
-				op->auth.aad.data,
-				(uint64_t)op->auth.aad.length);
+				iv_ptr,
+				sym_op->auth.aad.data,
+				(uint64_t)sym_op->auth.aad.length);
 
 		aesni_gcm_enc[session->key].update(&session->gdata, dst, src,
 				(uint64_t)part_len);
-		total_len = op->cipher.data.length - part_len;
+		total_len = sym_op->cipher.data.length - part_len;
 
 		while (total_len) {
 			dst += part_len;
@@ -270,12 +274,12 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op,
 		}
 
 		aesni_gcm_enc[session->key].finalize(&session->gdata,
-				op->auth.digest.data,
-				(uint64_t)op->auth.digest.length);
+				sym_op->auth.digest.data,
+				(uint64_t)sym_op->auth.digest.length);
 	} else { /* session->op == AESNI_GCM_OP_AUTHENTICATED_DECRYPTION */
-		uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(op->m_dst ?
-				op->m_dst : op->m_src,
-				op->auth.digest.length);
+		uint8_t *auth_tag = (uint8_t *)rte_pktmbuf_append(sym_op->m_dst ?
+				sym_op->m_dst : sym_op->m_src,
+				sym_op->auth.digest.length);
 
 		if (!auth_tag) {
 			GCM_LOG_ERR("auth_tag");
@@ -283,13 +287,13 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op,
 		}
 
 		aesni_gcm_dec[session->key].init(&session->gdata,
-				op->cipher.iv.data,
-				op->auth.aad.data,
-				(uint64_t)op->auth.aad.length);
+				iv_ptr,
+				sym_op->auth.aad.data,
+				(uint64_t)sym_op->auth.aad.length);
 
 		aesni_gcm_dec[session->key].update(&session->gdata, dst, src,
 				(uint64_t)part_len);
-		total_len = op->cipher.data.length - part_len;
+		total_len = sym_op->cipher.data.length - part_len;
 
 		while (total_len) {
 			dst += part_len;
@@ -309,7 +313,7 @@ process_gcm_crypto_op(struct rte_crypto_sym_op *op,
 
 		aesni_gcm_dec[session->key].finalize(&session->gdata,
 				auth_tag,
-				(uint64_t)op->auth.digest.length);
+				(uint64_t)sym_op->auth.digest.length);
 	}
 
 	return 0;
@@ -401,7 +405,7 @@ aesni_gcm_pmd_dequeue_burst(void *queue_pair,
 			break;
 		}
 
-		retval = process_gcm_crypto_op(ops[i]->sym, sess);
+		retval = process_gcm_crypto_op(ops[i], sess);
 		if (retval < 0) {
 			ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
 			qp->qp_stats.dequeue_err_count++;
diff --git a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c
index ccdb3a7..1f03582 100644
--- a/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c
+++ b/drivers/crypto/aesni_mb/rte_aesni_mb_pmd.c
@@ -471,7 +471,8 @@ set_mb_job_params(JOB_AES_HMAC *job, struct aesni_mb_qp *qp,
 			get_truncated_digest_byte_length(job->hash_alg);
 
 	/* Set IV parameters */
-	job->iv = op->sym->cipher.iv.data;
+	job->iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+			op->sym->cipher.iv.offset);
 	job->iv_len_in_bytes = op->sym->cipher.iv.length;
 
 	/* Data  Parameter */
diff --git a/drivers/crypto/armv8/rte_armv8_pmd.c b/drivers/crypto/armv8/rte_armv8_pmd.c
index 4a79b61..693eccd 100644
--- a/drivers/crypto/armv8/rte_armv8_pmd.c
+++ b/drivers/crypto/armv8/rte_armv8_pmd.c
@@ -654,7 +654,8 @@ process_armv8_chained_op
 		return;
 	}
 
-	arg.cipher.iv = op->sym->cipher.iv.data;
+	arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+					op->sym->cipher.iv.offset);
 	arg.cipher.key = sess->cipher.key.data;
 	/* Acquire combined mode function */
 	crypto_func = sess->crypto_func;
diff --git a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c
index e154395..1605701 100644
--- a/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c
+++ b/drivers/crypto/dpaa2_sec/dpaa2_sec_dpseci.c
@@ -87,6 +87,8 @@ build_authenc_fd(dpaa2_sec_session *sess,
 	int icv_len = sym_op->auth.digest.length;
 	uint8_t *old_icv;
 	uint32_t mem_len = (7 * sizeof(struct qbman_fle)) + icv_len;
+	uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+			op->sym->cipher.iv.offset);
 
 	PMD_INIT_FUNC_TRACE();
 
@@ -178,7 +180,7 @@ build_authenc_fd(dpaa2_sec_session *sess,
 			 sym_op->auth.digest.length);
 
 	/* Configure Input SGE for Encap/Decap */
-	DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(sym_op->cipher.iv.data));
+	DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr));
 	sge->length = sym_op->cipher.iv.length;
 	sge++;
 
@@ -307,6 +309,8 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
 	uint32_t mem_len = (5 * sizeof(struct qbman_fle));
 	struct sec_flow_context *flc;
 	struct ctxt_priv *priv = sess->ctxt;
+	uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+			op->sym->cipher.iv.offset);
 
 	PMD_INIT_FUNC_TRACE();
 
@@ -369,7 +373,7 @@ build_cipher_fd(dpaa2_sec_session *sess, struct rte_crypto_op *op,
 
 	DPAA2_SET_FLE_SG_EXT(fle);
 
-	DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(sym_op->cipher.iv.data));
+	DPAA2_SET_FLE_ADDR(sge, DPAA2_VADDR_TO_IOVA(iv_ptr));
 	sge->length = sym_op->cipher.iv.length;
 
 	sge++;
diff --git a/drivers/crypto/kasumi/rte_kasumi_pmd.c b/drivers/crypto/kasumi/rte_kasumi_pmd.c
index c539650..9a0b4a8 100644
--- a/drivers/crypto/kasumi/rte_kasumi_pmd.c
+++ b/drivers/crypto/kasumi/rte_kasumi_pmd.c
@@ -174,7 +174,8 @@ process_kasumi_cipher_op(struct rte_crypto_op **ops,
 	unsigned i;
 	uint8_t processed_ops = 0;
 	uint8_t *src[num_ops], *dst[num_ops];
-	uint64_t IV[num_ops];
+	uint8_t *iv_ptr;
+	uint64_t iv[num_ops];
 	uint32_t num_bytes[num_ops];
 
 	for (i = 0; i < num_ops; i++) {
@@ -192,14 +193,16 @@ process_kasumi_cipher_op(struct rte_crypto_op **ops,
 				(ops[i]->sym->cipher.data.offset >> 3) :
 			rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
 				(ops[i]->sym->cipher.data.offset >> 3);
-		IV[i] = *((uint64_t *)(ops[i]->sym->cipher.iv.data));
+		iv_ptr = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
+				ops[i]->sym->cipher.iv.offset);
+		iv[i] = *((uint64_t *)(iv_ptr));
 		num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
 
 		processed_ops++;
 	}
 
 	if (processed_ops != 0)
-		sso_kasumi_f8_n_buffer(&session->pKeySched_cipher, IV,
+		sso_kasumi_f8_n_buffer(&session->pKeySched_cipher, iv,
 			src, dst, num_bytes, processed_ops);
 
 	return processed_ops;
@@ -211,7 +214,8 @@ process_kasumi_cipher_op_bit(struct rte_crypto_op *op,
 		struct kasumi_session *session)
 {
 	uint8_t *src, *dst;
-	uint64_t IV;
+	uint8_t *iv_ptr;
+	uint64_t iv;
 	uint32_t length_in_bits, offset_in_bits;
 
 	/* Sanity checks. */
@@ -229,10 +233,12 @@ process_kasumi_cipher_op_bit(struct rte_crypto_op *op,
 		return 0;
 	}
 	dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *);
-	IV = *((uint64_t *)(op->sym->cipher.iv.data));
+	iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+			op->sym->cipher.iv.offset);
+	iv = *((uint64_t *)(iv_ptr));
 	length_in_bits = op->sym->cipher.data.length;
 
-	sso_kasumi_f8_1_buffer_bit(&session->pKeySched_cipher, IV,
+	sso_kasumi_f8_1_buffer_bit(&session->pKeySched_cipher, iv,
 			src, dst, length_in_bits, offset_in_bits);
 
 	return 1;
@@ -250,7 +256,7 @@ process_kasumi_hash_op(struct rte_crypto_op **ops,
 	uint32_t length_in_bits;
 	uint32_t num_bytes;
 	uint32_t shift_bits;
-	uint64_t IV;
+	uint64_t iv;
 	uint8_t direction;
 
 	for (i = 0; i < num_ops; i++) {
@@ -278,7 +284,7 @@ process_kasumi_hash_op(struct rte_crypto_op **ops,
 		src = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
 				(ops[i]->sym->auth.data.offset >> 3);
 		/* IV from AAD */
-		IV = *((uint64_t *)(ops[i]->sym->auth.aad.data));
+		iv = *((uint64_t *)(ops[i]->sym->auth.aad.data));
 		/* Direction from next bit after end of message */
 		num_bytes = (length_in_bits >> 3) + 1;
 		shift_bits = (BYTE_LEN - 1 - length_in_bits) % BYTE_LEN;
@@ -289,7 +295,7 @@ process_kasumi_hash_op(struct rte_crypto_op **ops,
 					ops[i]->sym->auth.digest.length);
 
 			sso_kasumi_f9_1_buffer_user(&session->pKeySched_hash,
-					IV, src,
+					iv, src,
 					length_in_bits,	dst, direction);
 			/* Verify digest. */
 			if (memcmp(dst, ops[i]->sym->auth.digest.data,
@@ -303,7 +309,7 @@ process_kasumi_hash_op(struct rte_crypto_op **ops,
 			dst = ops[i]->sym->auth.digest.data;
 
 			sso_kasumi_f9_1_buffer_user(&session->pKeySched_hash,
-					IV, src,
+					iv, src,
 					length_in_bits, dst, direction);
 		}
 		processed_ops++;
diff --git a/drivers/crypto/openssl/rte_openssl_pmd.c b/drivers/crypto/openssl/rte_openssl_pmd.c
index 9f4d9b7..6bfa06f 100644
--- a/drivers/crypto/openssl/rte_openssl_pmd.c
+++ b/drivers/crypto/openssl/rte_openssl_pmd.c
@@ -923,7 +923,8 @@ process_openssl_combined_op
 		return;
 	}
 
-	iv = op->sym->cipher.iv.data;
+	iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+			op->sym->cipher.iv.offset);
 	ivlen = op->sym->cipher.iv.length;
 	aad = op->sym->auth.aad.data;
 	aadlen = op->sym->auth.aad.length;
@@ -987,7 +988,8 @@ process_openssl_cipher_op
 	dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
 			op->sym->cipher.data.offset);
 
-	iv = op->sym->cipher.iv.data;
+	iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+			op->sym->cipher.iv.offset);
 
 	if (sess->cipher.mode == OPENSSL_CIPHER_LIB)
 		if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
@@ -1028,7 +1030,8 @@ process_openssl_docsis_bpi_op(struct rte_crypto_op *op,
 	dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
 			op->sym->cipher.data.offset);
 
-	iv = op->sym->cipher.iv.data;
+	iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+			op->sym->cipher.iv.offset);
 
 	block_size = DES_BLOCK_SIZE;
 
@@ -1086,7 +1089,8 @@ process_openssl_docsis_bpi_op(struct rte_crypto_op *op,
 						dst, iv,
 						last_block_len, sess->cipher.bpi_ctx);
 				/* Prepare parameters for CBC mode op */
-				iv = op->sym->cipher.iv.data;
+				iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+						op->sym->cipher.iv.offset);
 				dst += last_block_len - srclen;
 				srclen -= last_block_len;
 			}
diff --git a/drivers/crypto/qat/qat_crypto.c b/drivers/crypto/qat/qat_crypto.c
index 9b294e4..a4f356f 100644
--- a/drivers/crypto/qat/qat_crypto.c
+++ b/drivers/crypto/qat/qat_crypto.c
@@ -642,7 +642,8 @@ qat_bpicipher_preprocess(struct qat_session *ctx,
 			iv = last_block - block_len;
 		else
 			/* runt block, i.e. less than one full block */
-			iv = sym_op->cipher.iv.data;
+			iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+					sym_op->cipher.iv.offset);
 
 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
 		rte_hexdump(stdout, "BPI: src before pre-process:", last_block,
@@ -697,7 +698,8 @@ qat_bpicipher_postprocess(struct qat_session *ctx,
 			iv = dst - block_len;
 		else
 			/* runt block, i.e. less than one full block */
-			iv = sym_op->cipher.iv.data;
+			iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+					sym_op->cipher.iv.offset);
 
 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_RX
 		rte_hexdump(stdout, "BPI: src before post-process:", last_block,
@@ -898,6 +900,7 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 	uint32_t min_ofs = 0;
 	uint64_t src_buf_start = 0, dst_buf_start = 0;
 	uint8_t do_sgl = 0;
+	uint8_t *iv_ptr;
 
 
 #ifdef RTE_LIBRTE_PMD_QAT_DEBUG_TX
@@ -971,6 +974,8 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 			cipher_ofs = op->sym->cipher.data.offset;
 		}
 
+		iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *,
+					op->sym->cipher.iv.offset);
 		/* copy IV into request if it fits */
 		/*
 		 * If IV length is zero do not copy anything but still
@@ -981,14 +986,15 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 			if (op->sym->cipher.iv.length <=
 					sizeof(cipher_param->u.cipher_IV_array)) {
 				rte_memcpy(cipher_param->u.cipher_IV_array,
-						op->sym->cipher.iv.data,
+						iv_ptr,
 						op->sym->cipher.iv.length);
 			} else {
 				ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
 						qat_req->comn_hdr.serv_specif_flags,
 						ICP_QAT_FW_CIPH_IV_64BIT_PTR);
 				cipher_param->u.s.cipher_IV_ptr =
-						op->sym->cipher.iv.phys_addr;
+						rte_crypto_op_ctophys_offset(op,
+							op->sym->cipher.iv.offset);
 			}
 		}
 		min_ofs = cipher_ofs;
@@ -1179,12 +1185,16 @@ qat_write_hw_desc_entry(struct rte_crypto_op *op, uint8_t *out_msg,
 	rte_hexdump(stdout, "src_data:",
 			rte_pktmbuf_mtod(op->sym->m_src, uint8_t*),
 			rte_pktmbuf_data_len(op->sym->m_src));
-	rte_hexdump(stdout, "iv:", op->sym->cipher.iv.data,
-			op->sym->cipher.iv.length);
-	rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
-			op->sym->auth.digest.length);
-	rte_hexdump(stdout, "aad:", op->sym->auth.aad.data,
-			op->sym->auth.aad.length);
+	if (do_cipher)
+		rte_hexdump(stdout, "iv:", iv_ptr,
+				op->sym->cipher.iv.length);
+
+	if (do_auth) {
+		rte_hexdump(stdout, "digest:", op->sym->auth.digest.data,
+				op->sym->auth.digest.length);
+		rte_hexdump(stdout, "aad:", op->sym->auth.aad.data,
+				op->sym->auth.aad.length);
+	}
 #endif
 	return 0;
 }
diff --git a/drivers/crypto/snow3g/rte_snow3g_pmd.c b/drivers/crypto/snow3g/rte_snow3g_pmd.c
index 84757ac..3157d7b 100644
--- a/drivers/crypto/snow3g/rte_snow3g_pmd.c
+++ b/drivers/crypto/snow3g/rte_snow3g_pmd.c
@@ -174,7 +174,7 @@ process_snow3g_cipher_op(struct rte_crypto_op **ops,
 	unsigned i;
 	uint8_t processed_ops = 0;
 	uint8_t *src[SNOW3G_MAX_BURST], *dst[SNOW3G_MAX_BURST];
-	uint8_t *IV[SNOW3G_MAX_BURST];
+	uint8_t *iv[SNOW3G_MAX_BURST];
 	uint32_t num_bytes[SNOW3G_MAX_BURST];
 
 	for (i = 0; i < num_ops; i++) {
@@ -192,13 +192,14 @@ process_snow3g_cipher_op(struct rte_crypto_op **ops,
 				(ops[i]->sym->cipher.data.offset >> 3) :
 			rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
 				(ops[i]->sym->cipher.data.offset >> 3);
-		IV[i] = ops[i]->sym->cipher.iv.data;
+		iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
+				ops[i]->sym->cipher.iv.offset);
 		num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
 
 		processed_ops++;
 	}
 
-	sso_snow3g_f8_n_buffer(&session->pKeySched_cipher, IV, src, dst,
+	sso_snow3g_f8_n_buffer(&session->pKeySched_cipher, iv, src, dst,
 			num_bytes, processed_ops);
 
 	return processed_ops;
@@ -210,7 +211,7 @@ process_snow3g_cipher_op_bit(struct rte_crypto_op *op,
 		struct snow3g_session *session)
 {
 	uint8_t *src, *dst;
-	uint8_t *IV;
+	uint8_t *iv;
 	uint32_t length_in_bits, offset_in_bits;
 
 	/* Sanity checks. */
@@ -228,10 +229,11 @@ process_snow3g_cipher_op_bit(struct rte_crypto_op *op,
 		return 0;
 	}
 	dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *);
-	IV = op->sym->cipher.iv.data;
+	iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+				op->sym->cipher.iv.offset);
 	length_in_bits = op->sym->cipher.data.length;
 
-	sso_snow3g_f8_1_buffer_bit(&session->pKeySched_cipher, IV,
+	sso_snow3g_f8_1_buffer_bit(&session->pKeySched_cipher, iv,
 			src, dst, length_in_bits, offset_in_bits);
 
 	return 1;
diff --git a/drivers/crypto/zuc/rte_zuc_pmd.c b/drivers/crypto/zuc/rte_zuc_pmd.c
index 63236ac..b91b305 100644
--- a/drivers/crypto/zuc/rte_zuc_pmd.c
+++ b/drivers/crypto/zuc/rte_zuc_pmd.c
@@ -173,7 +173,7 @@ process_zuc_cipher_op(struct rte_crypto_op **ops,
 	unsigned i;
 	uint8_t processed_ops = 0;
 	uint8_t *src[ZUC_MAX_BURST], *dst[ZUC_MAX_BURST];
-	uint8_t *IV[ZUC_MAX_BURST];
+	uint8_t *iv[ZUC_MAX_BURST];
 	uint32_t num_bytes[ZUC_MAX_BURST];
 	uint8_t *cipher_keys[ZUC_MAX_BURST];
 
@@ -213,7 +213,8 @@ process_zuc_cipher_op(struct rte_crypto_op **ops,
 				(ops[i]->sym->cipher.data.offset >> 3) :
 			rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
 				(ops[i]->sym->cipher.data.offset >> 3);
-		IV[i] = ops[i]->sym->cipher.iv.data;
+		iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
+				ops[i]->sym->cipher.iv.offset);
 		num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
 
 		cipher_keys[i] = session->pKey_cipher;
@@ -221,7 +222,7 @@ process_zuc_cipher_op(struct rte_crypto_op **ops,
 		processed_ops++;
 	}
 
-	sso_zuc_eea3_n_buffer(cipher_keys, IV, src, dst,
+	sso_zuc_eea3_n_buffer(cipher_keys, iv, src, dst,
 			num_bytes, processed_ops);
 
 	return processed_ops;
diff --git a/examples/ipsec-secgw/esp.c b/examples/ipsec-secgw/esp.c
index 5bf2d7d..738a800 100644
--- a/examples/ipsec-secgw/esp.c
+++ b/examples/ipsec-secgw/esp.c
@@ -104,9 +104,7 @@ esp_inbound(struct rte_mbuf *m, struct ipsec_sa *sa,
 	case RTE_CRYPTO_CIPHER_AES_CBC:
 		/* Copy IV at the end of crypto operation */
 		rte_memcpy(iv_ptr, iv, sa->iv_len);
-		sym_cop->cipher.iv.data = iv_ptr;
-		sym_cop->cipher.iv.phys_addr =
-				rte_crypto_op_ctophys_offset(cop, IV_OFFSET);
+		sym_cop->cipher.iv.offset = IV_OFFSET;
 		sym_cop->cipher.iv.length = sa->iv_len;
 		break;
 	case RTE_CRYPTO_CIPHER_AES_CTR:
@@ -115,9 +113,7 @@ esp_inbound(struct rte_mbuf *m, struct ipsec_sa *sa,
 		icb->salt = sa->salt;
 		memcpy(&icb->iv, iv, 8);
 		icb->cnt = rte_cpu_to_be_32(1);
-		sym_cop->cipher.iv.data = iv_ptr;
-		sym_cop->cipher.iv.phys_addr =
-				rte_crypto_op_ctophys_offset(cop, IV_OFFSET);
+		sym_cop->cipher.iv.offset = IV_OFFSET;
 		sym_cop->cipher.iv.length = 16;
 		break;
 	default:
@@ -348,15 +344,11 @@ esp_outbound(struct rte_mbuf *m, struct ipsec_sa *sa,
 	padding[pad_len - 2] = pad_len - 2;
 	padding[pad_len - 1] = nlp;
 
-	uint8_t *iv_ptr = rte_crypto_op_ctod_offset(cop,
-				uint8_t *, IV_OFFSET);
 	struct cnt_blk *icb = get_cnt_blk(m);
 	icb->salt = sa->salt;
 	icb->iv = sa->seq;
 	icb->cnt = rte_cpu_to_be_32(1);
-	sym_cop->cipher.iv.data = iv_ptr;
-	sym_cop->cipher.iv.phys_addr =
-			rte_crypto_op_ctophys_offset(cop, IV_OFFSET);
+	sym_cop->cipher.iv.offset = IV_OFFSET;
 	sym_cop->cipher.iv.length = 16;
 
 	uint8_t *aad;
diff --git a/examples/l2fwd-crypto/main.c b/examples/l2fwd-crypto/main.c
index 1380bc6..ffd9731 100644
--- a/examples/l2fwd-crypto/main.c
+++ b/examples/l2fwd-crypto/main.c
@@ -489,9 +489,7 @@ l2fwd_simple_crypto_enqueue(struct rte_mbuf *m,
 		/* Copy IV at the end of the crypto operation */
 		rte_memcpy(iv_ptr, cparams->iv.data, cparams->iv.length);
 
-		op->sym->cipher.iv.data = iv_ptr;
-		op->sym->cipher.iv.phys_addr =
-				rte_crypto_op_ctophys_offset(op, IV_OFFSET);
+		op->sym->cipher.iv.offset = IV_OFFSET;
 		op->sym->cipher.iv.length = cparams->iv.length;
 
 		/* For wireless algorithms, offset/length must be in bits */
@@ -700,7 +698,6 @@ l2fwd_main_loop(struct l2fwd_crypto_options *options)
 		if (port_cparams[i].do_cipher) {
 			port_cparams[i].iv.data = options->iv.data;
 			port_cparams[i].iv.length = options->iv.length;
-			port_cparams[i].iv.phys_addr = options->iv.phys_addr;
 			if (!options->iv_param)
 				generate_random_key(port_cparams[i].iv.data,
 						port_cparams[i].iv.length);
diff --git a/lib/librte_cryptodev/rte_crypto_sym.h b/lib/librte_cryptodev/rte_crypto_sym.h
index 39ad1e3..b35c45a 100644
--- a/lib/librte_cryptodev/rte_crypto_sym.h
+++ b/lib/librte_cryptodev/rte_crypto_sym.h
@@ -464,8 +464,10 @@ struct rte_crypto_sym_op {
 		} data; /**< Data offsets and length for ciphering */
 
 		struct {
-			uint8_t *data;
-			/**< Initialisation Vector or Counter.
+			uint16_t offset;
+			/**< Starting point for Initialisation Vector or Counter,
+			 * specified as number of bytes from start of crypto
+			 * operation.
 			 *
 			 * - For block ciphers in CBC or F8 mode, or for KASUMI
 			 * in F8 mode, or for SNOW 3G in UEA2 mode, this is the
@@ -491,7 +493,6 @@ struct rte_crypto_sym_op {
 			 * For optimum performance, the data pointed to SHOULD
 			 * be 8-byte aligned.
 			 */
-			phys_addr_t phys_addr;
 			uint16_t length;
 			/**< Length of valid IV data.
 			 *
diff --git a/test/test/test_cryptodev.c b/test/test/test_cryptodev.c
index 0037e88..fbcaaee 100644
--- a/test/test/test_cryptodev.c
+++ b/test/test/test_cryptodev.c
@@ -1311,13 +1311,11 @@ test_AES_CBC_HMAC_SHA1_encrypt_digest(void)
 	sym_op->auth.data.length = QUOTE_512_BYTES;
 
 	/* Set crypto operation cipher parameters */
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
 
-	rte_memcpy(sym_op->cipher.iv.data, aes_cbc_iv, CIPHER_IV_LENGTH_AES_CBC);
+	rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+			aes_cbc_iv, CIPHER_IV_LENGTH_AES_CBC);
 
 	sym_op->cipher.data.offset = 0;
 	sym_op->cipher.data.length = QUOTE_512_BYTES;
@@ -1464,13 +1462,11 @@ test_AES_CBC_HMAC_SHA512_decrypt_perform(struct rte_cryptodev_sym_session *sess,
 	sym_op->auth.data.offset = 0;
 	sym_op->auth.data.length = QUOTE_512_BYTES;
 
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
 
-	rte_memcpy(sym_op->cipher.iv.data, iv, CIPHER_IV_LENGTH_AES_CBC);
+	rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+			iv, CIPHER_IV_LENGTH_AES_CBC);
 
 	sym_op->cipher.data.offset = 0;
 	sym_op->cipher.data.length = QUOTE_512_BYTES;
@@ -1860,13 +1856,11 @@ create_wireless_algo_cipher_operation(const uint8_t *iv, uint8_t iv_len,
 	sym_op->m_src = ut_params->ibuf;
 
 	/* iv */
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = iv_len;
 
-	rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
+	rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+			iv, iv_len);
 	sym_op->cipher.data.length = cipher_len;
 	sym_op->cipher.data.offset = cipher_offset;
 	return 0;
@@ -1896,13 +1890,11 @@ create_wireless_algo_cipher_operation_oop(const uint8_t *iv, uint8_t iv_len,
 	sym_op->m_dst = ut_params->obuf;
 
 	/* iv */
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = iv_len;
 
-	rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
+	rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+			iv, iv_len);
 	sym_op->cipher.data.length = cipher_len;
 	sym_op->cipher.data.offset = cipher_offset;
 	return 0;
@@ -2219,13 +2211,11 @@ create_wireless_cipher_hash_operation(const struct wireless_test_data *tdata,
 	TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len);
 
 	/* iv */
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = iv_len;
 
-	rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
+	rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+			iv, iv_len);
 	sym_op->cipher.data.length = cipher_len;
 	sym_op->cipher.data.offset = cipher_offset + auth_offset;
 	sym_op->auth.data.length = auth_len;
@@ -2316,13 +2306,11 @@ create_wireless_algo_cipher_hash_operation(const uint8_t *auth_tag,
 	TEST_HEXDUMP(stdout, "aad:", sym_op->auth.aad.data, aad_len);
 
 	/* iv */
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = iv_len;
 
-	rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
+	rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+			iv, iv_len);
 	sym_op->cipher.data.length = cipher_len;
 	sym_op->cipher.data.offset = cipher_offset + auth_offset;
 	sym_op->auth.data.length = auth_len;
@@ -2401,14 +2389,11 @@ create_wireless_algo_auth_cipher_operation(const unsigned auth_tag_len,
 			sym_op->auth.aad.data, aad_len);
 
 	/* iv */
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = iv_len;
 
-	rte_memcpy(sym_op->cipher.iv.data, iv, iv_len);
-
+	rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+			iv, iv_len);
 	sym_op->cipher.data.length = cipher_len;
 	sym_op->cipher.data.offset = auth_offset + cipher_offset;
 
@@ -4854,14 +4839,13 @@ create_gcm_operation(enum rte_crypto_cipher_operation op,
 		sym_op->auth.aad.length);
 
 	/* Append IV at the end of the crypto operation*/
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
+			uint8_t *, IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = tdata->iv.len;
 
-	rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, tdata->iv.len);
-	TEST_HEXDUMP(stdout, "iv:", sym_op->cipher.iv.data,
+	rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len);
+	TEST_HEXDUMP(stdout, "iv:", iv_ptr,
 		sym_op->cipher.iv.length);
 
 	/* Append plaintext/ciphertext */
@@ -6429,15 +6413,15 @@ create_gmac_operation(enum rte_crypto_auth_operation op,
 				sym_op->auth.digest.length);
 	}
 
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
+			uint8_t *, IV_OFFSET);
+
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = tdata->iv.len;
 
-	rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, tdata->iv.len);
+	rte_memcpy(iv_ptr, tdata->iv.data, tdata->iv.len);
 
-	TEST_HEXDUMP(stdout, "iv:", sym_op->cipher.iv.data, tdata->iv.len);
+	TEST_HEXDUMP(stdout, "iv:", iv_ptr, tdata->iv.len);
 
 	sym_op->cipher.data.length = 0;
 	sym_op->cipher.data.offset = 0;
@@ -6975,13 +6959,11 @@ create_auth_GMAC_operation(struct crypto_testsuite_params *ts_params,
 			sym_op->auth.digest.data,
 			sym_op->auth.digest.length);
 
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = reference->iv.len;
 
-	rte_memcpy(sym_op->cipher.iv.data, reference->iv.data, reference->iv.len);
+	rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+			reference->iv.data, reference->iv.len);
 
 	sym_op->cipher.data.length = 0;
 	sym_op->cipher.data.offset = 0;
@@ -7034,13 +7016,11 @@ create_cipher_auth_operation(struct crypto_testsuite_params *ts_params,
 			sym_op->auth.digest.data,
 			sym_op->auth.digest.length);
 
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = reference->iv.len;
 
-	rte_memcpy(sym_op->cipher.iv.data, reference->iv.data, reference->iv.len);
+	rte_memcpy(rte_crypto_op_ctod_offset(ut_params->op, uint8_t *, IV_OFFSET),
+			reference->iv.data, reference->iv.len);
 
 	sym_op->cipher.data.length = reference->ciphertext.len;
 	sym_op->cipher.data.offset = 0;
@@ -7284,13 +7264,12 @@ create_gcm_operation_SGL(enum rte_crypto_cipher_operation op,
 				sym_op->auth.digest.length);
 	}
 
-	sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(ut_params->op,
-						uint8_t *, IV_OFFSET);
-	sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(ut_params->op,
-						IV_OFFSET);
+	uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ut_params->op,
+			uint8_t *, IV_OFFSET);
+	sym_op->cipher.iv.offset = IV_OFFSET;
 	sym_op->cipher.iv.length = iv_len;
 
-	rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data, iv_len);
+	rte_memcpy(iv_ptr, tdata->iv.data, iv_len);
 
 	sym_op->auth.aad.data = (uint8_t *)rte_pktmbuf_prepend(
 			ut_params->ibuf, aad_len);
@@ -7303,7 +7282,7 @@ create_gcm_operation_SGL(enum rte_crypto_cipher_operation op,
 	memset(sym_op->auth.aad.data, 0, aad_len);
 	rte_memcpy(sym_op->auth.aad.data, tdata->aad.data, aad_len);
 
-	TEST_HEXDUMP(stdout, "iv:", sym_op->cipher.iv.data, iv_len);
+	TEST_HEXDUMP(stdout, "iv:", iv_ptr, iv_len);
 	TEST_HEXDUMP(stdout, "aad:",
 			sym_op->auth.aad.data, aad_len);
 
diff --git a/test/test/test_cryptodev_blockcipher.c b/test/test/test_cryptodev_blockcipher.c
index 2a0c364..312405b 100644
--- a/test/test/test_cryptodev_blockcipher.c
+++ b/test/test/test_cryptodev_blockcipher.c
@@ -290,12 +290,10 @@ test_blockcipher_one_case(const struct blockcipher_test_case *t,
 
 		sym_op->cipher.data.offset = 0;
 		sym_op->cipher.data.length = tdata->ciphertext.len;
-		sym_op->cipher.iv.data = rte_crypto_op_ctod_offset(op,
-						uint8_t *, IV_OFFSET);
-		sym_op->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
-						IV_OFFSET);
+		sym_op->cipher.iv.offset = IV_OFFSET;
 		sym_op->cipher.iv.length = tdata->iv.len;
-		rte_memcpy(sym_op->cipher.iv.data, tdata->iv.data,
+		rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+				tdata->iv.data,
 				tdata->iv.len);
 	}
 
diff --git a/test/test/test_cryptodev_perf.c b/test/test/test_cryptodev_perf.c
index b08451d..86bdc6e 100644
--- a/test/test/test_cryptodev_perf.c
+++ b/test/test/test_cryptodev_perf.c
@@ -1981,15 +1981,11 @@ test_perf_crypto_qp_vary_burst_size(uint16_t dev_num)
 		op->sym->auth.data.offset = 0;
 		op->sym->auth.data.length = data_params[0].length;
 
-
-		op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
-				uint8_t *, IV_OFFSET);
-		op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
-				IV_OFFSET);
+		op->sym->cipher.iv.offset = IV_OFFSET;
 		op->sym->cipher.iv.length = CIPHER_IV_LENGTH_AES_CBC;
 
-		rte_memcpy(op->sym->cipher.iv.data, aes_cbc_128_iv,
-				CIPHER_IV_LENGTH_AES_CBC);
+		rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+				aes_cbc_128_iv, CIPHER_IV_LENGTH_AES_CBC);
 
 		op->sym->cipher.data.offset = 0;
 		op->sym->cipher.data.length = data_params[0].length;
@@ -2898,13 +2894,10 @@ test_perf_set_crypto_op_aes(struct rte_crypto_op *op, struct rte_mbuf *m,
 
 
 	/* Cipher Parameters */
-	op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
-			uint8_t *, IV_OFFSET);
-	op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
-			IV_OFFSET);
+	op->sym->cipher.iv.offset = IV_OFFSET;
 	op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH;
-
-	rte_memcpy(op->sym->cipher.iv.data, aes_iv, AES_CIPHER_IV_LENGTH);
+	rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+			aes_iv, AES_CIPHER_IV_LENGTH);
 
 	op->sym->cipher.data.offset = 0;
 	op->sym->cipher.data.length = data_len;
@@ -2934,12 +2927,10 @@ test_perf_set_crypto_op_aes_gcm(struct rte_crypto_op *op, struct rte_mbuf *m,
 	op->sym->auth.aad.length = AES_GCM_AAD_LENGTH;
 
 	/* Cipher Parameters */
-	op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
-			uint8_t *, IV_OFFSET);
-	op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
-			IV_OFFSET);
+	op->sym->cipher.iv.offset = IV_OFFSET;
 	op->sym->cipher.iv.length = AES_CIPHER_IV_LENGTH;
-	rte_memcpy(op->sym->cipher.iv.data, aes_iv, AES_CIPHER_IV_LENGTH);
+	rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+			aes_iv, AES_CIPHER_IV_LENGTH);
 
 	/* Data lengths/offsets Parameters */
 	op->sym->auth.data.offset = 0;
@@ -2980,9 +2971,7 @@ test_perf_set_crypto_op_snow3g(struct rte_crypto_op *op, struct rte_mbuf *m,
 	op->sym->auth.aad.length = SNOW3G_CIPHER_IV_LENGTH;
 
 	/* Cipher Parameters */
-	op->sym->cipher.iv.data = iv_ptr;
-	op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
-			IV_OFFSET);
+	op->sym->cipher.iv.offset = IV_OFFSET;
 	op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH;
 
 	/* Data lengths/offsets Parameters */
@@ -3009,12 +2998,10 @@ test_perf_set_crypto_op_snow3g_cipher(struct rte_crypto_op *op,
 	}
 
 	/* Cipher Parameters */
-	op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
-			uint8_t *, IV_OFFSET);
-	op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
-			IV_OFFSET);
+	op->sym->cipher.iv.offset = IV_OFFSET;
 	op->sym->cipher.iv.length = SNOW3G_CIPHER_IV_LENGTH;
-	rte_memcpy(op->sym->cipher.iv.data, snow3g_iv, SNOW3G_CIPHER_IV_LENGTH);
+	rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+			snow3g_iv, SNOW3G_CIPHER_IV_LENGTH);
 
 	op->sym->cipher.data.offset = 0;
 	op->sym->cipher.data.length = data_len << 3;
@@ -3082,13 +3069,10 @@ test_perf_set_crypto_op_3des(struct rte_crypto_op *op, struct rte_mbuf *m,
 	op->sym->auth.digest.length = digest_len;
 
 	/* Cipher Parameters */
-	op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
-			uint8_t *, IV_OFFSET);
-	op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
-			IV_OFFSET);
+	op->sym->cipher.iv.offset = IV_OFFSET;
 	op->sym->cipher.iv.length = TRIPLE_DES_CIPHER_IV_LENGTH;
-	rte_memcpy(op->sym->cipher.iv.data, triple_des_iv,
-			TRIPLE_DES_CIPHER_IV_LENGTH);
+	rte_memcpy(rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET),
+			triple_des_iv, TRIPLE_DES_CIPHER_IV_LENGTH);
 
 	/* Data lengths/offsets Parameters */
 	op->sym->auth.data.offset = 0;
@@ -4183,6 +4167,9 @@ perf_gcm_set_crypto_op(struct rte_crypto_op *op, struct rte_mbuf *m,
 		struct crypto_params *m_hlp,
 		struct perf_test_params *params)
 {
+	uint8_t *iv_ptr = rte_crypto_op_ctod_offset(op,
+			uint8_t *, IV_OFFSET);
+
 	if (rte_crypto_op_attach_sym_session(op, sess) != 0) {
 		rte_crypto_op_free(op);
 		return NULL;
@@ -4203,14 +4190,11 @@ perf_gcm_set_crypto_op(struct rte_crypto_op *op, struct rte_mbuf *m,
 	rte_memcpy(op->sym->auth.aad.data, params->symmetric_op->aad_data,
 		       params->symmetric_op->aad_len);
 
-	op->sym->cipher.iv.data = rte_crypto_op_ctod_offset(op,
-			uint8_t *, IV_OFFSET);
-	op->sym->cipher.iv.phys_addr = rte_crypto_op_ctophys_offset(op,
-			IV_OFFSET);
-	rte_memcpy(op->sym->cipher.iv.data, params->symmetric_op->iv_data,
+	op->sym->cipher.iv.offset = IV_OFFSET;
+	rte_memcpy(iv_ptr, params->symmetric_op->iv_data,
 		       params->symmetric_op->iv_len);
 	if (params->symmetric_op->iv_len == 12)
-		op->sym->cipher.iv.data[15] = 1;
+		iv_ptr[15] = 1;
 
 	op->sym->cipher.iv.length = params->symmetric_op->iv_len;
 
-- 
2.9.4



More information about the dev mailing list