[PATCH 31/40] crypto/qat: reduce rsa struct to only necessary fields
Arek Kusztal
arkadiuszx.kusztal at intel.com
Fri May 20 07:54:36 CEST 2022
- reduced rsa struct to only necessary fields.
This commit reflects changes to the asymmetric crypto API.
Signed-off-by: Arek Kusztal <arkadiuszx.kusztal at intel.com>
---
drivers/crypto/qat/qat_asym.c | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/drivers/crypto/qat/qat_asym.c b/drivers/crypto/qat/qat_asym.c
index 5dd355d007..cb2b47acbb 100644
--- a/drivers/crypto/qat/qat_asym.c
+++ b/drivers/crypto/qat/qat_asym.c
@@ -347,7 +347,7 @@ rsa_set_pub_input(struct rte_crypto_asym_op *asym_op,
if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
switch (asym_op->rsa.padding.type) {
case RTE_CRYPTO_RSA_PADDING_NONE:
- SET_PKE_LN(cookie->input_array, asym_op->rsa.message,
+ SET_PKE_LN(cookie->input_array, asym_op->rsa.input,
alg_bytesize, 0);
break;
default:
@@ -360,7 +360,7 @@ rsa_set_pub_input(struct rte_crypto_asym_op *asym_op,
} else {
switch (asym_op->rsa.padding.type) {
case RTE_CRYPTO_RSA_PADDING_NONE:
- SET_PKE_LN(cookie->input_array, asym_op->rsa.sign,
+ SET_PKE_LN(cookie->input_array, asym_op->rsa.input,
alg_bytesize, 0);
break;
default:
@@ -456,7 +456,7 @@ rsa_set_priv_input(struct rte_crypto_asym_op *asym_op,
RTE_CRYPTO_ASYM_OP_DECRYPT) {
switch (asym_op->rsa.padding.type) {
case RTE_CRYPTO_RSA_PADDING_NONE:
- SET_PKE_LN(cookie->input_array, asym_op->rsa.cipher,
+ SET_PKE_LN(cookie->input_array, asym_op->rsa.input,
alg_bytesize, 0);
HEXDUMP("RSA ciphertext", cookie->input_array[0],
alg_bytesize);
@@ -471,7 +471,7 @@ rsa_set_priv_input(struct rte_crypto_asym_op *asym_op,
RTE_CRYPTO_ASYM_OP_SIGN) {
switch (asym_op->rsa.padding.type) {
case RTE_CRYPTO_RSA_PADDING_NONE:
- SET_PKE_LN(cookie->input_array, asym_op->rsa.message,
+ SET_PKE_LN(cookie->input_array, asym_op->rsa.input,
alg_bytesize, 0);
HEXDUMP("RSA text to be signed", cookie->input_array[0],
alg_bytesize);
@@ -519,7 +519,7 @@ rsa_collect(struct rte_crypto_asym_op *asym_op,
if (asym_op->rsa.op_type ==
RTE_CRYPTO_ASYM_OP_ENCRYPT) {
- uint8_t *rsa_result = asym_op->rsa.cipher.data;
+ uint8_t *rsa_result = asym_op->rsa.output.data;
rte_memcpy(rsa_result,
cookie->output_array[0],
@@ -527,7 +527,7 @@ rsa_collect(struct rte_crypto_asym_op *asym_op,
HEXDUMP("RSA Encrypted data", cookie->output_array[0],
alg_bytesize);
} else {
- uint8_t *rsa_result = asym_op->rsa.cipher.data;
+ uint8_t *rsa_result = asym_op->rsa.output.data;
switch (asym_op->rsa.padding.type) {
case RTE_CRYPTO_RSA_PADDING_NONE:
@@ -545,7 +545,7 @@ rsa_collect(struct rte_crypto_asym_op *asym_op,
}
} else {
if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
- uint8_t *rsa_result = asym_op->rsa.message.data;
+ uint8_t *rsa_result = asym_op->rsa.output.data;
switch (asym_op->rsa.padding.type) {
case RTE_CRYPTO_RSA_PADDING_NONE:
@@ -561,7 +561,7 @@ rsa_collect(struct rte_crypto_asym_op *asym_op,
return RTE_CRYPTO_OP_STATUS_ERROR;
}
} else {
- uint8_t *rsa_result = asym_op->rsa.sign.data;
+ uint8_t *rsa_result = asym_op->rsa.output.data;
rte_memcpy(rsa_result,
cookie->output_array[0],
--
2.13.6
More information about the dev
mailing list