[dpdk-dev] [RFC PATCH v2 2/3] cryptodev: asymmetric algorithm capability definitions
Umesh Kartha
Umesh.Kartha at caviumnetworks.com
Thu May 11 14:35:31 CEST 2017
Added asymmetric algorithm capability structures, operation error codes,
application helper functions. Added asymmetric algorithm/operation
variants, capability query APIs.
Signed-off-by: Umesh Kartha <Umesh.Kartha at caviumnetworks.com>
---
lib/librte_cryptodev/rte_crypto.h | 135 ++++++++++-
lib/librte_cryptodev/rte_cryptodev.c | 430 +++++++++++++++++++++++++++++++++++
lib/librte_cryptodev/rte_cryptodev.h | 334 +++++++++++++++++++++++++++
3 files changed, 896 insertions(+), 3 deletions(-)
diff --git lib/librte_cryptodev/rte_crypto.h lib/librte_cryptodev/rte_crypto.h
index 9019518..a8720bf 100644
--- lib/librte_cryptodev/rte_crypto.h
+++ lib/librte_cryptodev/rte_crypto.h
@@ -51,6 +51,7 @@
#include <rte_common.h>
#include "rte_crypto_sym.h"
+#include "rte_crypto_asym.h"
/** Crypto operation types */
enum rte_crypto_op_type {
@@ -58,6 +59,8 @@ enum rte_crypto_op_type {
/**< Undefined operation type */
RTE_CRYPTO_OP_TYPE_SYMMETRIC,
/**< Symmetric operation */
+ RTE_CRYPTO_OP_TYPE_ASYMMETRIC,
+ /**< Asymmetric operation */
};
/** Status of crypto operation */
@@ -75,6 +78,29 @@ enum rte_crypto_op_status {
* Symmetric operation failed due to invalid session arguments, or if
* in session-less mode, failed to allocate private operation material.
*/
+ RTE_CRYPTO_OP_STATUS_RSA_DATA_TOO_LARGE,
+ /**< Length of data to be encrypted/signed is too large */
+ RTE_CRYPTO_OP_STATUS_PKCS_DECRYPT_FAILED,
+ /**<
+ * PKCS decrypt operation failed due to bad padding.
+ */
+ RTE_CRYPTO_OP_STATUS_RSA_VERIFY_FAILED,
+ /**<
+ * PKCS RSA signature verification failed.
+ */
+ RTE_CRYPTO_OP_STATUS_ECDSA_INVALID_SIGNATURE,
+ /**<
+ * ECDSA signature generation failed due to either ECDSA_SIGN->r or
+ * ECDSA_SIGN->s component being invalid.
+ */
+ RTE_CRYPTO_OP_STATUS_ECDSA_VERIFY_FAILED,
+ /**<
+ * ECDSA signature verification failed.
+ */
+ RTE_CRYPTO_OP_STATUS_ECC_POINT_AT_INFINITY,
+ /**<
+ * ECC Operation failed due to point at infinity
+ */
RTE_CRYPTO_OP_STATUS_INVALID_ARGS,
/**< Operation failed due to invalid arguments in request */
RTE_CRYPTO_OP_STATUS_ERROR,
@@ -116,6 +142,8 @@ struct rte_crypto_op {
union {
struct rte_crypto_sym_op *sym;
/**< Symmetric operation parameters */
+ struct rte_crypto_asym_op *asym;
+ /**< Asymmetric operation parameters */
}; /**< operation specific parameters */
} __rte_cache_aligned;
@@ -141,6 +169,14 @@ struct rte_crypto_op {
__rte_crypto_sym_op_reset(op->sym);
break;
+ case RTE_CRYPTO_OP_TYPE_ASYMMETRIC:
+ /** Asymmetric operation structure starts after the end of the
+ * rte_crypto_op strucutre.
+ */
+ op->asym = (struct rte_crypto_asym_op *)(op + 1);
+ op->type = type;
+
+ __rte_crypto_asym_op_reset(op->asym);
default:
break;
}
@@ -303,13 +339,25 @@ struct rte_crypto_op_pool_private {
__rte_crypto_op_get_priv_data(struct rte_crypto_op *op, uint32_t size)
{
uint32_t priv_size;
+ int type = op->type;
if (likely(op->mempool != NULL)) {
priv_size = __rte_crypto_op_get_priv_data_size(op->mempool);
- if (likely(priv_size >= size))
- return (void *)((uint8_t *)(op + 1) +
+ if (likely(priv_size >= size)) {
+ switch (type) {
+ case RTE_CRYPTO_OP_TYPE_SYMMETRIC:
+ return (void *)((uint8_t *)(op + 1) +
sizeof(struct rte_crypto_sym_op));
+ break;
+ case RTE_CRYPTO_OP_TYPE_ASYMMETRIC:
+ return (void *)((uint8_t *)(op + 1) +
+ sizeof(struct rte_crypto_asym_op));
+ break;
+ default:
+ break;
+ }
+ }
}
return NULL;
@@ -320,7 +368,7 @@ struct rte_crypto_op_pool_private {
* If operation has been allocate from a rte_mempool, then the operation will
* be returned to the mempool.
*
- * @param op symmetric crypto operation
+ * @param op crypto operation
*/
static inline void
rte_crypto_op_free(struct rte_crypto_op *op)
@@ -410,6 +458,87 @@ struct rte_crypto_op_pool_private {
return __rte_crypto_sym_op_attach_sym_session(op->sym, sess);
}
+/**
+ * Allocate an asymmetric crypto operation in the private data of an mbuf.
+ *
+ * @param m mbuf which is associated with the crypto operation, the
+ * operation will be allocated in the private data of that
+ * mbuf.
+ *
+ * @returns
+ * - On success returns a pointer to the crypto operation.
+ * - On failure returns NULL.
+ */
+static inline struct rte_crypto_op *
+rte_crypto_asym_op_alloc_from_mbuf_priv_data(struct rte_mbuf *m)
+{
+ if (unlikely(m == NULL))
+ return NULL;
+
+ /*
+ * check that the mbuf's private data size is sufficient to contain a
+ * crypto operation
+ */
+ if (unlikely(m->priv_size < (sizeof(struct rte_crypto_op) +
+ sizeof(struct rte_crypto_asym_op))))
+ return NULL;
+
+ /* private data starts immediately after the mbuf header in the mbuf. */
+ struct rte_crypto_op *op = (struct rte_crypto_op *)(m + 1);
+
+ __rte_crypto_op_reset(op, RTE_CRYPTO_OP_TYPE_ASYMMETRIC);
+
+ op->mempool = NULL;
+ op->asym->m_src = m;
+
+ return op;
+}
+
+/**
+ * Allocate space for asymmetric crypto xforms in the private data space of the
+ * crypto operation. This also defaults the crypto xform type and configures
+ * the chaining of the xforms in the crypto operation
+ *
+ * @return
+ * - On success returns pointer to first crypto xform in crypto operations chain
+ * - On failure returns NULL
+ */
+static inline struct rte_crypto_asym_xform *
+rte_crypto_op_asym_xforms_alloc(struct rte_crypto_op *op, uint8_t nb_xforms)
+{
+ void *priv_data;
+ uint32_t size;
+
+ if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_ASYMMETRIC))
+ return NULL;
+
+ size = sizeof(struct rte_crypto_asym_xform) * nb_xforms;
+
+ priv_data = __rte_crypto_op_get_priv_data(op, size);
+ if (priv_data == NULL)
+ return NULL;
+
+ return __rte_crypto_asym_op_asym_xforms_alloc(op->asym, priv_data,
+ nb_xforms);
+}
+
+
+/**
+ * Attach a session to a crypto operation
+ *
+ * @param op crypto operation, must be of type asymmetric
+ * @param sess cryptodev session
+ */
+static inline int
+rte_crypto_op_attach_asym_session(struct rte_crypto_op *op,
+ struct rte_cryptodev_asym_session *sess)
+{
+ if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_ASYMMETRIC))
+ return -1;
+
+ return __rte_crypto_asym_op_attach_asym_session(op->asym, sess);
+}
+
#ifdef __cplusplus
}
#endif
diff --git lib/librte_cryptodev/rte_cryptodev.c lib/librte_cryptodev/rte_cryptodev.c
index b65cd9c..abcdeb0 100644
--- lib/librte_cryptodev/rte_cryptodev.c
+++ lib/librte_cryptodev/rte_cryptodev.c
@@ -224,6 +224,385 @@ struct rte_cryptodev_callback {
}
/**
+ * Asymmetric crypto transform operation strings identifiers.
+ */
+
+const char *
+rte_crypto_asym_algorithm_strings[] = {
+ [RTE_CRYPTO_ASYM_XFORM_RSA] = "rsa",
+ [RTE_CRYPTO_ASYM_XFORM_MODEX] = "modexp",
+ [RTE_CRYPTO_ASYM_XFORM_DH] = "dh",
+ [RTE_CRYPTO_ASYM_XFORM_ECDH] = "ecdh",
+ [RTE_CRYPTO_ASYM_XFORM_DSA] = "dsa",
+ [RTE_CRYPTO_ASYM_XFORM_ECDSA] = "ecdsa",
+ [RTE_CRYPTO_ASYM_XFORM_MODINV] = "modinv",
+ [RTE_CRYPTO_ASYM_XFORM_FECC] = "fecc"
+};
+
+/**
+ * RSA crypto transform operation strings identifiers.
+ */
+const char *
+rte_crypto_rsa_operation_strings[] = {
+ [RTE_CRYPTO_RSA_OP_PUBLIC_ENCRYPT] = "public-encrypt",
+ [RTE_CRYPTO_RSA_OP_PRIVATE_DECRYPT] = "private-decrypt",
+ [RTE_CRYPTO_RSA_OP_SIGN] = "sign",
+ [RTE_CRYPTO_RSA_OP_VERIFY] = "verify"
+};
+
+/**
+ * DH crypto transform operation strings identifiers.
+ */
+const char *
+rte_crypto_dh_operation_strings[] = {
+ [RTE_CRYPTO_DH_OP_KEY_GENERATION] = "key-generate",
+ [RTE_CRYPTO_DH_OP_KEY_COMPUTATION] = "key-compute"
+};
+
+/**
+ * ECDH crypto transform operation strings identifiers.
+ */
+const char *
+rte_crypto_ecdh_operation_strings[] = {
+ [RTE_CRYPTO_ECDH_OP_KEY_GENERATION] = "key-generate",
+ [RTE_CRYPTO_ECDH_OP_KEY_CHECK] = "key-check",
+ [RTE_CRYPTO_ECDH_OP_KEY_COMPUTATION] = "key-compute"
+};
+
+/**
+ * DSA crypto transform operation strings identifiers.
+ */
+const char *
+rte_crypto_dsa_operation_strings[] = {
+ [RTE_CRYPTO_DSA_OP_SIGN] = "sign",
+ [RTE_CRYPTO_DSA_OP_VERIFY] = "verify"
+};
+
+/**
+ * ECDSA crypto transform operation strings identifiers.
+ */
+const char *
+rte_crypto_ecdsa_operation_strings[] = {
+ [RTE_CRYPTO_ECDSA_OP_SIGN] = "sign",
+ [RTE_CRYPTO_ECDSA_OP_VERIFY] = "verify"
+};
+
+/**
+ * F-ECC crypto transform operation strings identifiers.
+ */
+const char *
+rte_crypto_fecc_operation_strings[] = {
+ [RTE_CRYPTO_FECC_OP_POINT_ADD] = "point-add",
+ [RTE_CRYPTO_FECC_OP_POINT_DBL] = "point-double",
+ [RTE_CRYPTO_FECC_OP_POINT_MULTIPLY] = "point-multiply"
+};
+
+/**
+ * RSA crypto padding scheme strings identifiers.
+ */
+const char *
+rte_crypto_rsa_padding_scheme_strings[] = {
+ [RTE_CRYPTO_RSA_PADDING_BT1] = "bt1",
+ [RTE_CRYPTO_RSA_PADDING_BT2] = "bt2",
+ [RTE_CRYPTO_RSA_PADDING_OAEP] = "oaep",
+ [RTE_CRYPTO_RSA_PADDING_PSS] = "pss"
+};
+
+/**
+ * ECC prime field curve string identifiers.
+ */
+const char *
+rte_crypto_prime_curve_id_strings[] = {
+ [RTE_CRYPTO_EC_CURVE_secp112r1] = "secp112r1",
+ [RTE_CRYPTO_EC_CURVE_secp112r2] = "secp112r2",
+ [RTE_CRYPTO_EC_CURVE_secp128r1] = "secp128r1",
+ [RTE_CRYPTO_EC_CURVE_secp128r2] = "secp128r2",
+ [RTE_CRYPTO_EC_CURVE_secp160k1] = "secp160k1",
+ [RTE_CRYPTO_EC_CURVE_secp160r1] = "secp160r1",
+ [RTE_CRYPTO_EC_CURVE_secp160r2] = "secp160r2",
+ [RTE_CRYPTO_EC_CURVE_secp192k1] = "secp192k1",
+ [RTE_CRYPTO_EC_CURVE_secp224k1] = "secp224k1",
+ [RTE_CRYPTO_EC_CURVE_secp224r1] = "secp224r1",
+ [RTE_CRYPTO_EC_CURVE_secp256k1] = "secp256k1",
+ [RTE_CRYPTO_EC_CURVE_secp384r1] = "secp384r1",
+ [RTE_CRYPTO_EC_CURVE_secp521r1] = "secp521r1",
+ [RTE_CRYPTO_EC_CURVE_prime192v1] = "prime192v1",
+ [RTE_CRYPTO_EC_CURVE_prime192v2] = "prime192v2",
+ [RTE_CRYPTO_EC_CURVE_prime192v3] = "prime192v3",
+ [RTE_CRYPTO_EC_CURVE_prime239v1] = "prime239v1",
+ [RTE_CRYPTO_EC_CURVE_prime239v2] = "prime239v2",
+ [RTE_CRYPTO_EC_CURVE_prime239v3] = "prime239v3",
+ [RTE_CRYPTO_EC_CURVE_prime256v1] = "prime256v1",
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls6] =
+ "wap_wsg_idm_ecid_wtls6";
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls7] =
+ "wap_wsg_idm_ecid_wtls7";
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls8] =
+ "wap_wsg_idm_ecid_wtls8";
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls9] =
+ "wap_wsg_idm_ecid_wtls9";
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls12] =
+ "wap_wsg_idm_ecid_wtls12";
+ [RTE_CRYPTO_EC_CURVE_brainpoolP160r1] = "brainpoolP160r1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP160t1] = "brainpoolP160t1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP192r1] = "brainpoolP192r1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP192t1] = "brainpoolP192t1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP224r1] = "brainpoolP224r1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP224t1] = "brainpoolP224t1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP256r1] = "brainpoolP256r1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP256t1] = "brainpoolP256t1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP320r1] = "brainpoolP320r1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP320t1] = "brainpoolP320t1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP384r1] = "brainpoolP384r1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP384t1] = "brainpoolP384t1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP512r1] = "brainpoolP512r1",
+ [RTE_CRYPTO_EC_CURVE_brainpoolP512t1] = "brainpoolP512t1",
+ [RTE_CRYPTO_EC_CURVE_x25519] = "curve25519"
+};
+
+/**
+ * ECC binary field curve string identifiers.
+ */
+const char *
+rte_crypto_prime_curve_id_strings[] = {
+ [RTE_CRYPTO_EC_CURVE_sect113r1] = "sect113r1",
+ [RTE_CRYPTO_EC_CURVE_sect113r2] = "sect113r2",
+ [RTE_CRYPTO_EC_CURVE_sect131r1] = "sect131r1",
+ [RTE_CRYPTO_EC_CURVE_sect131r2] = "sect131r2",
+ [RTE_CRYPTO_EC_CURVE_sect163k1] = "sect163k1",
+ [RTE_CRYPTO_EC_CURVE_sect163r1] = "sect163r1",
+ [RTE_CRYPTO_EC_CURVE_sect163r2] = "sect163r2",
+ [RTE_CRYPTO_EC_CURVE_sect193r1] = "sect193r1",
+ [RTE_CRYPTO_EC_CURVE_sect193r2] = "sect193r2",
+ [RTE_CRYPTO_EC_CURVE_sect233k1] = "sect233k1",
+ [RTE_CRYPTO_EC_CURVE_sect233r1] = "sect233r1",
+ [RTE_CRYPTO_EC_CURVE_sect239k1] = "sect239k1",
+ [RTE_CRYPTO_EC_CURVE_sect283k1] = "sect283k1",
+ [RTE_CRYPTO_EC_CURVE_sect283r1] = "sect283r1",
+ [RTE_CRYPTO_EC_CURVE_sect409k1] = "sect409k1",
+ [RTE_CRYPTO_EC_CURVE_sect409r1] = "sect409r1",
+ [RTE_CRYPTO_EC_CURVE_sect571k1] = "sect571k1",
+ [RTE_CRYPTO_EC_CURVE_sect571r1] = "sect571r1",
+ [RTE_CRYPTO_EC_CURVE_c2pnb163v1] = "c2pnb163v1",
+ [RTE_CRYPTO_EC_CURVE_c2pnb163v2] = "c2pnb163v2",
+ [RTE_CRYPTO_EC_CURVE_c2pnb163v3] = "c2pnb163v3",
+ [RTE_CRYPTO_EC_CURVE_c2pnb176v1] = "c2pnb176v1",
+ [RTE_CRYPTO_EC_CURVE_c2tnb191v1] = "c2tnb191v1",
+ [RTE_CRYPTO_EC_CURVE_c2tnb191v2] = "c2tnb191v2",
+ [RTE_CRYPTO_EC_CURVE_c2tnb191v3] = "c2tnb191v3",
+ [RTE_CRYPTO_EC_CURVE_c2pnb208w1] = "c2pnb208w1",
+ [RTE_CRYPTO_EC_CURVE_c2tnb239v1] = "c2tnb239v1",
+ [RTE_CRYPTO_EC_CURVE_c2tnb239v2] = "c2tnb239v2",
+ [RTE_CRYPTO_EC_CURVE_c2tnb239v3] = "c2tnb239v3",
+ [RTE_CRYPTO_EC_CURVE_c2pnb272w1] = "c2pnb272w1",
+ [RTE_CRYPTO_EC_CURVE_c2pnb304w1] = "c2pnb304w1",
+ [RTE_CRYPTO_EC_CURVE_c2tnb359v1] = "c2tnb359v1",
+ [RTE_CRYPTO_EC_CURVE_c2pnb368w1] = "c2pnb368w1",
+ [RTE_CRYPTO_EC_CURVE_c2tnb431r1] = "c2tnb431r1",
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls1] =
+ "wap_wsg_idm_ecid_wtls1",
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls3] =
+ "wap_wsg_idm_ecid_wtls3",
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls4] =
+ "wap_wsg_idm_ecid_wtls4",
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls5] =
+ "wap_wsg_idm_ecid_wtls5",
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls10] =
+ "wap_wsg_idm_ecid_wtls10",
+ [RTE_CRYPTO_EC_CURVE_wap_wsg_idm_ecid_wtls11] =
+ "wap_wsg_idm_ecid_wtls11"
+};
+
+int
+rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum,
+ const char *algo_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_cipher_algorithm_strings); i++) {
+ if (strcmp(algo_string, rte_crypto_cipher_algorithm_strings[i])
+ == 0) {
+ *algo_enum = (enum rte_crypto_cipher_algorithm) i;
+ return 0;
+ }
+ }
+
+ /* Invalid string */
+ return -1;
+}
+
+int
+rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum,
+ const char *algo_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_auth_algorithm_strings); i++) {
+ if (strcmp(algo_string, rte_crypto_auth_algorithm_strings[i])
+ == 0) {
+ *algo_enum = (enum rte_crypto_auth_algorithm) i;
+ return 0;
+ }
+ }
+
+ /* Invalid string */
+ return -1;
+}
+
+int
+rte_cryptodev_get_asym_algo_enum(enum rte_crypto_asym_xform_type *algo_enum,
+ const char *algo_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_asym_algorithm_strings); i++) {
+ if (strcmp(algo_string, rte_crypto_asym_algorithm_strings[i])
+ == 0) {
+ *algo_enum = (enum rte_crypto_asym_xform) i;
+ return 0;
+ }
+ }
+
+ /* Invalid string */
+ return -1;
+}
+
+int
+rte_cryptodev_get_rsa_op_enum(enum rte_crypto_rsa_optype *op_enum,
+ const char *op_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_rsa_operation_strings); i++) {
+ if (strcmp(op_string, rte_crypto_rsa_operation_strings[i])
+ == 0) {
+ *op_enum = (enum rte_crypto_rsa_optype) i;
+ return 0;
+ }
+ }
+
+ /* Invalid string */
+ return -1;
+}
+
+int
+rte_cryptodev_get_dh_op_enum(enum rte_crypto_dh_optype *op_enum,
+ const char *op_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_dh_operation_strings); i++) {
+ if (strcmp(op_string, rte_crypto_dh_operation_strings[i])
+ == 0) {
+ *op_enum = (enum rte_crypto_dh_optype) i;
+ return 0;
+ }
+ }
+
+ /* Invalid string */
+ return -1;
+}
+
+int
+rte_cryptodev_get_ecdh_op_enum(enum rte_crypto_ecdh_optype *op_enum,
+ const char *op_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_ecdh_operation_strings); i++) {
+ if (strcmp(op_string, rte_crypto_ecdh_operation_strings[i])
+ == 0) {
+ *op_enum = (enum rte_crypto_ecdh_optype) i;
+ return 0;
+ }
+ }
+
+ /* Invalid string */
+ return -1;
+}
+
+int
+rte_cryptodev_get_dsa_op_enum(enum rte_crypto_dsa_optype *op_enum,
+ const char *op_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_dsa_operation_strings); i++) {
+ if (strcmp(op_string, rte_crypto_dsa_operation_strings[i])
+ == 0) {
+ *op_enum = (enum rte_crypto_dsa_optype) i;
+ return 0;
+ }
+ }
+
+ /* Invalid string */
+ return -1;
+}
+
+int
+rte_cryptodev_get_ecdsa_op_enum(enum rte_crypto_ecdsa_optype *op_enum,
+ const char *op_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_rsa_operation_strings); i++) {
+ if (strcmp(op_string, rte_crypto_ecdsa_operation_strings[i])
+ == 0) {
+ *op_enum = (enum rte_crypto_ecdsa_optype) i;
+ return 0;
+ }
+ }
+
+ /* Invalid string */
+ return -1;
+}
+
+int
+rte_cryptodev_get_rsa_padding_enum(enum rte_crypto_rsa_padding_type *pad_enum,
+ const char *pad_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_rsa_padding_scheme_strings); i++) {
+ if (strcmp(op_string, rte_crypto_rsa_padding_scheme_strings[i])
+ == 0) {
+ *op_enum = (enum rte_crypto_rsa_padding_type) i;
+ return 0;
+ }
+ }
+
+ /* Invalid string */
+ return -1;
+}
+
+int
+rte_cryptodev_get_ec_curve_enum(struct rte_crypto_ec_curve_id *curve_id,
+ enum rte_crypto_ec_curve_type *curve_type,
+ const char *curve_string)
+{
+ unsigned int i;
+
+ for (i = 1; i < RTE_DIM(rte_crypto_prime_curve_id_strings); i++) {
+ if (strcmp(op_string, rte_crypto_prime_curve_id_strings[i])
+ == 0){
+ *curve_id.pcurve = (enum rte_crypto_ec_prime_curve) i;
+ *curve_type = RTE_CRYPTO_EC_CURVE_TYPE_PRIME_FIELD;
+ return 0;
+ }
+ }
+
+ for (i = 1; i < RTE_DIM(rte_crypto_binary_curve_id_strings); i++) {
+ if (strcmp(op_string, rte_crypto_binary_curve_id_strings[i])
+ == 0){
+ *curve_id.bcurve = (enum rte_crypto_ec_binary_curve) i;
+ *curve_type = RTE_CRYPTO_EC_CURVE_TYPE_BINARY_FIELD;
+ return 0;
+ }
+ }
+ /* Invalid string */
+ return -1;
+}
+/**
* The crypto auth operation strings identifiers.
* It could be used in application command line.
*/
@@ -369,6 +748,27 @@ struct rte_cryptodev_callback {
}
+const struct rte_cryptodev_asymmetric_capability *
+rte_cryptodev_asym_capability_get(uint8_t dev_id,
+ const struct rte_cryptodev_asym_capability_idx *idx)
+{
+ const struct rte_cryptodev_capabilities *capability;
+ struct rte_cryptodev_info dev_info;
+
+ rte_cryptodev_info_get(dev_id, &dev_info);
+
+ while ((capability == &dev_info.capabilities[i++])->op !=
+ RTE_CRYPTO_OP_TYPE_UNDEFINED) {
+ if (capability->op != RTE_CRYPTO_OP_TYPE_ASYMMETRIC)
+ continue;
+
+ if (capability->asym.xform_type == idx->type)
+ return &capability->asym;
+
+ }
+ return NULL;
+};
+
#define param_range_check(x, y) \
(((x < y.min) || (x > y.max)) || \
(y.increment != 0 && (x % y.increment) != 0))
@@ -404,6 +804,36 @@ struct rte_cryptodev_callback {
return 0;
}
+int
+rte_cryptodev_asym_capability_check_modlen(
+ const struct rte_cryptodev_asymmetric_capability *capability,
+ uint16_t modlen)
+{
+ if (param_range_check(modlen, capability.modlen))
+ return -1;
+
+ return 0;
+}
+
+#define curve_support_check(bitfield, curve) \
+ (bitfield & (1<<curve))
+
+int rte_cryptodev_asym_capability_check_curve(
+ const struct rte_cryptodev_asymmetric_capability *capability,
+ rte_crypto_ec_curve_type curve_type,
+ struct rte_crypto_ec_curve_id curve_id)
+{
+ uint64_t curve_field =
+ (curve_type == RTE_CRYPTO_EC_CURVE_TYPE_PRIME_FIELD) ?
+ capability->prime_bits:capability->binary_bits;
+
+ if (curve_support_check(curve_field, curve_id))
+ return 0;
+
+ return -1;
+
+}
+
const char *
rte_cryptodev_get_feature_name(uint64_t flag)
diff --git lib/librte_cryptodev/rte_cryptodev.h lib/librte_cryptodev/rte_cryptodev.h
index 88aeb87..f5f5a73 100644
--- lib/librte_cryptodev/rte_cryptodev.h
+++ lib/librte_cryptodev/rte_cryptodev.h
@@ -167,6 +167,161 @@ struct rte_cryptodev_symmetric_capability {
};
};
+/**
+ * Asymmetric Crypto Capability
+ *
+ * Capability for asymmetric crypto capabilities are divided as elliptic
+ * curve operations and non-elliptic curve operations. Capability for
+ * elliptic curve operations are dependent on the support for the curve
+ * used for the operation. Capability for non-elliptic curve operations
+ * are dependent on the length of prime modulus used for the operation.
+ *
+ * For non-elliptic curve operations (RSA/DSA/MODEXP/MODIN/DH):
+ * Capability is the param range of prime modulus
+ *
+ * For elliptic curve operations (ECDH/ECDSA/F-ECC) :
+ * Capability is the support for the operations on the curve.
+ *
+ *
+ * NOTE: The list of curves mentioned in the following structure are the
+ * curves supported by OpenSSL libcrypto presently. The curves were
+ * divided into prime or binary so as to use bitfield to determine the
+ * support for correspoding curve.
+ *
+ */
+struct rte_cryptodev_asymmetric_capability {
+ enum rte_crypto_asym_xform_type xform_type;
+ /**< Transform type: RSA/MODEXP/DH/ECDH/DSA/ECDSA/FECC/MODINV */
+ RTE_STD_C11
+ union {
+ struct rte_crypto_param_range mod_len;
+ /**< Range of modulus length supported for
+ * RSA
+ * MODEXP
+ * MODINV
+ * DH
+ */
+ struct {
+ RTE_STD_C11
+ union {
+ /**
+ * List or prime curves represented as a bit field.
+ */
+ struct{
+ uint64_t secp112r1 :1;
+ uint64_t secp112r2 :1;
+ uint64_t secp128r1 :1;
+ uint64_t secp128r2 :1;
+ uint64_t secp160k1 :1;
+ uint64_t secp160r1 :1;
+ uint64_t secp160r2 :1;
+ uint64_t secp192k1 :1;
+ uint64_t secp224k1 :1;
+ uint64_t secp224r1 :1;
+ uint64_t secp256k1 :1;
+ uint64_t secp384r1 :1;
+ uint64_t secp521r1 :1;
+ uint64_t prime192v1 :1;
+ uint64_t prime192v2 :1;
+ uint64_t prime192v3 :1;
+ uint64_t prime239v1 :1;
+ uint64_t prime239v2 :1;
+ uint64_t prime239v3 :1;
+ uint64_t prime256v1 :1;
+ uint64_t wap_wsg_idm_ecid_wtls6 :1;
+ uint64_t wap_wsg_idm_ecid_wtls7 :1;
+ uint64_t wap_wsg_idm_ecid_wtls8 :1;
+ uint64_t wap_wsg_idm_ecid_wtls9 :1;
+ uint64_t wap_wsg_idm_ecid_wtls12 :1;
+ uint64_t brainpoolP160r1 :1;
+ uint64_t brainpoolP160t1 :1;
+ uint64_t brainpoolP192r1 :1;
+ uint64_t brainpoolP192t1 :1;
+ uint64_t brainpoolP224r1 :1;
+ uint64_t brainpoolP224t1 :1;
+ uint64_t brainpoolP256r1 :1;
+ uint64_t brainpoolP256t1 :1;
+ uint64_t brainpoolP320r1 :1;
+ uint64_t brainpoolP320t1 :1;
+ uint64_t brainpoolP384r1 :1;
+ uint64_t brainpoolP384t1 :1;
+ uint64_t brainpoolP512r1 :1;
+ uint64_t brainpoolP512t1 :1;
+ uint64_t x25519 :1;
+ uint64_t unused :24;
+ } prime_curve;
+ /**<
+ * Supported prime curves for
+ * ECDH
+ * ECDSA
+ * FECC
+ */
+ uint64_t prime_bits;
+ };
+
+ RTE_STD_C11
+ union {
+ /**
+ * List or binary curves represented as a bit field.
+ */
+ struct {
+ uint64_t sect113r1 :1;
+ uint64_t sect113r2 :1;
+ uint64_t sect131r1 :1;
+ uint64_t sect131r2 :1;
+ uint64_t sect163k1 :1;
+ uint64_t sect163r1 :1;
+ uint64_t sect163r2 :1;
+ uint64_t sect193r1 :1;
+ uint64_t sect193r2 :1;
+ uint64_t sect233k1 :1;
+ uint64_t sect233r1 :1;
+ uint64_t sect239k1 :1;
+ uint64_t sect283k1 :1;
+ uint64_t sect283r1 :1;
+ uint64_t sect409k1 :1;
+ uint64_t sect409r1 :1;
+ uint64_t sect571k1 :1;
+ uint64_t sect571r1 :1;
+ uint64_t c2pnb163v1 :1;
+ uint64_t c2pnb163v2 :1;
+ uint64_t c2pnb163v3 :1;
+ uint64_t c2pnb176v1 :1;
+ uint64_t c2tnb191v1 :1;
+ uint64_t c2tnb191v2 :1;
+ uint64_t c2tnb191v3 :1;
+ uint64_t c2pnb208w1 :1;
+ uint64_t c2tnb239v1 :1;
+ uint64_t c2tnb239v2 :1;
+ uint64_t c2tnb239v3 :1;
+ uint64_t c2pnb272w1 :1;
+ uint64_t c2pnb304w1 :1;
+ uint64_t c2tnb359v1 :1;
+ uint64_t c2pnb368w1 :1;
+ uint64_t c2tnb431r1 :1;
+ uint64_t wap_wsg_idm_ecid_wtls1 :1;
+ uint64_t wap_wsg_idm_ecid_wtls3 :1;
+ uint64_t wap_wsg_idm_ecid_wtls4 :1;
+ uint64_t wap_wsg_idm_ecid_wtls5 :1;
+ uint64_t wap_wsg_idm_ecid_wtls10 :1;
+ uint64_t wap_wsg_idm_ecid_wtls11 :1;
+ uint64_t unused :24;
+ } binary_curve;
+ /**<
+ * Supported binary curves for
+ * ECDH
+ * ECDSA
+ * FECC
+ */
+ uint64_t binary_bits;
+ /**<
+ * Bitfield representing all binary curves.
+ */
+ };
+ } curves;
+ };
+};
+
/** Structure used to capture a capability of a crypto device */
struct rte_cryptodev_capabilities {
enum rte_crypto_op_type op;
@@ -176,6 +331,8 @@ struct rte_cryptodev_capabilities {
union {
struct rte_cryptodev_symmetric_capability sym;
/**< Symmetric operation capability parameters */
+ struct rte_cryptodev_asymmetric_capability asym;
+ /**< Asymmetric operation capability parameters */
};
};
@@ -188,6 +345,13 @@ struct rte_cryptodev_sym_capability_idx {
} algo;
};
+/** Structure used to describe crypto algorithms
+ * Only algorithm is required to define the capabilites associated
+ * with the particular asymmetric operation.
+ */
+struct rte_cryptodev_asym_capability_idx {
+ enum rte_crypto_asym_xform_type type;
+};
/**
* Provide capabilities available for defined device and algorithm
*
@@ -203,6 +367,20 @@ struct rte_cryptodev_sym_capability_idx {
const struct rte_cryptodev_sym_capability_idx *idx);
/**
+ * Provide capabilities available for defined device and algorithm
+ *
+ * @param dev_id The identifier of the device.
+ * @param algo Description of crypto algorithms.
+ *
+ * @return
+ * - Return description of the asymmetric crypto capability if exist.
+ * - Return NULL if the capability not exist.
+ */
+const struct rte_cryptodev_asymmetric_capability *
+rte_cryptodev_asym_capability_get(uint8_t dev_id,
+ const struct rte_cryptodev_asym_capability_idx *idx);
+
+/**
* Check if key size and initial vector are supported
* in crypto cipher capability
*
@@ -238,6 +416,37 @@ struct rte_cryptodev_sym_capability_idx {
uint16_t key_size, uint16_t digest_size, uint16_t aad_size);
/**
+ * Check if modulus length is supported for asymmetric crypto operation over
+ * a finite field.
+ *
+ * @param capability Description of the asymmetric crypto capability.
+ * @param modlen Modulus length
+ *
+ * @return
+ * - Return 0 if the parameters are in range of the capability.
+ * - Return -1 if the parameters are out of range of the capability.
+ */
+int
+rte_cryptodev_asym_capability_check_modlen(
+ const struct rte_cryptodev_asymmetric_capability *capability,
+ uint16_t modlen);
+
+/**
+ * Check if curve provided is supported for ECC operations
+ * @param capability Description of the asymmetric crypto capability.
+ * @param curve_type Type of the curve (Binary or Prime)
+ * @param curve_id Curve ID of the curve to be checked for support.
+ *
+ * @return
+ * - Return 0 if the curve provided is supported in the capability.
+ * - Return -1 if the curve provided is unsupported in the capability.
+ */
+int rte_cryptodev_asym_capability_check_curve(
+ const struct rte_cryptodev_asymmetric_capability *capability,
+ rte_crypto_ec_curve_type curve_type,
+ struct rte_crypto_ec_curve_id curve_id);
+
+/**
* Provide the cipher algorithm enum, given an algorithm string
*
* @param algo_enum A pointer to the cipher algorithm
@@ -267,6 +476,131 @@ struct rte_cryptodev_sym_capability_idx {
rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum,
const char *algo_string);
+/**
+ * Provide ECC curve enum and curve type from given curve name string.
+ *
+ * @param curve_id A pointer to the ECC curve ID enum to be
+ * filled.
+ * @param curve_type A pointer to the ECC curve type enum to be
+ * filled.
+ * @param curve_string Curve name string.
+ *
+ *
+ * @return
+ * - Return -1 if string is not valid.
+ * - Return 0 if the string is valid.
+ */
+int rte_cryptodev_get_ec_curve_enum(struct rte_crypto_ec_curve_id *curve_id,
+ enum rte_crypto_ec_curve_type *curve_type,
+ const char *curve_string);
+
+/**
+ * Provide asymmetric algorithm xform type for the given string.
+ *
+ * @param algo_enum Pointer to asymmetric xform enum to be filled.
+ *
+ * @param algo_string Asymmetric algorithm string.
+ *
+ * @return
+ * - Return -1 if string is not valid.
+ * - Return 0 if the string is valid.
+ */
+int
+rte_cryptodev_get_asym_algo_enum(enum rte_crypto_asym_xform_type *algo_enum,
+ const char *algo_string);
+
+/**
+ * Provide RSA operation type enum for the given string.
+ *
+ * @param op_enum Pointer to RSA operation type to be filled.
+ *
+ * @param op_string RSA operation type string.
+ *
+ * @return
+ * - Return -1 if string is not valid.
+ * - Return 0 if the string is valid.
+ */
+int
+rte_cryptodev_get_dh_op_enum(enum rte_crypto_dh_optype *op_enum,
+ const char *op_string);
+
+/**
+ * Provide DH operation type enum for the given string.
+ *
+ * @param op_enum Pointer to DH operation type enum to be filled.
+ *
+ * @param op_string DH operation type string.
+ *
+ * @return
+ * - Return -1 if string is not valid.
+ * - Return 0 if the string is valid.
+ */
+int
+rte_cryptodev_get_ecdh_op_enum(enum rte_crypto_ecdh_optype *op_enum,
+ const char *op_string);
+
+
+/**
+ * Provide ECDH operation type enum for the given string.
+ *
+ * @param op_enum Pointer to ECDH operation type enum to be filled.
+ *
+ * @param op_string ECDH operation type string.
+ *
+ * @return
+ * - Return -1 if string is not valid.
+ * - Return 0 if the string is valid.
+ */
+int
+rte_cryptodev_get_ecdh_op_enum(enum rte_crypto_ecdh_optype *op_enum,
+ const char *op_string);
+
+/**
+ * Provide DSA operation type enum for the given string.
+ *
+ * @param op_enum Pointer to DSA operation type enum to be filled.
+ *
+ * @param op_string DSA operation type string.
+ *
+ * @return
+ * - Return -1 if string is not valid.
+ * - Return 0 if the string is valid.
+ */
+int
+rte_cryptodev_get_dsa_op_enum(enum rte_crypto_dsa_optype *op_enum,
+ const char *op_string);
+
+
+/**
+ * Provide ECDSA operation type enum for the given string.
+ *
+ * @param op_enum Pointer to ECDSA operation type enum to be filled.
+ *
+ * @param op_string ECDSA operation type string.
+ *
+ * @return
+ * - Return -1 if string is not valid.
+ * - Return 0 if the string is valid.
+ */
+int
+rte_cryptodev_get_ecdsa_op_enum(enum rte_crypto_ecdsa_optype *op_enum,
+ const char *op_string);
+
+/**
+ * Provide RSA padding scheme enum for the given string.
+ *
+ * @param pad_enum Pointer to RSA padding scheme enum to be filled.
+ *
+ * @param pad_string Padding scheme as string
+ *
+ * @return
+ * - Return -1 if string is not valid.
+ * - Return 0 if the string is valid.
+ */
+int
+rte_cryptodev_get_rsa_padding_enum(enum rte_crypto_rsa_padding_type *pad_enum,
+ const char *pad_string);
+
/** Macro used at end of crypto PMD list */
#define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
{ RTE_CRYPTO_OP_TYPE_UNDEFINED }
--
1.8.3.1
More information about the dev
mailing list