DPDK patches and discussions
 help / color / mirror / Atom feed
* [dpdk-dev] [PATCH v3 1/2] crypto/qat: add chacha poly implementation
@ 2020-04-17 17:30 Arek Kusztal
  2020-04-17 17:30 ` [dpdk-dev] [PATCH v3 2/2] test/cryptodev: add chacha poly test cases to cryptodev Arek Kusztal
  0 siblings, 1 reply; 2+ messages in thread
From: Arek Kusztal @ 2020-04-17 17:30 UTC (permalink / raw)
  To: dev; +Cc: akhil.goyal, fiona.trahe, Arek Kusztal

This patchset adds Chacha20-Poly1305 implementation to Intel
QuickAssist Technology pmd.

Signed-off-by: Arek Kusztal <arkadiuszx.kusztal@intel.com>
---
v3:
- rebased against 20.05
 doc/guides/cryptodevs/features/qat.ini    |  13 +--
 doc/guides/cryptodevs/qat.rst             |   1 +
 doc/guides/rel_notes/release_20_05.rst    |   4 +
 drivers/common/qat/qat_adf/icp_qat_hw.h   |  10 ++-
 drivers/crypto/qat/qat_sym_capabilities.h |  32 +++++++
 drivers/crypto/qat/qat_sym_pmd.c          |  11 ++-
 drivers/crypto/qat/qat_sym_session.c      | 140 +++++++++++++++---------------
 7 files changed, 135 insertions(+), 76 deletions(-)

diff --git a/doc/guides/cryptodevs/features/qat.ini b/doc/guides/cryptodevs/features/qat.ini
index 6e350eb..32591b5 100644
--- a/doc/guides/cryptodevs/features/qat.ini
+++ b/doc/guides/cryptodevs/features/qat.ini
@@ -60,12 +60,13 @@ AES CMAC (128) = Y
 ; Supported AEAD algorithms of the 'qat' crypto driver.
 ;
 [AEAD]
-AES GCM (128) = Y
-AES GCM (192) = Y
-AES GCM (256) = Y
-AES CCM (128) = Y
-AES CCM (192) = Y
-AES CCM (256) = Y
+AES GCM (128)     = Y
+AES GCM (192)     = Y
+AES GCM (256)     = Y
+AES CCM (128)     = Y
+AES CCM (192)     = Y
+AES CCM (256)     = Y
+CHACHA20-POLY1305 = Y
 
 ;
 ; Supported Asymmetric algorithms of the 'qat' crypto driver.
diff --git a/doc/guides/cryptodevs/qat.rst b/doc/guides/cryptodevs/qat.rst
index c79e686..c756065 100644
--- a/doc/guides/cryptodevs/qat.rst
+++ b/doc/guides/cryptodevs/qat.rst
@@ -70,6 +70,7 @@ Supported AEAD algorithms:
 
 * ``RTE_CRYPTO_AEAD_AES_GCM``
 * ``RTE_CRYPTO_AEAD_AES_CCM``
+* ``RTE_CRYPTO_AEAD_CHACHA20_POLY1305``
 
 
 Supported Chains
diff --git a/doc/guides/rel_notes/release_20_05.rst b/doc/guides/rel_notes/release_20_05.rst
index afc943b..0d925be 100644
--- a/doc/guides/rel_notes/release_20_05.rst
+++ b/doc/guides/rel_notes/release_20_05.rst
@@ -85,6 +85,10 @@ New Features
 
   Chacha20-Poly1305 AEAD algorithm can now be supported in Cryptodev.
 
+* **Updated the Intel QuickAssist Technology (QAT) symmetric crypto PMD.**
+
+  Added Chacha20-Poly1305 AEAD algorithm.
+
 
 Removed Items
 -------------
diff --git a/drivers/common/qat/qat_adf/icp_qat_hw.h b/drivers/common/qat/qat_adf/icp_qat_hw.h
index cef6486..fdc0f19 100644
--- a/drivers/common/qat/qat_adf/icp_qat_hw.h
+++ b/drivers/common/qat/qat_adf/icp_qat_hw.h
@@ -204,7 +204,9 @@ enum icp_qat_hw_cipher_algo {
 	ICP_QAT_HW_CIPHER_ALGO_KASUMI = 7,
 	ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 = 8,
 	ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 = 9,
-	ICP_QAT_HW_CIPHER_DELIMITER = 10
+	ICP_QAT_HW_CIPHER_ALGO_SM4 = 10,
+	ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305 = 11,
+	ICP_QAT_HW_CIPHER_DELIMITER = 12
 };
 
 enum icp_qat_hw_cipher_mode {
@@ -306,6 +308,12 @@ enum icp_qat_hw_cipher_convert {
 #define ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ 16
 #define ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ 16
 #define ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR 2
+#define ICP_QAT_HW_CHACHAPOLY_KEY_SZ 32
+#define ICP_QAT_HW_CHACHAPOLY_IV_SZ 12
+#define ICP_QAT_HW_CHACHAPOLY_BLK_SZ 64
+#define ICP_QAT_HW_SPC_CTR_SZ 16
+#define ICP_QAT_HW_CHACHAPOLY_ICV_SZ 16
+#define ICP_QAT_HW_CHACHAPOLY_AAD_MAX_LOG 14
 
 #define ICP_QAT_HW_CIPHER_MAX_KEY_SZ ICP_QAT_HW_AES_256_F8_KEY_SZ
 
diff --git a/drivers/crypto/qat/qat_sym_capabilities.h b/drivers/crypto/qat/qat_sym_capabilities.h
index 27e57aa..bd95459 100644
--- a/drivers/crypto/qat/qat_sym_capabilities.h
+++ b/drivers/crypto/qat/qat_sym_capabilities.h
@@ -594,4 +594,36 @@
 		}, }							\
 	}
 
+#define QAT_EXTRA_GEN3_SYM_CAPABILITIES					\
+	{	/* Chacha20-Poly1305 */					\
+	.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,			\
+		{.sym = {						\
+			.xform_type = RTE_CRYPTO_SYM_XFORM_AEAD,	\
+			{.aead = {					\
+				.algo = RTE_CRYPTO_AEAD_CHACHA20_POLY1305, \
+				.block_size = 64,			\
+				.key_size = {				\
+					.min = 32,			\
+					.max = 32,			\
+					.increment = 0			\
+				},					\
+				.digest_size = {			\
+					.min = 16,			\
+					.max = 16,			\
+					.increment = 0			\
+				},					\
+				.aad_size = {				\
+					.min = 0,			\
+					.max = 240,			\
+					.increment = 1			\
+				},					\
+				.iv_size = {				\
+					.min = 12,			\
+					.max = 12,			\
+					.increment = 0			\
+				},					\
+			}, }						\
+		}, }							\
+	}
+
 #endif /* _QAT_SYM_CAPABILITIES_H_ */
diff --git a/drivers/crypto/qat/qat_sym_pmd.c b/drivers/crypto/qat/qat_sym_pmd.c
index e887c88..88e0fab 100644
--- a/drivers/crypto/qat/qat_sym_pmd.c
+++ b/drivers/crypto/qat/qat_sym_pmd.c
@@ -29,6 +29,13 @@ static const struct rte_cryptodev_capabilities qat_gen2_sym_capabilities[] = {
 	RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
 };
 
+static const struct rte_cryptodev_capabilities qat_gen3_sym_capabilities[] = {
+	QAT_BASE_GEN1_SYM_CAPABILITIES,
+	QAT_EXTRA_GEN2_SYM_CAPABILITIES,
+	QAT_EXTRA_GEN3_SYM_CAPABILITIES,
+	RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
+};
+
 static int qat_sym_qp_release(struct rte_cryptodev *dev,
 	uint16_t queue_pair_id);
 
@@ -320,9 +327,11 @@ qat_sym_dev_create(struct qat_pci_device *qat_pci_dev,
 		internals->qat_dev_capabilities = qat_gen1_sym_capabilities;
 		break;
 	case QAT_GEN2:
-	case QAT_GEN3:
 		internals->qat_dev_capabilities = qat_gen2_sym_capabilities;
 		break;
+	case QAT_GEN3:
+		internals->qat_dev_capabilities = qat_gen3_sym_capabilities;
+		break;
 	default:
 		internals->qat_dev_capabilities = qat_gen2_sym_capabilities;
 		QAT_LOG(DEBUG,
diff --git a/drivers/crypto/qat/qat_sym_session.c b/drivers/crypto/qat/qat_sym_session.c
index fd2cc38..7bdc1a5 100644
--- a/drivers/crypto/qat/qat_sym_session.c
+++ b/drivers/crypto/qat/qat_sym_session.c
@@ -586,69 +586,68 @@ qat_sym_session_set_parameters(struct rte_cryptodev *dev,
 }
 
 static int
-qat_sym_session_handle_single_pass(struct qat_sym_dev_private *internals,
-		struct qat_sym_session *session,
+qat_sym_session_handle_single_pass(struct qat_sym_session *session,
 		struct rte_crypto_aead_xform *aead_xform)
 {
-	enum qat_device_gen qat_dev_gen = internals->qat_dev->qat_dev_gen;
+	struct icp_qat_fw_la_cipher_req_params *cipher_param =
+			(void *) &session->fw_req.serv_specif_rqpars;
 
-	if (qat_dev_gen == QAT_GEN3 &&
-			aead_xform->iv.length == QAT_AES_GCM_SPC_IV_SIZE) {
-		/* Use faster Single-Pass GCM */
-		struct icp_qat_fw_la_cipher_req_params *cipher_param =
-				(void *) &session->fw_req.serv_specif_rqpars;
-
-		session->is_single_pass = 1;
-		session->min_qat_dev_gen = QAT_GEN3;
-		session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
+	session->is_single_pass = 1;
+	session->min_qat_dev_gen = QAT_GEN3;
+	session->qat_cmd = ICP_QAT_FW_LA_CMD_CIPHER;
+	if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
 		session->qat_mode = ICP_QAT_HW_CIPHER_AEAD_MODE;
-		session->cipher_iv.offset = aead_xform->iv.offset;
-		session->cipher_iv.length = aead_xform->iv.length;
-		if (qat_sym_session_aead_create_cd_cipher(session,
-				aead_xform->key.data, aead_xform->key.length))
-			return -EINVAL;
-		session->aad_len = aead_xform->aad_length;
-		session->digest_length = aead_xform->digest_length;
-		if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
-			session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
-			session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
-			ICP_QAT_FW_LA_RET_AUTH_SET(
-				session->fw_req.comn_hdr.serv_specif_flags,
-				ICP_QAT_FW_LA_RET_AUTH_RES);
-		} else {
-			session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
-			session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
-			ICP_QAT_FW_LA_CMP_AUTH_SET(
-				session->fw_req.comn_hdr.serv_specif_flags,
-				ICP_QAT_FW_LA_CMP_AUTH_RES);
-		}
-		ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
-				session->fw_req.comn_hdr.serv_specif_flags,
-				ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
-		ICP_QAT_FW_LA_PROTO_SET(
-				session->fw_req.comn_hdr.serv_specif_flags,
-				ICP_QAT_FW_LA_NO_PROTO);
 		ICP_QAT_FW_LA_GCM_IV_LEN_FLAG_SET(
-				session->fw_req.comn_hdr.serv_specif_flags,
-				ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
-		session->fw_req.comn_hdr.service_cmd_id =
-				ICP_QAT_FW_LA_CMD_CIPHER;
-		session->cd.cipher.cipher_config.val =
-				ICP_QAT_HW_CIPHER_CONFIG_BUILD(
-					ICP_QAT_HW_CIPHER_AEAD_MODE,
-					session->qat_cipher_alg,
-					ICP_QAT_HW_CIPHER_NO_CONVERT,
-					session->qat_dir);
-		QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
-				aead_xform->digest_length,
-				QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
-				QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
-		session->cd.cipher.cipher_config.reserved =
-				ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
-					aead_xform->aad_length);
-		cipher_param->spc_aad_sz = aead_xform->aad_length;
-		cipher_param->spc_auth_res_sz = aead_xform->digest_length;
+			session->fw_req.comn_hdr.serv_specif_flags,
+			ICP_QAT_FW_LA_GCM_IV_LEN_12_OCTETS);
+	} else {
+		/* Chacha-Poly is special case that use QAT CTR mode */
+		session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
 	}
+	session->cipher_iv.offset = aead_xform->iv.offset;
+	session->cipher_iv.length = aead_xform->iv.length;
+	if (qat_sym_session_aead_create_cd_cipher(session,
+			aead_xform->key.data, aead_xform->key.length))
+		return -EINVAL;
+	session->aad_len = aead_xform->aad_length;
+	session->digest_length = aead_xform->digest_length;
+	if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
+		session->qat_dir = ICP_QAT_HW_CIPHER_ENCRYPT;
+		session->auth_op = ICP_QAT_HW_AUTH_GENERATE;
+		ICP_QAT_FW_LA_RET_AUTH_SET(
+			session->fw_req.comn_hdr.serv_specif_flags,
+			ICP_QAT_FW_LA_RET_AUTH_RES);
+	} else {
+		session->qat_dir = ICP_QAT_HW_CIPHER_DECRYPT;
+		session->auth_op = ICP_QAT_HW_AUTH_VERIFY;
+		ICP_QAT_FW_LA_CMP_AUTH_SET(
+			session->fw_req.comn_hdr.serv_specif_flags,
+			ICP_QAT_FW_LA_CMP_AUTH_RES);
+	}
+	ICP_QAT_FW_LA_SINGLE_PASS_PROTO_FLAG_SET(
+			session->fw_req.comn_hdr.serv_specif_flags,
+			ICP_QAT_FW_LA_SINGLE_PASS_PROTO);
+	ICP_QAT_FW_LA_PROTO_SET(
+			session->fw_req.comn_hdr.serv_specif_flags,
+			ICP_QAT_FW_LA_NO_PROTO);
+	session->fw_req.comn_hdr.service_cmd_id =
+			ICP_QAT_FW_LA_CMD_CIPHER;
+	session->cd.cipher.cipher_config.val =
+			ICP_QAT_HW_CIPHER_CONFIG_BUILD(
+				ICP_QAT_HW_CIPHER_AEAD_MODE,
+				session->qat_cipher_alg,
+				ICP_QAT_HW_CIPHER_NO_CONVERT,
+				session->qat_dir);
+	QAT_FIELD_SET(session->cd.cipher.cipher_config.val,
+			aead_xform->digest_length,
+			QAT_CIPHER_AEAD_HASH_CMP_LEN_BITPOS,
+			QAT_CIPHER_AEAD_HASH_CMP_LEN_MASK);
+	session->cd.cipher.cipher_config.reserved =
+			ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
+				aead_xform->aad_length);
+	cipher_param->spc_aad_sz = aead_xform->aad_length;
+	cipher_param->spc_auth_res_sz = aead_xform->digest_length;
+
 	return 0;
 }
 
@@ -803,6 +802,10 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
 {
 	struct rte_crypto_aead_xform *aead_xform = &xform->aead;
 	enum rte_crypto_auth_operation crypto_operation;
+	struct qat_sym_dev_private *internals =
+			dev->data->dev_private;
+	enum qat_device_gen qat_dev_gen =
+			internals->qat_dev->qat_dev_gen;
 
 	/*
 	 * Store AEAD IV parameters as cipher IV,
@@ -811,6 +814,7 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
 	session->cipher_iv.offset = xform->aead.iv.offset;
 	session->cipher_iv.length = xform->aead.iv.length;
 
+	session->is_single_pass = 0;
 	switch (aead_xform->algo) {
 	case RTE_CRYPTO_AEAD_AES_GCM:
 		if (qat_sym_validate_aes_key(aead_xform->key.length,
@@ -820,6 +824,10 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
 		}
 		session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
 		session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_GALOIS_128;
+		if (qat_dev_gen > QAT_GEN2 && aead_xform->iv.length ==
+				QAT_AES_GCM_SPC_IV_SIZE) {
+		return qat_sym_session_handle_single_pass(session,
+				aead_xform);
 		if (session->cipher_iv.length == 0)
 			session->cipher_iv.length = AES_GCM_J0_LEN;
 
@@ -833,23 +841,19 @@ qat_sym_session_configure_aead(struct rte_cryptodev *dev,
 		session->qat_mode = ICP_QAT_HW_CIPHER_CTR_MODE;
 		session->qat_hash_alg = ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC;
 		break;
+	case RTE_CRYPTO_AEAD_CHACHA20_POLY1305:
+		if (aead_xform->key.length != ICP_QAT_HW_CHACHAPOLY_KEY_SZ)
+			return -EINVAL;
+		session->qat_cipher_alg =
+				ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305;
+		return qat_sym_session_handle_single_pass(session,
+						aead_xform);
 	default:
 		QAT_LOG(ERR, "Crypto: Undefined AEAD specified %u\n",
 				aead_xform->algo);
 		return -EINVAL;
 	}
 
-	session->is_single_pass = 0;
-	if (aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) {
-		/* Use faster Single-Pass GCM if possible */
-		int res = qat_sym_session_handle_single_pass(
-				dev->data->dev_private, session, aead_xform);
-		if (res < 0)
-			return res;
-		if (session->is_single_pass)
-			return 0;
-	}
-
 	if ((aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT &&
 			aead_xform->algo == RTE_CRYPTO_AEAD_AES_GCM) ||
 			(aead_xform->op == RTE_CRYPTO_AEAD_OP_DECRYPT &&
-- 
2.1.0


^ permalink raw reply	[flat|nested] 2+ messages in thread

end of thread, other threads:[~2020-04-17 17:30 UTC | newest]

Thread overview: 2+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2020-04-17 17:30 [dpdk-dev] [PATCH v3 1/2] crypto/qat: add chacha poly implementation Arek Kusztal
2020-04-17 17:30 ` [dpdk-dev] [PATCH v3 2/2] test/cryptodev: add chacha poly test cases to cryptodev Arek Kusztal

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for NNTP newsgroup(s).