* [PATCH 1/3] cryptodev: add ec points to sm2 op @ 2024-01-29 18:59 Arkadiusz Kusztal 2024-01-29 18:59 ` [PATCH 2/3] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal ` (3 more replies) 0 siblings, 4 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-01-29 18:59 UTC (permalink / raw) To: dev; +Cc: gakhil, ciara.power, Arkadiusz Kusztal In the case when PMD cannot support full process of the SM2, but elliptic curve computation only, additional fields are needed to handle such a case. Points C1, kP therefore were added to the SM2 crypto operation struct. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- lib/cryptodev/rte_crypto_asym.h | 119 +++++++++++++++++++------------- 1 file changed, 71 insertions(+), 48 deletions(-) diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h index 39d3da3952..55620d2d3a 100644 --- a/lib/cryptodev/rte_crypto_asym.h +++ b/lib/cryptodev/rte_crypto_asym.h @@ -599,40 +599,6 @@ struct rte_crypto_ecpm_op_param { /**< Scalar to multiply the input point */ }; -/** - * Asymmetric crypto transform data - * - * Structure describing asym xforms. - */ -struct rte_crypto_asym_xform { - struct rte_crypto_asym_xform *next; - /**< Pointer to next xform to set up xform chain.*/ - enum rte_crypto_asym_xform_type xform_type; - /**< Asymmetric crypto transform */ - - union { - struct rte_crypto_rsa_xform rsa; - /**< RSA xform parameters */ - - struct rte_crypto_modex_xform modex; - /**< Modular Exponentiation xform parameters */ - - struct rte_crypto_modinv_xform modinv; - /**< Modular Multiplicative Inverse xform parameters */ - - struct rte_crypto_dh_xform dh; - /**< DH xform parameters */ - - struct rte_crypto_dsa_xform dsa; - /**< DSA xform parameters */ - - struct rte_crypto_ec_xform ec; - /**< EC xform parameters, used by elliptic curve based - * operations. - */ - }; -}; - /** * SM2 operation params. */ @@ -658,20 +624,43 @@ struct rte_crypto_sm2_op_param { * will be overwritten by the PMD with the decrypted length. */ - rte_crypto_param cipher; - /**< - * Pointer to input data - * - to be decrypted for SM2 private decrypt. - * - * Pointer to output data - * - for SM2 public encrypt. - * In this case the underlying array should have been allocated - * with enough memory to hold ciphertext output (at least X bytes - * for prime field curve of N bytes and for message M bytes, - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will - * be overwritten by the PMD with the encrypted length. - */ + union { + rte_crypto_param cipher; + /**< + * Pointer to input data + * - to be decrypted for SM2 private decrypt. + * + * Pointer to output data + * - for SM2 public encrypt. + * In this case the underlying array should have been allocated + * with enough memory to hold ciphertext output (at least X bytes + * for prime field curve of N bytes and for message M bytes, + * where X = (C1 || C2 || C3) and computed based on SM2 RFC as + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will + * be overwritten by the PMD with the encrypted length. + */ + struct { + struct rte_crypto_ec_point C1; + /**< + * This field is used only when PMD does not support full + * process of the SM2 encryption/decryption, but elliptic + * curve part only. + * + * In the case of encryption, it is an output - point C1 = (x1,y1). + * In the case of decryption, if is an input - point C1 = (x1,y1) + * + */ + struct rte_crypto_ec_point kP; + /**< + * This field is used only when PMD does not support full + * process of the SM2 encryption/decryption, but elliptic + * curve part only. + * + * It is an output in the encryption case, it is a point + * [k]P = (x2,y2) + */ + }; + }; rte_crypto_uint id; /**< The SM2 id used by signer and verifier. */ @@ -697,6 +686,40 @@ struct rte_crypto_sm2_op_param { */ }; +/** + * Asymmetric crypto transform data + * + * Structure describing asym xforms. + */ +struct rte_crypto_asym_xform { + struct rte_crypto_asym_xform *next; + /**< Pointer to next xform to set up xform chain.*/ + enum rte_crypto_asym_xform_type xform_type; + /**< Asymmetric crypto transform */ + + union { + struct rte_crypto_rsa_xform rsa; + /**< RSA xform parameters */ + + struct rte_crypto_modex_xform modex; + /**< Modular Exponentiation xform parameters */ + + struct rte_crypto_modinv_xform modinv; + /**< Modular Multiplicative Inverse xform parameters */ + + struct rte_crypto_dh_xform dh; + /**< DH xform parameters */ + + struct rte_crypto_dsa_xform dsa; + /**< DSA xform parameters */ + + struct rte_crypto_ec_xform ec; + /**< EC xform parameters, used by elliptic curve based + * operations. + */ + }; +}; + /** * Asymmetric Cryptographic Operation. * -- 2.34.1 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH 2/3] crypto/qat: add sm2 encryption/decryption function 2024-01-29 18:59 [PATCH 1/3] cryptodev: add ec points to sm2 op Arkadiusz Kusztal @ 2024-01-29 18:59 ` Arkadiusz Kusztal 2024-01-29 18:59 ` [PATCH 3/3] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal ` (2 subsequent siblings) 3 siblings, 0 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-01-29 18:59 UTC (permalink / raw) To: dev; +Cc: gakhil, ciara.power, Arkadiusz Kusztal This commit adds SM2 elliptic curve based asymmetric encryption and decryption to the Intel QuickAssist Technology PMD. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- .../common/qat/qat_adf/icp_qat_fw_mmp_ids.h | 3 + drivers/common/qat/qat_adf/qat_pke.h | 20 +++ drivers/crypto/qat/qat_asym.c | 140 +++++++++++++++++- 3 files changed, 157 insertions(+), 6 deletions(-) diff --git a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h index 630c6e1a9b..aa49612ca1 100644 --- a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h +++ b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h @@ -1542,6 +1542,9 @@ icp_qat_fw_mmp_ecdsa_verify_gfp_521_input::in in @endlink * @li no output parameters */ +#define PKE_ECSM2_ENCRYPTION 0x25221720 +#define PKE_ECSM2_DECRYPTION 0x201716e6 + #define PKE_LIVENESS 0x00000001 /**< Functionality ID for PKE_LIVENESS * @li 0 input parameter(s) diff --git a/drivers/common/qat/qat_adf/qat_pke.h b/drivers/common/qat/qat_adf/qat_pke.h index f88932a275..ac051e965d 100644 --- a/drivers/common/qat/qat_adf/qat_pke.h +++ b/drivers/common/qat/qat_adf/qat_pke.h @@ -334,4 +334,24 @@ get_sm2_ecdsa_verify_function(void) return qat_function; } +static struct qat_asym_function +get_sm2_encryption_function(void) +{ + struct qat_asym_function qat_function = { + PKE_ECSM2_ENCRYPTION, 32 + }; + + return qat_function; +} + +static struct qat_asym_function +get_sm2_decryption_function(void) +{ + struct qat_asym_function qat_function = { + PKE_ECSM2_DECRYPTION, 32 + }; + + return qat_function; +} + #endif diff --git a/drivers/crypto/qat/qat_asym.c b/drivers/crypto/qat/qat_asym.c index 2bf3060278..7407821a6e 100644 --- a/drivers/crypto/qat/qat_asym.c +++ b/drivers/crypto/qat/qat_asym.c @@ -925,6 +925,15 @@ sm2_ecdsa_sign_set_input(struct icp_qat_fw_pke_request *qat_req, qat_req->input_param_count = 3; qat_req->output_param_count = 2; + HEXDUMP("SM2 K test", asym_op->sm2.k.data, + cookie->alg_bytesize); + HEXDUMP("SM2 K", cookie->input_array[0], + cookie->alg_bytesize); + HEXDUMP("SM2 msg", cookie->input_array[1], + cookie->alg_bytesize); + HEXDUMP("SM2 pkey", cookie->input_array[2], + cookie->alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; } @@ -975,6 +984,114 @@ sm2_ecdsa_sign_collect(struct rte_crypto_asym_op *asym_op, return RTE_CRYPTO_OP_STATUS_SUCCESS; } +static int +sm2_encryption_set_input(struct icp_qat_fw_pke_request *qat_req, + struct qat_asym_op_cookie *cookie, + const struct rte_crypto_asym_op *asym_op, + const struct rte_crypto_asym_xform *xform) +{ + const struct qat_asym_function qat_function = + get_sm2_encryption_function(); + const uint32_t qat_func_alignsize = + qat_function.bytesize; + + SET_PKE_LN(asym_op->sm2.k, qat_func_alignsize, 0); + SET_PKE_LN(xform->ec.q.x, qat_func_alignsize, 1); + SET_PKE_LN(xform->ec.q.y, qat_func_alignsize, 2); + + cookie->alg_bytesize = qat_function.bytesize; + cookie->qat_func_alignsize = qat_function.bytesize; + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; + qat_req->input_param_count = 3; + qat_req->output_param_count = 4; + + HEXDUMP("SM2 K", cookie->input_array[0], + qat_func_alignsize); + HEXDUMP("SM2 Q.x", cookie->input_array[1], + qat_func_alignsize); + HEXDUMP("SM2 Q.y", cookie->input_array[2], + qat_func_alignsize); + + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + +static uint8_t +sm2_encryption_collect(struct rte_crypto_asym_op *asym_op, + const struct qat_asym_op_cookie *cookie) +{ + uint32_t alg_bytesize = cookie->alg_bytesize; + + rte_memcpy(asym_op->sm2.C1.x.data, cookie->output_array[0], alg_bytesize); + rte_memcpy(asym_op->sm2.C1.y.data, cookie->output_array[1], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[2], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[3], alg_bytesize); + asym_op->sm2.C1.x.length = alg_bytesize; + asym_op->sm2.C1.y.length = alg_bytesize; + asym_op->sm2.kP.x.length = alg_bytesize; + asym_op->sm2.kP.y.length = alg_bytesize; + + HEXDUMP("C1[x1]", cookie->output_array[0], + alg_bytesize); + HEXDUMP("C1[y]", cookie->output_array[1], + alg_bytesize); + HEXDUMP("kP[x]", cookie->output_array[2], + alg_bytesize); + HEXDUMP("kP[y]", cookie->output_array[3], + alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + + +static int +sm2_decryption_set_input(struct icp_qat_fw_pke_request *qat_req, + struct qat_asym_op_cookie *cookie, + const struct rte_crypto_asym_op *asym_op, + const struct rte_crypto_asym_xform *xform) +{ + const struct qat_asym_function qat_function = + get_sm2_decryption_function(); + const uint32_t qat_func_alignsize = + qat_function.bytesize; + + SET_PKE_LN(xform->ec.pkey, qat_func_alignsize, 0); + SET_PKE_LN(asym_op->sm2.C1.x, qat_func_alignsize, 1); + SET_PKE_LN(asym_op->sm2.C1.y, qat_func_alignsize, 2); + + cookie->alg_bytesize = qat_function.bytesize; + cookie->qat_func_alignsize = qat_function.bytesize; + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; + qat_req->input_param_count = 3; + qat_req->output_param_count = 2; + + HEXDUMP("d", cookie->input_array[0], + qat_func_alignsize); + HEXDUMP("C1[x]", cookie->input_array[1], + qat_func_alignsize); + HEXDUMP("C1[y]", cookie->input_array[2], + qat_func_alignsize); + + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + + +static uint8_t +sm2_decryption_collect(struct rte_crypto_asym_op *asym_op, + const struct qat_asym_op_cookie *cookie) +{ + uint32_t alg_bytesize = cookie->alg_bytesize; + + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[0], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[1], alg_bytesize); + asym_op->sm2.kP.x.length = alg_bytesize; + asym_op->sm2.kP.y.length = alg_bytesize; + + HEXDUMP("kP[x]", cookie->output_array[0], + alg_bytesize); + HEXDUMP("kP[y]", cookie->output_array[1], + alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + static int asym_set_input(struct icp_qat_fw_pke_request *qat_req, struct qat_asym_op_cookie *cookie, @@ -1006,14 +1123,20 @@ asym_set_input(struct icp_qat_fw_pke_request *qat_req, asym_op, xform); } case RTE_CRYPTO_ASYM_XFORM_SM2: - if (asym_op->sm2.op_type == - RTE_CRYPTO_ASYM_OP_VERIFY) { + if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) { + return sm2_encryption_set_input(qat_req, cookie, + asym_op, xform); + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) { + return sm2_decryption_set_input(qat_req, cookie, + asym_op, xform); + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) { return sm2_ecdsa_verify_set_input(qat_req, cookie, asym_op, xform); - } else { + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_SIGN) { return sm2_ecdsa_sign_set_input(qat_req, cookie, asym_op, xform); } + break; default: QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform"); return -EINVAL; @@ -1103,7 +1226,13 @@ qat_asym_collect_response(struct rte_crypto_op *op, case RTE_CRYPTO_ASYM_XFORM_ECDH: return ecdh_collect(asym_op, cookie); case RTE_CRYPTO_ASYM_XFORM_SM2: - return sm2_ecdsa_sign_collect(asym_op, cookie); + if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) + return sm2_encryption_collect(asym_op, cookie); + else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) + return sm2_decryption_collect(asym_op, cookie); + else + return sm2_ecdsa_sign_collect(asym_op, cookie); + default: QAT_LOG(ERR, "Not supported xform type"); return RTE_CRYPTO_OP_STATUS_ERROR; @@ -1373,9 +1502,8 @@ qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused, case RTE_CRYPTO_ASYM_XFORM_ECDSA: case RTE_CRYPTO_ASYM_XFORM_ECPM: case RTE_CRYPTO_ASYM_XFORM_ECDH: - session_set_ec(qat_session, xform); - break; case RTE_CRYPTO_ASYM_XFORM_SM2: + session_set_ec(qat_session, xform); break; default: ret = -ENOTSUP; -- 2.34.1 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH 3/3] app/test: add test sm2 C1/Kp test cases 2024-01-29 18:59 [PATCH 1/3] cryptodev: add ec points to sm2 op Arkadiusz Kusztal 2024-01-29 18:59 ` [PATCH 2/3] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal @ 2024-01-29 18:59 ` Arkadiusz Kusztal 2024-02-01 8:07 ` [EXT] [PATCH 1/3] cryptodev: add ec points to sm2 op Akhil Goyal 2024-09-29 17:29 ` [PATCH v2] " Arkadiusz Kusztal 3 siblings, 0 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-01-29 18:59 UTC (permalink / raw) To: dev; +Cc: gakhil, ciara.power, Arkadiusz Kusztal This commit adds tests cases to be used when C1 or kP elliptic curve points need to be computed. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- app/test/test_cryptodev_asym.c | 116 ++++++++++++++++++++- app/test/test_cryptodev_sm2_test_vectors.h | 112 +++++++++++++++++++- 2 files changed, 224 insertions(+), 4 deletions(-) diff --git a/app/test/test_cryptodev_asym.c b/app/test/test_cryptodev_asym.c index 17daf734e8..20da0fd815 100644 --- a/app/test/test_cryptodev_asym.c +++ b/app/test/test_cryptodev_asym.c @@ -2639,6 +2639,8 @@ test_sm2_sign(void) asym_op->sm2.k.data = input_params.k.data; asym_op->sm2.k.length = input_params.k.length; } + asym_op->sm2.k.data = input_params.k.data; + asym_op->sm2.k.length = input_params.k.length; /* Init out buf */ asym_op->sm2.r.data = output_buf_r; @@ -3188,7 +3190,7 @@ static int send_one(void) ticks++; if (ticks >= DEQ_TIMEOUT) { RTE_LOG(ERR, USER1, - "line %u FAILED: Cannot dequeue the crypto op on device %d", + "line %u FAILED: Cannot dequeue the crypto op on device, timeout %d", __LINE__, params->valid_devs[0]); return TEST_FAILED; } @@ -3467,6 +3469,110 @@ KAT_RSA_Decrypt_CRT(const void *data) return 0; } +static int +test_sm2_encryption(const void *data) +{ + struct rte_crypto_asym_xform xform = { 0 }; + const uint8_t dev_id = params->valid_devs[0]; + const struct crypto_testsuite_sm2_params *test_vector = data; + uint8_t result_C1_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_C1_y1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; + + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; + xform.ec.curve_id = RTE_CRYPTO_EC_GROUP_SM2; + xform.ec.q = test_vector->pubkey; + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_ENCRYPT; + self->op->asym->sm2.k = test_vector->k; + if (rte_cryptodev_asym_session_create(dev_id, &xform, + params->session_mpool, &self->sess) < 0) { + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", + __LINE__); + return TEST_FAILED; + } + rte_crypto_op_attach_asym_session(self->op, self->sess); + + self->op->asym->sm2.C1.x.data = result_C1_x1; + self->op->asym->sm2.C1.y.data = result_C1_y1; + self->op->asym->sm2.kP.x.data = result_kP_x1; + self->op->asym->sm2.kP.y.data = result_kP_y1; + TEST_ASSERT_SUCCESS(send_one(), + "Failed to process crypto op"); + + debug_hexdump(stdout, "C1[x]", self->op->asym->sm2.C1.x.data, + self->op->asym->sm2.C1.x.length); + debug_hexdump(stdout, "C1[y]", self->op->asym->sm2.C1.y.data, + self->op->asym->sm2.C1.y.length); + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, + self->op->asym->sm2.kP.x.length); + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, + self->op->asym->sm2.kP.y.length); + + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.x.data, + self->op->asym->sm2.C1.x.data, + test_vector->C1.x.length, + "Incorrect value of C1[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.y.data, + self->op->asym->sm2.C1.y.data, + test_vector->C1.y.length, + "Incorrect value of C1[y]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, + self->op->asym->sm2.kP.x.data, + test_vector->kP.x.length, + "Incorrect value of kP[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, + self->op->asym->sm2.kP.y.data, + test_vector->kP.y.length, + "Incorrect value of kP[y]\n"); + + return TEST_SUCCESS; +} + +static int +test_sm2_decryption(const void *data) +{ + struct rte_crypto_asym_xform xform = {}; + const uint8_t dev_id = params->valid_devs[0]; + const struct crypto_testsuite_sm2_params *test_vector = data; + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; + + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; + xform.ec.pkey = test_vector->pkey; + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_DECRYPT; + self->op->asym->sm2.C1 = test_vector->C1; + + if (rte_cryptodev_asym_session_create(dev_id, &xform, + params->session_mpool, &self->sess) < 0) { + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", + __LINE__); + return TEST_FAILED; + } + rte_crypto_op_attach_asym_session(self->op, self->sess); + + self->op->asym->sm2.kP.x.data = result_kP_x1; + self->op->asym->sm2.kP.y.data = result_kP_y1; + TEST_ASSERT_SUCCESS(send_one(), + "Failed to process crypto op"); + + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, + self->op->asym->sm2.C1.x.length); + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, + self->op->asym->sm2.C1.y.length); + + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, + self->op->asym->sm2.kP.x.data, + test_vector->kP.x.length, + "Incorrect value of kP[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, + self->op->asym->sm2.kP.y.data, + test_vector->kP.y.length, + "Incorrect value of kP[y]\n"); + + return 0; +} + static struct unit_test_suite cryptodev_openssl_asym_testsuite = { .suite_name = "Crypto Device OPENSSL ASYM Unit Test Suite", .setup = testsuite_setup, @@ -3522,6 +3628,14 @@ static struct unit_test_suite cryptodev_qat_asym_testsuite = { .setup = testsuite_setup, .teardown = testsuite_teardown, .unit_test_cases = { + TEST_CASE_NAMED_WITH_DATA( + "SM2 encryption - test case 1", + ut_setup_asym, ut_teardown_asym, + test_sm2_encryption, &sm2_enc_hw_t1), + TEST_CASE_NAMED_WITH_DATA( + "SM2 decryption - test case 1", + ut_setup_asym, ut_teardown_asym, + test_sm2_decryption, &sm2_enc_hw_t1), TEST_CASE_NAMED_WITH_DATA( "Modular Exponentiation (mod=128, base=20, exp=3, res=128)", ut_setup_asym, ut_teardown_asym, diff --git a/app/test/test_cryptodev_sm2_test_vectors.h b/app/test/test_cryptodev_sm2_test_vectors.h index 41f5f7074a..92f7e77671 100644 --- a/app/test/test_cryptodev_sm2_test_vectors.h +++ b/app/test/test_cryptodev_sm2_test_vectors.h @@ -8,19 +8,125 @@ #include "rte_crypto_asym.h" struct crypto_testsuite_sm2_params { - rte_crypto_param pubkey_qx; - rte_crypto_param pubkey_qy; + union { + struct { + rte_crypto_param pubkey_qx; + rte_crypto_param pubkey_qy; + }; + struct rte_crypto_ec_point pubkey; + }; rte_crypto_param pkey; rte_crypto_param k; rte_crypto_param sign_r; rte_crypto_param sign_s; rte_crypto_param id; - rte_crypto_param cipher; + union { + rte_crypto_param cipher; + struct { + struct rte_crypto_ec_point C1; + struct rte_crypto_ec_point kP; + }; + }; rte_crypto_param message; rte_crypto_param digest; int curve; }; +uint8_t sm2_enc_pub_x_t1[] = { + 0x26, 0xf1, 0xf3, 0xef, 0x12, 0x27, 0x85, 0xd1, + 0x7d, 0x38, 0x70, 0xc2, 0x43, 0x46, 0x50, 0x36, + 0x3f, 0xdf, 0x4b, 0x2f, 0x45, 0x0e, 0x8e, 0xd1, + 0xb6, 0x0f, 0xdc, 0x1f, 0xc6, 0xf0, 0x19, 0xab +}; +uint8_t sm2_enc_pub_y_t1[] = { + 0xd9, 0x19, 0x8b, 0xdb, 0xef, 0xa5, 0x84, 0x76, + 0xec, 0x82, 0x25, 0x12, 0x5b, 0x8c, 0xe3, 0xe1, + 0x0a, 0x10, 0x0d, 0xc6, 0x97, 0x6c, 0xc1, 0x89, + 0xd9, 0x6d, 0xa6, 0x88, 0x9e, 0xbc, 0xd3, 0x7a +}; +uint8_t sm2_k_t1[] = { + 0x12, 0x34, 0x56, 0x78, 0xB9, 0x6E, 0x5A, 0xF7, + 0x0B, 0xD4, 0x80, 0xB4, 0x72, 0x40, 0x9A, 0x9A, + 0x32, 0x72, 0x57, 0xF1, 0xEB, 0xB7, 0x3F, 0x5B, + 0x07, 0x33, 0x54, 0xB2, 0x48, 0x66, 0x85, 0x63 +}; + +uint8_t sm2_C1_x_t1[] = { + 0x15, 0xf6, 0xb7, 0x49, 0x00, 0x39, 0x73, 0x9d, + 0x5b, 0xb3, 0xd3, 0xe9, 0x1d, 0xe4, 0xc8, 0xbd, + 0x08, 0xe3, 0x6a, 0x22, 0xff, 0x1a, 0xbf, 0xdc, + 0x75, 0x6b, 0x12, 0x85, 0x81, 0xc5, 0x8b, 0xcf +}; + +uint8_t sm2_C1_y_t1[] = { + 0x6a, 0x92, 0xd4, 0xd8, 0x13, 0xec, 0x8f, 0x9a, + 0x9d, 0xbe, 0x51, 0x47, 0x6f, 0x54, 0xc5, 0x41, + 0x98, 0xf5, 0x5f, 0x83, 0xce, 0x1c, 0x18, 0x1a, + 0x48, 0xbd, 0xeb, 0x38, 0x13, 0x67, 0x0d, 0x06 +}; + +uint8_t sm2_kP_x_t1[] = { + 0x6b, 0xfb, 0x9a, 0xcb, 0xc6, 0xb6, 0x36, 0x31, + 0x0f, 0xd1, 0xdd, 0x9c, 0x9f, 0x17, 0x5f, 0x3f, + 0x68, 0x13, 0x96, 0xd2, 0x54, 0x5b, 0xa6, 0x19, + 0x78, 0x1f, 0x87, 0x3d, 0x81, 0xc3, 0x21, 0x01 +}; + +uint8_t sm2_kP_y_t1[] = { + 0xa4, 0x08, 0xf3, 0x74, 0x35, 0x51, 0x8c, 0x81, + 0x06, 0x4c, 0x8f, 0x31, 0x49, 0xe3, 0x5b, 0x4d, + 0xfc, 0x3d, 0x19, 0xac, 0x7d, 0x07, 0xd0, 0x9a, + 0x99, 0x5a, 0x25, 0x16, 0x66, 0xff, 0x41, 0x3c +}; + +uint8_t sm2_kP_d_t1[] = { + 0x6F, 0xCB, 0xA2, 0xEF, 0x9A, 0xE0, 0xAB, 0x90, + 0x2B, 0xC3, 0xBD, 0xE3, 0xFF, 0x91, 0x5D, 0x44, + 0xBA, 0x4C, 0xC7, 0x8F, 0x88, 0xE2, 0xF8, 0xE7, + 0xF8, 0x99, 0x6D, 0x3B, 0x8C, 0xCE, 0xED, 0xEE +}; + +struct crypto_testsuite_sm2_params sm2_enc_hw_t1 = { + .k = { + .data = sm2_k_t1, + .length = sizeof(sm2_k_t1) + }, + .pubkey = { + .x = { + .data = sm2_enc_pub_x_t1, + .length = sizeof(sm2_enc_pub_x_t1) + }, + .y = { + .data = sm2_enc_pub_y_t1, + .length = sizeof(sm2_enc_pub_y_t1) + } + }, + .C1 = { + .x = { + .data = sm2_C1_x_t1, + .length = sizeof(sm2_C1_x_t1) + }, + .y = { + .data = sm2_C1_y_t1, + .length = sizeof(sm2_C1_y_t1) + } + }, + .kP = { + .x = { + .data = sm2_kP_x_t1, + .length = sizeof(sm2_kP_x_t1) + }, + .y = { + .data = sm2_kP_y_t1, + .length = sizeof(sm2_kP_y_t1) + } + }, + .pkey = { + .data = sm2_kP_d_t1, + .length = sizeof(sm2_kP_d_t1) + } +}; + static uint8_t fp256_pkey[] = { 0x77, 0x84, 0x35, 0x65, 0x4c, 0x7a, 0x6d, 0xb1, 0x1e, 0x63, 0x0b, 0x41, 0x97, 0x36, 0x04, 0xf4, -- 2.34.1 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXT] [PATCH 1/3] cryptodev: add ec points to sm2 op 2024-01-29 18:59 [PATCH 1/3] cryptodev: add ec points to sm2 op Arkadiusz Kusztal 2024-01-29 18:59 ` [PATCH 2/3] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal 2024-01-29 18:59 ` [PATCH 3/3] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal @ 2024-02-01 8:07 ` Akhil Goyal 2024-02-01 13:25 ` Kusztal, ArkadiuszX 2024-09-29 17:29 ` [PATCH v2] " Arkadiusz Kusztal 3 siblings, 1 reply; 42+ messages in thread From: Akhil Goyal @ 2024-02-01 8:07 UTC (permalink / raw) To: Arkadiusz Kusztal, dev; +Cc: ciara.power > ---------------------------------------------------------------------- > In the case when PMD cannot support full process of the SM2, > but elliptic curve computation only, additional fields > are needed to handle such a case. > Asym crypto APIs are no longer experimental. Hence adding new fields would lead to ABI break. > Points C1, kP therefore were added to the SM2 crypto operation struct. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXT] [PATCH 1/3] cryptodev: add ec points to sm2 op 2024-02-01 8:07 ` [EXT] [PATCH 1/3] cryptodev: add ec points to sm2 op Akhil Goyal @ 2024-02-01 13:25 ` Kusztal, ArkadiuszX 2024-09-24 16:30 ` Akhil Goyal 0 siblings, 1 reply; 42+ messages in thread From: Kusztal, ArkadiuszX @ 2024-02-01 13:25 UTC (permalink / raw) To: Akhil Goyal, dev; +Cc: Power, Ciara > -----Original Message----- > From: Akhil Goyal <gakhil@marvell.com> > Sent: Thursday, February 1, 2024 9:08 AM > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com>; dev@dpdk.org > Cc: Power, Ciara <ciara.power@intel.com> > Subject: RE: [EXT] [PATCH 1/3] cryptodev: add ec points to sm2 op > > > ---------------------------------------------------------------------- > > In the case when PMD cannot support full process of the SM2, but > > elliptic curve computation only, additional fields are needed to > > handle such a case. > > > > Asym crypto APIs are no longer experimental. > Hence adding new fields would lead to ABI break. It seems that `__rte_crypto_op_reset` `rte_crypto_op_pool_create` functions do not need versioning, and we could easily do it if needed. But the field `flags` changes an offset, and this is actually problematic. Which means that we cannot do this change before 24.11. > > > Points C1, kP therefore were added to the SM2 crypto operation struct. > > > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXT] [PATCH 1/3] cryptodev: add ec points to sm2 op 2024-02-01 13:25 ` Kusztal, ArkadiuszX @ 2024-09-24 16:30 ` Akhil Goyal 0 siblings, 0 replies; 42+ messages in thread From: Akhil Goyal @ 2024-09-24 16:30 UTC (permalink / raw) To: Kusztal, ArkadiuszX, dev Cc: Kai Ji, Brian Dooley, Gowrishankar Muthukrishnan, Anoob Joseph Hi Arek, Can you resend this series if you still want to pursue so that CI can run? > > > In the case when PMD cannot support full process of the SM2, but > > > elliptic curve computation only, additional fields are needed to > > > handle such a case. > > > > > > > Asym crypto APIs are no longer experimental. > > Hence adding new fields would lead to ABI break. > > It seems that > `__rte_crypto_op_reset` > `rte_crypto_op_pool_create` > functions do not need versioning, and we could easily do it if needed. > But the field `flags` changes an offset, and this is actually problematic. > Which means that we cannot do this change before 24.11. > > > > > > Points C1, kP therefore were added to the SM2 crypto operation struct. > > > > > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v2] cryptodev: add ec points to sm2 op 2024-01-29 18:59 [PATCH 1/3] cryptodev: add ec points to sm2 op Arkadiusz Kusztal ` (2 preceding siblings ...) 2024-02-01 8:07 ` [EXT] [PATCH 1/3] cryptodev: add ec points to sm2 op Akhil Goyal @ 2024-09-29 17:29 ` Arkadiusz Kusztal 2024-10-01 7:57 ` [EXTERNAL] " Akhil Goyal ` (2 more replies) 3 siblings, 3 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-09-29 17:29 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal In the case when PMD cannot support the full process of the SM2, but elliptic curve computation only, additional fields are needed to handle such a case. Points C1, kP therefore were added to the SM2 crypto operation struct. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- lib/cryptodev/rte_crypto_asym.h | 119 ++++++++++++++++++++++++---------------- 1 file changed, 71 insertions(+), 48 deletions(-) diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h index 39d3da3952..f59759062f 100644 --- a/lib/cryptodev/rte_crypto_asym.h +++ b/lib/cryptodev/rte_crypto_asym.h @@ -600,40 +600,6 @@ struct rte_crypto_ecpm_op_param { }; /** - * Asymmetric crypto transform data - * - * Structure describing asym xforms. - */ -struct rte_crypto_asym_xform { - struct rte_crypto_asym_xform *next; - /**< Pointer to next xform to set up xform chain.*/ - enum rte_crypto_asym_xform_type xform_type; - /**< Asymmetric crypto transform */ - - union { - struct rte_crypto_rsa_xform rsa; - /**< RSA xform parameters */ - - struct rte_crypto_modex_xform modex; - /**< Modular Exponentiation xform parameters */ - - struct rte_crypto_modinv_xform modinv; - /**< Modular Multiplicative Inverse xform parameters */ - - struct rte_crypto_dh_xform dh; - /**< DH xform parameters */ - - struct rte_crypto_dsa_xform dsa; - /**< DSA xform parameters */ - - struct rte_crypto_ec_xform ec; - /**< EC xform parameters, used by elliptic curve based - * operations. - */ - }; -}; - -/** * SM2 operation params. */ struct rte_crypto_sm2_op_param { @@ -658,20 +624,43 @@ struct rte_crypto_sm2_op_param { * will be overwritten by the PMD with the decrypted length. */ - rte_crypto_param cipher; - /**< - * Pointer to input data - * - to be decrypted for SM2 private decrypt. - * - * Pointer to output data - * - for SM2 public encrypt. - * In this case the underlying array should have been allocated - * with enough memory to hold ciphertext output (at least X bytes - * for prime field curve of N bytes and for message M bytes, - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will - * be overwritten by the PMD with the encrypted length. - */ + union { + rte_crypto_param cipher; + /**< + * Pointer to input data + * - to be decrypted for SM2 private decrypt. + * + * Pointer to output data + * - for SM2 public encrypt. + * In this case the underlying array should have been allocated + * with enough memory to hold ciphertext output (at least X bytes + * for prime field curve of N bytes and for message M bytes, + * where X = (C1 || C2 || C3) and computed based on SM2 RFC as + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will + * be overwritten by the PMD with the encrypted length. + */ + struct { + struct rte_crypto_ec_point C1; + /**< + * This field is used only when PMD does not support the full + * process of the SM2 encryption/decryption, but the elliptic + * curve part only. + * + * In the case of encryption, it is an output - point C1 = (x1,y1). + * In the case of decryption, if is an input - point C1 = (x1,y1) + * + */ + struct rte_crypto_ec_point kP; + /**< + * This field is used only when PMD does not support the full + * process of the SM2 encryption/decryption, but the elliptic + * curve part only. + * + * It is an output in the encryption case, it is a point + * [k]P = (x2,y2) + */ + }; + }; rte_crypto_uint id; /**< The SM2 id used by signer and verifier. */ @@ -698,6 +687,40 @@ struct rte_crypto_sm2_op_param { }; /** + * Asymmetric crypto transform data + * + * Structure describing asym xforms. + */ +struct rte_crypto_asym_xform { + struct rte_crypto_asym_xform *next; + /**< Pointer to next xform to set up xform chain.*/ + enum rte_crypto_asym_xform_type xform_type; + /**< Asymmetric crypto transform */ + + union { + struct rte_crypto_rsa_xform rsa; + /**< RSA xform parameters */ + + struct rte_crypto_modex_xform modex; + /**< Modular Exponentiation xform parameters */ + + struct rte_crypto_modinv_xform modinv; + /**< Modular Multiplicative Inverse xform parameters */ + + struct rte_crypto_dh_xform dh; + /**< DH xform parameters */ + + struct rte_crypto_dsa_xform dsa; + /**< DSA xform parameters */ + + struct rte_crypto_ec_xform ec; + /**< EC xform parameters, used by elliptic curve based + * operations. + */ + }; +}; + +/** * Asymmetric Cryptographic Operation. * * Structure describing asymmetric crypto operation params. -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v2] cryptodev: add ec points to sm2 op 2024-09-29 17:29 ` [PATCH v2] " Arkadiusz Kusztal @ 2024-10-01 7:57 ` Akhil Goyal 2024-10-03 14:39 ` Akhil Goyal 2024-10-08 6:28 ` [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Arkadiusz Kusztal 2 siblings, 0 replies; 42+ messages in thread From: Akhil Goyal @ 2024-10-01 7:57 UTC (permalink / raw) To: Arkadiusz Kusztal, dev, Gowrishankar Muthukrishnan Cc: brian.dooley, Anoob Joseph > In the case when PMD cannot support the full process of the SM2, > but elliptic curve computation only, additional fields > are needed to handle such a case. > > Points C1, kP therefore were added to the SM2 crypto operation struct. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > --- > lib/cryptodev/rte_crypto_asym.h | 119 ++++++++++++++++++++++++------------- > --- > 1 file changed, 71 insertions(+), 48 deletions(-) > > diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h > index 39d3da3952..f59759062f 100644 > --- a/lib/cryptodev/rte_crypto_asym.h > +++ b/lib/cryptodev/rte_crypto_asym.h > @@ -600,40 +600,6 @@ struct rte_crypto_ecpm_op_param { > }; > > /** > - * Asymmetric crypto transform data > - * > - * Structure describing asym xforms. > - */ > -struct rte_crypto_asym_xform { > - struct rte_crypto_asym_xform *next; > - /**< Pointer to next xform to set up xform chain.*/ > - enum rte_crypto_asym_xform_type xform_type; > - /**< Asymmetric crypto transform */ > - > - union { > - struct rte_crypto_rsa_xform rsa; > - /**< RSA xform parameters */ > - > - struct rte_crypto_modex_xform modex; > - /**< Modular Exponentiation xform parameters */ > - > - struct rte_crypto_modinv_xform modinv; > - /**< Modular Multiplicative Inverse xform parameters */ > - > - struct rte_crypto_dh_xform dh; > - /**< DH xform parameters */ > - > - struct rte_crypto_dsa_xform dsa; > - /**< DSA xform parameters */ > - > - struct rte_crypto_ec_xform ec; > - /**< EC xform parameters, used by elliptic curve based > - * operations. > - */ > - }; > -}; Above change seems unnecessary. > - > -/** > * SM2 operation params. > */ > struct rte_crypto_sm2_op_param { > @@ -658,20 +624,43 @@ struct rte_crypto_sm2_op_param { > * will be overwritten by the PMD with the decrypted length. > */ > > - rte_crypto_param cipher; > - /**< > - * Pointer to input data > - * - to be decrypted for SM2 private decrypt. > - * > - * Pointer to output data > - * - for SM2 public encrypt. > - * In this case the underlying array should have been allocated > - * with enough memory to hold ciphertext output (at least X bytes > - * for prime field curve of N bytes and for message M bytes, > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > - * be overwritten by the PMD with the encrypted length. > - */ > + union { > + rte_crypto_param cipher; > + /**< > + * Pointer to input data > + * - to be decrypted for SM2 private decrypt. > + * > + * Pointer to output data > + * - for SM2 public encrypt. > + * In this case the underlying array should have been allocated > + * with enough memory to hold ciphertext output (at least X > bytes > + * for prime field curve of N bytes and for message M bytes, > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > as > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > + * be overwritten by the PMD with the encrypted length. > + */ > + struct { > + struct rte_crypto_ec_point C1; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * In the case of encryption, it is an output - point C1 = > (x1,y1). > + * In the case of decryption, if is an input - point C1 = > (x1,y1) > + * > + */ > + struct rte_crypto_ec_point kP; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * It is an output in the encryption case, it is a point > + * [k]P = (x2,y2) > + */ > + }; > + }; > > rte_crypto_uint id; > /**< The SM2 id used by signer and verifier. */ > @@ -698,6 +687,40 @@ struct rte_crypto_sm2_op_param { > }; > > /** > + * Asymmetric crypto transform data > + * > + * Structure describing asym xforms. > + */ > +struct rte_crypto_asym_xform { > + struct rte_crypto_asym_xform *next; > + /**< Pointer to next xform to set up xform chain.*/ > + enum rte_crypto_asym_xform_type xform_type; > + /**< Asymmetric crypto transform */ > + > + union { > + struct rte_crypto_rsa_xform rsa; > + /**< RSA xform parameters */ > + > + struct rte_crypto_modex_xform modex; > + /**< Modular Exponentiation xform parameters */ > + > + struct rte_crypto_modinv_xform modinv; > + /**< Modular Multiplicative Inverse xform parameters */ > + > + struct rte_crypto_dh_xform dh; > + /**< DH xform parameters */ > + > + struct rte_crypto_dsa_xform dsa; > + /**< DSA xform parameters */ > + > + struct rte_crypto_ec_xform ec; > + /**< EC xform parameters, used by elliptic curve based > + * operations. > + */ > + }; > +}; > + > +/** > * Asymmetric Cryptographic Operation. > * > * Structure describing asymmetric crypto operation params. > -- > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v2] cryptodev: add ec points to sm2 op 2024-09-29 17:29 ` [PATCH v2] " Arkadiusz Kusztal 2024-10-01 7:57 ` [EXTERNAL] " Akhil Goyal @ 2024-10-03 14:39 ` Akhil Goyal 2024-10-07 8:29 ` Kusztal, ArkadiuszX 2024-10-08 6:28 ` [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Arkadiusz Kusztal 2 siblings, 1 reply; 42+ messages in thread From: Akhil Goyal @ 2024-10-03 14:39 UTC (permalink / raw) To: Arkadiusz Kusztal, dev; +Cc: brian.dooley > In the case when PMD cannot support the full process of the SM2, > but elliptic curve computation only, additional fields > are needed to handle such a case. > > Points C1, kP therefore were added to the SM2 crypto operation struct. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > --- > lib/cryptodev/rte_crypto_asym.h | 119 ++++++++++++++++++++++++------------- > --- > 1 file changed, 71 insertions(+), 48 deletions(-) > > diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h > index 39d3da3952..f59759062f 100644 > --- a/lib/cryptodev/rte_crypto_asym.h > +++ b/lib/cryptodev/rte_crypto_asym.h > @@ -600,40 +600,6 @@ struct rte_crypto_ecpm_op_param { > }; > > /** > - * Asymmetric crypto transform data > - * > - * Structure describing asym xforms. > - */ > -struct rte_crypto_asym_xform { > - struct rte_crypto_asym_xform *next; > - /**< Pointer to next xform to set up xform chain.*/ > - enum rte_crypto_asym_xform_type xform_type; > - /**< Asymmetric crypto transform */ > - > - union { > - struct rte_crypto_rsa_xform rsa; > - /**< RSA xform parameters */ > - > - struct rte_crypto_modex_xform modex; > - /**< Modular Exponentiation xform parameters */ > - > - struct rte_crypto_modinv_xform modinv; > - /**< Modular Multiplicative Inverse xform parameters */ > - > - struct rte_crypto_dh_xform dh; > - /**< DH xform parameters */ > - > - struct rte_crypto_dsa_xform dsa; > - /**< DSA xform parameters */ > - > - struct rte_crypto_ec_xform ec; > - /**< EC xform parameters, used by elliptic curve based > - * operations. > - */ > - }; > -}; > - > -/** > * SM2 operation params. > */ > struct rte_crypto_sm2_op_param { > @@ -658,20 +624,43 @@ struct rte_crypto_sm2_op_param { > * will be overwritten by the PMD with the decrypted length. > */ > > - rte_crypto_param cipher; > - /**< > - * Pointer to input data > - * - to be decrypted for SM2 private decrypt. > - * > - * Pointer to output data > - * - for SM2 public encrypt. > - * In this case the underlying array should have been allocated > - * with enough memory to hold ciphertext output (at least X bytes > - * for prime field curve of N bytes and for message M bytes, > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > - * be overwritten by the PMD with the encrypted length. > - */ > + union { > + rte_crypto_param cipher; > + /**< > + * Pointer to input data > + * - to be decrypted for SM2 private decrypt. > + * > + * Pointer to output data > + * - for SM2 public encrypt. > + * In this case the underlying array should have been allocated > + * with enough memory to hold ciphertext output (at least X > bytes > + * for prime field curve of N bytes and for message M bytes, > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > as > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > + * be overwritten by the PMD with the encrypted length. > + */ > + struct { > + struct rte_crypto_ec_point C1; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * In the case of encryption, it is an output - point C1 = > (x1,y1). > + * In the case of decryption, if is an input - point C1 = > (x1,y1) > + * > + */ > + struct rte_crypto_ec_point kP; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * It is an output in the encryption case, it is a point > + * [k]P = (x2,y2) > + */ > + }; > + }; > > rte_crypto_uint id; > /**< The SM2 id used by signer and verifier. */ > @@ -698,6 +687,40 @@ struct rte_crypto_sm2_op_param { > }; > How is the application supposed to know, it need to fill these parameters and PMD does not support full operation? Can we add some capability checks? Also send the patches for test case and PMD support. > /** > + * Asymmetric crypto transform data > + * > + * Structure describing asym xforms. > + */ > +struct rte_crypto_asym_xform { > + struct rte_crypto_asym_xform *next; > + /**< Pointer to next xform to set up xform chain.*/ > + enum rte_crypto_asym_xform_type xform_type; > + /**< Asymmetric crypto transform */ > + > + union { > + struct rte_crypto_rsa_xform rsa; > + /**< RSA xform parameters */ > + > + struct rte_crypto_modex_xform modex; > + /**< Modular Exponentiation xform parameters */ > + > + struct rte_crypto_modinv_xform modinv; > + /**< Modular Multiplicative Inverse xform parameters */ > + > + struct rte_crypto_dh_xform dh; > + /**< DH xform parameters */ > + > + struct rte_crypto_dsa_xform dsa; > + /**< DSA xform parameters */ > + > + struct rte_crypto_ec_xform ec; > + /**< EC xform parameters, used by elliptic curve based > + * operations. > + */ > + }; > +}; > + > +/** > * Asymmetric Cryptographic Operation. > * > * Structure describing asymmetric crypto operation params. > -- > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v2] cryptodev: add ec points to sm2 op 2024-10-03 14:39 ` Akhil Goyal @ 2024-10-07 8:29 ` Kusztal, ArkadiuszX 0 siblings, 0 replies; 42+ messages in thread From: Kusztal, ArkadiuszX @ 2024-10-07 8:29 UTC (permalink / raw) To: Akhil Goyal, dev; +Cc: Dooley, Brian > -----Original Message----- > From: Akhil Goyal <gakhil@marvell.com> > Sent: Thursday, October 3, 2024 4:39 PM > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com>; dev@dpdk.org > Cc: Dooley, Brian <brian.dooley@intel.com> > Subject: RE: [EXTERNAL] [PATCH v2] cryptodev: add ec points to sm2 op > > > In the case when PMD cannot support the full process of the SM2, but > > elliptic curve computation only, additional fields are needed to > > handle such a case. > > > > Points C1, kP therefore were added to the SM2 crypto operation struct. > > > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > > --- > > lib/cryptodev/rte_crypto_asym.h | 119 > > ++++++++++++++++++++++++------------- > > --- > > 1 file changed, 71 insertions(+), 48 deletions(-) > > > > diff --git a/lib/cryptodev/rte_crypto_asym.h > > b/lib/cryptodev/rte_crypto_asym.h index 39d3da3952..f59759062f 100644 > > --- a/lib/cryptodev/rte_crypto_asym.h > > +++ b/lib/cryptodev/rte_crypto_asym.h > > @@ -600,40 +600,6 @@ struct rte_crypto_ecpm_op_param { }; > > > > /** > > - * Asymmetric crypto transform data > > - * > > - * Structure describing asym xforms. > > - */ > > -struct rte_crypto_asym_xform { > > - struct rte_crypto_asym_xform *next; > > - /**< Pointer to next xform to set up xform chain.*/ > > - enum rte_crypto_asym_xform_type xform_type; > > - /**< Asymmetric crypto transform */ > > - > > - union { > > - struct rte_crypto_rsa_xform rsa; > > - /**< RSA xform parameters */ > > - > > - struct rte_crypto_modex_xform modex; > > - /**< Modular Exponentiation xform parameters */ > > - > > - struct rte_crypto_modinv_xform modinv; > > - /**< Modular Multiplicative Inverse xform parameters */ > > - > > - struct rte_crypto_dh_xform dh; > > - /**< DH xform parameters */ > > - > > - struct rte_crypto_dsa_xform dsa; > > - /**< DSA xform parameters */ > > - > > - struct rte_crypto_ec_xform ec; > > - /**< EC xform parameters, used by elliptic curve based > > - * operations. > > - */ > > - }; > > -}; > > - > > -/** > > * SM2 operation params. > > */ > > struct rte_crypto_sm2_op_param { > > @@ -658,20 +624,43 @@ struct rte_crypto_sm2_op_param { > > * will be overwritten by the PMD with the decrypted length. > > */ > > > > - rte_crypto_param cipher; > > - /**< > > - * Pointer to input data > > - * - to be decrypted for SM2 private decrypt. > > - * > > - * Pointer to output data > > - * - for SM2 public encrypt. > > - * In this case the underlying array should have been allocated > > - * with enough memory to hold ciphertext output (at least X bytes > > - * for prime field curve of N bytes and for message M bytes, > > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > - * be overwritten by the PMD with the encrypted length. > > - */ > > + union { > > + rte_crypto_param cipher; > > + /**< > > + * Pointer to input data > > + * - to be decrypted for SM2 private decrypt. > > + * > > + * Pointer to output data > > + * - for SM2 public encrypt. > > + * In this case the underlying array should have been allocated > > + * with enough memory to hold ciphertext output (at least X > > bytes > > + * for prime field curve of N bytes and for message M bytes, > > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > > as > > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > + * be overwritten by the PMD with the encrypted length. > > + */ > > + struct { > > + struct rte_crypto_ec_point C1; > > + /**< > > + * This field is used only when PMD does not support > the > > full > > + * process of the SM2 encryption/decryption, but the > > elliptic > > + * curve part only. > > + * > > + * In the case of encryption, it is an output - point C1 = > > (x1,y1). > > + * In the case of decryption, if is an input - point C1 = > > (x1,y1) > > + * > > + */ > > + struct rte_crypto_ec_point kP; > > + /**< > > + * This field is used only when PMD does not support > the > > full > > + * process of the SM2 encryption/decryption, but the > > elliptic > > + * curve part only. > > + * > > + * It is an output in the encryption case, it is a point > > + * [k]P = (x2,y2) > > + */ > > + }; > > + }; > > > > rte_crypto_uint id; > > /**< The SM2 id used by signer and verifier. */ @@ -698,6 +687,40 @@ > > struct rte_crypto_sm2_op_param { }; > > > > How is the application supposed to know, it need to fill these parameters and > PMD does not support full operation? > Can we add some capability checks? Initially I though it should be based on the .rst file PMD information, like with the key generation random number. Otherwise, it could rather be a feature flag than a capability? > > Also send the patches for test case and PMD support. Sure, I will send. > > > /** > > + * Asymmetric crypto transform data > > + * > > + * Structure describing asym xforms. > > + */ > > +struct rte_crypto_asym_xform { > > + struct rte_crypto_asym_xform *next; > > + /**< Pointer to next xform to set up xform chain.*/ > > + enum rte_crypto_asym_xform_type xform_type; > > + /**< Asymmetric crypto transform */ > > + > > + union { > > + struct rte_crypto_rsa_xform rsa; > > + /**< RSA xform parameters */ > > + > > + struct rte_crypto_modex_xform modex; > > + /**< Modular Exponentiation xform parameters */ > > + > > + struct rte_crypto_modinv_xform modinv; > > + /**< Modular Multiplicative Inverse xform parameters */ > > + > > + struct rte_crypto_dh_xform dh; > > + /**< DH xform parameters */ > > + > > + struct rte_crypto_dsa_xform dsa; > > + /**< DSA xform parameters */ > > + > > + struct rte_crypto_ec_xform ec; > > + /**< EC xform parameters, used by elliptic curve based > > + * operations. > > + */ > > + }; > > +}; > > + > > +/** > > * Asymmetric Cryptographic Operation. > > * > > * Structure describing asymmetric crypto operation params. > > -- > > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v3 1/4] cryptodev: add partial sm2 feature flag 2024-09-29 17:29 ` [PATCH v2] " Arkadiusz Kusztal 2024-10-01 7:57 ` [EXTERNAL] " Akhil Goyal 2024-10-03 14:39 ` Akhil Goyal @ 2024-10-08 6:28 ` Arkadiusz Kusztal 2024-10-08 6:28 ` [PATCH v3 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal ` (4 more replies) 2 siblings, 5 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-08 6:28 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal Due to complex ways of handling asymmetric cryptography algorithms, capabilities may differ between hardware and software PMDs, or even between hardware PMDs. One of the examples are algorithms that need an additional round of hashing, like SM2. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- lib/cryptodev/rte_cryptodev.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/cryptodev/rte_cryptodev.h b/lib/cryptodev/rte_cryptodev.h index bec947f6d5..c0e816b17f 100644 --- a/lib/cryptodev/rte_cryptodev.h +++ b/lib/cryptodev/rte_cryptodev.h @@ -554,6 +554,8 @@ rte_cryptodev_asym_get_xform_string(enum rte_crypto_asym_xform_type xform_enum); /**< Support inner checksum computation/verification */ #define RTE_CRYPTODEV_FF_SECURITY_RX_INJECT (1ULL << 28) /**< Support Rx injection after security processing */ +#define RTE_CRYPTODEV_FF_ASYM_PARTIAL_SM2 (1ULL << 29) +/**< Support the elliptic curve part only in SM2 */ /** * Get the name of a crypto device feature flag -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v3 2/4] cryptodev: add ec points to sm2 op 2024-10-08 6:28 ` [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Arkadiusz Kusztal @ 2024-10-08 6:28 ` Arkadiusz Kusztal 2024-10-08 11:27 ` [EXTERNAL] " Akhil Goyal 2024-10-08 6:28 ` [PATCH v3 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal ` (3 subsequent siblings) 4 siblings, 1 reply; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-08 6:28 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal In the case when PMD cannot support the full process of the SM2, but elliptic curve computation only, additional fields are needed to handle such a case. Points C1, kP therefore were added to the SM2 crypto operation struct. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- lib/cryptodev/rte_crypto_asym.h | 119 ++++++++++++++++++++++++---------------- 1 file changed, 71 insertions(+), 48 deletions(-) diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h index 39d3da3952..f59759062f 100644 --- a/lib/cryptodev/rte_crypto_asym.h +++ b/lib/cryptodev/rte_crypto_asym.h @@ -600,40 +600,6 @@ struct rte_crypto_ecpm_op_param { }; /** - * Asymmetric crypto transform data - * - * Structure describing asym xforms. - */ -struct rte_crypto_asym_xform { - struct rte_crypto_asym_xform *next; - /**< Pointer to next xform to set up xform chain.*/ - enum rte_crypto_asym_xform_type xform_type; - /**< Asymmetric crypto transform */ - - union { - struct rte_crypto_rsa_xform rsa; - /**< RSA xform parameters */ - - struct rte_crypto_modex_xform modex; - /**< Modular Exponentiation xform parameters */ - - struct rte_crypto_modinv_xform modinv; - /**< Modular Multiplicative Inverse xform parameters */ - - struct rte_crypto_dh_xform dh; - /**< DH xform parameters */ - - struct rte_crypto_dsa_xform dsa; - /**< DSA xform parameters */ - - struct rte_crypto_ec_xform ec; - /**< EC xform parameters, used by elliptic curve based - * operations. - */ - }; -}; - -/** * SM2 operation params. */ struct rte_crypto_sm2_op_param { @@ -658,20 +624,43 @@ struct rte_crypto_sm2_op_param { * will be overwritten by the PMD with the decrypted length. */ - rte_crypto_param cipher; - /**< - * Pointer to input data - * - to be decrypted for SM2 private decrypt. - * - * Pointer to output data - * - for SM2 public encrypt. - * In this case the underlying array should have been allocated - * with enough memory to hold ciphertext output (at least X bytes - * for prime field curve of N bytes and for message M bytes, - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will - * be overwritten by the PMD with the encrypted length. - */ + union { + rte_crypto_param cipher; + /**< + * Pointer to input data + * - to be decrypted for SM2 private decrypt. + * + * Pointer to output data + * - for SM2 public encrypt. + * In this case the underlying array should have been allocated + * with enough memory to hold ciphertext output (at least X bytes + * for prime field curve of N bytes and for message M bytes, + * where X = (C1 || C2 || C3) and computed based on SM2 RFC as + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will + * be overwritten by the PMD with the encrypted length. + */ + struct { + struct rte_crypto_ec_point C1; + /**< + * This field is used only when PMD does not support the full + * process of the SM2 encryption/decryption, but the elliptic + * curve part only. + * + * In the case of encryption, it is an output - point C1 = (x1,y1). + * In the case of decryption, if is an input - point C1 = (x1,y1) + * + */ + struct rte_crypto_ec_point kP; + /**< + * This field is used only when PMD does not support the full + * process of the SM2 encryption/decryption, but the elliptic + * curve part only. + * + * It is an output in the encryption case, it is a point + * [k]P = (x2,y2) + */ + }; + }; rte_crypto_uint id; /**< The SM2 id used by signer and verifier. */ @@ -698,6 +687,40 @@ struct rte_crypto_sm2_op_param { }; /** + * Asymmetric crypto transform data + * + * Structure describing asym xforms. + */ +struct rte_crypto_asym_xform { + struct rte_crypto_asym_xform *next; + /**< Pointer to next xform to set up xform chain.*/ + enum rte_crypto_asym_xform_type xform_type; + /**< Asymmetric crypto transform */ + + union { + struct rte_crypto_rsa_xform rsa; + /**< RSA xform parameters */ + + struct rte_crypto_modex_xform modex; + /**< Modular Exponentiation xform parameters */ + + struct rte_crypto_modinv_xform modinv; + /**< Modular Multiplicative Inverse xform parameters */ + + struct rte_crypto_dh_xform dh; + /**< DH xform parameters */ + + struct rte_crypto_dsa_xform dsa; + /**< DSA xform parameters */ + + struct rte_crypto_ec_xform ec; + /**< EC xform parameters, used by elliptic curve based + * operations. + */ + }; +}; + +/** * Asymmetric Cryptographic Operation. * * Structure describing asymmetric crypto operation params. -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v3 2/4] cryptodev: add ec points to sm2 op 2024-10-08 6:28 ` [PATCH v3 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal @ 2024-10-08 11:27 ` Akhil Goyal 2024-10-08 11:46 ` Kusztal, ArkadiuszX 0 siblings, 1 reply; 42+ messages in thread From: Akhil Goyal @ 2024-10-08 11:27 UTC (permalink / raw) To: Arkadiusz Kusztal, dev; +Cc: brian.dooley > /** > - * Asymmetric crypto transform data > - * > - * Structure describing asym xforms. > - */ > -struct rte_crypto_asym_xform { > - struct rte_crypto_asym_xform *next; > - /**< Pointer to next xform to set up xform chain.*/ > - enum rte_crypto_asym_xform_type xform_type; > - /**< Asymmetric crypto transform */ > - > - union { > - struct rte_crypto_rsa_xform rsa; > - /**< RSA xform parameters */ > - > - struct rte_crypto_modex_xform modex; > - /**< Modular Exponentiation xform parameters */ > - > - struct rte_crypto_modinv_xform modinv; > - /**< Modular Multiplicative Inverse xform parameters */ > - > - struct rte_crypto_dh_xform dh; > - /**< DH xform parameters */ > - > - struct rte_crypto_dsa_xform dsa; > - /**< DSA xform parameters */ > - > - struct rte_crypto_ec_xform ec; > - /**< EC xform parameters, used by elliptic curve based > - * operations. > - */ > - }; > -}; > - Above change seems redundant. It was commented on v2 as well. > -/** > * SM2 operation params. > */ > struct rte_crypto_sm2_op_param { > @@ -658,20 +624,43 @@ struct rte_crypto_sm2_op_param { > * will be overwritten by the PMD with the decrypted length. > */ > > - rte_crypto_param cipher; > - /**< > - * Pointer to input data > - * - to be decrypted for SM2 private decrypt. > - * > - * Pointer to output data > - * - for SM2 public encrypt. > - * In this case the underlying array should have been allocated > - * with enough memory to hold ciphertext output (at least X bytes > - * for prime field curve of N bytes and for message M bytes, > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > - * be overwritten by the PMD with the encrypted length. > - */ > + union { > + rte_crypto_param cipher; > + /**< > + * Pointer to input data > + * - to be decrypted for SM2 private decrypt. > + * > + * Pointer to output data > + * - for SM2 public encrypt. > + * In this case the underlying array should have been allocated > + * with enough memory to hold ciphertext output (at least X > bytes > + * for prime field curve of N bytes and for message M bytes, > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > as > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > + * be overwritten by the PMD with the encrypted length. > + */ > + struct { > + struct rte_crypto_ec_point C1; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * In the case of encryption, it is an output - point C1 = > (x1,y1). > + * In the case of decryption, if is an input - point C1 = > (x1,y1) > + * > + */ > + struct rte_crypto_ec_point kP; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * It is an output in the encryption case, it is a point > + * [k]P = (x2,y2) > + */ > + }; > + }; You may add a reference to the newly added feature flag here. > > rte_crypto_uint id; > /**< The SM2 id used by signer and verifier. */ > @@ -698,6 +687,40 @@ struct rte_crypto_sm2_op_param { > }; > > /** > + * Asymmetric crypto transform data > + * > + * Structure describing asym xforms. > + */ > +struct rte_crypto_asym_xform { > + struct rte_crypto_asym_xform *next; > + /**< Pointer to next xform to set up xform chain.*/ > + enum rte_crypto_asym_xform_type xform_type; > + /**< Asymmetric crypto transform */ > + > + union { > + struct rte_crypto_rsa_xform rsa; > + /**< RSA xform parameters */ > + > + struct rte_crypto_modex_xform modex; > + /**< Modular Exponentiation xform parameters */ > + > + struct rte_crypto_modinv_xform modinv; > + /**< Modular Multiplicative Inverse xform parameters */ > + > + struct rte_crypto_dh_xform dh; > + /**< DH xform parameters */ > + > + struct rte_crypto_dsa_xform dsa; > + /**< DSA xform parameters */ > + > + struct rte_crypto_ec_xform ec; > + /**< EC xform parameters, used by elliptic curve based > + * operations. > + */ > + }; > +}; > + > +/** > * Asymmetric Cryptographic Operation. > * > * Structure describing asymmetric crypto operation params. > -- > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v3 2/4] cryptodev: add ec points to sm2 op 2024-10-08 11:27 ` [EXTERNAL] " Akhil Goyal @ 2024-10-08 11:46 ` Kusztal, ArkadiuszX 2024-10-08 11:49 ` Akhil Goyal 0 siblings, 1 reply; 42+ messages in thread From: Kusztal, ArkadiuszX @ 2024-10-08 11:46 UTC (permalink / raw) To: Akhil Goyal, dev; +Cc: Dooley, Brian > -----Original Message----- > From: Akhil Goyal <gakhil@marvell.com> > Sent: Tuesday, October 8, 2024 1:28 PM > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com>; dev@dpdk.org > Cc: Dooley, Brian <brian.dooley@intel.com> > Subject: RE: [EXTERNAL] [PATCH v3 2/4] cryptodev: add ec points to sm2 op > > > /** > > - * Asymmetric crypto transform data > > - * > > - * Structure describing asym xforms. > > - */ > > -struct rte_crypto_asym_xform { > > - struct rte_crypto_asym_xform *next; > > - /**< Pointer to next xform to set up xform chain.*/ > > - enum rte_crypto_asym_xform_type xform_type; > > - /**< Asymmetric crypto transform */ > > - > > - union { > > - struct rte_crypto_rsa_xform rsa; > > - /**< RSA xform parameters */ > > - > > - struct rte_crypto_modex_xform modex; > > - /**< Modular Exponentiation xform parameters */ > > - > > - struct rte_crypto_modinv_xform modinv; > > - /**< Modular Multiplicative Inverse xform parameters */ > > - > > - struct rte_crypto_dh_xform dh; > > - /**< DH xform parameters */ > > - > > - struct rte_crypto_dsa_xform dsa; > > - /**< DSA xform parameters */ > > - > > - struct rte_crypto_ec_xform ec; > > - /**< EC xform parameters, used by elliptic curve based > > - * operations. > > - */ > > - }; > > -}; > > - > Above change seems redundant. > It was commented on v2 as well. My apologies, I have missed it. The reason for this change was to keep a little bit better order in this file. So previously we have had: -defines -enums -algorithms xfroms -algorithms ops -asym xform -asym op now we got: -defines -enums -algorithms xfroms -algorithms ops -asym xform -------------SM2 OP -asym op I know it is not a game changer, but helps to keep a better order in this file. If this is problematic then I can revert it. > > > > -/** > > * SM2 operation params. > > */ > > struct rte_crypto_sm2_op_param { > > @@ -658,20 +624,43 @@ struct rte_crypto_sm2_op_param { > > * will be overwritten by the PMD with the decrypted length. > > */ > > > > - rte_crypto_param cipher; > > - /**< > > - * Pointer to input data > > - * - to be decrypted for SM2 private decrypt. > > - * > > - * Pointer to output data > > - * - for SM2 public encrypt. > > - * In this case the underlying array should have been allocated > > - * with enough memory to hold ciphertext output (at least X bytes > > - * for prime field curve of N bytes and for message M bytes, > > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > - * be overwritten by the PMD with the encrypted length. > > - */ > > + union { > > + rte_crypto_param cipher; > > + /**< > > + * Pointer to input data > > + * - to be decrypted for SM2 private decrypt. > > + * > > + * Pointer to output data > > + * - for SM2 public encrypt. > > + * In this case the underlying array should have been allocated > > + * with enough memory to hold ciphertext output (at least X > > bytes > > + * for prime field curve of N bytes and for message M bytes, > > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > > as > > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > + * be overwritten by the PMD with the encrypted length. > > + */ > > + struct { > > + struct rte_crypto_ec_point C1; > > + /**< > > + * This field is used only when PMD does not support > the > > full > > + * process of the SM2 encryption/decryption, but the > > elliptic > > + * curve part only. > > + * > > + * In the case of encryption, it is an output - point C1 = > > (x1,y1). > > + * In the case of decryption, if is an input - point C1 = > > (x1,y1) > > + * > > + */ > > + struct rte_crypto_ec_point kP; > > + /**< > > + * This field is used only when PMD does not support > the > > full > > + * process of the SM2 encryption/decryption, but the > > elliptic > > + * curve part only. > > + * > > + * It is an output in the encryption case, it is a point > > + * [k]P = (x2,y2) > > + */ > > + }; > > + }; > You may add a reference to the newly added feature flag here. +1 > > > > > > rte_crypto_uint id; > > /**< The SM2 id used by signer and verifier. */ @@ -698,6 +687,40 @@ > > struct rte_crypto_sm2_op_param { }; > > > > /** > > + * Asymmetric crypto transform data > > + * > > + * Structure describing asym xforms. > > + */ > > +struct rte_crypto_asym_xform { > > + struct rte_crypto_asym_xform *next; > > + /**< Pointer to next xform to set up xform chain.*/ > > + enum rte_crypto_asym_xform_type xform_type; > > + /**< Asymmetric crypto transform */ > > + > > + union { > > + struct rte_crypto_rsa_xform rsa; > > + /**< RSA xform parameters */ > > + > > + struct rte_crypto_modex_xform modex; > > + /**< Modular Exponentiation xform parameters */ > > + > > + struct rte_crypto_modinv_xform modinv; > > + /**< Modular Multiplicative Inverse xform parameters */ > > + > > + struct rte_crypto_dh_xform dh; > > + /**< DH xform parameters */ > > + > > + struct rte_crypto_dsa_xform dsa; > > + /**< DSA xform parameters */ > > + > > + struct rte_crypto_ec_xform ec; > > + /**< EC xform parameters, used by elliptic curve based > > + * operations. > > + */ > > + }; > > +}; > > + > > +/** > > * Asymmetric Cryptographic Operation. > > * > > * Structure describing asymmetric crypto operation params. > > -- > > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v3 2/4] cryptodev: add ec points to sm2 op 2024-10-08 11:46 ` Kusztal, ArkadiuszX @ 2024-10-08 11:49 ` Akhil Goyal 0 siblings, 0 replies; 42+ messages in thread From: Akhil Goyal @ 2024-10-08 11:49 UTC (permalink / raw) To: Kusztal, ArkadiuszX, dev; +Cc: Dooley, Brian > > > -----Original Message----- > > From: Akhil Goyal <gakhil@marvell.com> > > Sent: Tuesday, October 8, 2024 1:28 PM > > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com>; dev@dpdk.org > > Cc: Dooley, Brian <brian.dooley@intel.com> > > Subject: RE: [EXTERNAL] [PATCH v3 2/4] cryptodev: add ec points to sm2 op > > > > > /** > > > - * Asymmetric crypto transform data > > > - * > > > - * Structure describing asym xforms. > > > - */ > > > -struct rte_crypto_asym_xform { > > > - struct rte_crypto_asym_xform *next; > > > - /**< Pointer to next xform to set up xform chain.*/ > > > - enum rte_crypto_asym_xform_type xform_type; > > > - /**< Asymmetric crypto transform */ > > > - > > > - union { > > > - struct rte_crypto_rsa_xform rsa; > > > - /**< RSA xform parameters */ > > > - > > > - struct rte_crypto_modex_xform modex; > > > - /**< Modular Exponentiation xform parameters */ > > > - > > > - struct rte_crypto_modinv_xform modinv; > > > - /**< Modular Multiplicative Inverse xform parameters */ > > > - > > > - struct rte_crypto_dh_xform dh; > > > - /**< DH xform parameters */ > > > - > > > - struct rte_crypto_dsa_xform dsa; > > > - /**< DSA xform parameters */ > > > - > > > - struct rte_crypto_ec_xform ec; > > > - /**< EC xform parameters, used by elliptic curve based > > > - * operations. > > > - */ > > > - }; > > > -}; > > > - > > Above change seems redundant. > > It was commented on v2 as well. > > My apologies, I have missed it. > > The reason for this change was to keep a little bit better order in this file. > > So previously we have had: > -defines > -enums > -algorithms xfroms > -algorithms ops > -asym xform > -asym op > > now we got: > -defines > -enums > -algorithms xfroms > -algorithms ops > -asym xform > -------------SM2 OP > -asym op > > I know it is not a game changer, but helps to keep a better order in this file. > If this is problematic then I can revert it. Got your intention, but this should not be part of this patch. It can be separate patch. > > > > > > > > -/** > > > * SM2 operation params. > > > */ > > > struct rte_crypto_sm2_op_param { > > > @@ -658,20 +624,43 @@ struct rte_crypto_sm2_op_param { > > > * will be overwritten by the PMD with the decrypted length. > > > */ > > > > > > - rte_crypto_param cipher; > > > - /**< > > > - * Pointer to input data > > > - * - to be decrypted for SM2 private decrypt. > > > - * > > > - * Pointer to output data > > > - * - for SM2 public encrypt. > > > - * In this case the underlying array should have been allocated > > > - * with enough memory to hold ciphertext output (at least X bytes > > > - * for prime field curve of N bytes and for message M bytes, > > > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > > > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > > - * be overwritten by the PMD with the encrypted length. > > > - */ > > > + union { > > > + rte_crypto_param cipher; > > > + /**< > > > + * Pointer to input data > > > + * - to be decrypted for SM2 private decrypt. > > > + * > > > + * Pointer to output data > > > + * - for SM2 public encrypt. > > > + * In this case the underlying array should have been allocated > > > + * with enough memory to hold ciphertext output (at least X > > > bytes > > > + * for prime field curve of N bytes and for message M bytes, > > > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > > > as > > > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > > + * be overwritten by the PMD with the encrypted length. > > > + */ > > > + struct { > > > + struct rte_crypto_ec_point C1; > > > + /**< > > > + * This field is used only when PMD does not support > > the > > > full > > > + * process of the SM2 encryption/decryption, but the > > > elliptic > > > + * curve part only. > > > + * > > > + * In the case of encryption, it is an output - point C1 = > > > (x1,y1). > > > + * In the case of decryption, if is an input - point C1 = > > > (x1,y1) > > > + * > > > + */ > > > + struct rte_crypto_ec_point kP; > > > + /**< > > > + * This field is used only when PMD does not support > > the > > > full > > > + * process of the SM2 encryption/decryption, but the > > > elliptic > > > + * curve part only. > > > + * > > > + * It is an output in the encryption case, it is a point > > > + * [k]P = (x2,y2) > > > + */ > > > + }; > > > + }; > > You may add a reference to the newly added feature flag here. > +1 > > > > > > > > > > rte_crypto_uint id; > > > /**< The SM2 id used by signer and verifier. */ @@ -698,6 +687,40 @@ > > > struct rte_crypto_sm2_op_param { }; > > > > > > /** > > > + * Asymmetric crypto transform data > > > + * > > > + * Structure describing asym xforms. > > > + */ > > > +struct rte_crypto_asym_xform { > > > + struct rte_crypto_asym_xform *next; > > > + /**< Pointer to next xform to set up xform chain.*/ > > > + enum rte_crypto_asym_xform_type xform_type; > > > + /**< Asymmetric crypto transform */ > > > + > > > + union { > > > + struct rte_crypto_rsa_xform rsa; > > > + /**< RSA xform parameters */ > > > + > > > + struct rte_crypto_modex_xform modex; > > > + /**< Modular Exponentiation xform parameters */ > > > + > > > + struct rte_crypto_modinv_xform modinv; > > > + /**< Modular Multiplicative Inverse xform parameters */ > > > + > > > + struct rte_crypto_dh_xform dh; > > > + /**< DH xform parameters */ > > > + > > > + struct rte_crypto_dsa_xform dsa; > > > + /**< DSA xform parameters */ > > > + > > > + struct rte_crypto_ec_xform ec; > > > + /**< EC xform parameters, used by elliptic curve based > > > + * operations. > > > + */ > > > + }; > > > +}; > > > + > > > +/** > > > * Asymmetric Cryptographic Operation. > > > * > > > * Structure describing asymmetric crypto operation params. > > > -- > > > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v3 3/4] crypto/qat: add sm2 encryption/decryption function 2024-10-08 6:28 ` [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Arkadiusz Kusztal 2024-10-08 6:28 ` [PATCH v3 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal @ 2024-10-08 6:28 ` Arkadiusz Kusztal 2024-10-08 6:28 ` [PATCH v3 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal ` (2 subsequent siblings) 4 siblings, 0 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-08 6:28 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal This commit adds SM2 elliptic curve based asymmetric encryption and decryption to the Intel QuickAssist Technology PMD. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- doc/guides/cryptodevs/features/qat.ini | 1 + doc/guides/rel_notes/release_24_11.rst | 4 + drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h | 3 + drivers/common/qat/qat_adf/qat_pke.h | 20 ++++ drivers/crypto/qat/dev/qat_asym_pmd_gen1.c | 3 +- drivers/crypto/qat/qat_asym.c | 140 +++++++++++++++++++++++- 6 files changed, 164 insertions(+), 7 deletions(-) diff --git a/doc/guides/cryptodevs/features/qat.ini b/doc/guides/cryptodevs/features/qat.ini index f41d29158f..219dd1e011 100644 --- a/doc/guides/cryptodevs/features/qat.ini +++ b/doc/guides/cryptodevs/features/qat.ini @@ -71,6 +71,7 @@ ZUC EIA3 = Y AES CMAC (128) = Y SM3 = Y SM3 HMAC = Y +SM2 = Y ; ; Supported AEAD algorithms of the 'qat' crypto driver. diff --git a/doc/guides/rel_notes/release_24_11.rst b/doc/guides/rel_notes/release_24_11.rst index 0ff70d9057..85f4a2dd97 100644 --- a/doc/guides/rel_notes/release_24_11.rst +++ b/doc/guides/rel_notes/release_24_11.rst @@ -55,6 +55,10 @@ New Features Also, make sure to start the actual text at the margin. ======================================================= +* **Updated the QuickAssist Technology (QAT) Crypto PMD.** + + * Added SM2 encryption and decryption alghorithms. + Removed Items ------------- diff --git a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h index 630c6e1a9b..aa49612ca1 100644 --- a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h +++ b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h @@ -1542,6 +1542,9 @@ icp_qat_fw_mmp_ecdsa_verify_gfp_521_input::in in @endlink * @li no output parameters */ +#define PKE_ECSM2_ENCRYPTION 0x25221720 +#define PKE_ECSM2_DECRYPTION 0x201716e6 + #define PKE_LIVENESS 0x00000001 /**< Functionality ID for PKE_LIVENESS * @li 0 input parameter(s) diff --git a/drivers/common/qat/qat_adf/qat_pke.h b/drivers/common/qat/qat_adf/qat_pke.h index f88932a275..ac051e965d 100644 --- a/drivers/common/qat/qat_adf/qat_pke.h +++ b/drivers/common/qat/qat_adf/qat_pke.h @@ -334,4 +334,24 @@ get_sm2_ecdsa_verify_function(void) return qat_function; } +static struct qat_asym_function +get_sm2_encryption_function(void) +{ + struct qat_asym_function qat_function = { + PKE_ECSM2_ENCRYPTION, 32 + }; + + return qat_function; +} + +static struct qat_asym_function +get_sm2_decryption_function(void) +{ + struct qat_asym_function qat_function = { + PKE_ECSM2_DECRYPTION, 32 + }; + + return qat_function; +} + #endif diff --git a/drivers/crypto/qat/dev/qat_asym_pmd_gen1.c b/drivers/crypto/qat/dev/qat_asym_pmd_gen1.c index 67b1892c32..f991729dd9 100644 --- a/drivers/crypto/qat/dev/qat_asym_pmd_gen1.c +++ b/drivers/crypto/qat/dev/qat_asym_pmd_gen1.c @@ -87,7 +87,8 @@ qat_asym_crypto_feature_flags_get_gen1( RTE_CRYPTODEV_FF_HW_ACCELERATED | RTE_CRYPTODEV_FF_ASYM_SESSIONLESS | RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP | - RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT; + RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT | + RTE_CRYPTODEV_FF_ASYM_PARTIAL_SM2; return feature_flags; } diff --git a/drivers/crypto/qat/qat_asym.c b/drivers/crypto/qat/qat_asym.c index 491f5ecd5b..e1ada8629e 100644 --- a/drivers/crypto/qat/qat_asym.c +++ b/drivers/crypto/qat/qat_asym.c @@ -932,6 +932,15 @@ sm2_ecdsa_sign_set_input(struct icp_qat_fw_pke_request *qat_req, qat_req->input_param_count = 3; qat_req->output_param_count = 2; + HEXDUMP("SM2 K test", asym_op->sm2.k.data, + cookie->alg_bytesize); + HEXDUMP("SM2 K", cookie->input_array[0], + cookie->alg_bytesize); + HEXDUMP("SM2 msg", cookie->input_array[1], + cookie->alg_bytesize); + HEXDUMP("SM2 pkey", cookie->input_array[2], + cookie->alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; } @@ -983,6 +992,114 @@ sm2_ecdsa_sign_collect(struct rte_crypto_asym_op *asym_op, } static int +sm2_encryption_set_input(struct icp_qat_fw_pke_request *qat_req, + struct qat_asym_op_cookie *cookie, + const struct rte_crypto_asym_op *asym_op, + const struct rte_crypto_asym_xform *xform) +{ + const struct qat_asym_function qat_function = + get_sm2_encryption_function(); + const uint32_t qat_func_alignsize = + qat_function.bytesize; + + SET_PKE_LN(asym_op->sm2.k, qat_func_alignsize, 0); + SET_PKE_LN(xform->ec.q.x, qat_func_alignsize, 1); + SET_PKE_LN(xform->ec.q.y, qat_func_alignsize, 2); + + cookie->alg_bytesize = qat_function.bytesize; + cookie->qat_func_alignsize = qat_function.bytesize; + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; + qat_req->input_param_count = 3; + qat_req->output_param_count = 4; + + HEXDUMP("SM2 K", cookie->input_array[0], + qat_func_alignsize); + HEXDUMP("SM2 Q.x", cookie->input_array[1], + qat_func_alignsize); + HEXDUMP("SM2 Q.y", cookie->input_array[2], + qat_func_alignsize); + + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + +static uint8_t +sm2_encryption_collect(struct rte_crypto_asym_op *asym_op, + const struct qat_asym_op_cookie *cookie) +{ + uint32_t alg_bytesize = cookie->alg_bytesize; + + rte_memcpy(asym_op->sm2.C1.x.data, cookie->output_array[0], alg_bytesize); + rte_memcpy(asym_op->sm2.C1.y.data, cookie->output_array[1], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[2], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[3], alg_bytesize); + asym_op->sm2.C1.x.length = alg_bytesize; + asym_op->sm2.C1.y.length = alg_bytesize; + asym_op->sm2.kP.x.length = alg_bytesize; + asym_op->sm2.kP.y.length = alg_bytesize; + + HEXDUMP("C1[x1]", cookie->output_array[0], + alg_bytesize); + HEXDUMP("C1[y]", cookie->output_array[1], + alg_bytesize); + HEXDUMP("kP[x]", cookie->output_array[2], + alg_bytesize); + HEXDUMP("kP[y]", cookie->output_array[3], + alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + + +static int +sm2_decryption_set_input(struct icp_qat_fw_pke_request *qat_req, + struct qat_asym_op_cookie *cookie, + const struct rte_crypto_asym_op *asym_op, + const struct rte_crypto_asym_xform *xform) +{ + const struct qat_asym_function qat_function = + get_sm2_decryption_function(); + const uint32_t qat_func_alignsize = + qat_function.bytesize; + + SET_PKE_LN(xform->ec.pkey, qat_func_alignsize, 0); + SET_PKE_LN(asym_op->sm2.C1.x, qat_func_alignsize, 1); + SET_PKE_LN(asym_op->sm2.C1.y, qat_func_alignsize, 2); + + cookie->alg_bytesize = qat_function.bytesize; + cookie->qat_func_alignsize = qat_function.bytesize; + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; + qat_req->input_param_count = 3; + qat_req->output_param_count = 2; + + HEXDUMP("d", cookie->input_array[0], + qat_func_alignsize); + HEXDUMP("C1[x]", cookie->input_array[1], + qat_func_alignsize); + HEXDUMP("C1[y]", cookie->input_array[2], + qat_func_alignsize); + + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + + +static uint8_t +sm2_decryption_collect(struct rte_crypto_asym_op *asym_op, + const struct qat_asym_op_cookie *cookie) +{ + uint32_t alg_bytesize = cookie->alg_bytesize; + + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[0], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[1], alg_bytesize); + asym_op->sm2.kP.x.length = alg_bytesize; + asym_op->sm2.kP.y.length = alg_bytesize; + + HEXDUMP("kP[x]", cookie->output_array[0], + alg_bytesize); + HEXDUMP("kP[y]", cookie->output_array[1], + alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + +static int asym_set_input(struct icp_qat_fw_pke_request *qat_req, struct qat_asym_op_cookie *cookie, const struct rte_crypto_asym_op *asym_op, @@ -1014,14 +1131,20 @@ asym_set_input(struct icp_qat_fw_pke_request *qat_req, asym_op, xform); } case RTE_CRYPTO_ASYM_XFORM_SM2: - if (asym_op->sm2.op_type == - RTE_CRYPTO_ASYM_OP_VERIFY) { + if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) { + return sm2_encryption_set_input(qat_req, cookie, + asym_op, xform); + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) { + return sm2_decryption_set_input(qat_req, cookie, + asym_op, xform); + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) { return sm2_ecdsa_verify_set_input(qat_req, cookie, asym_op, xform); - } else { + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_SIGN) { return sm2_ecdsa_sign_set_input(qat_req, cookie, asym_op, xform); } + break; default: QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform"); return -EINVAL; @@ -1113,7 +1236,13 @@ qat_asym_collect_response(struct rte_crypto_op *op, case RTE_CRYPTO_ASYM_XFORM_ECDH: return ecdh_collect(asym_op, cookie); case RTE_CRYPTO_ASYM_XFORM_SM2: - return sm2_ecdsa_sign_collect(asym_op, cookie); + if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) + return sm2_encryption_collect(asym_op, cookie); + else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) + return sm2_decryption_collect(asym_op, cookie); + else + return sm2_ecdsa_sign_collect(asym_op, cookie); + default: QAT_LOG(ERR, "Not supported xform type"); return RTE_CRYPTO_OP_STATUS_ERROR; @@ -1385,9 +1514,8 @@ qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused, case RTE_CRYPTO_ASYM_XFORM_ECDSA: case RTE_CRYPTO_ASYM_XFORM_ECPM: case RTE_CRYPTO_ASYM_XFORM_ECDH: - session_set_ec(qat_session, xform); - break; case RTE_CRYPTO_ASYM_XFORM_SM2: + session_set_ec(qat_session, xform); break; default: ret = -ENOTSUP; -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v3 4/4] app/test: add test sm2 C1/Kp test cases 2024-10-08 6:28 ` [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Arkadiusz Kusztal 2024-10-08 6:28 ` [PATCH v3 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal 2024-10-08 6:28 ` [PATCH v3 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal @ 2024-10-08 6:28 ` Arkadiusz Kusztal 2024-10-08 15:40 ` Dooley, Brian 2024-10-08 11:46 ` [EXTERNAL] [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Akhil Goyal 2024-10-08 18:14 ` [PATCH v4 0/4] add ec points to sm2 op Arkadiusz Kusztal 4 siblings, 1 reply; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-08 6:28 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal This commit adds tests cases to be used when C1 or kP elliptic curve points need to be computed. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- app/test/test_cryptodev_asym.c | 148 ++++++++++++++++++++++++++++- app/test/test_cryptodev_sm2_test_vectors.h | 112 +++++++++++++++++++++- 2 files changed, 256 insertions(+), 4 deletions(-) diff --git a/app/test/test_cryptodev_asym.c b/app/test/test_cryptodev_asym.c index f0b5d38543..cb28179562 100644 --- a/app/test/test_cryptodev_asym.c +++ b/app/test/test_cryptodev_asym.c @@ -2635,6 +2635,8 @@ test_sm2_sign(void) asym_op->sm2.k.data = input_params.k.data; asym_op->sm2.k.length = input_params.k.length; } + asym_op->sm2.k.data = input_params.k.data; + asym_op->sm2.k.length = input_params.k.length; /* Init out buf */ asym_op->sm2.r.data = output_buf_r; @@ -3184,7 +3186,7 @@ static int send_one(void) ticks++; if (ticks >= DEQ_TIMEOUT) { RTE_LOG(ERR, USER1, - "line %u FAILED: Cannot dequeue the crypto op on device %d", + "line %u FAILED: Cannot dequeue the crypto op on device, timeout %d", __LINE__, params->valid_devs[0]); return TEST_FAILED; } @@ -3489,6 +3491,142 @@ kat_rsa_decrypt_crt(const void *data) return 0; } +static int +test_sm2_partial_encryption(const void *data) +{ + struct rte_crypto_asym_xform xform = { 0 }; + const uint8_t dev_id = params->valid_devs[0]; + const struct crypto_testsuite_sm2_params *test_vector = data; + uint8_t result_C1_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_C1_y1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; + const struct rte_cryptodev_asymmetric_xform_capability *capa; + struct rte_cryptodev_asym_capability_idx idx; + struct rte_cryptodev_info dev_info; + + rte_cryptodev_info_get(dev_id, &dev_info); + if (!(dev_info.feature_flags & + RTE_CRYPTODEV_FF_ASYM_PARTIAL_SM2)) { + RTE_LOG(INFO, USER1, + "Device doesn't support partial SM2. Test Skipped\n"); + return TEST_SKIPPED; + } + + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); + if (capa == NULL) + return TEST_SKIPPED; + + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; + xform.ec.curve_id = RTE_CRYPTO_EC_GROUP_SM2; + xform.ec.q = test_vector->pubkey; + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_ENCRYPT; + self->op->asym->sm2.k = test_vector->k; + if (rte_cryptodev_asym_session_create(dev_id, &xform, + params->session_mpool, &self->sess) < 0) { + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", + __LINE__); + return TEST_FAILED; + } + rte_crypto_op_attach_asym_session(self->op, self->sess); + + self->op->asym->sm2.C1.x.data = result_C1_x1; + self->op->asym->sm2.C1.y.data = result_C1_y1; + self->op->asym->sm2.kP.x.data = result_kP_x1; + self->op->asym->sm2.kP.y.data = result_kP_y1; + TEST_ASSERT_SUCCESS(send_one(), + "Failed to process crypto op"); + + debug_hexdump(stdout, "C1[x]", self->op->asym->sm2.C1.x.data, + self->op->asym->sm2.C1.x.length); + debug_hexdump(stdout, "C1[y]", self->op->asym->sm2.C1.y.data, + self->op->asym->sm2.C1.y.length); + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, + self->op->asym->sm2.kP.x.length); + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, + self->op->asym->sm2.kP.y.length); + + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.x.data, + self->op->asym->sm2.C1.x.data, + test_vector->C1.x.length, + "Incorrect value of C1[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.y.data, + self->op->asym->sm2.C1.y.data, + test_vector->C1.y.length, + "Incorrect value of C1[y]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, + self->op->asym->sm2.kP.x.data, + test_vector->kP.x.length, + "Incorrect value of kP[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, + self->op->asym->sm2.kP.y.data, + test_vector->kP.y.length, + "Incorrect value of kP[y]\n"); + + return TEST_SUCCESS; +} + +static int +test_sm2_partial_decryption(const void *data) +{ + struct rte_crypto_asym_xform xform = {}; + const uint8_t dev_id = params->valid_devs[0]; + const struct crypto_testsuite_sm2_params *test_vector = data; + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; + const struct rte_cryptodev_asymmetric_xform_capability *capa; + struct rte_cryptodev_asym_capability_idx idx; + struct rte_cryptodev_info dev_info; + + rte_cryptodev_info_get(dev_id, &dev_info); + if (!(dev_info.feature_flags & + RTE_CRYPTODEV_FF_ASYM_PARTIAL_SM2)) { + RTE_LOG(INFO, USER1, + "Device doesn't support partial SM2. Test Skipped\n"); + return TEST_SKIPPED; + } + + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); + if (capa == NULL) + return TEST_SKIPPED; + + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; + xform.ec.pkey = test_vector->pkey; + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_DECRYPT; + self->op->asym->sm2.C1 = test_vector->C1; + + if (rte_cryptodev_asym_session_create(dev_id, &xform, + params->session_mpool, &self->sess) < 0) { + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", + __LINE__); + return TEST_FAILED; + } + rte_crypto_op_attach_asym_session(self->op, self->sess); + + self->op->asym->sm2.kP.x.data = result_kP_x1; + self->op->asym->sm2.kP.y.data = result_kP_y1; + TEST_ASSERT_SUCCESS(send_one(), + "Failed to process crypto op"); + + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, + self->op->asym->sm2.C1.x.length); + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, + self->op->asym->sm2.C1.y.length); + + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, + self->op->asym->sm2.kP.x.data, + test_vector->kP.x.length, + "Incorrect value of kP[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, + self->op->asym->sm2.kP.y.data, + test_vector->kP.y.length, + "Incorrect value of kP[y]\n"); + + return 0; +} + static struct unit_test_suite cryptodev_openssl_asym_testsuite = { .suite_name = "Crypto Device OPENSSL ASYM Unit Test Suite", .setup = testsuite_setup, @@ -3553,6 +3691,14 @@ static struct unit_test_suite cryptodev_qat_asym_testsuite = { .teardown = testsuite_teardown, .unit_test_cases = { TEST_CASE_NAMED_WITH_DATA( + "SM2 encryption - test case 1", + ut_setup_asym, ut_teardown_asym, + test_sm2_partial_encryption, &sm2_enc_hw_t1), + TEST_CASE_NAMED_WITH_DATA( + "SM2 decryption - test case 1", + ut_setup_asym, ut_teardown_asym, + test_sm2_partial_decryption, &sm2_enc_hw_t1), + TEST_CASE_NAMED_WITH_DATA( "Modular Exponentiation (mod=128, base=20, exp=3, res=128)", ut_setup_asym, ut_teardown_asym, modular_exponentiation, &modex_test_case_m128_b20_e3), diff --git a/app/test/test_cryptodev_sm2_test_vectors.h b/app/test/test_cryptodev_sm2_test_vectors.h index 41f5f7074a..92f7e77671 100644 --- a/app/test/test_cryptodev_sm2_test_vectors.h +++ b/app/test/test_cryptodev_sm2_test_vectors.h @@ -8,19 +8,125 @@ #include "rte_crypto_asym.h" struct crypto_testsuite_sm2_params { - rte_crypto_param pubkey_qx; - rte_crypto_param pubkey_qy; + union { + struct { + rte_crypto_param pubkey_qx; + rte_crypto_param pubkey_qy; + }; + struct rte_crypto_ec_point pubkey; + }; rte_crypto_param pkey; rte_crypto_param k; rte_crypto_param sign_r; rte_crypto_param sign_s; rte_crypto_param id; - rte_crypto_param cipher; + union { + rte_crypto_param cipher; + struct { + struct rte_crypto_ec_point C1; + struct rte_crypto_ec_point kP; + }; + }; rte_crypto_param message; rte_crypto_param digest; int curve; }; +uint8_t sm2_enc_pub_x_t1[] = { + 0x26, 0xf1, 0xf3, 0xef, 0x12, 0x27, 0x85, 0xd1, + 0x7d, 0x38, 0x70, 0xc2, 0x43, 0x46, 0x50, 0x36, + 0x3f, 0xdf, 0x4b, 0x2f, 0x45, 0x0e, 0x8e, 0xd1, + 0xb6, 0x0f, 0xdc, 0x1f, 0xc6, 0xf0, 0x19, 0xab +}; +uint8_t sm2_enc_pub_y_t1[] = { + 0xd9, 0x19, 0x8b, 0xdb, 0xef, 0xa5, 0x84, 0x76, + 0xec, 0x82, 0x25, 0x12, 0x5b, 0x8c, 0xe3, 0xe1, + 0x0a, 0x10, 0x0d, 0xc6, 0x97, 0x6c, 0xc1, 0x89, + 0xd9, 0x6d, 0xa6, 0x88, 0x9e, 0xbc, 0xd3, 0x7a +}; +uint8_t sm2_k_t1[] = { + 0x12, 0x34, 0x56, 0x78, 0xB9, 0x6E, 0x5A, 0xF7, + 0x0B, 0xD4, 0x80, 0xB4, 0x72, 0x40, 0x9A, 0x9A, + 0x32, 0x72, 0x57, 0xF1, 0xEB, 0xB7, 0x3F, 0x5B, + 0x07, 0x33, 0x54, 0xB2, 0x48, 0x66, 0x85, 0x63 +}; + +uint8_t sm2_C1_x_t1[] = { + 0x15, 0xf6, 0xb7, 0x49, 0x00, 0x39, 0x73, 0x9d, + 0x5b, 0xb3, 0xd3, 0xe9, 0x1d, 0xe4, 0xc8, 0xbd, + 0x08, 0xe3, 0x6a, 0x22, 0xff, 0x1a, 0xbf, 0xdc, + 0x75, 0x6b, 0x12, 0x85, 0x81, 0xc5, 0x8b, 0xcf +}; + +uint8_t sm2_C1_y_t1[] = { + 0x6a, 0x92, 0xd4, 0xd8, 0x13, 0xec, 0x8f, 0x9a, + 0x9d, 0xbe, 0x51, 0x47, 0x6f, 0x54, 0xc5, 0x41, + 0x98, 0xf5, 0x5f, 0x83, 0xce, 0x1c, 0x18, 0x1a, + 0x48, 0xbd, 0xeb, 0x38, 0x13, 0x67, 0x0d, 0x06 +}; + +uint8_t sm2_kP_x_t1[] = { + 0x6b, 0xfb, 0x9a, 0xcb, 0xc6, 0xb6, 0x36, 0x31, + 0x0f, 0xd1, 0xdd, 0x9c, 0x9f, 0x17, 0x5f, 0x3f, + 0x68, 0x13, 0x96, 0xd2, 0x54, 0x5b, 0xa6, 0x19, + 0x78, 0x1f, 0x87, 0x3d, 0x81, 0xc3, 0x21, 0x01 +}; + +uint8_t sm2_kP_y_t1[] = { + 0xa4, 0x08, 0xf3, 0x74, 0x35, 0x51, 0x8c, 0x81, + 0x06, 0x4c, 0x8f, 0x31, 0x49, 0xe3, 0x5b, 0x4d, + 0xfc, 0x3d, 0x19, 0xac, 0x7d, 0x07, 0xd0, 0x9a, + 0x99, 0x5a, 0x25, 0x16, 0x66, 0xff, 0x41, 0x3c +}; + +uint8_t sm2_kP_d_t1[] = { + 0x6F, 0xCB, 0xA2, 0xEF, 0x9A, 0xE0, 0xAB, 0x90, + 0x2B, 0xC3, 0xBD, 0xE3, 0xFF, 0x91, 0x5D, 0x44, + 0xBA, 0x4C, 0xC7, 0x8F, 0x88, 0xE2, 0xF8, 0xE7, + 0xF8, 0x99, 0x6D, 0x3B, 0x8C, 0xCE, 0xED, 0xEE +}; + +struct crypto_testsuite_sm2_params sm2_enc_hw_t1 = { + .k = { + .data = sm2_k_t1, + .length = sizeof(sm2_k_t1) + }, + .pubkey = { + .x = { + .data = sm2_enc_pub_x_t1, + .length = sizeof(sm2_enc_pub_x_t1) + }, + .y = { + .data = sm2_enc_pub_y_t1, + .length = sizeof(sm2_enc_pub_y_t1) + } + }, + .C1 = { + .x = { + .data = sm2_C1_x_t1, + .length = sizeof(sm2_C1_x_t1) + }, + .y = { + .data = sm2_C1_y_t1, + .length = sizeof(sm2_C1_y_t1) + } + }, + .kP = { + .x = { + .data = sm2_kP_x_t1, + .length = sizeof(sm2_kP_x_t1) + }, + .y = { + .data = sm2_kP_y_t1, + .length = sizeof(sm2_kP_y_t1) + } + }, + .pkey = { + .data = sm2_kP_d_t1, + .length = sizeof(sm2_kP_d_t1) + } +}; + static uint8_t fp256_pkey[] = { 0x77, 0x84, 0x35, 0x65, 0x4c, 0x7a, 0x6d, 0xb1, 0x1e, 0x63, 0x0b, 0x41, 0x97, 0x36, 0x04, 0xf4, -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [PATCH v3 4/4] app/test: add test sm2 C1/Kp test cases 2024-10-08 6:28 ` [PATCH v3 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal @ 2024-10-08 15:40 ` Dooley, Brian 0 siblings, 0 replies; 42+ messages in thread From: Dooley, Brian @ 2024-10-08 15:40 UTC (permalink / raw) To: Kusztal, ArkadiuszX, dev; +Cc: gakhil > -----Original Message----- > From: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com> > Sent: Tuesday, October 8, 2024 7:29 AM > To: dev@dpdk.org > Cc: gakhil@marvell.com; Dooley, Brian <brian.dooley@intel.com>; Kusztal, > ArkadiuszX <arkadiuszx.kusztal@intel.com> > Subject: [PATCH v3 4/4] app/test: add test sm2 C1/Kp test cases > > This commit adds tests cases to be used when C1 or kP elliptic curve points > need to be computed. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > --- > app/test/test_cryptodev_asym.c | 148 > ++++++++++++++++++++++++++++- > app/test/test_cryptodev_sm2_test_vectors.h | 112 > +++++++++++++++++++++- > 2 files changed, 256 insertions(+), 4 deletions(-) > > diff --git a/app/test/test_cryptodev_asym.c > b/app/test/test_cryptodev_asym.c index f0b5d38543..cb28179562 100644 > --- a/app/test/test_cryptodev_asym.c > +++ b/app/test/test_cryptodev_asym.c > @@ -2635,6 +2635,8 @@ test_sm2_sign(void) > asym_op->sm2.k.data = input_params.k.data; > asym_op->sm2.k.length = input_params.k.length; > } > + asym_op->sm2.k.data = input_params.k.data; > + asym_op->sm2.k.length = input_params.k.length; > > /* Init out buf */ > asym_op->sm2.r.data = output_buf_r; > @@ -3184,7 +3186,7 @@ static int send_one(void) > ticks++; > if (ticks >= DEQ_TIMEOUT) { > RTE_LOG(ERR, USER1, > - "line %u FAILED: Cannot dequeue the crypto > op on device %d", > + "line %u FAILED: Cannot dequeue the crypto > op on device, timeout > +%d", > __LINE__, params->valid_devs[0]); > return TEST_FAILED; > } > @@ -3489,6 +3491,142 @@ kat_rsa_decrypt_crt(const void *data) > return 0; > } > > +static int > +test_sm2_partial_encryption(const void *data) { > + struct rte_crypto_asym_xform xform = { 0 }; > + const uint8_t dev_id = params->valid_devs[0]; > + const struct crypto_testsuite_sm2_params *test_vector = data; > + uint8_t result_C1_x1[TEST_DATA_SIZE] = { 0 }; > + uint8_t result_C1_y1[TEST_DATA_SIZE] = { 0 }; > + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; > + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; > + const struct rte_cryptodev_asymmetric_xform_capability *capa; > + struct rte_cryptodev_asym_capability_idx idx; > + struct rte_cryptodev_info dev_info; > + > + rte_cryptodev_info_get(dev_id, &dev_info); > + if (!(dev_info.feature_flags & > + RTE_CRYPTODEV_FF_ASYM_PARTIAL_SM2)) { > + RTE_LOG(INFO, USER1, > + "Device doesn't support partial SM2. Test > Skipped\n"); > + return TEST_SKIPPED; > + } > + > + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; > + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); > + if (capa == NULL) > + return TEST_SKIPPED; > + > + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; > + xform.ec.curve_id = RTE_CRYPTO_EC_GROUP_SM2; > + xform.ec.q = test_vector->pubkey; > + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_ENCRYPT; > + self->op->asym->sm2.k = test_vector->k; > + if (rte_cryptodev_asym_session_create(dev_id, &xform, > + params->session_mpool, &self->sess) < 0) { > + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation > failed", > + __LINE__); > + return TEST_FAILED; > + } > + rte_crypto_op_attach_asym_session(self->op, self->sess); > + > + self->op->asym->sm2.C1.x.data = result_C1_x1; > + self->op->asym->sm2.C1.y.data = result_C1_y1; > + self->op->asym->sm2.kP.x.data = result_kP_x1; > + self->op->asym->sm2.kP.y.data = result_kP_y1; > + TEST_ASSERT_SUCCESS(send_one(), > + "Failed to process crypto op"); > + > + debug_hexdump(stdout, "C1[x]", self->op->asym->sm2.C1.x.data, > + self->op->asym->sm2.C1.x.length); > + debug_hexdump(stdout, "C1[y]", self->op->asym->sm2.C1.y.data, > + self->op->asym->sm2.C1.y.length); > + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, > + self->op->asym->sm2.kP.x.length); > + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, > + self->op->asym->sm2.kP.y.length); > + > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.x.data, > + self->op->asym->sm2.C1.x.data, > + test_vector->C1.x.length, > + "Incorrect value of C1[x]\n"); > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.y.data, > + self->op->asym->sm2.C1.y.data, > + test_vector->C1.y.length, > + "Incorrect value of C1[y]\n"); > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, > + self->op->asym->sm2.kP.x.data, > + test_vector->kP.x.length, > + "Incorrect value of kP[x]\n"); > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, > + self->op->asym->sm2.kP.y.data, > + test_vector->kP.y.length, > + "Incorrect value of kP[y]\n"); > + > + return TEST_SUCCESS; > +} > + > +static int > +test_sm2_partial_decryption(const void *data) { > + struct rte_crypto_asym_xform xform = {}; > + const uint8_t dev_id = params->valid_devs[0]; > + const struct crypto_testsuite_sm2_params *test_vector = data; > + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; > + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; > + const struct rte_cryptodev_asymmetric_xform_capability *capa; > + struct rte_cryptodev_asym_capability_idx idx; > + struct rte_cryptodev_info dev_info; > + > + rte_cryptodev_info_get(dev_id, &dev_info); > + if (!(dev_info.feature_flags & > + RTE_CRYPTODEV_FF_ASYM_PARTIAL_SM2)) { > + RTE_LOG(INFO, USER1, > + "Device doesn't support partial SM2. Test > Skipped\n"); > + return TEST_SKIPPED; > + } > + > + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; > + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); > + if (capa == NULL) > + return TEST_SKIPPED; > + > + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; > + xform.ec.pkey = test_vector->pkey; > + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_DECRYPT; > + self->op->asym->sm2.C1 = test_vector->C1; > + > + if (rte_cryptodev_asym_session_create(dev_id, &xform, > + params->session_mpool, &self->sess) < 0) { > + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation > failed", > + __LINE__); > + return TEST_FAILED; > + } > + rte_crypto_op_attach_asym_session(self->op, self->sess); > + > + self->op->asym->sm2.kP.x.data = result_kP_x1; > + self->op->asym->sm2.kP.y.data = result_kP_y1; > + TEST_ASSERT_SUCCESS(send_one(), > + "Failed to process crypto op"); > + > + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, > + self->op->asym->sm2.C1.x.length); > + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, > + self->op->asym->sm2.C1.y.length); > + > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, > + self->op->asym->sm2.kP.x.data, > + test_vector->kP.x.length, > + "Incorrect value of kP[x]\n"); > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, > + self->op->asym->sm2.kP.y.data, > + test_vector->kP.y.length, > + "Incorrect value of kP[y]\n"); > + > + return 0; > +} > + > static struct unit_test_suite cryptodev_openssl_asym_testsuite = { > .suite_name = "Crypto Device OPENSSL ASYM Unit Test Suite", > .setup = testsuite_setup, > @@ -3553,6 +3691,14 @@ static struct unit_test_suite > cryptodev_qat_asym_testsuite = { > .teardown = testsuite_teardown, > .unit_test_cases = { > TEST_CASE_NAMED_WITH_DATA( > + "SM2 encryption - test case 1", > + ut_setup_asym, ut_teardown_asym, > + test_sm2_partial_encryption, &sm2_enc_hw_t1), > + TEST_CASE_NAMED_WITH_DATA( > + "SM2 decryption - test case 1", > + ut_setup_asym, ut_teardown_asym, > + test_sm2_partial_decryption, &sm2_enc_hw_t1), > + TEST_CASE_NAMED_WITH_DATA( > "Modular Exponentiation (mod=128, base=20, exp=3, > res=128)", > ut_setup_asym, ut_teardown_asym, > modular_exponentiation, > &modex_test_case_m128_b20_e3), diff --git > a/app/test/test_cryptodev_sm2_test_vectors.h > b/app/test/test_cryptodev_sm2_test_vectors.h > index 41f5f7074a..92f7e77671 100644 > --- a/app/test/test_cryptodev_sm2_test_vectors.h > +++ b/app/test/test_cryptodev_sm2_test_vectors.h > @@ -8,19 +8,125 @@ > #include "rte_crypto_asym.h" > > struct crypto_testsuite_sm2_params { > - rte_crypto_param pubkey_qx; > - rte_crypto_param pubkey_qy; > + union { > + struct { > + rte_crypto_param pubkey_qx; > + rte_crypto_param pubkey_qy; > + }; > + struct rte_crypto_ec_point pubkey; > + }; > rte_crypto_param pkey; > rte_crypto_param k; > rte_crypto_param sign_r; > rte_crypto_param sign_s; > rte_crypto_param id; > - rte_crypto_param cipher; > + union { > + rte_crypto_param cipher; > + struct { > + struct rte_crypto_ec_point C1; > + struct rte_crypto_ec_point kP; > + }; > + }; > rte_crypto_param message; > rte_crypto_param digest; > int curve; > }; > > +uint8_t sm2_enc_pub_x_t1[] = { > + 0x26, 0xf1, 0xf3, 0xef, 0x12, 0x27, 0x85, 0xd1, > + 0x7d, 0x38, 0x70, 0xc2, 0x43, 0x46, 0x50, 0x36, > + 0x3f, 0xdf, 0x4b, 0x2f, 0x45, 0x0e, 0x8e, 0xd1, > + 0xb6, 0x0f, 0xdc, 0x1f, 0xc6, 0xf0, 0x19, 0xab }; uint8_t > +sm2_enc_pub_y_t1[] = { > + 0xd9, 0x19, 0x8b, 0xdb, 0xef, 0xa5, 0x84, 0x76, > + 0xec, 0x82, 0x25, 0x12, 0x5b, 0x8c, 0xe3, 0xe1, > + 0x0a, 0x10, 0x0d, 0xc6, 0x97, 0x6c, 0xc1, 0x89, > + 0xd9, 0x6d, 0xa6, 0x88, 0x9e, 0xbc, 0xd3, 0x7a }; uint8_t sm2_k_t1[] > = > +{ > + 0x12, 0x34, 0x56, 0x78, 0xB9, 0x6E, 0x5A, 0xF7, > + 0x0B, 0xD4, 0x80, 0xB4, 0x72, 0x40, 0x9A, 0x9A, > + 0x32, 0x72, 0x57, 0xF1, 0xEB, 0xB7, 0x3F, 0x5B, > + 0x07, 0x33, 0x54, 0xB2, 0x48, 0x66, 0x85, 0x63 }; > + > +uint8_t sm2_C1_x_t1[] = { > + 0x15, 0xf6, 0xb7, 0x49, 0x00, 0x39, 0x73, 0x9d, > + 0x5b, 0xb3, 0xd3, 0xe9, 0x1d, 0xe4, 0xc8, 0xbd, > + 0x08, 0xe3, 0x6a, 0x22, 0xff, 0x1a, 0xbf, 0xdc, > + 0x75, 0x6b, 0x12, 0x85, 0x81, 0xc5, 0x8b, 0xcf }; > + > +uint8_t sm2_C1_y_t1[] = { > + 0x6a, 0x92, 0xd4, 0xd8, 0x13, 0xec, 0x8f, 0x9a, > + 0x9d, 0xbe, 0x51, 0x47, 0x6f, 0x54, 0xc5, 0x41, > + 0x98, 0xf5, 0x5f, 0x83, 0xce, 0x1c, 0x18, 0x1a, > + 0x48, 0xbd, 0xeb, 0x38, 0x13, 0x67, 0x0d, 0x06 }; > + > +uint8_t sm2_kP_x_t1[] = { > + 0x6b, 0xfb, 0x9a, 0xcb, 0xc6, 0xb6, 0x36, 0x31, > + 0x0f, 0xd1, 0xdd, 0x9c, 0x9f, 0x17, 0x5f, 0x3f, > + 0x68, 0x13, 0x96, 0xd2, 0x54, 0x5b, 0xa6, 0x19, > + 0x78, 0x1f, 0x87, 0x3d, 0x81, 0xc3, 0x21, 0x01 }; > + > +uint8_t sm2_kP_y_t1[] = { > + 0xa4, 0x08, 0xf3, 0x74, 0x35, 0x51, 0x8c, 0x81, > + 0x06, 0x4c, 0x8f, 0x31, 0x49, 0xe3, 0x5b, 0x4d, > + 0xfc, 0x3d, 0x19, 0xac, 0x7d, 0x07, 0xd0, 0x9a, > + 0x99, 0x5a, 0x25, 0x16, 0x66, 0xff, 0x41, 0x3c }; > + > +uint8_t sm2_kP_d_t1[] = { > + 0x6F, 0xCB, 0xA2, 0xEF, 0x9A, 0xE0, 0xAB, 0x90, > + 0x2B, 0xC3, 0xBD, 0xE3, 0xFF, 0x91, 0x5D, 0x44, > + 0xBA, 0x4C, 0xC7, 0x8F, 0x88, 0xE2, 0xF8, 0xE7, > + 0xF8, 0x99, 0x6D, 0x3B, 0x8C, 0xCE, 0xED, 0xEE }; > + > +struct crypto_testsuite_sm2_params sm2_enc_hw_t1 = { > + .k = { > + .data = sm2_k_t1, > + .length = sizeof(sm2_k_t1) > + }, > + .pubkey = { > + .x = { > + .data = sm2_enc_pub_x_t1, > + .length = sizeof(sm2_enc_pub_x_t1) > + }, > + .y = { > + .data = sm2_enc_pub_y_t1, > + .length = sizeof(sm2_enc_pub_y_t1) > + } > + }, > + .C1 = { > + .x = { > + .data = sm2_C1_x_t1, > + .length = sizeof(sm2_C1_x_t1) > + }, > + .y = { > + .data = sm2_C1_y_t1, > + .length = sizeof(sm2_C1_y_t1) > + } > + }, > + .kP = { > + .x = { > + .data = sm2_kP_x_t1, > + .length = sizeof(sm2_kP_x_t1) > + }, > + .y = { > + .data = sm2_kP_y_t1, > + .length = sizeof(sm2_kP_y_t1) > + } > + }, > + .pkey = { > + .data = sm2_kP_d_t1, > + .length = sizeof(sm2_kP_d_t1) > + } > +}; > + > static uint8_t fp256_pkey[] = { > 0x77, 0x84, 0x35, 0x65, 0x4c, 0x7a, 0x6d, 0xb1, > 0x1e, 0x63, 0x0b, 0x41, 0x97, 0x36, 0x04, 0xf4, > -- > 2.13.6 Series-Acked-by: Brian Dooley <brian.dooley@intel.com> ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v3 1/4] cryptodev: add partial sm2 feature flag 2024-10-08 6:28 ` [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Arkadiusz Kusztal ` (2 preceding siblings ...) 2024-10-08 6:28 ` [PATCH v3 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal @ 2024-10-08 11:46 ` Akhil Goyal 2024-10-08 11:48 ` Kusztal, ArkadiuszX 2024-10-08 18:14 ` [PATCH v4 0/4] add ec points to sm2 op Arkadiusz Kusztal 4 siblings, 1 reply; 42+ messages in thread From: Akhil Goyal @ 2024-10-08 11:46 UTC (permalink / raw) To: Arkadiusz Kusztal, dev; +Cc: brian.dooley > Due to complex ways of handling asymmetric cryptography algorithms, > capabilities may differ between hardware and software PMDs, > or even between hardware PMDs. One of the examples are algorithms that > need an additional round of hashing, like SM2. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > --- > lib/cryptodev/rte_cryptodev.h | 2 ++ > 1 file changed, 2 insertions(+) > > diff --git a/lib/cryptodev/rte_cryptodev.h b/lib/cryptodev/rte_cryptodev.h > index bec947f6d5..c0e816b17f 100644 > --- a/lib/cryptodev/rte_cryptodev.h > +++ b/lib/cryptodev/rte_cryptodev.h > @@ -554,6 +554,8 @@ rte_cryptodev_asym_get_xform_string(enum > rte_crypto_asym_xform_type xform_enum); > /**< Support inner checksum computation/verification */ > #define RTE_CRYPTODEV_FF_SECURITY_RX_INJECT (1ULL << 28) > /**< Support Rx injection after security processing */ > +#define RTE_CRYPTODEV_FF_ASYM_PARTIAL_SM2 (1ULL << 29) > +/**< Support the elliptic curve part only in SM2 */ > This would need an update in doc/guides/cryptodevs/features/default.ini as well. However, it would be better to use the capability thing. https://patches.dpdk.org/project/dpdk/patch/20241004181255.916-1-gmuthukrishn@marvell.com/ This patch add SM2 op specific capability. It would be better to add this partial SM2 in the enum rte_crypto_sm2_op_capa defined in the above patch. ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v3 1/4] cryptodev: add partial sm2 feature flag 2024-10-08 11:46 ` [EXTERNAL] [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Akhil Goyal @ 2024-10-08 11:48 ` Kusztal, ArkadiuszX 0 siblings, 0 replies; 42+ messages in thread From: Kusztal, ArkadiuszX @ 2024-10-08 11:48 UTC (permalink / raw) To: Akhil Goyal, dev; +Cc: Dooley, Brian > -----Original Message----- > From: Akhil Goyal <gakhil@marvell.com> > Sent: Tuesday, October 8, 2024 1:47 PM > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com>; dev@dpdk.org > Cc: Dooley, Brian <brian.dooley@intel.com> > Subject: RE: [EXTERNAL] [PATCH v3 1/4] cryptodev: add partial sm2 feature flag > > > Due to complex ways of handling asymmetric cryptography algorithms, > > capabilities may differ between hardware and software PMDs, or even > > between hardware PMDs. One of the examples are algorithms that need an > > additional round of hashing, like SM2. > > > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > > --- > > lib/cryptodev/rte_cryptodev.h | 2 ++ > > 1 file changed, 2 insertions(+) > > > > diff --git a/lib/cryptodev/rte_cryptodev.h > > b/lib/cryptodev/rte_cryptodev.h index bec947f6d5..c0e816b17f 100644 > > --- a/lib/cryptodev/rte_cryptodev.h > > +++ b/lib/cryptodev/rte_cryptodev.h > > @@ -554,6 +554,8 @@ rte_cryptodev_asym_get_xform_string(enum > > rte_crypto_asym_xform_type xform_enum); /**< Support inner checksum > > computation/verification */ > > #define RTE_CRYPTODEV_FF_SECURITY_RX_INJECT (1ULL << 28) > > /**< Support Rx injection after security processing */ > > +#define RTE_CRYPTODEV_FF_ASYM_PARTIAL_SM2 (1ULL << 29) > > +/**< Support the elliptic curve part only in SM2 */ > > > This would need an update in doc/guides/cryptodevs/features/default.ini as > well. > > However, it would be better to use the capability thing. > https://patches.dpdk.org/project/dpdk/patch/20241004181255.916-1- > gmuthukrishn@marvell.com/ > > This patch add SM2 op specific capability. > It would be better to add this partial SM2 in the enum rte_crypto_sm2_op_capa > defined in the above patch. +1. ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v4 0/4] add ec points to sm2 op 2024-10-08 6:28 ` [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Arkadiusz Kusztal ` (3 preceding siblings ...) 2024-10-08 11:46 ` [EXTERNAL] [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Akhil Goyal @ 2024-10-08 18:14 ` Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 1/4] cryptodev: reorder structures in asym crypto header Arkadiusz Kusztal ` (3 more replies) 4 siblings, 4 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-08 18:14 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal In the case when PMD cannot support the full process of the SM2, but elliptic curve computation only, additional fields are needed to handle such a case. Depends-on: patch-145188 ("[v2] cryptodev: add asymmetric operational capability") v2: - rebased against the 24.11 code v3: - added feature flag - added QAT patches - added test patches v4: - replaced feature flag with capability - split API patches Arkadiusz Kusztal (4): cryptodev: reorder structures in asym crypto header cryptodev: add ec points to sm2 op crypto/qat: add sm2 encryption/decryption function app/test: add test sm2 C1/Kp test cases app/test/test_cryptodev_asym.c | 138 ++++++++++++++++++++++- app/test/test_cryptodev_sm2_test_vectors.h | 112 ++++++++++++++++++- doc/guides/cryptodevs/features/qat.ini | 1 + doc/guides/rel_notes/release_24_11.rst | 4 + drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h | 3 + drivers/common/qat/qat_adf/qat_pke.h | 20 ++++ drivers/crypto/qat/qat_asym.c | 140 +++++++++++++++++++++++- lib/cryptodev/rte_crypto_asym.h | 125 +++++++++++++-------- 8 files changed, 485 insertions(+), 58 deletions(-) -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v4 1/4] cryptodev: reorder structures in asym crypto header 2024-10-08 18:14 ` [PATCH v4 0/4] add ec points to sm2 op Arkadiusz Kusztal @ 2024-10-08 18:14 ` Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal ` (2 subsequent siblings) 3 siblings, 0 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-08 18:14 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal Asymmetric-crypto header has a simple structure that allows to keep logically separate blocks together. Therefore, xforms, ops, and generic structs may be appropriately ordered. This patch moves sm2-op structs to be placed along other algorithms-op structs. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- lib/cryptodev/rte_crypto_asym.h | 68 ++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h index 157f597d5d..1b54774fee 100644 --- a/lib/cryptodev/rte_crypto_asym.h +++ b/lib/cryptodev/rte_crypto_asym.h @@ -600,40 +600,6 @@ struct rte_crypto_ecpm_op_param { }; /** - * Asymmetric crypto transform data - * - * Structure describing asym xforms. - */ -struct rte_crypto_asym_xform { - struct rte_crypto_asym_xform *next; - /**< Pointer to next xform to set up xform chain.*/ - enum rte_crypto_asym_xform_type xform_type; - /**< Asymmetric crypto transform */ - - union { - struct rte_crypto_rsa_xform rsa; - /**< RSA xform parameters */ - - struct rte_crypto_modex_xform modex; - /**< Modular Exponentiation xform parameters */ - - struct rte_crypto_modinv_xform modinv; - /**< Modular Multiplicative Inverse xform parameters */ - - struct rte_crypto_dh_xform dh; - /**< DH xform parameters */ - - struct rte_crypto_dsa_xform dsa; - /**< DSA xform parameters */ - - struct rte_crypto_ec_xform ec; - /**< EC xform parameters, used by elliptic curve based - * operations. - */ - }; -}; - -/** * SM2 operation capabilities */ enum rte_crypto_sm2_op_capa { @@ -710,6 +676,40 @@ struct rte_crypto_sm2_op_param { }; /** + * Asymmetric crypto transform data + * + * Structure describing asym xforms. + */ +struct rte_crypto_asym_xform { + struct rte_crypto_asym_xform *next; + /**< Pointer to next xform to set up xform chain.*/ + enum rte_crypto_asym_xform_type xform_type; + /**< Asymmetric crypto transform */ + + union { + struct rte_crypto_rsa_xform rsa; + /**< RSA xform parameters */ + + struct rte_crypto_modex_xform modex; + /**< Modular Exponentiation xform parameters */ + + struct rte_crypto_modinv_xform modinv; + /**< Modular Multiplicative Inverse xform parameters */ + + struct rte_crypto_dh_xform dh; + /**< DH xform parameters */ + + struct rte_crypto_dsa_xform dsa; + /**< DSA xform parameters */ + + struct rte_crypto_ec_xform ec; + /**< EC xform parameters, used by elliptic curve based + * operations. + */ + }; +}; + +/** * Asymmetric Cryptographic Operation. * * Structure describing asymmetric crypto operation params. -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v4 2/4] cryptodev: add ec points to sm2 op 2024-10-08 18:14 ` [PATCH v4 0/4] add ec points to sm2 op Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 1/4] cryptodev: reorder structures in asym crypto header Arkadiusz Kusztal @ 2024-10-08 18:14 ` Arkadiusz Kusztal 2024-10-08 20:46 ` Stephen Hemminger 2024-10-09 13:01 ` [PATCH v5 0/4] " Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal 3 siblings, 2 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-08 18:14 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal In the case when PMD cannot support the full process of the SM2, but elliptic curve computation only, additional fields are needed to handle such a case. Points C1, kP therefore were added to the SM2 crypto operation struct. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- lib/cryptodev/rte_crypto_asym.h | 57 +++++++++++++++++++++++++++++++---------- 1 file changed, 43 insertions(+), 14 deletions(-) diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h index 1b54774fee..4fb8ca813c 100644 --- a/lib/cryptodev/rte_crypto_asym.h +++ b/lib/cryptodev/rte_crypto_asym.h @@ -609,6 +609,12 @@ enum rte_crypto_sm2_op_capa { /**< Prehash message before crypto op. */ RTE_CRYPTO_SM2_PKE_KDF, /**< KDF support in SM2 public key encryption */ + RTE_CRYPTO_SM2_PARTIAL, + /**< + * PMD does not support the full process of the + * SM2 encryption/decryption, but the elliptic + * curve part only + */ }; /** @@ -636,20 +642,43 @@ struct rte_crypto_sm2_op_param { * will be overwritten by the PMD with the decrypted length. */ - rte_crypto_param cipher; - /**< - * Pointer to input data - * - to be decrypted for SM2 private decrypt. - * - * Pointer to output data - * - for SM2 public encrypt. - * In this case the underlying array should have been allocated - * with enough memory to hold ciphertext output (at least X bytes - * for prime field curve of N bytes and for message M bytes, - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will - * be overwritten by the PMD with the encrypted length. - */ + union { + rte_crypto_param cipher; + /**< + * Pointer to input data + * - to be decrypted for SM2 private decrypt. + * + * Pointer to output data + * - for SM2 public encrypt. + * In this case the underlying array should have been allocated + * with enough memory to hold ciphertext output (at least X bytes + * for prime field curve of N bytes and for message M bytes, + * where X = (C1 || C2 || C3) and computed based on SM2 RFC as + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will + * be overwritten by the PMD with the encrypted length. + */ + struct { + struct rte_crypto_ec_point C1; + /**< + * This field is used only when PMD does not support the full + * process of the SM2 encryption/decryption, but the elliptic + * curve part only. + * + * In the case of encryption, it is an output - point C1 = (x1,y1). + * In the case of decryption, if is an input - point C1 = (x1,y1) + * + */ + struct rte_crypto_ec_point kP; + /**< + * This field is used only when PMD does not support the full + * process of the SM2 encryption/decryption, but the elliptic + * curve part only. + * + * It is an output in the encryption case, it is a point + * [k]P = (x2,y2) + */ + }; + }; rte_crypto_uint id; /**< The SM2 id used by signer and verifier. */ -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* Re: [PATCH v4 2/4] cryptodev: add ec points to sm2 op 2024-10-08 18:14 ` [PATCH v4 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal @ 2024-10-08 20:46 ` Stephen Hemminger 2024-10-08 21:00 ` Kusztal, ArkadiuszX 2024-10-09 13:01 ` [PATCH v5 0/4] " Arkadiusz Kusztal 1 sibling, 1 reply; 42+ messages in thread From: Stephen Hemminger @ 2024-10-08 20:46 UTC (permalink / raw) To: Arkadiusz Kusztal; +Cc: dev, gakhil, brian.dooley On Tue, 8 Oct 2024 19:14:31 +0100 Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> wrote: > + RTE_CRYPTO_SM2_PARTIAL, > + /**< > + * PMD does not support the full process of the > + * SM2 encryption/decryption, but the elliptic > + * curve part only Couldn't this just be: /**< PMD only supports elliptic curve */ ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [PATCH v4 2/4] cryptodev: add ec points to sm2 op 2024-10-08 20:46 ` Stephen Hemminger @ 2024-10-08 21:00 ` Kusztal, ArkadiuszX 2024-10-08 21:09 ` Stephen Hemminger 0 siblings, 1 reply; 42+ messages in thread From: Kusztal, ArkadiuszX @ 2024-10-08 21:00 UTC (permalink / raw) To: Stephen Hemminger; +Cc: dev, gakhil, Dooley, Brian Hi Stephen, > -----Original Message----- > From: Stephen Hemminger <stephen@networkplumber.org> > Sent: Tuesday, October 8, 2024 10:46 PM > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com> > Cc: dev@dpdk.org; gakhil@marvell.com; Dooley, Brian > <brian.dooley@intel.com> > Subject: Re: [PATCH v4 2/4] cryptodev: add ec points to sm2 op > > On Tue, 8 Oct 2024 19:14:31 +0100 > Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> wrote: > > > + RTE_CRYPTO_SM2_PARTIAL, > > + /**< > > + * PMD does not support the full process of the > > + * SM2 encryption/decryption, but the elliptic > > + * curve part only > > Couldn't this just be: > /**< PMD only supports elliptic curve */ SM2 encryption involves several steps: random number generation, hashing, some trivial xor's etc, and calculation of elliptic curve points, what I meant here is that only this EC calculation will be performed. But when I read it now, I probably may need to add some more clarity to it. ^ permalink raw reply [flat|nested] 42+ messages in thread
* Re: [PATCH v4 2/4] cryptodev: add ec points to sm2 op 2024-10-08 21:00 ` Kusztal, ArkadiuszX @ 2024-10-08 21:09 ` Stephen Hemminger 2024-10-08 21:29 ` Kusztal, ArkadiuszX 0 siblings, 1 reply; 42+ messages in thread From: Stephen Hemminger @ 2024-10-08 21:09 UTC (permalink / raw) To: Kusztal, ArkadiuszX; +Cc: dev, gakhil, Dooley, Brian On Tue, 8 Oct 2024 21:00:50 +0000 "Kusztal, ArkadiuszX" <arkadiuszx.kusztal@intel.com> wrote: > Hi Stephen, > > > -----Original Message----- > > From: Stephen Hemminger <stephen@networkplumber.org> > > Sent: Tuesday, October 8, 2024 10:46 PM > > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com> > > Cc: dev@dpdk.org; gakhil@marvell.com; Dooley, Brian > > <brian.dooley@intel.com> > > Subject: Re: [PATCH v4 2/4] cryptodev: add ec points to sm2 op > > > > On Tue, 8 Oct 2024 19:14:31 +0100 > > Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> wrote: > > > > > + RTE_CRYPTO_SM2_PARTIAL, > > > + /**< > > > + * PMD does not support the full process of the > > > + * SM2 encryption/decryption, but the elliptic > > > + * curve part only > > > > Couldn't this just be: > > /**< PMD only supports elliptic curve */ > > SM2 encryption involves several steps: random number generation, hashing, some trivial xor's etc, and calculation of elliptic curve points, what I meant here is that only this EC calculation will be performed. > But when I read it now, I probably may need to add some more clarity to it. My point is what developers write tends to be overly wordy and redundant. Comments and documentation should be as succinct as possible. ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [PATCH v4 2/4] cryptodev: add ec points to sm2 op 2024-10-08 21:09 ` Stephen Hemminger @ 2024-10-08 21:29 ` Kusztal, ArkadiuszX 0 siblings, 0 replies; 42+ messages in thread From: Kusztal, ArkadiuszX @ 2024-10-08 21:29 UTC (permalink / raw) To: Stephen Hemminger; +Cc: dev, gakhil, Dooley, Brian > -----Original Message----- > From: Stephen Hemminger <stephen@networkplumber.org> > Sent: Tuesday, October 8, 2024 11:09 PM > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com> > Cc: dev@dpdk.org; gakhil@marvell.com; Dooley, Brian > <brian.dooley@intel.com> > Subject: Re: [PATCH v4 2/4] cryptodev: add ec points to sm2 op > > On Tue, 8 Oct 2024 21:00:50 +0000 > "Kusztal, ArkadiuszX" <arkadiuszx.kusztal@intel.com> wrote: > > > Hi Stephen, > > > > > -----Original Message----- > > > From: Stephen Hemminger <stephen@networkplumber.org> > > > Sent: Tuesday, October 8, 2024 10:46 PM > > > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com> > > > Cc: dev@dpdk.org; gakhil@marvell.com; Dooley, Brian > > > <brian.dooley@intel.com> > > > Subject: Re: [PATCH v4 2/4] cryptodev: add ec points to sm2 op > > > > > > On Tue, 8 Oct 2024 19:14:31 +0100 > > > Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> wrote: > > > > > > > + RTE_CRYPTO_SM2_PARTIAL, > > > > + /**< > > > > + * PMD does not support the full process of the > > > > + * SM2 encryption/decryption, but the elliptic > > > > + * curve part only > > > > > > Couldn't this just be: > > > /**< PMD only supports elliptic curve */ > > > > SM2 encryption involves several steps: random number generation, hashing, > some trivial xor's etc, and calculation of elliptic curve points, what I meant here > is that only this EC calculation will be performed. > > But when I read it now, I probably may need to add some more clarity to it. > > > My point is what developers write tends to be overly wordy and redundant. > Comments and documentation should be as succinct as possible. I agree, I will change it to the more technical/precise. ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v5 0/4] add ec points to sm2 op 2024-10-08 18:14 ` [PATCH v4 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal 2024-10-08 20:46 ` Stephen Hemminger @ 2024-10-09 13:01 ` Arkadiusz Kusztal 2024-10-09 13:01 ` [PATCH v5 1/4] cryptodev: reorder structures in asym crypto header Arkadiusz Kusztal ` (3 more replies) 1 sibling, 4 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-09 13:01 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal In the case when PMD cannot support the full process of the SM2, but elliptic curve computation only, additional fields are needed to handle such a case. Depends-on: series-33376 ("cryptodev: add asymmetric operational capability") v2: - rebased against the 24.11 code v3: - added feature flag - added QAT patches - added test patches v4: - replaced feature flag with capability - split API patches v5: - rebased - clarified usage of the partial flag Arkadiusz Kusztal (4): cryptodev: reorder structures in asym crypto header cryptodev: add ec points to sm2 op crypto/qat: add sm2 encryption/decryption function app/test: add test sm2 C1/Kp test cases app/test/test_cryptodev_asym.c | 138 ++++++++++++++++++++++- app/test/test_cryptodev_sm2_test_vectors.h | 112 ++++++++++++++++++- doc/guides/cryptodevs/features/qat.ini | 1 + doc/guides/rel_notes/release_24_11.rst | 4 + drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h | 3 + drivers/common/qat/qat_adf/qat_pke.h | 20 ++++ drivers/crypto/qat/qat_asym.c | 140 +++++++++++++++++++++++- lib/cryptodev/rte_crypto_asym.h | 121 ++++++++++++-------- 8 files changed, 481 insertions(+), 58 deletions(-) -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v5 1/4] cryptodev: reorder structures in asym crypto header 2024-10-09 13:01 ` [PATCH v5 0/4] " Arkadiusz Kusztal @ 2024-10-09 13:01 ` Arkadiusz Kusztal 2024-10-09 21:03 ` [EXTERNAL] " Akhil Goyal 2024-10-09 13:01 ` [PATCH v5 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal ` (2 subsequent siblings) 3 siblings, 1 reply; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-09 13:01 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal Asymmetric-crypto header has a simple structure that allows to keep logically separate blocks together. Therefore, xforms, ops, and generic structs may be appropriately ordered. This patch moves sm2-op structs to be placed along other algorithms-op structs. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- lib/cryptodev/rte_crypto_asym.h | 68 ++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h index 1adbe9c389..2af6a307f6 100644 --- a/lib/cryptodev/rte_crypto_asym.h +++ b/lib/cryptodev/rte_crypto_asym.h @@ -600,40 +600,6 @@ struct rte_crypto_ecpm_op_param { }; /** - * Asymmetric crypto transform data - * - * Structure describing asym xforms. - */ -struct rte_crypto_asym_xform { - struct rte_crypto_asym_xform *next; - /**< Pointer to next xform to set up xform chain.*/ - enum rte_crypto_asym_xform_type xform_type; - /**< Asymmetric crypto transform */ - - union { - struct rte_crypto_rsa_xform rsa; - /**< RSA xform parameters */ - - struct rte_crypto_modex_xform modex; - /**< Modular Exponentiation xform parameters */ - - struct rte_crypto_modinv_xform modinv; - /**< Modular Multiplicative Inverse xform parameters */ - - struct rte_crypto_dh_xform dh; - /**< DH xform parameters */ - - struct rte_crypto_dsa_xform dsa; - /**< DSA xform parameters */ - - struct rte_crypto_ec_xform ec; - /**< EC xform parameters, used by elliptic curve based - * operations. - */ - }; -}; - -/** * SM2 operation capabilities */ enum rte_crypto_sm2_op_capa { @@ -708,6 +674,40 @@ struct rte_crypto_sm2_op_param { }; /** + * Asymmetric crypto transform data + * + * Structure describing asym xforms. + */ +struct rte_crypto_asym_xform { + struct rte_crypto_asym_xform *next; + /**< Pointer to next xform to set up xform chain.*/ + enum rte_crypto_asym_xform_type xform_type; + /**< Asymmetric crypto transform */ + + union { + struct rte_crypto_rsa_xform rsa; + /**< RSA xform parameters */ + + struct rte_crypto_modex_xform modex; + /**< Modular Exponentiation xform parameters */ + + struct rte_crypto_modinv_xform modinv; + /**< Modular Multiplicative Inverse xform parameters */ + + struct rte_crypto_dh_xform dh; + /**< DH xform parameters */ + + struct rte_crypto_dsa_xform dsa; + /**< DSA xform parameters */ + + struct rte_crypto_ec_xform ec; + /**< EC xform parameters, used by elliptic curve based + * operations. + */ + }; +}; + +/** * Asymmetric Cryptographic Operation. * * Structure describing asymmetric crypto operation params. -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v5 1/4] cryptodev: reorder structures in asym crypto header 2024-10-09 13:01 ` [PATCH v5 1/4] cryptodev: reorder structures in asym crypto header Arkadiusz Kusztal @ 2024-10-09 21:03 ` Akhil Goyal 0 siblings, 0 replies; 42+ messages in thread From: Akhil Goyal @ 2024-10-09 21:03 UTC (permalink / raw) To: Arkadiusz Kusztal, dev; +Cc: brian.dooley > Asymmetric-crypto header has a simple structure that allows to > keep logically separate blocks together. Therefore, xforms, > ops, and generic structs may be appropriately ordered. > This patch moves sm2-op structs to be placed along other > algorithms-op structs. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > --- Acked-by: Akhil Goyal <gakhil@marvell.com> This patch is applied to dpdk-next-crypto Please send the next version for rest of the patches in the series. ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v5 2/4] cryptodev: add ec points to sm2 op 2024-10-09 13:01 ` [PATCH v5 0/4] " Arkadiusz Kusztal 2024-10-09 13:01 ` [PATCH v5 1/4] cryptodev: reorder structures in asym crypto header Arkadiusz Kusztal @ 2024-10-09 13:01 ` Arkadiusz Kusztal 2024-10-09 20:51 ` [EXTERNAL] " Akhil Goyal 2024-10-11 12:17 ` Akhil Goyal 2024-10-09 13:01 ` [PATCH v5 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal 2024-10-09 13:01 ` [PATCH v5 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal 3 siblings, 2 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-09 13:01 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal In the case when PMD cannot support the full process of the SM2, but elliptic curve computation only, additional fields are needed to handle such a case. Points C1, kP therefore were added to the SM2 crypto operation struct. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- lib/cryptodev/rte_crypto_asym.h | 53 ++++++++++++++++++++++++++++++----------- 1 file changed, 39 insertions(+), 14 deletions(-) diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h index 2af6a307f6..65b1a081b1 100644 --- a/lib/cryptodev/rte_crypto_asym.h +++ b/lib/cryptodev/rte_crypto_asym.h @@ -607,6 +607,8 @@ enum rte_crypto_sm2_op_capa { /**< Random number generator supported in SM2 ops. */ RTE_CRYPTO_SM2_PH, /**< Prehash message before crypto op. */ + RTE_CRYPTO_SM2_PARTIAL, + /**< Calculate elliptic curve points only. */ }; /** @@ -634,20 +636,43 @@ struct rte_crypto_sm2_op_param { * will be overwritten by the PMD with the decrypted length. */ - rte_crypto_param cipher; - /**< - * Pointer to input data - * - to be decrypted for SM2 private decrypt. - * - * Pointer to output data - * - for SM2 public encrypt. - * In this case the underlying array should have been allocated - * with enough memory to hold ciphertext output (at least X bytes - * for prime field curve of N bytes and for message M bytes, - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will - * be overwritten by the PMD with the encrypted length. - */ + union { + rte_crypto_param cipher; + /**< + * Pointer to input data + * - to be decrypted for SM2 private decrypt. + * + * Pointer to output data + * - for SM2 public encrypt. + * In this case the underlying array should have been allocated + * with enough memory to hold ciphertext output (at least X bytes + * for prime field curve of N bytes and for message M bytes, + * where X = (C1 || C2 || C3) and computed based on SM2 RFC as + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will + * be overwritten by the PMD with the encrypted length. + */ + struct { + struct rte_crypto_ec_point C1; + /**< + * This field is used only when PMD does not support the full + * process of the SM2 encryption/decryption, but the elliptic + * curve part only. + * + * In the case of encryption, it is an output - point C1 = (x1,y1). + * In the case of decryption, if is an input - point C1 = (x1,y1) + * + */ + struct rte_crypto_ec_point kP; + /**< + * This field is used only when PMD does not support the full + * process of the SM2 encryption/decryption, but the elliptic + * curve part only. + * + * It is an output in the encryption case, it is a point + * [k]P = (x2,y2) + */ + }; + }; rte_crypto_uint id; /**< The SM2 id used by signer and verifier. */ -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v5 2/4] cryptodev: add ec points to sm2 op 2024-10-09 13:01 ` [PATCH v5 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal @ 2024-10-09 20:51 ` Akhil Goyal 2024-10-11 12:17 ` Akhil Goyal 1 sibling, 0 replies; 42+ messages in thread From: Akhil Goyal @ 2024-10-09 20:51 UTC (permalink / raw) To: Arkadiusz Kusztal, dev; +Cc: brian.dooley > In the case when PMD cannot support the full process of the SM2, > but elliptic curve computation only, additional fields > are needed to handle such a case. > > Points C1, kP therefore were added to the SM2 crypto operation struct. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > --- > lib/cryptodev/rte_crypto_asym.h | 53 ++++++++++++++++++++++++++++++------ > ----- > 1 file changed, 39 insertions(+), 14 deletions(-) > > diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h > index 2af6a307f6..65b1a081b1 100644 > --- a/lib/cryptodev/rte_crypto_asym.h > +++ b/lib/cryptodev/rte_crypto_asym.h > @@ -607,6 +607,8 @@ enum rte_crypto_sm2_op_capa { > /**< Random number generator supported in SM2 ops. */ > RTE_CRYPTO_SM2_PH, > /**< Prehash message before crypto op. */ > + RTE_CRYPTO_SM2_PARTIAL, > + /**< Calculate elliptic curve points only. */ > }; > > /** > @@ -634,20 +636,43 @@ struct rte_crypto_sm2_op_param { > * will be overwritten by the PMD with the decrypted length. > */ > > - rte_crypto_param cipher; > - /**< > - * Pointer to input data > - * - to be decrypted for SM2 private decrypt. > - * > - * Pointer to output data > - * - for SM2 public encrypt. > - * In this case the underlying array should have been allocated > - * with enough memory to hold ciphertext output (at least X bytes > - * for prime field curve of N bytes and for message M bytes, > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > - * be overwritten by the PMD with the encrypted length. > - */ > + union { > + rte_crypto_param cipher; > + /**< > + * Pointer to input data > + * - to be decrypted for SM2 private decrypt. > + * > + * Pointer to output data > + * - for SM2 public encrypt. > + * In this case the underlying array should have been allocated > + * with enough memory to hold ciphertext output (at least X > bytes > + * for prime field curve of N bytes and for message M bytes, > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > as > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > + * be overwritten by the PMD with the encrypted length. > + */ > + struct { > + struct rte_crypto_ec_point C1; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * In the case of encryption, it is an output - point C1 = > (x1,y1). > + * In the case of decryption, if is an input - point C1 = > (x1,y1) > + * > + */ > + struct rte_crypto_ec_point kP; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * It is an output in the encryption case, it is a point > + * [k]P = (x2,y2) > + */ Please add reference to RTE_CRYPTO_SM2_PARTIAL in the comments. > + }; > + }; > > rte_crypto_uint id; > /**< The SM2 id used by signer and verifier. */ > -- > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v5 2/4] cryptodev: add ec points to sm2 op 2024-10-09 13:01 ` [PATCH v5 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal 2024-10-09 20:51 ` [EXTERNAL] " Akhil Goyal @ 2024-10-11 12:17 ` Akhil Goyal 2024-10-17 18:54 ` Kusztal, ArkadiuszX 1 sibling, 1 reply; 42+ messages in thread From: Akhil Goyal @ 2024-10-11 12:17 UTC (permalink / raw) To: Arkadiusz Kusztal, dev; +Cc: brian.dooley > In the case when PMD cannot support the full process of the SM2, > but elliptic curve computation only, additional fields > are needed to handle such a case. > > Points C1, kP therefore were added to the SM2 crypto operation struct. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > --- > lib/cryptodev/rte_crypto_asym.h | 53 ++++++++++++++++++++++++++++++------ > ----- > 1 file changed, 39 insertions(+), 14 deletions(-) > > diff --git a/lib/cryptodev/rte_crypto_asym.h b/lib/cryptodev/rte_crypto_asym.h > index 2af6a307f6..65b1a081b1 100644 > --- a/lib/cryptodev/rte_crypto_asym.h > +++ b/lib/cryptodev/rte_crypto_asym.h > @@ -607,6 +607,8 @@ enum rte_crypto_sm2_op_capa { > /**< Random number generator supported in SM2 ops. */ > RTE_CRYPTO_SM2_PH, > /**< Prehash message before crypto op. */ > + RTE_CRYPTO_SM2_PARTIAL, > + /**< Calculate elliptic curve points only. */ > }; > > /** > @@ -634,20 +636,43 @@ struct rte_crypto_sm2_op_param { > * will be overwritten by the PMD with the decrypted length. > */ > > - rte_crypto_param cipher; > - /**< > - * Pointer to input data > - * - to be decrypted for SM2 private decrypt. > - * > - * Pointer to output data > - * - for SM2 public encrypt. > - * In this case the underlying array should have been allocated > - * with enough memory to hold ciphertext output (at least X bytes > - * for prime field curve of N bytes and for message M bytes, > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > - * be overwritten by the PMD with the encrypted length. > - */ > + union { > + rte_crypto_param cipher; > + /**< > + * Pointer to input data > + * - to be decrypted for SM2 private decrypt. > + * > + * Pointer to output data > + * - for SM2 public encrypt. > + * In this case the underlying array should have been allocated > + * with enough memory to hold ciphertext output (at least X > bytes > + * for prime field curve of N bytes and for message M bytes, > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > as > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > + * be overwritten by the PMD with the encrypted length. > + */ > + struct { > + struct rte_crypto_ec_point C1; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * In the case of encryption, it is an output - point C1 = > (x1,y1). > + * In the case of decryption, if is an input - point C1 = > (x1,y1) > + * > + */ > + struct rte_crypto_ec_point kP; > + /**< > + * This field is used only when PMD does not support the > full > + * process of the SM2 encryption/decryption, but the > elliptic > + * curve part only. > + * > + * It is an output in the encryption case, it is a point > + * [k]P = (x2,y2) > + */ It is better to keep the variable names in lower case. c1 and kp should be fine. > + }; > + }; > > rte_crypto_uint id; > /**< The SM2 id used by signer and verifier. */ > -- > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v5 2/4] cryptodev: add ec points to sm2 op 2024-10-11 12:17 ` Akhil Goyal @ 2024-10-17 18:54 ` Kusztal, ArkadiuszX 2024-10-18 12:30 ` Akhil Goyal 0 siblings, 1 reply; 42+ messages in thread From: Kusztal, ArkadiuszX @ 2024-10-17 18:54 UTC (permalink / raw) To: Akhil Goyal, dev; +Cc: Dooley, Brian > -----Original Message----- > From: Akhil Goyal <gakhil@marvell.com> > Sent: Friday, October 11, 2024 2:18 PM > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com>; dev@dpdk.org > Cc: Dooley, Brian <brian.dooley@intel.com> > Subject: RE: [EXTERNAL] [PATCH v5 2/4] cryptodev: add ec points to sm2 op > > > In the case when PMD cannot support the full process of the SM2, but > > elliptic curve computation only, additional fields are needed to > > handle such a case. > > > > Points C1, kP therefore were added to the SM2 crypto operation struct. > > > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > > --- > > lib/cryptodev/rte_crypto_asym.h | 53 > > ++++++++++++++++++++++++++++++------ > > ----- > > 1 file changed, 39 insertions(+), 14 deletions(-) > > > > diff --git a/lib/cryptodev/rte_crypto_asym.h > > b/lib/cryptodev/rte_crypto_asym.h index 2af6a307f6..65b1a081b1 100644 > > --- a/lib/cryptodev/rte_crypto_asym.h > > +++ b/lib/cryptodev/rte_crypto_asym.h > > @@ -607,6 +607,8 @@ enum rte_crypto_sm2_op_capa { > > /**< Random number generator supported in SM2 ops. */ > > RTE_CRYPTO_SM2_PH, > > /**< Prehash message before crypto op. */ > > + RTE_CRYPTO_SM2_PARTIAL, > > + /**< Calculate elliptic curve points only. */ > > }; > > > > /** > > @@ -634,20 +636,43 @@ struct rte_crypto_sm2_op_param { > > * will be overwritten by the PMD with the decrypted length. > > */ > > > > - rte_crypto_param cipher; > > - /**< > > - * Pointer to input data > > - * - to be decrypted for SM2 private decrypt. > > - * > > - * Pointer to output data > > - * - for SM2 public encrypt. > > - * In this case the underlying array should have been allocated > > - * with enough memory to hold ciphertext output (at least X bytes > > - * for prime field curve of N bytes and for message M bytes, > > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > - * be overwritten by the PMD with the encrypted length. > > - */ > > + union { > > + rte_crypto_param cipher; > > + /**< > > + * Pointer to input data > > + * - to be decrypted for SM2 private decrypt. > > + * > > + * Pointer to output data > > + * - for SM2 public encrypt. > > + * In this case the underlying array should have been allocated > > + * with enough memory to hold ciphertext output (at least X > > bytes > > + * for prime field curve of N bytes and for message M bytes, > > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > > as > > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > + * be overwritten by the PMD with the encrypted length. > > + */ > > + struct { > > + struct rte_crypto_ec_point C1; > > + /**< > > + * This field is used only when PMD does not support > the > > full > > + * process of the SM2 encryption/decryption, but the > > elliptic > > + * curve part only. > > + * > > + * In the case of encryption, it is an output - point C1 = > > (x1,y1). > > + * In the case of decryption, if is an input - point C1 = > > (x1,y1) > > + * > > + */ > > + struct rte_crypto_ec_point kP; > > + /**< > > + * This field is used only when PMD does not support > the > > full > > + * process of the SM2 encryption/decryption, but the > > elliptic > > + * curve part only. > > + * > > + * It is an output in the encryption case, it is a point > > + * [k]P = (x2,y2) > > + */ > > It is better to keep the variable names in lower case. > c1 and kp should be fine. The reason for keeping some of the letters in uppercase is that it corresponds to the general convention of naming for these types. That's why we have dQ, qInv in RSA key for example, not dq, qinv. > > > + }; > > + }; > > > > rte_crypto_uint id; > > /**< The SM2 id used by signer and verifier. */ > > -- > > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v5 2/4] cryptodev: add ec points to sm2 op 2024-10-17 18:54 ` Kusztal, ArkadiuszX @ 2024-10-18 12:30 ` Akhil Goyal 0 siblings, 0 replies; 42+ messages in thread From: Akhil Goyal @ 2024-10-18 12:30 UTC (permalink / raw) To: Kusztal, ArkadiuszX, dev; +Cc: Dooley, Brian > > > > -----Original Message----- > > From: Akhil Goyal <gakhil@marvell.com> > > Sent: Friday, October 11, 2024 2:18 PM > > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com>; dev@dpdk.org > > Cc: Dooley, Brian <brian.dooley@intel.com> > > Subject: RE: [EXTERNAL] [PATCH v5 2/4] cryptodev: add ec points to sm2 op > > > > > In the case when PMD cannot support the full process of the SM2, but > > > elliptic curve computation only, additional fields are needed to > > > handle such a case. > > > > > > Points C1, kP therefore were added to the SM2 crypto operation struct. > > > > > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > > > --- > > > lib/cryptodev/rte_crypto_asym.h | 53 > > > ++++++++++++++++++++++++++++++------ > > > ----- > > > 1 file changed, 39 insertions(+), 14 deletions(-) > > > > > > diff --git a/lib/cryptodev/rte_crypto_asym.h > > > b/lib/cryptodev/rte_crypto_asym.h index 2af6a307f6..65b1a081b1 100644 > > > --- a/lib/cryptodev/rte_crypto_asym.h > > > +++ b/lib/cryptodev/rte_crypto_asym.h > > > @@ -607,6 +607,8 @@ enum rte_crypto_sm2_op_capa { > > > /**< Random number generator supported in SM2 ops. */ > > > RTE_CRYPTO_SM2_PH, > > > /**< Prehash message before crypto op. */ > > > + RTE_CRYPTO_SM2_PARTIAL, > > > + /**< Calculate elliptic curve points only. */ > > > }; > > > > > > /** > > > @@ -634,20 +636,43 @@ struct rte_crypto_sm2_op_param { > > > * will be overwritten by the PMD with the decrypted length. > > > */ > > > > > > - rte_crypto_param cipher; > > > - /**< > > > - * Pointer to input data > > > - * - to be decrypted for SM2 private decrypt. > > > - * > > > - * Pointer to output data > > > - * - for SM2 public encrypt. > > > - * In this case the underlying array should have been allocated > > > - * with enough memory to hold ciphertext output (at least X bytes > > > - * for prime field curve of N bytes and for message M bytes, > > > - * where X = (C1 || C2 || C3) and computed based on SM2 RFC as > > > - * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > > - * be overwritten by the PMD with the encrypted length. > > > - */ > > > + union { > > > + rte_crypto_param cipher; > > > + /**< > > > + * Pointer to input data > > > + * - to be decrypted for SM2 private decrypt. > > > + * > > > + * Pointer to output data > > > + * - for SM2 public encrypt. > > > + * In this case the underlying array should have been allocated > > > + * with enough memory to hold ciphertext output (at least X > > > bytes > > > + * for prime field curve of N bytes and for message M bytes, > > > + * where X = (C1 || C2 || C3) and computed based on SM2 RFC > > > as > > > + * C1 (1 + N + N), C2 = M, C3 = N. The cipher.length field will > > > + * be overwritten by the PMD with the encrypted length. > > > + */ > > > + struct { > > > + struct rte_crypto_ec_point C1; > > > + /**< > > > + * This field is used only when PMD does not support > > the > > > full > > > + * process of the SM2 encryption/decryption, but the > > > elliptic > > > + * curve part only. > > > + * > > > + * In the case of encryption, it is an output - point C1 = > > > (x1,y1). > > > + * In the case of decryption, if is an input - point C1 = > > > (x1,y1) > > > + * > > > + */ > > > + struct rte_crypto_ec_point kP; > > > + /**< > > > + * This field is used only when PMD does not support > > the > > > full > > > + * process of the SM2 encryption/decryption, but the > > > elliptic > > > + * curve part only. > > > + * > > > + * It is an output in the encryption case, it is a point > > > + * [k]P = (x2,y2) > > > + */ > > > > It is better to keep the variable names in lower case. > > c1 and kp should be fine. > > The reason for keeping some of the letters in uppercase is that it corresponds to > the general convention of naming for these types. > That's why we have dQ, qInv in RSA key for example, not dq, qinv. Those were missed in the review. We should not add new ones. ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v5 3/4] crypto/qat: add sm2 encryption/decryption function 2024-10-09 13:01 ` [PATCH v5 0/4] " Arkadiusz Kusztal 2024-10-09 13:01 ` [PATCH v5 1/4] cryptodev: reorder structures in asym crypto header Arkadiusz Kusztal 2024-10-09 13:01 ` [PATCH v5 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal @ 2024-10-09 13:01 ` Arkadiusz Kusztal 2024-10-09 20:49 ` [EXTERNAL] " Akhil Goyal 2024-10-09 13:01 ` [PATCH v5 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal 3 siblings, 1 reply; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-09 13:01 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal This commit adds SM2 elliptic curve based asymmetric encryption and decryption to the Intel QuickAssist Technology PMD. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- doc/guides/cryptodevs/features/qat.ini | 1 + doc/guides/rel_notes/release_24_11.rst | 4 + drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h | 3 + drivers/common/qat/qat_adf/qat_pke.h | 20 ++++ drivers/crypto/qat/qat_asym.c | 140 +++++++++++++++++++++++- 5 files changed, 162 insertions(+), 6 deletions(-) diff --git a/doc/guides/cryptodevs/features/qat.ini b/doc/guides/cryptodevs/features/qat.ini index f41d29158f..219dd1e011 100644 --- a/doc/guides/cryptodevs/features/qat.ini +++ b/doc/guides/cryptodevs/features/qat.ini @@ -71,6 +71,7 @@ ZUC EIA3 = Y AES CMAC (128) = Y SM3 = Y SM3 HMAC = Y +SM2 = Y ; ; Supported AEAD algorithms of the 'qat' crypto driver. diff --git a/doc/guides/rel_notes/release_24_11.rst b/doc/guides/rel_notes/release_24_11.rst index e0a9aa55a1..e56824310b 100644 --- a/doc/guides/rel_notes/release_24_11.rst +++ b/doc/guides/rel_notes/release_24_11.rst @@ -67,6 +67,10 @@ New Features The new statistics are useful for debugging and profiling. +* **Updated the QuickAssist Technology (QAT) Crypto PMD.** + + * Added SM2 encryption and decryption algorithms. + Removed Items ------------- diff --git a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h index 630c6e1a9b..aa49612ca1 100644 --- a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h +++ b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h @@ -1542,6 +1542,9 @@ icp_qat_fw_mmp_ecdsa_verify_gfp_521_input::in in @endlink * @li no output parameters */ +#define PKE_ECSM2_ENCRYPTION 0x25221720 +#define PKE_ECSM2_DECRYPTION 0x201716e6 + #define PKE_LIVENESS 0x00000001 /**< Functionality ID for PKE_LIVENESS * @li 0 input parameter(s) diff --git a/drivers/common/qat/qat_adf/qat_pke.h b/drivers/common/qat/qat_adf/qat_pke.h index f88932a275..ac051e965d 100644 --- a/drivers/common/qat/qat_adf/qat_pke.h +++ b/drivers/common/qat/qat_adf/qat_pke.h @@ -334,4 +334,24 @@ get_sm2_ecdsa_verify_function(void) return qat_function; } +static struct qat_asym_function +get_sm2_encryption_function(void) +{ + struct qat_asym_function qat_function = { + PKE_ECSM2_ENCRYPTION, 32 + }; + + return qat_function; +} + +static struct qat_asym_function +get_sm2_decryption_function(void) +{ + struct qat_asym_function qat_function = { + PKE_ECSM2_DECRYPTION, 32 + }; + + return qat_function; +} + #endif diff --git a/drivers/crypto/qat/qat_asym.c b/drivers/crypto/qat/qat_asym.c index e43884e69b..3ce48a1486 100644 --- a/drivers/crypto/qat/qat_asym.c +++ b/drivers/crypto/qat/qat_asym.c @@ -932,6 +932,15 @@ sm2_ecdsa_sign_set_input(struct icp_qat_fw_pke_request *qat_req, qat_req->input_param_count = 3; qat_req->output_param_count = 2; + HEXDUMP("SM2 K test", asym_op->sm2.k.data, + cookie->alg_bytesize); + HEXDUMP("SM2 K", cookie->input_array[0], + cookie->alg_bytesize); + HEXDUMP("SM2 msg", cookie->input_array[1], + cookie->alg_bytesize); + HEXDUMP("SM2 pkey", cookie->input_array[2], + cookie->alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; } @@ -983,6 +992,114 @@ sm2_ecdsa_sign_collect(struct rte_crypto_asym_op *asym_op, } static int +sm2_encryption_set_input(struct icp_qat_fw_pke_request *qat_req, + struct qat_asym_op_cookie *cookie, + const struct rte_crypto_asym_op *asym_op, + const struct rte_crypto_asym_xform *xform) +{ + const struct qat_asym_function qat_function = + get_sm2_encryption_function(); + const uint32_t qat_func_alignsize = + qat_function.bytesize; + + SET_PKE_LN(asym_op->sm2.k, qat_func_alignsize, 0); + SET_PKE_LN(xform->ec.q.x, qat_func_alignsize, 1); + SET_PKE_LN(xform->ec.q.y, qat_func_alignsize, 2); + + cookie->alg_bytesize = qat_function.bytesize; + cookie->qat_func_alignsize = qat_function.bytesize; + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; + qat_req->input_param_count = 3; + qat_req->output_param_count = 4; + + HEXDUMP("SM2 K", cookie->input_array[0], + qat_func_alignsize); + HEXDUMP("SM2 Q.x", cookie->input_array[1], + qat_func_alignsize); + HEXDUMP("SM2 Q.y", cookie->input_array[2], + qat_func_alignsize); + + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + +static uint8_t +sm2_encryption_collect(struct rte_crypto_asym_op *asym_op, + const struct qat_asym_op_cookie *cookie) +{ + uint32_t alg_bytesize = cookie->alg_bytesize; + + rte_memcpy(asym_op->sm2.C1.x.data, cookie->output_array[0], alg_bytesize); + rte_memcpy(asym_op->sm2.C1.y.data, cookie->output_array[1], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[2], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[3], alg_bytesize); + asym_op->sm2.C1.x.length = alg_bytesize; + asym_op->sm2.C1.y.length = alg_bytesize; + asym_op->sm2.kP.x.length = alg_bytesize; + asym_op->sm2.kP.y.length = alg_bytesize; + + HEXDUMP("C1[x1]", cookie->output_array[0], + alg_bytesize); + HEXDUMP("C1[y]", cookie->output_array[1], + alg_bytesize); + HEXDUMP("kP[x]", cookie->output_array[2], + alg_bytesize); + HEXDUMP("kP[y]", cookie->output_array[3], + alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + + +static int +sm2_decryption_set_input(struct icp_qat_fw_pke_request *qat_req, + struct qat_asym_op_cookie *cookie, + const struct rte_crypto_asym_op *asym_op, + const struct rte_crypto_asym_xform *xform) +{ + const struct qat_asym_function qat_function = + get_sm2_decryption_function(); + const uint32_t qat_func_alignsize = + qat_function.bytesize; + + SET_PKE_LN(xform->ec.pkey, qat_func_alignsize, 0); + SET_PKE_LN(asym_op->sm2.C1.x, qat_func_alignsize, 1); + SET_PKE_LN(asym_op->sm2.C1.y, qat_func_alignsize, 2); + + cookie->alg_bytesize = qat_function.bytesize; + cookie->qat_func_alignsize = qat_function.bytesize; + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; + qat_req->input_param_count = 3; + qat_req->output_param_count = 2; + + HEXDUMP("d", cookie->input_array[0], + qat_func_alignsize); + HEXDUMP("C1[x]", cookie->input_array[1], + qat_func_alignsize); + HEXDUMP("C1[y]", cookie->input_array[2], + qat_func_alignsize); + + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + + +static uint8_t +sm2_decryption_collect(struct rte_crypto_asym_op *asym_op, + const struct qat_asym_op_cookie *cookie) +{ + uint32_t alg_bytesize = cookie->alg_bytesize; + + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[0], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[1], alg_bytesize); + asym_op->sm2.kP.x.length = alg_bytesize; + asym_op->sm2.kP.y.length = alg_bytesize; + + HEXDUMP("kP[x]", cookie->output_array[0], + alg_bytesize); + HEXDUMP("kP[y]", cookie->output_array[1], + alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + +static int asym_set_input(struct icp_qat_fw_pke_request *qat_req, struct qat_asym_op_cookie *cookie, const struct rte_crypto_asym_op *asym_op, @@ -1014,14 +1131,20 @@ asym_set_input(struct icp_qat_fw_pke_request *qat_req, asym_op, xform); } case RTE_CRYPTO_ASYM_XFORM_SM2: - if (asym_op->sm2.op_type == - RTE_CRYPTO_ASYM_OP_VERIFY) { + if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) { + return sm2_encryption_set_input(qat_req, cookie, + asym_op, xform); + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) { + return sm2_decryption_set_input(qat_req, cookie, + asym_op, xform); + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) { return sm2_ecdsa_verify_set_input(qat_req, cookie, asym_op, xform); - } else { + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_SIGN) { return sm2_ecdsa_sign_set_input(qat_req, cookie, asym_op, xform); } + break; default: QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform"); return -EINVAL; @@ -1113,7 +1236,13 @@ qat_asym_collect_response(struct rte_crypto_op *op, case RTE_CRYPTO_ASYM_XFORM_ECDH: return ecdh_collect(asym_op, cookie); case RTE_CRYPTO_ASYM_XFORM_SM2: - return sm2_ecdsa_sign_collect(asym_op, cookie); + if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) + return sm2_encryption_collect(asym_op, cookie); + else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) + return sm2_decryption_collect(asym_op, cookie); + else + return sm2_ecdsa_sign_collect(asym_op, cookie); + default: QAT_LOG(ERR, "Not supported xform type"); return RTE_CRYPTO_OP_STATUS_ERROR; @@ -1385,9 +1514,8 @@ qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused, case RTE_CRYPTO_ASYM_XFORM_ECDSA: case RTE_CRYPTO_ASYM_XFORM_ECPM: case RTE_CRYPTO_ASYM_XFORM_ECDH: - session_set_ec(qat_session, xform); - break; case RTE_CRYPTO_ASYM_XFORM_SM2: + session_set_ec(qat_session, xform); break; default: ret = -ENOTSUP; -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v5 3/4] crypto/qat: add sm2 encryption/decryption function 2024-10-09 13:01 ` [PATCH v5 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal @ 2024-10-09 20:49 ` Akhil Goyal 2024-10-10 7:49 ` Kusztal, ArkadiuszX 0 siblings, 1 reply; 42+ messages in thread From: Akhil Goyal @ 2024-10-09 20:49 UTC (permalink / raw) To: Arkadiusz Kusztal, dev; +Cc: brian.dooley > This commit adds SM2 elliptic curve based asymmetric > encryption and decryption to the Intel QuickAssist > Technology PMD. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > --- > doc/guides/cryptodevs/features/qat.ini | 1 + > doc/guides/rel_notes/release_24_11.rst | 4 + > drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h | 3 + > drivers/common/qat/qat_adf/qat_pke.h | 20 ++++ > drivers/crypto/qat/qat_asym.c | 140 +++++++++++++++++++++++- > 5 files changed, 162 insertions(+), 6 deletions(-) > RTE_CRYPTO_SM2_PARTIAL is not set in the PMD as op_capa. > diff --git a/doc/guides/cryptodevs/features/qat.ini > b/doc/guides/cryptodevs/features/qat.ini > index f41d29158f..219dd1e011 100644 > --- a/doc/guides/cryptodevs/features/qat.ini > +++ b/doc/guides/cryptodevs/features/qat.ini > @@ -71,6 +71,7 @@ ZUC EIA3 = Y > AES CMAC (128) = Y > SM3 = Y > SM3 HMAC = Y > +SM2 = Y > > ; > ; Supported AEAD algorithms of the 'qat' crypto driver. > diff --git a/doc/guides/rel_notes/release_24_11.rst > b/doc/guides/rel_notes/release_24_11.rst > index e0a9aa55a1..e56824310b 100644 > --- a/doc/guides/rel_notes/release_24_11.rst > +++ b/doc/guides/rel_notes/release_24_11.rst > @@ -67,6 +67,10 @@ New Features > > The new statistics are useful for debugging and profiling. > > +* **Updated the QuickAssist Technology (QAT) Crypto PMD.** > + > + * Added SM2 encryption and decryption algorithms. > + > > Removed Items > ------------- > diff --git a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h > b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h > index 630c6e1a9b..aa49612ca1 100644 > --- a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h > +++ b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h > @@ -1542,6 +1542,9 @@ icp_qat_fw_mmp_ecdsa_verify_gfp_521_input::in in > @endlink > * @li no output parameters > */ > > +#define PKE_ECSM2_ENCRYPTION 0x25221720 > +#define PKE_ECSM2_DECRYPTION 0x201716e6 > + > #define PKE_LIVENESS 0x00000001 > /**< Functionality ID for PKE_LIVENESS > * @li 0 input parameter(s) > diff --git a/drivers/common/qat/qat_adf/qat_pke.h > b/drivers/common/qat/qat_adf/qat_pke.h > index f88932a275..ac051e965d 100644 > --- a/drivers/common/qat/qat_adf/qat_pke.h > +++ b/drivers/common/qat/qat_adf/qat_pke.h > @@ -334,4 +334,24 @@ get_sm2_ecdsa_verify_function(void) > return qat_function; > } > > +static struct qat_asym_function > +get_sm2_encryption_function(void) > +{ > + struct qat_asym_function qat_function = { > + PKE_ECSM2_ENCRYPTION, 32 > + }; > + > + return qat_function; > +} > + > +static struct qat_asym_function > +get_sm2_decryption_function(void) > +{ > + struct qat_asym_function qat_function = { > + PKE_ECSM2_DECRYPTION, 32 > + }; > + > + return qat_function; > +} > + > #endif > diff --git a/drivers/crypto/qat/qat_asym.c b/drivers/crypto/qat/qat_asym.c > index e43884e69b..3ce48a1486 100644 > --- a/drivers/crypto/qat/qat_asym.c > +++ b/drivers/crypto/qat/qat_asym.c > @@ -932,6 +932,15 @@ sm2_ecdsa_sign_set_input(struct > icp_qat_fw_pke_request *qat_req, > qat_req->input_param_count = 3; > qat_req->output_param_count = 2; > > + HEXDUMP("SM2 K test", asym_op->sm2.k.data, > + cookie->alg_bytesize); > + HEXDUMP("SM2 K", cookie->input_array[0], > + cookie->alg_bytesize); > + HEXDUMP("SM2 msg", cookie->input_array[1], > + cookie->alg_bytesize); > + HEXDUMP("SM2 pkey", cookie->input_array[2], > + cookie->alg_bytesize); > + > return RTE_CRYPTO_OP_STATUS_SUCCESS; > } > > @@ -983,6 +992,114 @@ sm2_ecdsa_sign_collect(struct rte_crypto_asym_op > *asym_op, > } > > static int > +sm2_encryption_set_input(struct icp_qat_fw_pke_request *qat_req, > + struct qat_asym_op_cookie *cookie, > + const struct rte_crypto_asym_op *asym_op, > + const struct rte_crypto_asym_xform *xform) > +{ > + const struct qat_asym_function qat_function = > + get_sm2_encryption_function(); > + const uint32_t qat_func_alignsize = > + qat_function.bytesize; > + > + SET_PKE_LN(asym_op->sm2.k, qat_func_alignsize, 0); > + SET_PKE_LN(xform->ec.q.x, qat_func_alignsize, 1); > + SET_PKE_LN(xform->ec.q.y, qat_func_alignsize, 2); > + > + cookie->alg_bytesize = qat_function.bytesize; > + cookie->qat_func_alignsize = qat_function.bytesize; > + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; > + qat_req->input_param_count = 3; > + qat_req->output_param_count = 4; > + > + HEXDUMP("SM2 K", cookie->input_array[0], > + qat_func_alignsize); > + HEXDUMP("SM2 Q.x", cookie->input_array[1], > + qat_func_alignsize); > + HEXDUMP("SM2 Q.y", cookie->input_array[2], > + qat_func_alignsize); > + > + return RTE_CRYPTO_OP_STATUS_SUCCESS; > +} > + > +static uint8_t > +sm2_encryption_collect(struct rte_crypto_asym_op *asym_op, > + const struct qat_asym_op_cookie *cookie) > +{ > + uint32_t alg_bytesize = cookie->alg_bytesize; > + > + rte_memcpy(asym_op->sm2.C1.x.data, cookie->output_array[0], > alg_bytesize); > + rte_memcpy(asym_op->sm2.C1.y.data, cookie->output_array[1], > alg_bytesize); > + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[2], > alg_bytesize); > + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[3], > alg_bytesize); > + asym_op->sm2.C1.x.length = alg_bytesize; > + asym_op->sm2.C1.y.length = alg_bytesize; > + asym_op->sm2.kP.x.length = alg_bytesize; > + asym_op->sm2.kP.y.length = alg_bytesize; > + > + HEXDUMP("C1[x1]", cookie->output_array[0], > + alg_bytesize); > + HEXDUMP("C1[y]", cookie->output_array[1], > + alg_bytesize); > + HEXDUMP("kP[x]", cookie->output_array[2], > + alg_bytesize); > + HEXDUMP("kP[y]", cookie->output_array[3], > + alg_bytesize); > + return RTE_CRYPTO_OP_STATUS_SUCCESS; > +} > + > + > +static int > +sm2_decryption_set_input(struct icp_qat_fw_pke_request *qat_req, > + struct qat_asym_op_cookie *cookie, > + const struct rte_crypto_asym_op *asym_op, > + const struct rte_crypto_asym_xform *xform) > +{ > + const struct qat_asym_function qat_function = > + get_sm2_decryption_function(); > + const uint32_t qat_func_alignsize = > + qat_function.bytesize; > + > + SET_PKE_LN(xform->ec.pkey, qat_func_alignsize, 0); > + SET_PKE_LN(asym_op->sm2.C1.x, qat_func_alignsize, 1); > + SET_PKE_LN(asym_op->sm2.C1.y, qat_func_alignsize, 2); > + > + cookie->alg_bytesize = qat_function.bytesize; > + cookie->qat_func_alignsize = qat_function.bytesize; > + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; > + qat_req->input_param_count = 3; > + qat_req->output_param_count = 2; > + > + HEXDUMP("d", cookie->input_array[0], > + qat_func_alignsize); > + HEXDUMP("C1[x]", cookie->input_array[1], > + qat_func_alignsize); > + HEXDUMP("C1[y]", cookie->input_array[2], > + qat_func_alignsize); > + > + return RTE_CRYPTO_OP_STATUS_SUCCESS; > +} > + > + > +static uint8_t > +sm2_decryption_collect(struct rte_crypto_asym_op *asym_op, > + const struct qat_asym_op_cookie *cookie) > +{ > + uint32_t alg_bytesize = cookie->alg_bytesize; > + > + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[0], > alg_bytesize); > + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[1], > alg_bytesize); > + asym_op->sm2.kP.x.length = alg_bytesize; > + asym_op->sm2.kP.y.length = alg_bytesize; > + > + HEXDUMP("kP[x]", cookie->output_array[0], > + alg_bytesize); > + HEXDUMP("kP[y]", cookie->output_array[1], > + alg_bytesize); > + return RTE_CRYPTO_OP_STATUS_SUCCESS; > +} > + > +static int > asym_set_input(struct icp_qat_fw_pke_request *qat_req, > struct qat_asym_op_cookie *cookie, > const struct rte_crypto_asym_op *asym_op, > @@ -1014,14 +1131,20 @@ asym_set_input(struct icp_qat_fw_pke_request > *qat_req, > asym_op, xform); > } > case RTE_CRYPTO_ASYM_XFORM_SM2: > - if (asym_op->sm2.op_type == > - RTE_CRYPTO_ASYM_OP_VERIFY) { > + if (asym_op->sm2.op_type == > RTE_CRYPTO_ASYM_OP_ENCRYPT) { > + return sm2_encryption_set_input(qat_req, cookie, > + asym_op, xform); > + } else if (asym_op->sm2.op_type == > RTE_CRYPTO_ASYM_OP_DECRYPT) { > + return sm2_decryption_set_input(qat_req, cookie, > + asym_op, xform); > + } else if (asym_op->sm2.op_type == > RTE_CRYPTO_ASYM_OP_VERIFY) { > return sm2_ecdsa_verify_set_input(qat_req, cookie, > asym_op, xform); > - } else { > + } else if (asym_op->sm2.op_type == > RTE_CRYPTO_ASYM_OP_SIGN) { > return sm2_ecdsa_sign_set_input(qat_req, cookie, > asym_op, xform); > } > + break; > default: > QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform"); > return -EINVAL; > @@ -1113,7 +1236,13 @@ qat_asym_collect_response(struct rte_crypto_op > *op, > case RTE_CRYPTO_ASYM_XFORM_ECDH: > return ecdh_collect(asym_op, cookie); > case RTE_CRYPTO_ASYM_XFORM_SM2: > - return sm2_ecdsa_sign_collect(asym_op, cookie); > + if (asym_op->sm2.op_type == > RTE_CRYPTO_ASYM_OP_ENCRYPT) > + return sm2_encryption_collect(asym_op, cookie); > + else if (asym_op->sm2.op_type == > RTE_CRYPTO_ASYM_OP_DECRYPT) > + return sm2_decryption_collect(asym_op, cookie); > + else > + return sm2_ecdsa_sign_collect(asym_op, cookie); > + > default: > QAT_LOG(ERR, "Not supported xform type"); > return RTE_CRYPTO_OP_STATUS_ERROR; > @@ -1385,9 +1514,8 @@ qat_asym_session_configure(struct rte_cryptodev > *dev __rte_unused, > case RTE_CRYPTO_ASYM_XFORM_ECDSA: > case RTE_CRYPTO_ASYM_XFORM_ECPM: > case RTE_CRYPTO_ASYM_XFORM_ECDH: > - session_set_ec(qat_session, xform); > - break; > case RTE_CRYPTO_ASYM_XFORM_SM2: > + session_set_ec(qat_session, xform); > break; > default: > ret = -ENOTSUP; > -- > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v5 3/4] crypto/qat: add sm2 encryption/decryption function 2024-10-09 20:49 ` [EXTERNAL] " Akhil Goyal @ 2024-10-10 7:49 ` Kusztal, ArkadiuszX 0 siblings, 0 replies; 42+ messages in thread From: Kusztal, ArkadiuszX @ 2024-10-10 7:49 UTC (permalink / raw) To: Akhil Goyal, dev; +Cc: Dooley, Brian > -----Original Message----- > From: Akhil Goyal <gakhil@marvell.com> > Sent: Wednesday, October 9, 2024 10:49 PM > To: Kusztal, ArkadiuszX <arkadiuszx.kusztal@intel.com>; dev@dpdk.org > Cc: Dooley, Brian <brian.dooley@intel.com> > Subject: RE: [EXTERNAL] [PATCH v5 3/4] crypto/qat: add sm2 > encryption/decryption function > > > This commit adds SM2 elliptic curve based asymmetric encryption and > > decryption to the Intel QuickAssist Technology PMD. > > > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > > --- > > doc/guides/cryptodevs/features/qat.ini | 1 + > > doc/guides/rel_notes/release_24_11.rst | 4 + > > drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h | 3 + > > drivers/common/qat/qat_adf/qat_pke.h | 20 ++++ > > drivers/crypto/qat/qat_asym.c | 140 +++++++++++++++++++++++- > > 5 files changed, 162 insertions(+), 6 deletions(-) > > > > RTE_CRYPTO_SM2_PARTIAL is not set in the PMD as op_capa. Yes, it's true. For that we need to do a small change in the QAT PMD architecture, we will send that soon as another patch or a fix. > > > > diff --git a/doc/guides/cryptodevs/features/qat.ini > > b/doc/guides/cryptodevs/features/qat.ini > > index f41d29158f..219dd1e011 100644 > > --- a/doc/guides/cryptodevs/features/qat.ini > > +++ b/doc/guides/cryptodevs/features/qat.ini > > @@ -71,6 +71,7 @@ ZUC EIA3 = Y > > AES CMAC (128) = Y > > SM3 = Y > > SM3 HMAC = Y > > +SM2 = Y > > > > ; > > ; Supported AEAD algorithms of the 'qat' crypto driver. > > diff --git a/doc/guides/rel_notes/release_24_11.rst > > b/doc/guides/rel_notes/release_24_11.rst > > index e0a9aa55a1..e56824310b 100644 > > --- a/doc/guides/rel_notes/release_24_11.rst > > +++ b/doc/guides/rel_notes/release_24_11.rst > > @@ -67,6 +67,10 @@ New Features > > > > The new statistics are useful for debugging and profiling. > > > > +* **Updated the QuickAssist Technology (QAT) Crypto PMD.** > > + > > + * Added SM2 encryption and decryption algorithms. > > + > > > > Removed Items > > ------------- > > diff --git a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h > > b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h > > index 630c6e1a9b..aa49612ca1 100644 > > --- a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h > > +++ b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h > > @@ -1542,6 +1542,9 @@ icp_qat_fw_mmp_ecdsa_verify_gfp_521_input::in > in > > @endlink > > * @li no output parameters > > */ > > > > +#define PKE_ECSM2_ENCRYPTION 0x25221720 #define > PKE_ECSM2_DECRYPTION > > +0x201716e6 > > + > > #define PKE_LIVENESS 0x00000001 > > /**< Functionality ID for PKE_LIVENESS > > * @li 0 input parameter(s) > > diff --git a/drivers/common/qat/qat_adf/qat_pke.h > > b/drivers/common/qat/qat_adf/qat_pke.h > > index f88932a275..ac051e965d 100644 > > --- a/drivers/common/qat/qat_adf/qat_pke.h > > +++ b/drivers/common/qat/qat_adf/qat_pke.h > > @@ -334,4 +334,24 @@ get_sm2_ecdsa_verify_function(void) > > return qat_function; > > } > > > > +static struct qat_asym_function > > +get_sm2_encryption_function(void) > > +{ > > + struct qat_asym_function qat_function = { > > + PKE_ECSM2_ENCRYPTION, 32 > > + }; > > + > > + return qat_function; > > +} > > + > > +static struct qat_asym_function > > +get_sm2_decryption_function(void) > > +{ > > + struct qat_asym_function qat_function = { > > + PKE_ECSM2_DECRYPTION, 32 > > + }; > > + > > + return qat_function; > > +} > > + > > #endif > > diff --git a/drivers/crypto/qat/qat_asym.c > > b/drivers/crypto/qat/qat_asym.c index e43884e69b..3ce48a1486 100644 > > --- a/drivers/crypto/qat/qat_asym.c > > +++ b/drivers/crypto/qat/qat_asym.c > > @@ -932,6 +932,15 @@ sm2_ecdsa_sign_set_input(struct > > icp_qat_fw_pke_request *qat_req, > > qat_req->input_param_count = 3; > > qat_req->output_param_count = 2; > > > > + HEXDUMP("SM2 K test", asym_op->sm2.k.data, > > + cookie->alg_bytesize); > > + HEXDUMP("SM2 K", cookie->input_array[0], > > + cookie->alg_bytesize); > > + HEXDUMP("SM2 msg", cookie->input_array[1], > > + cookie->alg_bytesize); > > + HEXDUMP("SM2 pkey", cookie->input_array[2], > > + cookie->alg_bytesize); > > + > > return RTE_CRYPTO_OP_STATUS_SUCCESS; } > > > > @@ -983,6 +992,114 @@ sm2_ecdsa_sign_collect(struct rte_crypto_asym_op > > *asym_op, } > > > > static int > > +sm2_encryption_set_input(struct icp_qat_fw_pke_request *qat_req, > > + struct qat_asym_op_cookie *cookie, > > + const struct rte_crypto_asym_op *asym_op, > > + const struct rte_crypto_asym_xform *xform) { > > + const struct qat_asym_function qat_function = > > + get_sm2_encryption_function(); > > + const uint32_t qat_func_alignsize = > > + qat_function.bytesize; > > + > > + SET_PKE_LN(asym_op->sm2.k, qat_func_alignsize, 0); > > + SET_PKE_LN(xform->ec.q.x, qat_func_alignsize, 1); > > + SET_PKE_LN(xform->ec.q.y, qat_func_alignsize, 2); > > + > > + cookie->alg_bytesize = qat_function.bytesize; > > + cookie->qat_func_alignsize = qat_function.bytesize; > > + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; > > + qat_req->input_param_count = 3; > > + qat_req->output_param_count = 4; > > + > > + HEXDUMP("SM2 K", cookie->input_array[0], > > + qat_func_alignsize); > > + HEXDUMP("SM2 Q.x", cookie->input_array[1], > > + qat_func_alignsize); > > + HEXDUMP("SM2 Q.y", cookie->input_array[2], > > + qat_func_alignsize); > > + > > + return RTE_CRYPTO_OP_STATUS_SUCCESS; } > > + > > +static uint8_t > > +sm2_encryption_collect(struct rte_crypto_asym_op *asym_op, > > + const struct qat_asym_op_cookie *cookie) { > > + uint32_t alg_bytesize = cookie->alg_bytesize; > > + > > + rte_memcpy(asym_op->sm2.C1.x.data, cookie->output_array[0], > > alg_bytesize); > > + rte_memcpy(asym_op->sm2.C1.y.data, cookie->output_array[1], > > alg_bytesize); > > + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[2], > > alg_bytesize); > > + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[3], > > alg_bytesize); > > + asym_op->sm2.C1.x.length = alg_bytesize; > > + asym_op->sm2.C1.y.length = alg_bytesize; > > + asym_op->sm2.kP.x.length = alg_bytesize; > > + asym_op->sm2.kP.y.length = alg_bytesize; > > + > > + HEXDUMP("C1[x1]", cookie->output_array[0], > > + alg_bytesize); > > + HEXDUMP("C1[y]", cookie->output_array[1], > > + alg_bytesize); > > + HEXDUMP("kP[x]", cookie->output_array[2], > > + alg_bytesize); > > + HEXDUMP("kP[y]", cookie->output_array[3], > > + alg_bytesize); > > + return RTE_CRYPTO_OP_STATUS_SUCCESS; } > > + > > + > > +static int > > +sm2_decryption_set_input(struct icp_qat_fw_pke_request *qat_req, > > + struct qat_asym_op_cookie *cookie, > > + const struct rte_crypto_asym_op *asym_op, > > + const struct rte_crypto_asym_xform *xform) { > > + const struct qat_asym_function qat_function = > > + get_sm2_decryption_function(); > > + const uint32_t qat_func_alignsize = > > + qat_function.bytesize; > > + > > + SET_PKE_LN(xform->ec.pkey, qat_func_alignsize, 0); > > + SET_PKE_LN(asym_op->sm2.C1.x, qat_func_alignsize, 1); > > + SET_PKE_LN(asym_op->sm2.C1.y, qat_func_alignsize, 2); > > + > > + cookie->alg_bytesize = qat_function.bytesize; > > + cookie->qat_func_alignsize = qat_function.bytesize; > > + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; > > + qat_req->input_param_count = 3; > > + qat_req->output_param_count = 2; > > + > > + HEXDUMP("d", cookie->input_array[0], > > + qat_func_alignsize); > > + HEXDUMP("C1[x]", cookie->input_array[1], > > + qat_func_alignsize); > > + HEXDUMP("C1[y]", cookie->input_array[2], > > + qat_func_alignsize); > > + > > + return RTE_CRYPTO_OP_STATUS_SUCCESS; } > > + > > + > > +static uint8_t > > +sm2_decryption_collect(struct rte_crypto_asym_op *asym_op, > > + const struct qat_asym_op_cookie *cookie) { > > + uint32_t alg_bytesize = cookie->alg_bytesize; > > + > > + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[0], > > alg_bytesize); > > + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[1], > > alg_bytesize); > > + asym_op->sm2.kP.x.length = alg_bytesize; > > + asym_op->sm2.kP.y.length = alg_bytesize; > > + > > + HEXDUMP("kP[x]", cookie->output_array[0], > > + alg_bytesize); > > + HEXDUMP("kP[y]", cookie->output_array[1], > > + alg_bytesize); > > + return RTE_CRYPTO_OP_STATUS_SUCCESS; } > > + > > +static int > > asym_set_input(struct icp_qat_fw_pke_request *qat_req, > > struct qat_asym_op_cookie *cookie, > > const struct rte_crypto_asym_op *asym_op, @@ -1014,14 > +1131,20 @@ > > asym_set_input(struct icp_qat_fw_pke_request *qat_req, > > asym_op, xform); > > } > > case RTE_CRYPTO_ASYM_XFORM_SM2: > > - if (asym_op->sm2.op_type == > > - RTE_CRYPTO_ASYM_OP_VERIFY) { > > + if (asym_op->sm2.op_type == > > RTE_CRYPTO_ASYM_OP_ENCRYPT) { > > + return sm2_encryption_set_input(qat_req, cookie, > > + asym_op, xform); > > + } else if (asym_op->sm2.op_type == > > RTE_CRYPTO_ASYM_OP_DECRYPT) { > > + return sm2_decryption_set_input(qat_req, cookie, > > + asym_op, xform); > > + } else if (asym_op->sm2.op_type == > > RTE_CRYPTO_ASYM_OP_VERIFY) { > > return sm2_ecdsa_verify_set_input(qat_req, cookie, > > asym_op, xform); > > - } else { > > + } else if (asym_op->sm2.op_type == > > RTE_CRYPTO_ASYM_OP_SIGN) { > > return sm2_ecdsa_sign_set_input(qat_req, cookie, > > asym_op, xform); > > } > > + break; > > default: > > QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto > xform"); > > return -EINVAL; > > @@ -1113,7 +1236,13 @@ qat_asym_collect_response(struct rte_crypto_op > > *op, > > case RTE_CRYPTO_ASYM_XFORM_ECDH: > > return ecdh_collect(asym_op, cookie); > > case RTE_CRYPTO_ASYM_XFORM_SM2: > > - return sm2_ecdsa_sign_collect(asym_op, cookie); > > + if (asym_op->sm2.op_type == > > RTE_CRYPTO_ASYM_OP_ENCRYPT) > > + return sm2_encryption_collect(asym_op, cookie); > > + else if (asym_op->sm2.op_type == > > RTE_CRYPTO_ASYM_OP_DECRYPT) > > + return sm2_decryption_collect(asym_op, cookie); > > + else > > + return sm2_ecdsa_sign_collect(asym_op, cookie); > > + > > default: > > QAT_LOG(ERR, "Not supported xform type"); > > return RTE_CRYPTO_OP_STATUS_ERROR; @@ -1385,9 +1514,8 > @@ > > qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused, > > case RTE_CRYPTO_ASYM_XFORM_ECDSA: > > case RTE_CRYPTO_ASYM_XFORM_ECPM: > > case RTE_CRYPTO_ASYM_XFORM_ECDH: > > - session_set_ec(qat_session, xform); > > - break; > > case RTE_CRYPTO_ASYM_XFORM_SM2: > > + session_set_ec(qat_session, xform); > > break; > > default: > > ret = -ENOTSUP; > > -- > > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v5 4/4] app/test: add test sm2 C1/Kp test cases 2024-10-09 13:01 ` [PATCH v5 0/4] " Arkadiusz Kusztal ` (2 preceding siblings ...) 2024-10-09 13:01 ` [PATCH v5 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal @ 2024-10-09 13:01 ` Arkadiusz Kusztal 2024-10-11 12:16 ` [EXTERNAL] " Akhil Goyal 3 siblings, 1 reply; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-09 13:01 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal This commit adds tests cases to be used when C1 or kP elliptic curve points need to be computed. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- app/test/test_cryptodev_asym.c | 138 ++++++++++++++++++++++++++++- app/test/test_cryptodev_sm2_test_vectors.h | 112 ++++++++++++++++++++++- 2 files changed, 246 insertions(+), 4 deletions(-) diff --git a/app/test/test_cryptodev_asym.c b/app/test/test_cryptodev_asym.c index f0b5d38543..6fc7e87c1f 100644 --- a/app/test/test_cryptodev_asym.c +++ b/app/test/test_cryptodev_asym.c @@ -2635,6 +2635,8 @@ test_sm2_sign(void) asym_op->sm2.k.data = input_params.k.data; asym_op->sm2.k.length = input_params.k.length; } + asym_op->sm2.k.data = input_params.k.data; + asym_op->sm2.k.length = input_params.k.length; /* Init out buf */ asym_op->sm2.r.data = output_buf_r; @@ -3184,7 +3186,7 @@ static int send_one(void) ticks++; if (ticks >= DEQ_TIMEOUT) { RTE_LOG(ERR, USER1, - "line %u FAILED: Cannot dequeue the crypto op on device %d", + "line %u FAILED: Cannot dequeue the crypto op on device, timeout %d", __LINE__, params->valid_devs[0]); return TEST_FAILED; } @@ -3489,6 +3491,132 @@ kat_rsa_decrypt_crt(const void *data) return 0; } +static int +test_sm2_partial_encryption(const void *data) +{ + struct rte_crypto_asym_xform xform = { 0 }; + const uint8_t dev_id = params->valid_devs[0]; + const struct crypto_testsuite_sm2_params *test_vector = data; + uint8_t result_C1_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_C1_y1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; + struct rte_cryptodev_asym_capability_idx idx; + const struct rte_cryptodev_asymmetric_xform_capability *capa; + + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); + if (capa == NULL) + return TEST_SKIPPED; + if (!rte_cryptodev_asym_xform_capability_check_opcap(capa, + RTE_CRYPTO_ASYM_OP_ENCRYPT, RTE_CRYPTO_SM2_PARTIAL)) { + return TEST_SKIPPED; + } + + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; + xform.ec.curve_id = RTE_CRYPTO_EC_GROUP_SM2; + xform.ec.q = test_vector->pubkey; + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_ENCRYPT; + self->op->asym->sm2.k = test_vector->k; + if (rte_cryptodev_asym_session_create(dev_id, &xform, + params->session_mpool, &self->sess) < 0) { + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", + __LINE__); + return TEST_FAILED; + } + rte_crypto_op_attach_asym_session(self->op, self->sess); + + self->op->asym->sm2.C1.x.data = result_C1_x1; + self->op->asym->sm2.C1.y.data = result_C1_y1; + self->op->asym->sm2.kP.x.data = result_kP_x1; + self->op->asym->sm2.kP.y.data = result_kP_y1; + TEST_ASSERT_SUCCESS(send_one(), + "Failed to process crypto op"); + + debug_hexdump(stdout, "C1[x]", self->op->asym->sm2.C1.x.data, + self->op->asym->sm2.C1.x.length); + debug_hexdump(stdout, "C1[y]", self->op->asym->sm2.C1.y.data, + self->op->asym->sm2.C1.y.length); + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, + self->op->asym->sm2.kP.x.length); + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, + self->op->asym->sm2.kP.y.length); + + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.x.data, + self->op->asym->sm2.C1.x.data, + test_vector->C1.x.length, + "Incorrect value of C1[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.y.data, + self->op->asym->sm2.C1.y.data, + test_vector->C1.y.length, + "Incorrect value of C1[y]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, + self->op->asym->sm2.kP.x.data, + test_vector->kP.x.length, + "Incorrect value of kP[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, + self->op->asym->sm2.kP.y.data, + test_vector->kP.y.length, + "Incorrect value of kP[y]\n"); + + return TEST_SUCCESS; +} + +static int +test_sm2_partial_decryption(const void *data) +{ + struct rte_crypto_asym_xform xform = {}; + const uint8_t dev_id = params->valid_devs[0]; + const struct crypto_testsuite_sm2_params *test_vector = data; + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; + struct rte_cryptodev_asym_capability_idx idx; + const struct rte_cryptodev_asymmetric_xform_capability *capa; + + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); + if (capa == NULL) + return TEST_SKIPPED; + if (!rte_cryptodev_asym_xform_capability_check_opcap(capa, + RTE_CRYPTO_ASYM_OP_DECRYPT, RTE_CRYPTO_SM2_PARTIAL)) { + return TEST_SKIPPED; + } + + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; + xform.ec.pkey = test_vector->pkey; + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_DECRYPT; + self->op->asym->sm2.C1 = test_vector->C1; + + if (rte_cryptodev_asym_session_create(dev_id, &xform, + params->session_mpool, &self->sess) < 0) { + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", + __LINE__); + return TEST_FAILED; + } + rte_crypto_op_attach_asym_session(self->op, self->sess); + + self->op->asym->sm2.kP.x.data = result_kP_x1; + self->op->asym->sm2.kP.y.data = result_kP_y1; + TEST_ASSERT_SUCCESS(send_one(), + "Failed to process crypto op"); + + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, + self->op->asym->sm2.C1.x.length); + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, + self->op->asym->sm2.C1.y.length); + + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, + self->op->asym->sm2.kP.x.data, + test_vector->kP.x.length, + "Incorrect value of kP[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, + self->op->asym->sm2.kP.y.data, + test_vector->kP.y.length, + "Incorrect value of kP[y]\n"); + + return 0; +} + static struct unit_test_suite cryptodev_openssl_asym_testsuite = { .suite_name = "Crypto Device OPENSSL ASYM Unit Test Suite", .setup = testsuite_setup, @@ -3553,6 +3681,14 @@ static struct unit_test_suite cryptodev_qat_asym_testsuite = { .teardown = testsuite_teardown, .unit_test_cases = { TEST_CASE_NAMED_WITH_DATA( + "SM2 encryption - test case 1", + ut_setup_asym, ut_teardown_asym, + test_sm2_partial_encryption, &sm2_enc_hw_t1), + TEST_CASE_NAMED_WITH_DATA( + "SM2 decryption - test case 1", + ut_setup_asym, ut_teardown_asym, + test_sm2_partial_decryption, &sm2_enc_hw_t1), + TEST_CASE_NAMED_WITH_DATA( "Modular Exponentiation (mod=128, base=20, exp=3, res=128)", ut_setup_asym, ut_teardown_asym, modular_exponentiation, &modex_test_case_m128_b20_e3), diff --git a/app/test/test_cryptodev_sm2_test_vectors.h b/app/test/test_cryptodev_sm2_test_vectors.h index 41f5f7074a..92f7e77671 100644 --- a/app/test/test_cryptodev_sm2_test_vectors.h +++ b/app/test/test_cryptodev_sm2_test_vectors.h @@ -8,19 +8,125 @@ #include "rte_crypto_asym.h" struct crypto_testsuite_sm2_params { - rte_crypto_param pubkey_qx; - rte_crypto_param pubkey_qy; + union { + struct { + rte_crypto_param pubkey_qx; + rte_crypto_param pubkey_qy; + }; + struct rte_crypto_ec_point pubkey; + }; rte_crypto_param pkey; rte_crypto_param k; rte_crypto_param sign_r; rte_crypto_param sign_s; rte_crypto_param id; - rte_crypto_param cipher; + union { + rte_crypto_param cipher; + struct { + struct rte_crypto_ec_point C1; + struct rte_crypto_ec_point kP; + }; + }; rte_crypto_param message; rte_crypto_param digest; int curve; }; +uint8_t sm2_enc_pub_x_t1[] = { + 0x26, 0xf1, 0xf3, 0xef, 0x12, 0x27, 0x85, 0xd1, + 0x7d, 0x38, 0x70, 0xc2, 0x43, 0x46, 0x50, 0x36, + 0x3f, 0xdf, 0x4b, 0x2f, 0x45, 0x0e, 0x8e, 0xd1, + 0xb6, 0x0f, 0xdc, 0x1f, 0xc6, 0xf0, 0x19, 0xab +}; +uint8_t sm2_enc_pub_y_t1[] = { + 0xd9, 0x19, 0x8b, 0xdb, 0xef, 0xa5, 0x84, 0x76, + 0xec, 0x82, 0x25, 0x12, 0x5b, 0x8c, 0xe3, 0xe1, + 0x0a, 0x10, 0x0d, 0xc6, 0x97, 0x6c, 0xc1, 0x89, + 0xd9, 0x6d, 0xa6, 0x88, 0x9e, 0xbc, 0xd3, 0x7a +}; +uint8_t sm2_k_t1[] = { + 0x12, 0x34, 0x56, 0x78, 0xB9, 0x6E, 0x5A, 0xF7, + 0x0B, 0xD4, 0x80, 0xB4, 0x72, 0x40, 0x9A, 0x9A, + 0x32, 0x72, 0x57, 0xF1, 0xEB, 0xB7, 0x3F, 0x5B, + 0x07, 0x33, 0x54, 0xB2, 0x48, 0x66, 0x85, 0x63 +}; + +uint8_t sm2_C1_x_t1[] = { + 0x15, 0xf6, 0xb7, 0x49, 0x00, 0x39, 0x73, 0x9d, + 0x5b, 0xb3, 0xd3, 0xe9, 0x1d, 0xe4, 0xc8, 0xbd, + 0x08, 0xe3, 0x6a, 0x22, 0xff, 0x1a, 0xbf, 0xdc, + 0x75, 0x6b, 0x12, 0x85, 0x81, 0xc5, 0x8b, 0xcf +}; + +uint8_t sm2_C1_y_t1[] = { + 0x6a, 0x92, 0xd4, 0xd8, 0x13, 0xec, 0x8f, 0x9a, + 0x9d, 0xbe, 0x51, 0x47, 0x6f, 0x54, 0xc5, 0x41, + 0x98, 0xf5, 0x5f, 0x83, 0xce, 0x1c, 0x18, 0x1a, + 0x48, 0xbd, 0xeb, 0x38, 0x13, 0x67, 0x0d, 0x06 +}; + +uint8_t sm2_kP_x_t1[] = { + 0x6b, 0xfb, 0x9a, 0xcb, 0xc6, 0xb6, 0x36, 0x31, + 0x0f, 0xd1, 0xdd, 0x9c, 0x9f, 0x17, 0x5f, 0x3f, + 0x68, 0x13, 0x96, 0xd2, 0x54, 0x5b, 0xa6, 0x19, + 0x78, 0x1f, 0x87, 0x3d, 0x81, 0xc3, 0x21, 0x01 +}; + +uint8_t sm2_kP_y_t1[] = { + 0xa4, 0x08, 0xf3, 0x74, 0x35, 0x51, 0x8c, 0x81, + 0x06, 0x4c, 0x8f, 0x31, 0x49, 0xe3, 0x5b, 0x4d, + 0xfc, 0x3d, 0x19, 0xac, 0x7d, 0x07, 0xd0, 0x9a, + 0x99, 0x5a, 0x25, 0x16, 0x66, 0xff, 0x41, 0x3c +}; + +uint8_t sm2_kP_d_t1[] = { + 0x6F, 0xCB, 0xA2, 0xEF, 0x9A, 0xE0, 0xAB, 0x90, + 0x2B, 0xC3, 0xBD, 0xE3, 0xFF, 0x91, 0x5D, 0x44, + 0xBA, 0x4C, 0xC7, 0x8F, 0x88, 0xE2, 0xF8, 0xE7, + 0xF8, 0x99, 0x6D, 0x3B, 0x8C, 0xCE, 0xED, 0xEE +}; + +struct crypto_testsuite_sm2_params sm2_enc_hw_t1 = { + .k = { + .data = sm2_k_t1, + .length = sizeof(sm2_k_t1) + }, + .pubkey = { + .x = { + .data = sm2_enc_pub_x_t1, + .length = sizeof(sm2_enc_pub_x_t1) + }, + .y = { + .data = sm2_enc_pub_y_t1, + .length = sizeof(sm2_enc_pub_y_t1) + } + }, + .C1 = { + .x = { + .data = sm2_C1_x_t1, + .length = sizeof(sm2_C1_x_t1) + }, + .y = { + .data = sm2_C1_y_t1, + .length = sizeof(sm2_C1_y_t1) + } + }, + .kP = { + .x = { + .data = sm2_kP_x_t1, + .length = sizeof(sm2_kP_x_t1) + }, + .y = { + .data = sm2_kP_y_t1, + .length = sizeof(sm2_kP_y_t1) + } + }, + .pkey = { + .data = sm2_kP_d_t1, + .length = sizeof(sm2_kP_d_t1) + } +}; + static uint8_t fp256_pkey[] = { 0x77, 0x84, 0x35, 0x65, 0x4c, 0x7a, 0x6d, 0xb1, 0x1e, 0x63, 0x0b, 0x41, 0x97, 0x36, 0x04, 0xf4, -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* RE: [EXTERNAL] [PATCH v5 4/4] app/test: add test sm2 C1/Kp test cases 2024-10-09 13:01 ` [PATCH v5 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal @ 2024-10-11 12:16 ` Akhil Goyal 0 siblings, 0 replies; 42+ messages in thread From: Akhil Goyal @ 2024-10-11 12:16 UTC (permalink / raw) To: Arkadiusz Kusztal, dev; +Cc: brian.dooley > This commit adds tests cases to be used when C1 or kP elliptic > curve points need to be computed. > > Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> > --- > app/test/test_cryptodev_asym.c | 138 > ++++++++++++++++++++++++++++- > app/test/test_cryptodev_sm2_test_vectors.h | 112 ++++++++++++++++++++++- > 2 files changed, 246 insertions(+), 4 deletions(-) > > diff --git a/app/test/test_cryptodev_asym.c b/app/test/test_cryptodev_asym.c > index f0b5d38543..6fc7e87c1f 100644 > --- a/app/test/test_cryptodev_asym.c > +++ b/app/test/test_cryptodev_asym.c > @@ -2635,6 +2635,8 @@ test_sm2_sign(void) > asym_op->sm2.k.data = input_params.k.data; > asym_op->sm2.k.length = input_params.k.length; > } > + asym_op->sm2.k.data = input_params.k.data; > + asym_op->sm2.k.length = input_params.k.length; > > /* Init out buf */ > asym_op->sm2.r.data = output_buf_r; > @@ -3184,7 +3186,7 @@ static int send_one(void) > ticks++; > if (ticks >= DEQ_TIMEOUT) { > RTE_LOG(ERR, USER1, > - "line %u FAILED: Cannot dequeue the crypto op > on device %d", > + "line %u FAILED: Cannot dequeue the crypto op > on device, timeout %d", > __LINE__, params->valid_devs[0]); Unnecessary and incorrect change. > return TEST_FAILED; > } > @@ -3489,6 +3491,132 @@ kat_rsa_decrypt_crt(const void *data) > return 0; > } > > +static int > +test_sm2_partial_encryption(const void *data) > +{ > + struct rte_crypto_asym_xform xform = { 0 }; > + const uint8_t dev_id = params->valid_devs[0]; > + const struct crypto_testsuite_sm2_params *test_vector = data; > + uint8_t result_C1_x1[TEST_DATA_SIZE] = { 0 }; > + uint8_t result_C1_y1[TEST_DATA_SIZE] = { 0 }; > + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; > + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; > + struct rte_cryptodev_asym_capability_idx idx; > + const struct rte_cryptodev_asymmetric_xform_capability *capa; > + > + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; > + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); > + if (capa == NULL) > + return TEST_SKIPPED; > + if (!rte_cryptodev_asym_xform_capability_check_opcap(capa, > + RTE_CRYPTO_ASYM_OP_ENCRYPT, > RTE_CRYPTO_SM2_PARTIAL)) { > + return TEST_SKIPPED; > + } > + > + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; > + xform.ec.curve_id = RTE_CRYPTO_EC_GROUP_SM2; > + xform.ec.q = test_vector->pubkey; > + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_ENCRYPT; > + self->op->asym->sm2.k = test_vector->k; > + if (rte_cryptodev_asym_session_create(dev_id, &xform, > + params->session_mpool, &self->sess) < 0) { > + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", > + __LINE__); > + return TEST_FAILED; > + } > + rte_crypto_op_attach_asym_session(self->op, self->sess); > + > + self->op->asym->sm2.C1.x.data = result_C1_x1; > + self->op->asym->sm2.C1.y.data = result_C1_y1; > + self->op->asym->sm2.kP.x.data = result_kP_x1; > + self->op->asym->sm2.kP.y.data = result_kP_y1; > + TEST_ASSERT_SUCCESS(send_one(), > + "Failed to process crypto op"); > + > + debug_hexdump(stdout, "C1[x]", self->op->asym->sm2.C1.x.data, > + self->op->asym->sm2.C1.x.length); > + debug_hexdump(stdout, "C1[y]", self->op->asym->sm2.C1.y.data, > + self->op->asym->sm2.C1.y.length); > + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, > + self->op->asym->sm2.kP.x.length); > + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, > + self->op->asym->sm2.kP.y.length); > + > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.x.data, > + self->op->asym->sm2.C1.x.data, > + test_vector->C1.x.length, > + "Incorrect value of C1[x]\n"); > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.y.data, > + self->op->asym->sm2.C1.y.data, > + test_vector->C1.y.length, > + "Incorrect value of C1[y]\n"); > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, > + self->op->asym->sm2.kP.x.data, > + test_vector->kP.x.length, > + "Incorrect value of kP[x]\n"); > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, > + self->op->asym->sm2.kP.y.data, > + test_vector->kP.y.length, > + "Incorrect value of kP[y]\n"); > + > + return TEST_SUCCESS; > +} > + > +static int > +test_sm2_partial_decryption(const void *data) > +{ > + struct rte_crypto_asym_xform xform = {}; > + const uint8_t dev_id = params->valid_devs[0]; > + const struct crypto_testsuite_sm2_params *test_vector = data; > + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; > + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; > + struct rte_cryptodev_asym_capability_idx idx; > + const struct rte_cryptodev_asymmetric_xform_capability *capa; > + > + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; > + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); > + if (capa == NULL) > + return TEST_SKIPPED; > + if (!rte_cryptodev_asym_xform_capability_check_opcap(capa, > + RTE_CRYPTO_ASYM_OP_DECRYPT, > RTE_CRYPTO_SM2_PARTIAL)) { > + return TEST_SKIPPED; > + } > + > + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; > + xform.ec.pkey = test_vector->pkey; > + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_DECRYPT; > + self->op->asym->sm2.C1 = test_vector->C1; > + > + if (rte_cryptodev_asym_session_create(dev_id, &xform, > + params->session_mpool, &self->sess) < 0) { > + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", > + __LINE__); > + return TEST_FAILED; > + } > + rte_crypto_op_attach_asym_session(self->op, self->sess); > + > + self->op->asym->sm2.kP.x.data = result_kP_x1; > + self->op->asym->sm2.kP.y.data = result_kP_y1; > + TEST_ASSERT_SUCCESS(send_one(), > + "Failed to process crypto op"); > + > + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, > + self->op->asym->sm2.C1.x.length); > + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, > + self->op->asym->sm2.C1.y.length); > + > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, > + self->op->asym->sm2.kP.x.data, > + test_vector->kP.x.length, > + "Incorrect value of kP[x]\n"); > + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, > + self->op->asym->sm2.kP.y.data, > + test_vector->kP.y.length, > + "Incorrect value of kP[y]\n"); > + > + return 0; > +} > + > static struct unit_test_suite cryptodev_openssl_asym_testsuite = { > .suite_name = "Crypto Device OPENSSL ASYM Unit Test Suite", > .setup = testsuite_setup, > @@ -3553,6 +3681,14 @@ static struct unit_test_suite > cryptodev_qat_asym_testsuite = { > .teardown = testsuite_teardown, > .unit_test_cases = { > TEST_CASE_NAMED_WITH_DATA( > + "SM2 encryption - test case 1", > + ut_setup_asym, ut_teardown_asym, > + test_sm2_partial_encryption, &sm2_enc_hw_t1), > + TEST_CASE_NAMED_WITH_DATA( > + "SM2 decryption - test case 1", > + ut_setup_asym, ut_teardown_asym, > + test_sm2_partial_decryption, &sm2_enc_hw_t1), > + TEST_CASE_NAMED_WITH_DATA( > "Modular Exponentiation (mod=128, base=20, exp=3, > res=128)", > ut_setup_asym, ut_teardown_asym, > modular_exponentiation, > &modex_test_case_m128_b20_e3), > diff --git a/app/test/test_cryptodev_sm2_test_vectors.h > b/app/test/test_cryptodev_sm2_test_vectors.h > index 41f5f7074a..92f7e77671 100644 > --- a/app/test/test_cryptodev_sm2_test_vectors.h > +++ b/app/test/test_cryptodev_sm2_test_vectors.h > @@ -8,19 +8,125 @@ > #include "rte_crypto_asym.h" > > struct crypto_testsuite_sm2_params { > - rte_crypto_param pubkey_qx; > - rte_crypto_param pubkey_qy; > + union { > + struct { > + rte_crypto_param pubkey_qx; > + rte_crypto_param pubkey_qy; > + }; > + struct rte_crypto_ec_point pubkey; > + }; > rte_crypto_param pkey; > rte_crypto_param k; > rte_crypto_param sign_r; > rte_crypto_param sign_s; > rte_crypto_param id; > - rte_crypto_param cipher; > + union { > + rte_crypto_param cipher; > + struct { > + struct rte_crypto_ec_point C1; > + struct rte_crypto_ec_point kP; It is better to use variable names in lower case - c1 and kp should be fine. > + }; > + }; > rte_crypto_param message; > rte_crypto_param digest; > int curve; > }; > > +uint8_t sm2_enc_pub_x_t1[] = { > + 0x26, 0xf1, 0xf3, 0xef, 0x12, 0x27, 0x85, 0xd1, > + 0x7d, 0x38, 0x70, 0xc2, 0x43, 0x46, 0x50, 0x36, > + 0x3f, 0xdf, 0x4b, 0x2f, 0x45, 0x0e, 0x8e, 0xd1, > + 0xb6, 0x0f, 0xdc, 0x1f, 0xc6, 0xf0, 0x19, 0xab > +}; > +uint8_t sm2_enc_pub_y_t1[] = { > + 0xd9, 0x19, 0x8b, 0xdb, 0xef, 0xa5, 0x84, 0x76, > + 0xec, 0x82, 0x25, 0x12, 0x5b, 0x8c, 0xe3, 0xe1, > + 0x0a, 0x10, 0x0d, 0xc6, 0x97, 0x6c, 0xc1, 0x89, > + 0xd9, 0x6d, 0xa6, 0x88, 0x9e, 0xbc, 0xd3, 0x7a > +}; > +uint8_t sm2_k_t1[] = { > + 0x12, 0x34, 0x56, 0x78, 0xB9, 0x6E, 0x5A, 0xF7, > + 0x0B, 0xD4, 0x80, 0xB4, 0x72, 0x40, 0x9A, 0x9A, > + 0x32, 0x72, 0x57, 0xF1, 0xEB, 0xB7, 0x3F, 0x5B, > + 0x07, 0x33, 0x54, 0xB2, 0x48, 0x66, 0x85, 0x63 > +}; > + > +uint8_t sm2_C1_x_t1[] = { > + 0x15, 0xf6, 0xb7, 0x49, 0x00, 0x39, 0x73, 0x9d, > + 0x5b, 0xb3, 0xd3, 0xe9, 0x1d, 0xe4, 0xc8, 0xbd, > + 0x08, 0xe3, 0x6a, 0x22, 0xff, 0x1a, 0xbf, 0xdc, > + 0x75, 0x6b, 0x12, 0x85, 0x81, 0xc5, 0x8b, 0xcf > +}; > + > +uint8_t sm2_C1_y_t1[] = { > + 0x6a, 0x92, 0xd4, 0xd8, 0x13, 0xec, 0x8f, 0x9a, > + 0x9d, 0xbe, 0x51, 0x47, 0x6f, 0x54, 0xc5, 0x41, > + 0x98, 0xf5, 0x5f, 0x83, 0xce, 0x1c, 0x18, 0x1a, > + 0x48, 0xbd, 0xeb, 0x38, 0x13, 0x67, 0x0d, 0x06 > +}; > + > +uint8_t sm2_kP_x_t1[] = { > + 0x6b, 0xfb, 0x9a, 0xcb, 0xc6, 0xb6, 0x36, 0x31, > + 0x0f, 0xd1, 0xdd, 0x9c, 0x9f, 0x17, 0x5f, 0x3f, > + 0x68, 0x13, 0x96, 0xd2, 0x54, 0x5b, 0xa6, 0x19, > + 0x78, 0x1f, 0x87, 0x3d, 0x81, 0xc3, 0x21, 0x01 > +}; > + > +uint8_t sm2_kP_y_t1[] = { > + 0xa4, 0x08, 0xf3, 0x74, 0x35, 0x51, 0x8c, 0x81, > + 0x06, 0x4c, 0x8f, 0x31, 0x49, 0xe3, 0x5b, 0x4d, > + 0xfc, 0x3d, 0x19, 0xac, 0x7d, 0x07, 0xd0, 0x9a, > + 0x99, 0x5a, 0x25, 0x16, 0x66, 0xff, 0x41, 0x3c > +}; > + > +uint8_t sm2_kP_d_t1[] = { > + 0x6F, 0xCB, 0xA2, 0xEF, 0x9A, 0xE0, 0xAB, 0x90, > + 0x2B, 0xC3, 0xBD, 0xE3, 0xFF, 0x91, 0x5D, 0x44, > + 0xBA, 0x4C, 0xC7, 0x8F, 0x88, 0xE2, 0xF8, 0xE7, > + 0xF8, 0x99, 0x6D, 0x3B, 0x8C, 0xCE, 0xED, 0xEE > +}; Fix mixed case for the arrays defined above. > + > +struct crypto_testsuite_sm2_params sm2_enc_hw_t1 = { > + .k = { > + .data = sm2_k_t1, > + .length = sizeof(sm2_k_t1) > + }, > + .pubkey = { > + .x = { > + .data = sm2_enc_pub_x_t1, > + .length = sizeof(sm2_enc_pub_x_t1) > + }, > + .y = { > + .data = sm2_enc_pub_y_t1, > + .length = sizeof(sm2_enc_pub_y_t1) > + } > + }, > + .C1 = { > + .x = { > + .data = sm2_C1_x_t1, > + .length = sizeof(sm2_C1_x_t1) > + }, > + .y = { > + .data = sm2_C1_y_t1, > + .length = sizeof(sm2_C1_y_t1) > + } > + }, > + .kP = { > + .x = { > + .data = sm2_kP_x_t1, > + .length = sizeof(sm2_kP_x_t1) > + }, > + .y = { > + .data = sm2_kP_y_t1, > + .length = sizeof(sm2_kP_y_t1) > + } > + }, > + .pkey = { > + .data = sm2_kP_d_t1, > + .length = sizeof(sm2_kP_d_t1) > + } > +}; > + > static uint8_t fp256_pkey[] = { > 0x77, 0x84, 0x35, 0x65, 0x4c, 0x7a, 0x6d, 0xb1, > 0x1e, 0x63, 0x0b, 0x41, 0x97, 0x36, 0x04, 0xf4, > -- > 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v4 3/4] crypto/qat: add sm2 encryption/decryption function 2024-10-08 18:14 ` [PATCH v4 0/4] add ec points to sm2 op Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 1/4] cryptodev: reorder structures in asym crypto header Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal @ 2024-10-08 18:14 ` Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal 3 siblings, 0 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-08 18:14 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal This commit adds SM2 elliptic curve based asymmetric encryption and decryption to the Intel QuickAssist Technology PMD. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- doc/guides/cryptodevs/features/qat.ini | 1 + doc/guides/rel_notes/release_24_11.rst | 4 + drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h | 3 + drivers/common/qat/qat_adf/qat_pke.h | 20 ++++ drivers/crypto/qat/qat_asym.c | 140 +++++++++++++++++++++++- 5 files changed, 162 insertions(+), 6 deletions(-) diff --git a/doc/guides/cryptodevs/features/qat.ini b/doc/guides/cryptodevs/features/qat.ini index f41d29158f..219dd1e011 100644 --- a/doc/guides/cryptodevs/features/qat.ini +++ b/doc/guides/cryptodevs/features/qat.ini @@ -71,6 +71,7 @@ ZUC EIA3 = Y AES CMAC (128) = Y SM3 = Y SM3 HMAC = Y +SM2 = Y ; ; Supported AEAD algorithms of the 'qat' crypto driver. diff --git a/doc/guides/rel_notes/release_24_11.rst b/doc/guides/rel_notes/release_24_11.rst index e0a9aa55a1..e56824310b 100644 --- a/doc/guides/rel_notes/release_24_11.rst +++ b/doc/guides/rel_notes/release_24_11.rst @@ -67,6 +67,10 @@ New Features The new statistics are useful for debugging and profiling. +* **Updated the QuickAssist Technology (QAT) Crypto PMD.** + + * Added SM2 encryption and decryption algorithms. + Removed Items ------------- diff --git a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h index 630c6e1a9b..aa49612ca1 100644 --- a/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h +++ b/drivers/common/qat/qat_adf/icp_qat_fw_mmp_ids.h @@ -1542,6 +1542,9 @@ icp_qat_fw_mmp_ecdsa_verify_gfp_521_input::in in @endlink * @li no output parameters */ +#define PKE_ECSM2_ENCRYPTION 0x25221720 +#define PKE_ECSM2_DECRYPTION 0x201716e6 + #define PKE_LIVENESS 0x00000001 /**< Functionality ID for PKE_LIVENESS * @li 0 input parameter(s) diff --git a/drivers/common/qat/qat_adf/qat_pke.h b/drivers/common/qat/qat_adf/qat_pke.h index f88932a275..ac051e965d 100644 --- a/drivers/common/qat/qat_adf/qat_pke.h +++ b/drivers/common/qat/qat_adf/qat_pke.h @@ -334,4 +334,24 @@ get_sm2_ecdsa_verify_function(void) return qat_function; } +static struct qat_asym_function +get_sm2_encryption_function(void) +{ + struct qat_asym_function qat_function = { + PKE_ECSM2_ENCRYPTION, 32 + }; + + return qat_function; +} + +static struct qat_asym_function +get_sm2_decryption_function(void) +{ + struct qat_asym_function qat_function = { + PKE_ECSM2_DECRYPTION, 32 + }; + + return qat_function; +} + #endif diff --git a/drivers/crypto/qat/qat_asym.c b/drivers/crypto/qat/qat_asym.c index e43884e69b..3ce48a1486 100644 --- a/drivers/crypto/qat/qat_asym.c +++ b/drivers/crypto/qat/qat_asym.c @@ -932,6 +932,15 @@ sm2_ecdsa_sign_set_input(struct icp_qat_fw_pke_request *qat_req, qat_req->input_param_count = 3; qat_req->output_param_count = 2; + HEXDUMP("SM2 K test", asym_op->sm2.k.data, + cookie->alg_bytesize); + HEXDUMP("SM2 K", cookie->input_array[0], + cookie->alg_bytesize); + HEXDUMP("SM2 msg", cookie->input_array[1], + cookie->alg_bytesize); + HEXDUMP("SM2 pkey", cookie->input_array[2], + cookie->alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; } @@ -983,6 +992,114 @@ sm2_ecdsa_sign_collect(struct rte_crypto_asym_op *asym_op, } static int +sm2_encryption_set_input(struct icp_qat_fw_pke_request *qat_req, + struct qat_asym_op_cookie *cookie, + const struct rte_crypto_asym_op *asym_op, + const struct rte_crypto_asym_xform *xform) +{ + const struct qat_asym_function qat_function = + get_sm2_encryption_function(); + const uint32_t qat_func_alignsize = + qat_function.bytesize; + + SET_PKE_LN(asym_op->sm2.k, qat_func_alignsize, 0); + SET_PKE_LN(xform->ec.q.x, qat_func_alignsize, 1); + SET_PKE_LN(xform->ec.q.y, qat_func_alignsize, 2); + + cookie->alg_bytesize = qat_function.bytesize; + cookie->qat_func_alignsize = qat_function.bytesize; + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; + qat_req->input_param_count = 3; + qat_req->output_param_count = 4; + + HEXDUMP("SM2 K", cookie->input_array[0], + qat_func_alignsize); + HEXDUMP("SM2 Q.x", cookie->input_array[1], + qat_func_alignsize); + HEXDUMP("SM2 Q.y", cookie->input_array[2], + qat_func_alignsize); + + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + +static uint8_t +sm2_encryption_collect(struct rte_crypto_asym_op *asym_op, + const struct qat_asym_op_cookie *cookie) +{ + uint32_t alg_bytesize = cookie->alg_bytesize; + + rte_memcpy(asym_op->sm2.C1.x.data, cookie->output_array[0], alg_bytesize); + rte_memcpy(asym_op->sm2.C1.y.data, cookie->output_array[1], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[2], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[3], alg_bytesize); + asym_op->sm2.C1.x.length = alg_bytesize; + asym_op->sm2.C1.y.length = alg_bytesize; + asym_op->sm2.kP.x.length = alg_bytesize; + asym_op->sm2.kP.y.length = alg_bytesize; + + HEXDUMP("C1[x1]", cookie->output_array[0], + alg_bytesize); + HEXDUMP("C1[y]", cookie->output_array[1], + alg_bytesize); + HEXDUMP("kP[x]", cookie->output_array[2], + alg_bytesize); + HEXDUMP("kP[y]", cookie->output_array[3], + alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + + +static int +sm2_decryption_set_input(struct icp_qat_fw_pke_request *qat_req, + struct qat_asym_op_cookie *cookie, + const struct rte_crypto_asym_op *asym_op, + const struct rte_crypto_asym_xform *xform) +{ + const struct qat_asym_function qat_function = + get_sm2_decryption_function(); + const uint32_t qat_func_alignsize = + qat_function.bytesize; + + SET_PKE_LN(xform->ec.pkey, qat_func_alignsize, 0); + SET_PKE_LN(asym_op->sm2.C1.x, qat_func_alignsize, 1); + SET_PKE_LN(asym_op->sm2.C1.y, qat_func_alignsize, 2); + + cookie->alg_bytesize = qat_function.bytesize; + cookie->qat_func_alignsize = qat_function.bytesize; + qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id; + qat_req->input_param_count = 3; + qat_req->output_param_count = 2; + + HEXDUMP("d", cookie->input_array[0], + qat_func_alignsize); + HEXDUMP("C1[x]", cookie->input_array[1], + qat_func_alignsize); + HEXDUMP("C1[y]", cookie->input_array[2], + qat_func_alignsize); + + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + + +static uint8_t +sm2_decryption_collect(struct rte_crypto_asym_op *asym_op, + const struct qat_asym_op_cookie *cookie) +{ + uint32_t alg_bytesize = cookie->alg_bytesize; + + rte_memcpy(asym_op->sm2.kP.x.data, cookie->output_array[0], alg_bytesize); + rte_memcpy(asym_op->sm2.kP.y.data, cookie->output_array[1], alg_bytesize); + asym_op->sm2.kP.x.length = alg_bytesize; + asym_op->sm2.kP.y.length = alg_bytesize; + + HEXDUMP("kP[x]", cookie->output_array[0], + alg_bytesize); + HEXDUMP("kP[y]", cookie->output_array[1], + alg_bytesize); + return RTE_CRYPTO_OP_STATUS_SUCCESS; +} + +static int asym_set_input(struct icp_qat_fw_pke_request *qat_req, struct qat_asym_op_cookie *cookie, const struct rte_crypto_asym_op *asym_op, @@ -1014,14 +1131,20 @@ asym_set_input(struct icp_qat_fw_pke_request *qat_req, asym_op, xform); } case RTE_CRYPTO_ASYM_XFORM_SM2: - if (asym_op->sm2.op_type == - RTE_CRYPTO_ASYM_OP_VERIFY) { + if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) { + return sm2_encryption_set_input(qat_req, cookie, + asym_op, xform); + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) { + return sm2_decryption_set_input(qat_req, cookie, + asym_op, xform); + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) { return sm2_ecdsa_verify_set_input(qat_req, cookie, asym_op, xform); - } else { + } else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_SIGN) { return sm2_ecdsa_sign_set_input(qat_req, cookie, asym_op, xform); } + break; default: QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform"); return -EINVAL; @@ -1113,7 +1236,13 @@ qat_asym_collect_response(struct rte_crypto_op *op, case RTE_CRYPTO_ASYM_XFORM_ECDH: return ecdh_collect(asym_op, cookie); case RTE_CRYPTO_ASYM_XFORM_SM2: - return sm2_ecdsa_sign_collect(asym_op, cookie); + if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) + return sm2_encryption_collect(asym_op, cookie); + else if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) + return sm2_decryption_collect(asym_op, cookie); + else + return sm2_ecdsa_sign_collect(asym_op, cookie); + default: QAT_LOG(ERR, "Not supported xform type"); return RTE_CRYPTO_OP_STATUS_ERROR; @@ -1385,9 +1514,8 @@ qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused, case RTE_CRYPTO_ASYM_XFORM_ECDSA: case RTE_CRYPTO_ASYM_XFORM_ECPM: case RTE_CRYPTO_ASYM_XFORM_ECDH: - session_set_ec(qat_session, xform); - break; case RTE_CRYPTO_ASYM_XFORM_SM2: + session_set_ec(qat_session, xform); break; default: ret = -ENOTSUP; -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
* [PATCH v4 4/4] app/test: add test sm2 C1/Kp test cases 2024-10-08 18:14 ` [PATCH v4 0/4] add ec points to sm2 op Arkadiusz Kusztal ` (2 preceding siblings ...) 2024-10-08 18:14 ` [PATCH v4 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal @ 2024-10-08 18:14 ` Arkadiusz Kusztal 3 siblings, 0 replies; 42+ messages in thread From: Arkadiusz Kusztal @ 2024-10-08 18:14 UTC (permalink / raw) To: dev; +Cc: gakhil, brian.dooley, Arkadiusz Kusztal This commit adds tests cases to be used when C1 or kP elliptic curve points need to be computed. Signed-off-by: Arkadiusz Kusztal <arkadiuszx.kusztal@intel.com> --- app/test/test_cryptodev_asym.c | 138 ++++++++++++++++++++++++++++- app/test/test_cryptodev_sm2_test_vectors.h | 112 ++++++++++++++++++++++- 2 files changed, 246 insertions(+), 4 deletions(-) diff --git a/app/test/test_cryptodev_asym.c b/app/test/test_cryptodev_asym.c index d9b260d50e..c1437b52f6 100644 --- a/app/test/test_cryptodev_asym.c +++ b/app/test/test_cryptodev_asym.c @@ -2637,6 +2637,8 @@ test_sm2_sign(void) asym_op->sm2.k.data = input_params.k.data; asym_op->sm2.k.length = input_params.k.length; } + asym_op->sm2.k.data = input_params.k.data; + asym_op->sm2.k.length = input_params.k.length; /* Init out buf */ asym_op->sm2.r.data = output_buf_r; @@ -3192,7 +3194,7 @@ static int send_one(void) ticks++; if (ticks >= DEQ_TIMEOUT) { RTE_LOG(ERR, USER1, - "line %u FAILED: Cannot dequeue the crypto op on device %d", + "line %u FAILED: Cannot dequeue the crypto op on device, timeout %d", __LINE__, params->valid_devs[0]); return TEST_FAILED; } @@ -3497,6 +3499,132 @@ kat_rsa_decrypt_crt(const void *data) return 0; } +static int +test_sm2_partial_encryption(const void *data) +{ + struct rte_crypto_asym_xform xform = { 0 }; + const uint8_t dev_id = params->valid_devs[0]; + const struct crypto_testsuite_sm2_params *test_vector = data; + uint8_t result_C1_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_C1_y1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; + struct rte_cryptodev_asym_capability_idx idx; + const struct rte_cryptodev_asymmetric_xform_capability *capa; + + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); + if (capa == NULL) + return TEST_SKIPPED; + if (!rte_cryptodev_asym_xform_capability_check_opcap(capa, + RTE_CRYPTO_ASYM_OP_ENCRYPT, RTE_CRYPTO_SM2_PARTIAL)) { + return TEST_SKIPPED; + } + + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; + xform.ec.curve_id = RTE_CRYPTO_EC_GROUP_SM2; + xform.ec.q = test_vector->pubkey; + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_ENCRYPT; + self->op->asym->sm2.k = test_vector->k; + if (rte_cryptodev_asym_session_create(dev_id, &xform, + params->session_mpool, &self->sess) < 0) { + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", + __LINE__); + return TEST_FAILED; + } + rte_crypto_op_attach_asym_session(self->op, self->sess); + + self->op->asym->sm2.C1.x.data = result_C1_x1; + self->op->asym->sm2.C1.y.data = result_C1_y1; + self->op->asym->sm2.kP.x.data = result_kP_x1; + self->op->asym->sm2.kP.y.data = result_kP_y1; + TEST_ASSERT_SUCCESS(send_one(), + "Failed to process crypto op"); + + debug_hexdump(stdout, "C1[x]", self->op->asym->sm2.C1.x.data, + self->op->asym->sm2.C1.x.length); + debug_hexdump(stdout, "C1[y]", self->op->asym->sm2.C1.y.data, + self->op->asym->sm2.C1.y.length); + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, + self->op->asym->sm2.kP.x.length); + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, + self->op->asym->sm2.kP.y.length); + + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.x.data, + self->op->asym->sm2.C1.x.data, + test_vector->C1.x.length, + "Incorrect value of C1[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->C1.y.data, + self->op->asym->sm2.C1.y.data, + test_vector->C1.y.length, + "Incorrect value of C1[y]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, + self->op->asym->sm2.kP.x.data, + test_vector->kP.x.length, + "Incorrect value of kP[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, + self->op->asym->sm2.kP.y.data, + test_vector->kP.y.length, + "Incorrect value of kP[y]\n"); + + return TEST_SUCCESS; +} + +static int +test_sm2_partial_decryption(const void *data) +{ + struct rte_crypto_asym_xform xform = {}; + const uint8_t dev_id = params->valid_devs[0]; + const struct crypto_testsuite_sm2_params *test_vector = data; + uint8_t result_kP_x1[TEST_DATA_SIZE] = { 0 }; + uint8_t result_kP_y1[TEST_DATA_SIZE] = { 0 }; + struct rte_cryptodev_asym_capability_idx idx; + const struct rte_cryptodev_asymmetric_xform_capability *capa; + + idx.type = RTE_CRYPTO_ASYM_XFORM_SM2; + capa = rte_cryptodev_asym_capability_get(dev_id, &idx); + if (capa == NULL) + return TEST_SKIPPED; + if (!rte_cryptodev_asym_xform_capability_check_opcap(capa, + RTE_CRYPTO_ASYM_OP_DECRYPT, RTE_CRYPTO_SM2_PARTIAL)) { + return TEST_SKIPPED; + } + + xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; + xform.ec.pkey = test_vector->pkey; + self->op->asym->sm2.op_type = RTE_CRYPTO_ASYM_OP_DECRYPT; + self->op->asym->sm2.C1 = test_vector->C1; + + if (rte_cryptodev_asym_session_create(dev_id, &xform, + params->session_mpool, &self->sess) < 0) { + RTE_LOG(ERR, USER1, "line %u FAILED: Session creation failed", + __LINE__); + return TEST_FAILED; + } + rte_crypto_op_attach_asym_session(self->op, self->sess); + + self->op->asym->sm2.kP.x.data = result_kP_x1; + self->op->asym->sm2.kP.y.data = result_kP_y1; + TEST_ASSERT_SUCCESS(send_one(), + "Failed to process crypto op"); + + debug_hexdump(stdout, "kP[x]", self->op->asym->sm2.kP.x.data, + self->op->asym->sm2.C1.x.length); + debug_hexdump(stdout, "kP[y]", self->op->asym->sm2.kP.y.data, + self->op->asym->sm2.C1.y.length); + + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.x.data, + self->op->asym->sm2.kP.x.data, + test_vector->kP.x.length, + "Incorrect value of kP[x]\n"); + TEST_ASSERT_BUFFERS_ARE_EQUAL(test_vector->kP.y.data, + self->op->asym->sm2.kP.y.data, + test_vector->kP.y.length, + "Incorrect value of kP[y]\n"); + + return 0; +} + static struct unit_test_suite cryptodev_openssl_asym_testsuite = { .suite_name = "Crypto Device OPENSSL ASYM Unit Test Suite", .setup = testsuite_setup, @@ -3561,6 +3689,14 @@ static struct unit_test_suite cryptodev_qat_asym_testsuite = { .teardown = testsuite_teardown, .unit_test_cases = { TEST_CASE_NAMED_WITH_DATA( + "SM2 encryption - test case 1", + ut_setup_asym, ut_teardown_asym, + test_sm2_partial_encryption, &sm2_enc_hw_t1), + TEST_CASE_NAMED_WITH_DATA( + "SM2 decryption - test case 1", + ut_setup_asym, ut_teardown_asym, + test_sm2_partial_decryption, &sm2_enc_hw_t1), + TEST_CASE_NAMED_WITH_DATA( "Modular Exponentiation (mod=128, base=20, exp=3, res=128)", ut_setup_asym, ut_teardown_asym, modular_exponentiation, &modex_test_case_m128_b20_e3), diff --git a/app/test/test_cryptodev_sm2_test_vectors.h b/app/test/test_cryptodev_sm2_test_vectors.h index 41f5f7074a..92f7e77671 100644 --- a/app/test/test_cryptodev_sm2_test_vectors.h +++ b/app/test/test_cryptodev_sm2_test_vectors.h @@ -8,19 +8,125 @@ #include "rte_crypto_asym.h" struct crypto_testsuite_sm2_params { - rte_crypto_param pubkey_qx; - rte_crypto_param pubkey_qy; + union { + struct { + rte_crypto_param pubkey_qx; + rte_crypto_param pubkey_qy; + }; + struct rte_crypto_ec_point pubkey; + }; rte_crypto_param pkey; rte_crypto_param k; rte_crypto_param sign_r; rte_crypto_param sign_s; rte_crypto_param id; - rte_crypto_param cipher; + union { + rte_crypto_param cipher; + struct { + struct rte_crypto_ec_point C1; + struct rte_crypto_ec_point kP; + }; + }; rte_crypto_param message; rte_crypto_param digest; int curve; }; +uint8_t sm2_enc_pub_x_t1[] = { + 0x26, 0xf1, 0xf3, 0xef, 0x12, 0x27, 0x85, 0xd1, + 0x7d, 0x38, 0x70, 0xc2, 0x43, 0x46, 0x50, 0x36, + 0x3f, 0xdf, 0x4b, 0x2f, 0x45, 0x0e, 0x8e, 0xd1, + 0xb6, 0x0f, 0xdc, 0x1f, 0xc6, 0xf0, 0x19, 0xab +}; +uint8_t sm2_enc_pub_y_t1[] = { + 0xd9, 0x19, 0x8b, 0xdb, 0xef, 0xa5, 0x84, 0x76, + 0xec, 0x82, 0x25, 0x12, 0x5b, 0x8c, 0xe3, 0xe1, + 0x0a, 0x10, 0x0d, 0xc6, 0x97, 0x6c, 0xc1, 0x89, + 0xd9, 0x6d, 0xa6, 0x88, 0x9e, 0xbc, 0xd3, 0x7a +}; +uint8_t sm2_k_t1[] = { + 0x12, 0x34, 0x56, 0x78, 0xB9, 0x6E, 0x5A, 0xF7, + 0x0B, 0xD4, 0x80, 0xB4, 0x72, 0x40, 0x9A, 0x9A, + 0x32, 0x72, 0x57, 0xF1, 0xEB, 0xB7, 0x3F, 0x5B, + 0x07, 0x33, 0x54, 0xB2, 0x48, 0x66, 0x85, 0x63 +}; + +uint8_t sm2_C1_x_t1[] = { + 0x15, 0xf6, 0xb7, 0x49, 0x00, 0x39, 0x73, 0x9d, + 0x5b, 0xb3, 0xd3, 0xe9, 0x1d, 0xe4, 0xc8, 0xbd, + 0x08, 0xe3, 0x6a, 0x22, 0xff, 0x1a, 0xbf, 0xdc, + 0x75, 0x6b, 0x12, 0x85, 0x81, 0xc5, 0x8b, 0xcf +}; + +uint8_t sm2_C1_y_t1[] = { + 0x6a, 0x92, 0xd4, 0xd8, 0x13, 0xec, 0x8f, 0x9a, + 0x9d, 0xbe, 0x51, 0x47, 0x6f, 0x54, 0xc5, 0x41, + 0x98, 0xf5, 0x5f, 0x83, 0xce, 0x1c, 0x18, 0x1a, + 0x48, 0xbd, 0xeb, 0x38, 0x13, 0x67, 0x0d, 0x06 +}; + +uint8_t sm2_kP_x_t1[] = { + 0x6b, 0xfb, 0x9a, 0xcb, 0xc6, 0xb6, 0x36, 0x31, + 0x0f, 0xd1, 0xdd, 0x9c, 0x9f, 0x17, 0x5f, 0x3f, + 0x68, 0x13, 0x96, 0xd2, 0x54, 0x5b, 0xa6, 0x19, + 0x78, 0x1f, 0x87, 0x3d, 0x81, 0xc3, 0x21, 0x01 +}; + +uint8_t sm2_kP_y_t1[] = { + 0xa4, 0x08, 0xf3, 0x74, 0x35, 0x51, 0x8c, 0x81, + 0x06, 0x4c, 0x8f, 0x31, 0x49, 0xe3, 0x5b, 0x4d, + 0xfc, 0x3d, 0x19, 0xac, 0x7d, 0x07, 0xd0, 0x9a, + 0x99, 0x5a, 0x25, 0x16, 0x66, 0xff, 0x41, 0x3c +}; + +uint8_t sm2_kP_d_t1[] = { + 0x6F, 0xCB, 0xA2, 0xEF, 0x9A, 0xE0, 0xAB, 0x90, + 0x2B, 0xC3, 0xBD, 0xE3, 0xFF, 0x91, 0x5D, 0x44, + 0xBA, 0x4C, 0xC7, 0x8F, 0x88, 0xE2, 0xF8, 0xE7, + 0xF8, 0x99, 0x6D, 0x3B, 0x8C, 0xCE, 0xED, 0xEE +}; + +struct crypto_testsuite_sm2_params sm2_enc_hw_t1 = { + .k = { + .data = sm2_k_t1, + .length = sizeof(sm2_k_t1) + }, + .pubkey = { + .x = { + .data = sm2_enc_pub_x_t1, + .length = sizeof(sm2_enc_pub_x_t1) + }, + .y = { + .data = sm2_enc_pub_y_t1, + .length = sizeof(sm2_enc_pub_y_t1) + } + }, + .C1 = { + .x = { + .data = sm2_C1_x_t1, + .length = sizeof(sm2_C1_x_t1) + }, + .y = { + .data = sm2_C1_y_t1, + .length = sizeof(sm2_C1_y_t1) + } + }, + .kP = { + .x = { + .data = sm2_kP_x_t1, + .length = sizeof(sm2_kP_x_t1) + }, + .y = { + .data = sm2_kP_y_t1, + .length = sizeof(sm2_kP_y_t1) + } + }, + .pkey = { + .data = sm2_kP_d_t1, + .length = sizeof(sm2_kP_d_t1) + } +}; + static uint8_t fp256_pkey[] = { 0x77, 0x84, 0x35, 0x65, 0x4c, 0x7a, 0x6d, 0xb1, 0x1e, 0x63, 0x0b, 0x41, 0x97, 0x36, 0x04, 0xf4, -- 2.13.6 ^ permalink raw reply [flat|nested] 42+ messages in thread
end of thread, other threads:[~2024-10-18 12:31 UTC | newest] Thread overview: 42+ messages (download: mbox.gz / follow: Atom feed) -- links below jump to the message on this page -- 2024-01-29 18:59 [PATCH 1/3] cryptodev: add ec points to sm2 op Arkadiusz Kusztal 2024-01-29 18:59 ` [PATCH 2/3] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal 2024-01-29 18:59 ` [PATCH 3/3] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal 2024-02-01 8:07 ` [EXT] [PATCH 1/3] cryptodev: add ec points to sm2 op Akhil Goyal 2024-02-01 13:25 ` Kusztal, ArkadiuszX 2024-09-24 16:30 ` Akhil Goyal 2024-09-29 17:29 ` [PATCH v2] " Arkadiusz Kusztal 2024-10-01 7:57 ` [EXTERNAL] " Akhil Goyal 2024-10-03 14:39 ` Akhil Goyal 2024-10-07 8:29 ` Kusztal, ArkadiuszX 2024-10-08 6:28 ` [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Arkadiusz Kusztal 2024-10-08 6:28 ` [PATCH v3 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal 2024-10-08 11:27 ` [EXTERNAL] " Akhil Goyal 2024-10-08 11:46 ` Kusztal, ArkadiuszX 2024-10-08 11:49 ` Akhil Goyal 2024-10-08 6:28 ` [PATCH v3 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal 2024-10-08 6:28 ` [PATCH v3 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal 2024-10-08 15:40 ` Dooley, Brian 2024-10-08 11:46 ` [EXTERNAL] [PATCH v3 1/4] cryptodev: add partial sm2 feature flag Akhil Goyal 2024-10-08 11:48 ` Kusztal, ArkadiuszX 2024-10-08 18:14 ` [PATCH v4 0/4] add ec points to sm2 op Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 1/4] cryptodev: reorder structures in asym crypto header Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal 2024-10-08 20:46 ` Stephen Hemminger 2024-10-08 21:00 ` Kusztal, ArkadiuszX 2024-10-08 21:09 ` Stephen Hemminger 2024-10-08 21:29 ` Kusztal, ArkadiuszX 2024-10-09 13:01 ` [PATCH v5 0/4] " Arkadiusz Kusztal 2024-10-09 13:01 ` [PATCH v5 1/4] cryptodev: reorder structures in asym crypto header Arkadiusz Kusztal 2024-10-09 21:03 ` [EXTERNAL] " Akhil Goyal 2024-10-09 13:01 ` [PATCH v5 2/4] cryptodev: add ec points to sm2 op Arkadiusz Kusztal 2024-10-09 20:51 ` [EXTERNAL] " Akhil Goyal 2024-10-11 12:17 ` Akhil Goyal 2024-10-17 18:54 ` Kusztal, ArkadiuszX 2024-10-18 12:30 ` Akhil Goyal 2024-10-09 13:01 ` [PATCH v5 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal 2024-10-09 20:49 ` [EXTERNAL] " Akhil Goyal 2024-10-10 7:49 ` Kusztal, ArkadiuszX 2024-10-09 13:01 ` [PATCH v5 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal 2024-10-11 12:16 ` [EXTERNAL] " Akhil Goyal 2024-10-08 18:14 ` [PATCH v4 3/4] crypto/qat: add sm2 encryption/decryption function Arkadiusz Kusztal 2024-10-08 18:14 ` [PATCH v4 4/4] app/test: add test sm2 C1/Kp test cases Arkadiusz Kusztal
This is a public inbox, see mirroring instructions for how to clone and mirror all data and code used for this inbox; as well as URLs for NNTP newsgroup(s).