aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCedric Neveux <cedric.neveux@nxp.com>2020-02-04 15:27:27 +0100
committerJérôme Forissier <jerome@forissier.org>2020-02-28 10:47:29 +0100
commit796ea6d867547350b371dfa64a0959f3ee5bc107 (patch)
treedc5bf967f831f22c01d334e95505143935b4b6b2
parentf5a70e3efb80be4b9bff2c9c811ddc139058e05a (diff)
drivers: caam: implement NXP CAAM Driver - RSAHEADmaster
Add the NXP CAAM drivers: - Asymmetric RSA: - Encrypt/Decrypt - Sign/Verify - Mathematical operation XOR (mod N) Signed-off-by: Cedric Neveux <cedric.neveux@nxp.com> Acked-by: Jens Wiklander <jens.wiklander@linaro.org>
-rw-r--r--core/arch/arm/plat-imx/crypto_conf.mk25
-rw-r--r--core/drivers/crypto/caam/acipher/caam_math.c216
-rw-r--r--core/drivers/crypto/caam/acipher/caam_prime.c853
-rw-r--r--core/drivers/crypto/caam/acipher/caam_rsa.c1668
-rw-r--r--core/drivers/crypto/caam/acipher/local.h31
-rw-r--r--core/drivers/crypto/caam/acipher/sub.mk4
-rw-r--r--core/drivers/crypto/caam/caam_ctrl.c17
-rw-r--r--core/drivers/crypto/caam/hal/common/hal_ctrl.c21
-rw-r--r--core/drivers/crypto/caam/hal/common/registers/version_regs.h4
-rw-r--r--core/drivers/crypto/caam/include/caam_acipher.h39
-rw-r--r--core/drivers/crypto/caam/include/caam_desc_ccb_defines.h (renamed from core/drivers/crypto/caam/hal/common/registers/ccb_regs.h)12
-rw-r--r--core/drivers/crypto/caam/include/caam_desc_defines.h6
-rw-r--r--core/drivers/crypto/caam/include/caam_hal_ctrl.h15
-rw-r--r--core/drivers/crypto/caam/include/caam_jr_status.h9
-rw-r--r--core/drivers/crypto/caam/include/caam_trace.h27
-rw-r--r--core/drivers/crypto/caam/include/caam_utils_mem.h12
-rw-r--r--core/drivers/crypto/caam/sub.mk2
-rw-r--r--core/drivers/crypto/caam/utils/utils_mem.c20
18 files changed, 2965 insertions, 16 deletions
diff --git a/core/arch/arm/plat-imx/crypto_conf.mk b/core/arch/arm/plat-imx/crypto_conf.mk
index c3dbecf3..bc7ba563 100644
--- a/core/arch/arm/plat-imx/crypto_conf.mk
+++ b/core/arch/arm/plat-imx/crypto_conf.mk
@@ -16,6 +16,7 @@
# DBG_JR BIT32(5) // Job Ring trace
# DBG_RNG BIT32(6) // RNG trace
# DBG_HASH BIT32(7) // Hash trace
+# DBG_RSA BIT32(8) // RSA trace
CFG_DBG_CAAM_TRACE ?= 0x2
CFG_DBG_CAAM_DESC ?= 0x0
CFG_DBG_CAAM_BUF ?= 0x0
@@ -55,7 +56,31 @@ $$(call force, CFG_NXP_CAAM_$$(_var)_DRV, y)
$$(call force, CFG_CRYPTO_DRV_$$(_var), y)
endef
+# Return 'y' if at least one of the variable
+# CFG_CRYPTO_xxx_HW is 'y'
+cryphw-one-enabled = $(call cfg-one-enabled, \
+ $(foreach v,$(1), CFG_NXP_CAAM_$(v)_DRV))
+
# Definition of the HW and Cryto Driver Algorithm supported by all i.MX
$(eval $(call cryphw-enable-drv-hw, HASH))
+ifneq ($(filter y, $(CFG_MX6QP) $(CFG_MX6Q) $(CFG_MX6D) $(CFG_MX6DL) \
+ $(CFG_MX6S) $(CFG_MX6SL) $(CFG_MX6SLL) $(CFG_MX6SX)), y)
+$(eval $(call cryphw-enable-drv-hw, RSA))
+
+# Define the RSA Private Key Format used by the CAAM
+# Format #1: (n, d)
+# Format #2: (p, q, d)
+# Format #3: (p, q, dp, dq, qp)
+CFG_NXP_CAAM_RSA_KEY_FORMAT ?= 3
+
+endif
+
+$(call force, CFG_NXP_CAAM_ACIPHER_DRV, $(call cryphw-one-enabled, RSA))
+
+#
+# Enable Cryptographic Driver interface
+#
+CFG_CRYPTO_DRV_ACIPHER ?= $(CFG_NXP_CAAM_ACIPHER_DRV)
+
endif
diff --git a/core/drivers/crypto/caam/acipher/caam_math.c b/core/drivers/crypto/caam/acipher/caam_math.c
new file mode 100644
index 00000000..43aa0c3f
--- /dev/null
+++ b/core/drivers/crypto/caam/acipher/caam_math.c
@@ -0,0 +1,216 @@
+// SPDX-License-Identifier: BSD-2-Clause
+/*
+ * Copyright 2018-2020 NXP
+ *
+ * CAAM Mathematical Operation manager.
+ * Implementation of Mathematical operation using CAAM's MATH function
+ */
+#include <caam_acipher.h>
+#include <caam_common.h>
+#include <caam_hal_ctrl.h>
+#include <caam_jr.h>
+#include <caam_utils_mem.h>
+#include <caam_utils_sgt.h>
+#include <caam_utils_status.h>
+#include <drvcrypt.h>
+#include <drvcrypt_math.h>
+#include <mm/core_memprot.h>
+#include <string.h>
+#include <tee/cache.h>
+
+#include "local.h"
+
+/*
+ * MATH operation A xor B modulus n
+ *
+ * @data [in/out] operation data
+ */
+static TEE_Result do_xor_mod_n(struct drvcrypt_mod_op *data)
+{
+ TEE_Result ret = TEE_ERROR_GENERIC;
+ enum caam_status retstatus = CAAM_FAILURE;
+ struct caam_jobctx jobctx = { };
+ uint32_t *desc = NULL;
+ int realloc = 0;
+ struct caambuf res_align = { };
+ struct caamsgtbuf sgtres = { .sgt_type = false };
+ struct caambuf data_a = { .data = data->a.data,
+ .length = data->a.length };
+ struct caamsgtbuf sgtdata_a = { .sgt_type = false };
+ struct caambuf data_b = { .data = data->b.data,
+ .length = data->b.length };
+ struct caamsgtbuf sgtdata_b = { .sgt_type = false };
+
+ RSA_TRACE("(A xor B) mod n");
+
+ data_a.paddr = virt_to_phys(data_a.data);
+ data_b.paddr = virt_to_phys(data_b.data);
+
+ if (!data_a.paddr || !data_b.paddr)
+ return ret;
+
+ if (!caam_mem_is_cached_buf(data_a.data, data_a.length))
+ data_a.nocache = 1;
+
+ retstatus = caam_sgt_build_block_data(&sgtdata_a, NULL, &data_a);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto out;
+ }
+
+ if (!caam_mem_is_cached_buf(data_b.data, data_b.length))
+ data_b.nocache = 1;
+
+ retstatus = caam_sgt_build_block_data(&sgtdata_b, NULL, &data_b);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto out;
+ }
+
+ /*
+ * ReAllocate the result buffer with a maximum size
+ * of the Key Modulus's size (N) if not cache aligned
+ */
+ realloc = caam_set_or_alloc_align_buf(data->result.data, &res_align,
+ data->result.length);
+ if (realloc == -1) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto out;
+ }
+
+ retstatus = caam_sgt_build_block_data(&sgtres, NULL, &res_align);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto out;
+ }
+
+#ifdef CFG_CAAM_64BIT
+#define XOR_OP_DESC_SIZE 14
+#else
+#define XOR_OP_DESC_SIZE 11
+#endif
+ /* Allocate the job descriptor */
+ desc = caam_calloc_desc(XOR_OP_DESC_SIZE);
+ if (!desc) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto out;
+ }
+
+ /* Load in N Modulus Size */
+ caam_desc_init(desc);
+ caam_desc_add_word(desc, DESC_HEADER(0));
+ caam_desc_add_word(desc, LD_IMM(CLASS_1, REG_PKHA_N_SIZE, 4));
+ caam_desc_add_word(desc, data->n.length);
+
+ /* Load in A first value */
+ if (sgtdata_a.sgt_type) {
+ caam_desc_add_word(desc, FIFO_LD_SGT(CLASS_1, PKHA_A, NOACTION,
+ sgtdata_a.length));
+ caam_desc_add_ptr(desc, virt_to_phys(sgtdata_a.sgt));
+
+ caam_sgt_cache_op(TEE_CACHECLEAN, &sgtdata_a);
+ } else {
+ caam_desc_add_word(desc, FIFO_LD(CLASS_1, PKHA_A, NOACTION,
+ sgtdata_a.length));
+ caam_desc_add_ptr(desc, sgtdata_a.buf->paddr);
+
+ if (!sgtdata_a.buf->nocache)
+ cache_operation(TEE_CACHECLEAN, sgtdata_a.buf->data,
+ sgtdata_a.length);
+ }
+
+ /* Load in B second value */
+ if (sgtdata_b.sgt_type) {
+ caam_desc_add_word(desc, FIFO_LD_SGT(CLASS_1, PKHA_B, NOACTION,
+ sgtdata_b.length));
+ caam_desc_add_ptr(desc, virt_to_phys(sgtdata_b.sgt));
+
+ caam_sgt_cache_op(TEE_CACHECLEAN, &sgtdata_b);
+ } else {
+ caam_desc_add_word(desc, FIFO_LD(CLASS_1, PKHA_B, NOACTION,
+ sgtdata_b.length));
+ caam_desc_add_ptr(desc, sgtdata_b.buf->paddr);
+
+ if (!sgtdata_b.buf->nocache)
+ cache_operation(TEE_CACHECLEAN, sgtdata_b.buf->data,
+ sgtdata_b.length);
+ }
+
+ /* Operation B = A xor B mod n */
+ caam_desc_add_word(desc, PKHA_F2M_OP(MOD_ADD_A_B, B));
+
+ /* Store the result */
+ if (sgtres.sgt_type) {
+ caam_desc_add_word(desc, FIFO_ST_SGT(PKHA_B, sgtres.length));
+ caam_desc_add_ptr(desc, virt_to_phys(sgtres.sgt));
+
+ caam_sgt_cache_op(TEE_CACHEFLUSH, &sgtres);
+ } else {
+ caam_desc_add_word(desc, FIFO_ST(PKHA_B, sgtres.length));
+ caam_desc_add_ptr(desc, sgtres.buf->paddr);
+
+ if (!sgtres.buf->nocache)
+ cache_operation(TEE_CACHEFLUSH, sgtres.buf->data,
+ sgtres.length);
+ }
+
+ RSA_DUMPDESC(desc);
+
+ if (!res_align.nocache)
+ cache_operation(TEE_CACHEFLUSH, res_align.data,
+ data->result.length);
+
+ jobctx.desc = desc;
+ retstatus = caam_jr_enqueue(&jobctx, NULL);
+
+ if (retstatus == CAAM_NO_ERROR) {
+ if (!res_align.nocache)
+ cache_operation(TEE_CACHEINVALIDATE, res_align.data,
+ data->result.length);
+
+ if (realloc)
+ memcpy(data->result.data, res_align.data,
+ data->result.length);
+
+ RSA_DUMPBUF("Output", data->result.data, data->result.length);
+ ret = TEE_SUCCESS;
+ } else {
+ RSA_TRACE("CAAM Status 0x%08" PRIx32, jobctx.status);
+ ret = job_status_to_tee_result(jobctx.status);
+ }
+
+out:
+ caam_free_desc(&desc);
+
+ if (realloc == 1)
+ caam_free_buf(&res_align);
+
+ if (sgtdata_a.sgt_type)
+ caam_sgtbuf_free(&sgtdata_a);
+
+ if (sgtdata_b.sgt_type)
+ caam_sgtbuf_free(&sgtdata_b);
+
+ if (sgtres.sgt_type)
+ caam_sgtbuf_free(&sgtres);
+
+ return ret;
+}
+
+/*
+ * Registration of the MATH Driver
+ */
+static const struct drvcrypt_math driver_math = {
+ .xor_mod_n = &do_xor_mod_n,
+};
+
+enum caam_status caam_math_init(vaddr_t ctrl_addr __unused)
+{
+ enum caam_status retstatus = CAAM_FAILURE;
+
+ if (caam_hal_ctrl_pknum(ctrl_addr))
+ if (!drvcrypt_register_math(&driver_math))
+ retstatus = CAAM_NO_ERROR;
+
+ return retstatus;
+}
diff --git a/core/drivers/crypto/caam/acipher/caam_prime.c b/core/drivers/crypto/caam/acipher/caam_prime.c
new file mode 100644
index 00000000..686680c3
--- /dev/null
+++ b/core/drivers/crypto/caam/acipher/caam_prime.c
@@ -0,0 +1,853 @@
+// SPDX-License-Identifier: BSD-2-Clause
+/*
+ * Copyright 2018-2020 NXP
+ *
+ * CAAM Prime Numbering.
+ * Implementation of Prime Number functions
+ */
+#include <caam_common.h>
+#include <caam_desc_ccb_defines.h>
+#include <caam_jr.h>
+#include <caam_utils_mem.h>
+#include <kernel/panic.h>
+#include <mm/core_memprot.h>
+#include <string.h>
+#include <tee_api_types.h>
+#include <tee/cache.h>
+
+#include "local.h"
+
+#define RSA_TRY_FAIL 0x42
+#define RETRY_TOO_SMALL 0x2A
+
+#define STATUS_GOOD_Q 0xCA
+
+#define MR_PRIME_SIZE 1536
+
+#define MAX_RETRY_PRIME_GEN 5000
+
+#ifdef CFG_CAAM_64BIT
+#define SETUP_RSA_DESC_ENTRIES 20
+#define GEN_RSA_DESC_ENTRIES 62
+#define CHECK_P_Q_DESC_ENTRIES 32
+#else
+#define SETUP_RSA_DESC_ENTRIES 17
+#define GEN_RSA_DESC_ENTRIES 58
+#define CHECK_P_Q_DESC_ENTRIES 29
+#endif
+
+/*
+ * Predefined const value corresponding to the
+ * operation sqrt(2) * (2 ^ ((nlen / 2) - 1))
+ * Used at step 4.4
+ */
+static const uint8_t sqrt_value[] = {
+ 0xb5, 0x04, 0xf3, 0x33, 0xf9, 0xde, 0x64, 0x84, 0x59, 0x7d, 0x89, 0xb3,
+ 0x75, 0x4a, 0xbe, 0x9f, 0x1d, 0x6f, 0x60, 0xba, 0x89, 0x3b, 0xa8, 0x4c,
+ 0xed, 0x17, 0xac, 0x85, 0x83, 0x33, 0x99, 0x15, 0x4a, 0xfc, 0x83, 0x04,
+ 0x3a, 0xb8, 0xa2, 0xc3, 0xa8, 0xb1, 0xfe, 0x6f, 0xdc, 0x83, 0xdb, 0x39,
+ 0x0f, 0x74, 0xa8, 0x5e, 0x43, 0x9c, 0x7b, 0x4a, 0x78, 0x04, 0x87, 0x36,
+ 0x3d, 0xfa, 0x27, 0x68, 0xd2, 0x20, 0x2e, 0x87, 0x42, 0xaf, 0x1f, 0x4e,
+ 0x53, 0x05, 0x9c, 0x60, 0x11, 0xbc, 0x33, 0x7b, 0xca, 0xb1, 0xbc, 0x91,
+ 0x16, 0x88, 0x45, 0x8a, 0x46, 0x0a, 0xbc, 0x72, 0x2f, 0x7c, 0x4e, 0x33,
+ 0xc6, 0xd5, 0xa8, 0xa3, 0x8b, 0xb7, 0xe9, 0xdc, 0xcb, 0x2a, 0x63, 0x43,
+ 0x31, 0xf3, 0xc8, 0x4d, 0xf5, 0x2f, 0x12, 0x0f, 0x83, 0x6e, 0x58, 0x2e,
+ 0xea, 0xa4, 0xa0, 0x89, 0x90, 0x40, 0xca, 0x4a, 0x81, 0x39, 0x4a, 0xb6,
+ 0xd8, 0xfd, 0x0e, 0xfd, 0xf4, 0xd3, 0xa0, 0x2c, 0xeb, 0xc9, 0x3e, 0x0c,
+ 0x42, 0x64, 0xda, 0xbc, 0xd5, 0x28, 0xb6, 0x51, 0xb8, 0xcf, 0x34, 0x1b,
+ 0x6f, 0x82, 0x36, 0xc7, 0x01, 0x04, 0xdc, 0x01, 0xfe, 0x32, 0x35, 0x2f,
+ 0x33, 0x2a, 0x5e, 0x9f, 0x7b, 0xda, 0x1e, 0xbf, 0xf6, 0xa1, 0xbe, 0x3f,
+ 0xca, 0x22, 0x13, 0x07, 0xde, 0xa0, 0x62, 0x41, 0xf7, 0xaa, 0x81, 0xc2,
+ 0xc1, 0xfc, 0xbd, 0xde, 0xa2, 0xf7, 0xdc, 0x33, 0x18, 0x83, 0x8a, 0x2e,
+ 0xaf, 0xf5, 0xf3, 0xb2, 0xd2, 0x4f, 0x4a, 0x76, 0x3f, 0xac, 0xb8, 0x82,
+ 0xfd, 0xfe, 0x17, 0x0f, 0xd3, 0xb1, 0xf7, 0x80, 0xf9, 0xac, 0xce, 0x41,
+ 0x79, 0x7f, 0x28, 0x05, 0xc2, 0x46, 0x78, 0x5e, 0x92, 0x95, 0x70, 0x23,
+ 0x5f, 0xcf, 0x8f, 0x7b, 0xca, 0x3e, 0xa3, 0x3b, 0x4d, 0x7c, 0x60, 0xa5,
+ 0xe6, 0x33, 0xe3, 0xe1
+};
+
+/*
+ * Speedups for prime searching
+ *
+ * These values are products of small primes. Information about the product
+ * preceeds it. These values have been pre-computed by the CAAM design team.
+ *
+ * Per Handbook of Applied Cryptography, Menezes et al, 4.4.1, one can compute
+ * the percentage of non-primes weeded out by checking for small prime factors
+ * in the candidates. In the table below, "highest prime" is used for B, and
+ * "%weeded" is the number of candidates which get through this
+ * sieve. As you can see, even with relatively few primes, there are
+ * diminishing returns to using larger numbers of primes.
+ *
+ * Percentage weeded: 1 - 1.12/ln B
+ *
+ * These can be used to compute GCD(prime, smallprime) before the Miller
+ * Rabin; this will weed out those candidates with 'small' primes before doing
+ * the costly modular exponentation inside of Miller-Rabin. (If the result is
+ * not one, then the candidate has as a factor at least one of the small primes
+ * in the product).
+ *
+ * So, where is the sweet spot for the size of the product versus the size of
+ * the candidate? Does it depend upon the size of the PKHA multiplier? Hunt
+ * time for primes takes a long time to actually compute, and what are the
+ * stats for percentage of candidates that might be weeded out? If not many,
+ * then there is an extra computation.
+ */
+struct smallprime {
+ const size_t length;
+ const uint8_t *data;
+};
+
+/* sizes | #primes | highest prime | %weeded */
+/* bits / bytes | | */
+/* 64 / 8 | 15 | 53 | 72 */
+static const uint8_t smallprime_8[] = {
+ 0xe2, 0x21, 0xf9, 0x7c, 0x30, 0xe9, 0x4e, 0x1d,
+};
+
+/* 128 / 16 | 25 | 101 | 76 */
+static const uint8_t smallprime_16[] = {
+ 0x57, 0x97, 0xd4, 0x7c, 0x51, 0x68, 0x15, 0x49, 0xd7, 0x34, 0xe4, 0xfc,
+ 0x4c, 0x3e, 0xaf, 0x7f,
+};
+
+/* 256 / 32 | 43 | 193 | 79 */
+static const uint8_t smallprime_32[] = {
+ 0xdb, 0xf0, 0x5b, 0x6f, 0x56, 0x54, 0xb3, 0xc0, 0xf5, 0x24, 0x35, 0x51,
+ 0x43, 0x95, 0x86, 0x88, 0x9f, 0x15, 0x58, 0x87, 0x81, 0x9a, 0xed, 0x2a,
+ 0xc0, 0x5b, 0x93, 0x35, 0x2b, 0xe9, 0x86, 0x77,
+};
+
+/* 384 / 48 | 59 | 281 | 80 */
+static const uint8_t smallprime_48[] = {
+ 0x50, 0x12, 0x01, 0xcc, 0x51, 0xa4, 0x92, 0xa5, 0x44, 0xd3, 0x90, 0x0a,
+ 0xd4, 0xf8, 0xb3, 0x2a, 0x20, 0x3c, 0x85, 0x84, 0x06, 0xa4, 0x45, 0x7c,
+ 0xab, 0x0b, 0x4f, 0x80, 0x5a, 0xb1, 0x8a, 0xc6, 0xeb, 0x95, 0x72, 0xac,
+ 0x6e, 0x93, 0x94, 0xfa, 0x52, 0x2b, 0xff, 0xb6, 0xf4, 0x4a, 0xf2, 0xf3,
+};
+
+/* 512 / 64 | 74 | 379 | 81 */
+static const uint8_t smallprime_64[] = {
+ 0x10, 0x6a, 0xa9, 0xfb, 0x76, 0x46, 0xfa, 0x6e, 0xb0, 0x81, 0x3c, 0x28,
+ 0xc5, 0xd5, 0xf0, 0x9f, 0x07, 0x7e, 0xc3, 0xba, 0x23, 0x8b, 0xfb, 0x99,
+ 0xc1, 0xb6, 0x31, 0xa2, 0x03, 0xe8, 0x11, 0x87, 0x23, 0x3d, 0xb1, 0x17,
+ 0xcb, 0xc3, 0x84, 0x05, 0x6e, 0xf0, 0x46, 0x59, 0xa4, 0xa1, 0x1d, 0xe4,
+ 0x9f, 0x7e, 0xcb, 0x29, 0xba, 0xda, 0x8f, 0x98, 0x0d, 0xec, 0xec, 0xe9,
+ 0x2e, 0x30, 0xc4, 0x8f,
+};
+
+/* 576 / 72 | 81 | 421 | 82 */
+static const uint8_t smallprime_72[] = {
+ 0x01, 0x85, 0xdb, 0xeb, 0x2b, 0x8b, 0x11, 0xd3, 0x76, 0x33, 0xe9, 0xdc,
+ 0x1e, 0xec, 0x54, 0x15, 0x65, 0xc6, 0xce, 0x84, 0x31, 0xd2, 0x27, 0xee,
+ 0x28, 0xf0, 0x32, 0x8a, 0x60, 0xc9, 0x01, 0x18, 0xae, 0x03, 0x1c, 0xc5,
+ 0xa7, 0x81, 0xc8, 0x24, 0xd1, 0xf1, 0x6d, 0x25, 0xf4, 0xf0, 0xcc, 0xcf,
+ 0xf3, 0x5e, 0x97, 0x45, 0x79, 0x07, 0x2e, 0xc8, 0xca, 0xf1, 0xac, 0x8e,
+ 0xef, 0xd5, 0x56, 0x6f, 0xa1, 0x5f, 0xb9, 0x4f, 0xe3, 0x4f, 0x5d, 0x37,
+};
+
+/* 768 / 96 | 103 | 569 | 82 */
+static const uint8_t smallprime_96[] = {
+ 0x25, 0xea, 0xc8, 0x9f, 0x8d, 0x4d, 0xa3, 0x38, 0x33, 0x7b, 0x49, 0x85,
+ 0x0d, 0x2d, 0x14, 0x89, 0x26, 0x63, 0x17, 0x7b, 0x40, 0x10, 0xaf, 0x3d,
+ 0xd2, 0x3e, 0xeb, 0x0b, 0x22, 0x8f, 0x38, 0x32, 0xff, 0xce, 0xe2, 0xe5,
+ 0xcb, 0xd1, 0xac, 0xc9, 0x8f, 0x47, 0xf2, 0x51, 0x87, 0x33, 0x80, 0xae,
+ 0x10, 0xf0, 0xff, 0xdd, 0x8e, 0x60, 0x2f, 0xfa, 0x21, 0x0f, 0x41, 0xf6,
+ 0x69, 0xa1, 0x57, 0x0a, 0x93, 0xc1, 0x58, 0xc1, 0xa9, 0xa8, 0x22, 0x7f,
+ 0xf8, 0x1a, 0x90, 0xc5, 0x63, 0x0e, 0x9c, 0x44, 0x84, 0x5c, 0x75, 0x5c,
+ 0x7d, 0xf3, 0x5a, 0x7d, 0x43, 0x0c, 0x67, 0x9a, 0x11, 0x57, 0x56, 0x55,
+};
+
+/* 1024 / 128 | 130 | 739 | 83 */
+static const uint8_t smallprime_128[] = {
+ 0x02, 0xc8, 0x5f, 0xf8, 0x70, 0xf2, 0x4b, 0xe8, 0x0f, 0x62, 0xb1, 0xba,
+ 0x6c, 0x20, 0xbd, 0x72, 0xb8, 0x37, 0xef, 0xdf, 0x12, 0x12, 0x06, 0xd8,
+ 0x7d, 0xb5, 0x6b, 0x7d, 0x69, 0xfa, 0x4c, 0x02, 0x1c, 0x10, 0x7c, 0x3c,
+ 0xa2, 0x06, 0xfe, 0x8f, 0xa7, 0x08, 0x0e, 0xf5, 0x76, 0xef, 0xfc, 0x82,
+ 0xf9, 0xb1, 0x0f, 0x57, 0x50, 0x65, 0x6b, 0x77, 0x94, 0xb1, 0x6a, 0xfd,
+ 0x70, 0x99, 0x6e, 0x91, 0xae, 0xf6, 0xe0, 0xad, 0x15, 0xe9, 0x1b, 0x07,
+ 0x1a, 0xc9, 0xb2, 0x4d, 0x98, 0xb2, 0x33, 0xad, 0x86, 0xee, 0x05, 0x55,
+ 0x18, 0xe5, 0x8e, 0x56, 0x63, 0x8e, 0xf1, 0x8b, 0xac, 0x5c, 0x74, 0xcb,
+ 0x35, 0xbb, 0xb6, 0xe5, 0xda, 0xe2, 0x78, 0x3d, 0xd1, 0xc0, 0xce, 0x7d,
+ 0xec, 0x4f, 0xc7, 0x0e, 0x51, 0x86, 0xd4, 0x11, 0xdf, 0x36, 0x36, 0x8f,
+ 0x06, 0x1a, 0xa3, 0x60, 0x11, 0xf3, 0x01, 0x79,
+};
+
+/* 1088 / 184 | 136 | 787 | 83 */
+static const uint8_t smallprime_184[] = {
+ 0x16, 0xaf, 0x5c, 0x18, 0xa2, 0xbe, 0xf8, 0xef, 0xf2, 0x27, 0x83, 0x32,
+ 0x18, 0x2d, 0x0f, 0xbf, 0x00, 0x38, 0xcc, 0x20, 0x51, 0x48, 0xb8, 0x3d,
+ 0x06, 0xe3, 0xd7, 0xd9, 0x32, 0x82, 0x8b, 0x18, 0xe1, 0x1e, 0x09, 0x40,
+ 0x28, 0xc7, 0xea, 0xed, 0xa3, 0x39, 0x50, 0x17, 0xe0, 0x7d, 0x8a, 0xe9,
+ 0xb5, 0x94, 0x06, 0x04, 0x51, 0xd0, 0x5f, 0x93, 0x08, 0x4c, 0xb4, 0x81,
+ 0x66, 0x3c, 0x94, 0xc6, 0xff, 0x98, 0x0d, 0xde, 0xcc, 0xdb, 0x42, 0xad,
+ 0x37, 0x09, 0x7f, 0x41, 0xa7, 0x83, 0x7f, 0xc9, 0x5a, 0xfe, 0x3f, 0x18,
+ 0xad, 0x76, 0xf2, 0x34, 0x83, 0xae, 0x94, 0x2e, 0x0f, 0x0c, 0x0b, 0xc6,
+ 0xe4, 0x00, 0x16, 0x12, 0x31, 0x89, 0x87, 0x2b, 0xe5, 0x8f, 0x6d, 0xfc,
+ 0x23, 0x9c, 0xa2, 0x8f, 0xb0, 0xcf, 0xbf, 0x96, 0x4c, 0x8f, 0x27, 0xce,
+ 0x05, 0xd6, 0xc7, 0x7a, 0x01, 0xf9, 0xd3, 0x32, 0x36, 0xc9, 0xd4, 0x42,
+ 0xad, 0x69, 0xed, 0x33,
+};
+
+/* 1536 / 192 | 182 | 1093 | 84 */
+static const uint8_t smallprime_192[] = {
+ 0x02, 0x1b, 0xf9, 0x49, 0x70, 0x91, 0xb8, 0xc3, 0x68, 0xcc, 0x7c, 0x8e,
+ 0x00, 0xc1, 0x99, 0x0c, 0x60, 0x27, 0x48, 0x1b, 0x79, 0x21, 0x5a, 0xc8,
+ 0xa7, 0x51, 0x77, 0x49, 0xa2, 0x15, 0x13, 0x77, 0x9a, 0x99, 0x3d, 0x29,
+ 0x58, 0xfc, 0xb4, 0x9a, 0x73, 0x68, 0x02, 0x92, 0x68, 0x52, 0x79, 0x94,
+ 0xc6, 0xcc, 0x19, 0x28, 0xad, 0xd4, 0x12, 0x95, 0x96, 0x76, 0x5f, 0x4c,
+ 0xc3, 0x14, 0x1a, 0x04, 0x4e, 0xb1, 0xd6, 0x15, 0x78, 0x88, 0x16, 0x67,
+ 0x57, 0xd8, 0x61, 0x87, 0x81, 0x81, 0x30, 0x62, 0x03, 0x22, 0x67, 0x98,
+ 0x7d, 0xf0, 0xd4, 0x71, 0x9c, 0xd3, 0x8f, 0x1b, 0x70, 0x85, 0xfc, 0xa5,
+ 0x33, 0x4b, 0xe3, 0xa6, 0x00, 0x3a, 0x3c, 0xe7, 0xe1, 0x9a, 0xba, 0x55,
+ 0x3e, 0x80, 0xcc, 0x5a, 0xe4, 0x06, 0x0e, 0xff, 0x6e, 0x18, 0x06, 0x66,
+ 0x1d, 0xa5, 0xee, 0xb7, 0xd1, 0x42, 0xd3, 0xb2, 0xe4, 0x07, 0x39, 0xf1,
+ 0x44, 0x3d, 0xee, 0x3a, 0x19, 0x86, 0x37, 0xf0, 0x3c, 0x06, 0x28, 0x45,
+ 0xea, 0xff, 0x3f, 0xf2, 0x7e, 0xa3, 0x8d, 0x93, 0x44, 0xd8, 0xa9, 0x02,
+ 0x22, 0x47, 0x2d, 0xf0, 0x7d, 0xfb, 0x5c, 0x9c, 0x8a, 0xda, 0x77, 0xcd,
+ 0x0d, 0x5b, 0x94, 0xef, 0xf0, 0x21, 0xe0, 0x2e, 0x30, 0x7d, 0x08, 0x01,
+ 0x03, 0x12, 0xd5, 0x7c, 0xb5, 0xd9, 0x75, 0x76, 0x46, 0x97, 0x84, 0x2d,
+};
+
+/* 2048 / 256 | 232 | 1471 | 85 */
+static const uint8_t smallprime_256[] = {
+ 0x24, 0x65, 0xa7, 0xbd, 0x85, 0x01, 0x1e, 0x1c, 0x9e, 0x05, 0x27, 0x92,
+ 0x9f, 0xff, 0x26, 0x8c, 0x82, 0xef, 0x7e, 0xfa, 0x41, 0x68, 0x63, 0xba,
+ 0xa5, 0xac, 0xdb, 0x09, 0x71, 0xdb, 0xa0, 0xcc, 0xac, 0x3e, 0xe4, 0x99,
+ 0x93, 0x45, 0x02, 0x9f, 0x2c, 0xf8, 0x10, 0xb9, 0x9e, 0x40, 0x6a, 0xac,
+ 0x5f, 0xce, 0x5d, 0xd6, 0x9d, 0x1c, 0x71, 0x7d, 0xae, 0xa5, 0xd1, 0x8a,
+ 0xb9, 0x13, 0xf4, 0x56, 0x50, 0x56, 0x79, 0xbc, 0x91, 0xc5, 0x7d, 0x46,
+ 0xd9, 0x88, 0x88, 0x57, 0x86, 0x2b, 0x36, 0xe2, 0xed, 0xe2, 0xe4, 0x73,
+ 0xc1, 0xf0, 0xab, 0x35, 0x9d, 0xa2, 0x52, 0x71, 0xaf, 0xfe, 0x15, 0xff,
+ 0x24, 0x0e, 0x29, 0x9d, 0x0b, 0x04, 0xf4, 0xcd, 0x0e, 0x4d, 0x7c, 0x0e,
+ 0x47, 0xb1, 0xa7, 0xba, 0x00, 0x7d, 0xe8, 0x9a, 0xae, 0x84, 0x8f, 0xd5,
+ 0xbd, 0xcd, 0x7f, 0x98, 0x15, 0x56, 0x4e, 0xb0, 0x60, 0xae, 0x14, 0xf1,
+ 0x9c, 0xb5, 0x0c, 0x29, 0x1f, 0x0b, 0xbd, 0x8e, 0xd1, 0xc4, 0xc7, 0xf8,
+ 0xfc, 0x5f, 0xba, 0x51, 0x66, 0x20, 0x01, 0x93, 0x9b, 0x53, 0x2d, 0x92,
+ 0xda, 0xc8, 0x44, 0xa8, 0x43, 0x1d, 0x40, 0x0c, 0x83, 0x2d, 0x03, 0x9f,
+ 0x5f, 0x90, 0x0b, 0x27, 0x8a, 0x75, 0x21, 0x9c, 0x29, 0x86, 0x14, 0x0c,
+ 0x79, 0x04, 0x5d, 0x77, 0x59, 0x54, 0x08, 0x54, 0xc3, 0x15, 0x04, 0xdc,
+ 0x56, 0xf1, 0xdf, 0x5e, 0xeb, 0xe7, 0xbe, 0xe4, 0x47, 0x65, 0x8b, 0x91,
+ 0x7b, 0xf6, 0x96, 0xd6, 0x92, 0x7f, 0x2e, 0x24, 0x28, 0xfb, 0xeb, 0x34,
+ 0x0e, 0x51, 0x5c, 0xb9, 0x83, 0x5d, 0x63, 0x87, 0x1b, 0xe8, 0xbb, 0xe0,
+ 0x9c, 0xf1, 0x34, 0x45, 0x79, 0x9f, 0x2e, 0x67, 0x78, 0x81, 0x51, 0x57,
+ 0x1a, 0x93, 0xb4, 0xc1, 0xee, 0xe5, 0x5d, 0x1b, 0x90, 0x72, 0xe0, 0xb2,
+ 0xf5, 0xc4, 0x60, 0x7f,
+};
+
+/* 3072 / 384 | 326 | 2179 | 85 */
+static const uint8_t smallprime_384[] = {
+ 0x00, 0x4d, 0xc2, 0x0e, 0x27, 0x31, 0x51, 0x23, 0xfd, 0xab, 0xcd, 0x18,
+ 0xca, 0x81, 0x2e, 0xe0, 0xee, 0x44, 0x49, 0x23, 0x87, 0x38, 0x9e, 0xd6,
+ 0xc9, 0x16, 0x97, 0x95, 0x89, 0x65, 0xed, 0xc5, 0x3d, 0x89, 0x13, 0xa8,
+ 0xe6, 0xec, 0x7f, 0x83, 0x6a, 0x8b, 0xd6, 0x03, 0x7e, 0x57, 0xed, 0x0c,
+ 0x69, 0x30, 0xef, 0x26, 0x49, 0x0d, 0xc3, 0x5d, 0x05, 0xd0, 0x98, 0xa4,
+ 0x66, 0xad, 0xf8, 0x17, 0x9f, 0x82, 0x99, 0x69, 0xd1, 0x39, 0x55, 0x8f,
+ 0x16, 0xe9, 0x8b, 0x3f, 0x76, 0xfc, 0x90, 0x62, 0xc1, 0x57, 0x25, 0xce,
+ 0x09, 0x88, 0xfa, 0xed, 0xca, 0x96, 0x6a, 0x6b, 0x92, 0x5f, 0x9b, 0x9c,
+ 0x67, 0x03, 0x43, 0xea, 0x7e, 0x84, 0x20, 0x65, 0xbd, 0x26, 0xf2, 0xbf,
+ 0x29, 0x90, 0x4f, 0xa7, 0xf4, 0x9f, 0x33, 0x49, 0x28, 0x96, 0x33, 0x73,
+ 0xba, 0x08, 0x95, 0x96, 0x51, 0x3d, 0xac, 0xa7, 0x39, 0x28, 0xcf, 0x30,
+ 0x5a, 0xdf, 0x8c, 0x24, 0x6e, 0x1d, 0x99, 0xa2, 0x42, 0xd9, 0x23, 0x56,
+ 0x23, 0xc4, 0x9a, 0xf2, 0x91, 0x45, 0x06, 0xc9, 0x11, 0x21, 0x5e, 0x1e,
+ 0x49, 0xaf, 0x84, 0x80, 0x3e, 0xd9, 0xa2, 0xca, 0x05, 0x51, 0x72, 0x1f,
+ 0xe6, 0x31, 0x9b, 0xf2, 0x38, 0xc0, 0x8a, 0xae, 0x6f, 0xd5, 0x01, 0x54,
+ 0x03, 0xd9, 0xe5, 0x55, 0x09, 0xee, 0x31, 0xc9, 0x60, 0x12, 0xf9, 0x08,
+ 0x35, 0x18, 0x5f, 0x31, 0xcb, 0xd2, 0xe4, 0x89, 0x83, 0x3c, 0x1d, 0x54,
+ 0x62, 0xfa, 0x80, 0x53, 0x59, 0x04, 0x86, 0x7b, 0x2c, 0x94, 0x5e, 0x9a,
+ 0x0c, 0x2f, 0x7a, 0xa3, 0x6e, 0x0a, 0xc0, 0xeb, 0x9b, 0xb4, 0xc1, 0x1b,
+ 0xf5, 0x80, 0xcf, 0x0d, 0x6d, 0x2a, 0x49, 0xed, 0x1a, 0x2d, 0x74, 0xca,
+ 0xe0, 0xf4, 0xc3, 0xad, 0xff, 0x61, 0xd6, 0x48, 0xca, 0x6a, 0x12, 0x08,
+ 0x58, 0xf4, 0xab, 0xb3, 0xb3, 0x12, 0x07, 0xcf, 0x9b, 0x7c, 0x2f, 0xda,
+ 0x74, 0xf7, 0x72, 0x2b, 0x14, 0x99, 0x17, 0x87, 0x5a, 0xac, 0x9d, 0x61,
+ 0x53, 0xc9, 0x71, 0x13, 0xfc, 0xd3, 0x74, 0xaf, 0x93, 0xdd, 0x3f, 0xa2,
+ 0x1a, 0x7d, 0xe5, 0x1f, 0x1a, 0x70, 0xc6, 0x31, 0xba, 0x6c, 0x92, 0x26,
+ 0x1e, 0x89, 0x54, 0x1a, 0xa4, 0x71, 0x41, 0xf4, 0x4e, 0x07, 0x5a, 0x1c,
+ 0x52, 0x2a, 0xe5, 0x81, 0x60, 0xda, 0xc8, 0x70, 0xdf, 0xbd, 0x86, 0x06,
+ 0xe4, 0xec, 0xa0, 0x89, 0x2a, 0xe5, 0x1c, 0x87, 0x34, 0xf5, 0xb7, 0x71,
+ 0x2b, 0xcd, 0x3d, 0xe3, 0x32, 0x5e, 0xc2, 0x5f, 0x07, 0xd4, 0xef, 0x94,
+ 0x33, 0x94, 0xd5, 0xe7, 0xb3, 0x84, 0x10, 0x05, 0xa3, 0xbd, 0x1a, 0x3e,
+ 0x4d, 0x27, 0x06, 0x1d, 0x54, 0xd2, 0x44, 0x58, 0x24, 0xf8, 0x51, 0x17,
+ 0xd0, 0xf6, 0x97, 0x12, 0x84, 0xa8, 0xc9, 0x7a, 0x42, 0x50, 0xb9, 0x9b,
+};
+
+/* 4096 / 512 | 417 | 2887 | 86 */
+static const uint8_t smallprime_512[] = {
+ 0x09, 0x62, 0x07, 0xfc, 0xcb, 0x19, 0xd6, 0x75, 0x8e, 0x37, 0x4b, 0xee,
+ 0x6c, 0x37, 0x09, 0xaf, 0x0a, 0x54, 0xa9, 0x82, 0xbf, 0x90, 0x14, 0xe4,
+ 0x50, 0xb7, 0x48, 0x18, 0x13, 0xb7, 0x30, 0x5b, 0x4c, 0x25, 0xf0, 0xe2,
+ 0xea, 0x6e, 0x2b, 0x56, 0xf9, 0x1e, 0x59, 0x92, 0x14, 0x2d, 0x21, 0x6e,
+ 0xae, 0xb2, 0xec, 0xe0, 0x05, 0xfa, 0x0d, 0x18, 0xef, 0xeb, 0x78, 0xef,
+ 0xc3, 0x41, 0xf3, 0x1f, 0x78, 0x3e, 0xe4, 0x4a, 0xc5, 0xef, 0x5d, 0xfe,
+ 0x35, 0x57, 0x91, 0x28, 0x21, 0x06, 0x15, 0x6c, 0x64, 0xd1, 0x67, 0xa5,
+ 0x42, 0x1c, 0xfe, 0xc3, 0x3c, 0xbb, 0xd3, 0x88, 0x38, 0x0b, 0xe8, 0x54,
+ 0x14, 0x9f, 0xb6, 0x5c, 0x08, 0xe7, 0x9c, 0xd0, 0x4e, 0xc4, 0x8b, 0x45,
+ 0x62, 0x8e, 0xe6, 0x7f, 0x5c, 0x6f, 0xb0, 0x18, 0x18, 0xfa, 0x1f, 0xf7,
+ 0x32, 0x24, 0x0c, 0x0b, 0xb1, 0xc7, 0xfe, 0xc1, 0x4c, 0x48, 0x23, 0x4c,
+ 0x6f, 0xc3, 0xe0, 0x75, 0x76, 0x4f, 0x63, 0xc0, 0x26, 0x83, 0x61, 0x83,
+ 0x1d, 0x89, 0x60, 0xf2, 0x4b, 0x23, 0x7e, 0x96, 0xc2, 0xca, 0xba, 0x4c,
+ 0x1a, 0x21, 0x23, 0xff, 0x33, 0xa4, 0x9b, 0xca, 0x39, 0x49, 0xe8, 0xab,
+ 0xad, 0xde, 0x06, 0xda, 0xc5, 0x70, 0x3d, 0x16, 0xdb, 0x76, 0x77, 0xdf,
+ 0x2b, 0x0c, 0xe2, 0xc7, 0x84, 0x85, 0xeb, 0xd5, 0xe6, 0x9b, 0xd8, 0x0a,
+ 0x18, 0x48, 0xa9, 0xfe, 0x28, 0x9c, 0xa2, 0xba, 0x66, 0x4a, 0x68, 0x7b,
+ 0x3f, 0x05, 0x40, 0x15, 0x6e, 0x67, 0xae, 0x67, 0x69, 0xc0, 0x9e, 0x11,
+ 0xce, 0x56, 0x73, 0x57, 0xf5, 0xa5, 0x76, 0xa4, 0x8e, 0xed, 0xd9, 0x63,
+ 0x35, 0xe6, 0x28, 0x77, 0xc7, 0x3a, 0x65, 0x40, 0x8b, 0x71, 0x48, 0x4e,
+ 0xd0, 0xf1, 0x1d, 0x20, 0xd5, 0x1e, 0x8e, 0x54, 0x67, 0xa1, 0xe4, 0xc0,
+ 0x9b, 0xf7, 0x29, 0xba, 0x16, 0x9f, 0xcf, 0xdb, 0xa8, 0xb5, 0x5c, 0x4c,
+ 0x5b, 0x68, 0x2f, 0xaa, 0x28, 0x71, 0x9b, 0x9f, 0x49, 0xbf, 0x36, 0x2d,
+ 0x9f, 0x03, 0xee, 0x6b, 0xde, 0x79, 0x01, 0xe9, 0x40, 0xe2, 0x49, 0xb4,
+ 0x1c, 0x93, 0xb9, 0xab, 0x05, 0x4a, 0xbc, 0xab, 0x10, 0x9a, 0xf1, 0x2a,
+ 0xa6, 0x53, 0x5e, 0xd8, 0xf6, 0x23, 0xab, 0xfd, 0x31, 0x2a, 0xaa, 0x08,
+ 0x4a, 0x74, 0x8f, 0x86, 0x53, 0x83, 0xbc, 0xe3, 0x15, 0xdc, 0x0d, 0x45,
+ 0xcb, 0x89, 0x50, 0x8d, 0xec, 0xa9, 0x3b, 0xda, 0x22, 0xf0, 0xe7, 0x7a,
+ 0x4f, 0xea, 0xa2, 0xa7, 0x90, 0xe0, 0x0e, 0x5a, 0xda, 0x9b, 0xbb, 0x9a,
+ 0xe7, 0xd5, 0xfb, 0x63, 0x54, 0xa2, 0x52, 0xda, 0x7d, 0xc2, 0x6e, 0x6a,
+ 0xc2, 0xd7, 0xa6, 0x42, 0xea, 0xbf, 0x48, 0x12, 0xe6, 0x4a, 0xe1, 0x95,
+ 0xbf, 0x29, 0xcc, 0x9e, 0xe0, 0x25, 0x84, 0xb7, 0x74, 0xdc, 0xb1, 0x12,
+ 0x91, 0x57, 0xbf, 0x52, 0x43, 0x8f, 0xb7, 0xb7, 0xcd, 0x6a, 0x78, 0x24,
+ 0xa7, 0x41, 0x8b, 0xcc, 0x65, 0x83, 0x05, 0x8e, 0xc2, 0xf0, 0x69, 0x28,
+ 0xe4, 0x42, 0x62, 0x37, 0x98, 0xb5, 0x03, 0xf6, 0x75, 0x1d, 0xce, 0xe2,
+ 0xc0, 0x1f, 0x39, 0xac, 0xb0, 0xfb, 0x47, 0x8f, 0x6e, 0x8b, 0x16, 0xa3,
+ 0x0f, 0xe8, 0x21, 0x9b, 0x8e, 0x67, 0x04, 0xc7, 0x26, 0xb6, 0x03, 0xe1,
+ 0x00, 0x09, 0xf6, 0x77, 0x76, 0x46, 0x51, 0x41, 0x57, 0x0d, 0x4b, 0x4c,
+ 0x2a, 0x30, 0xdb, 0x84, 0x02, 0x6f, 0x93, 0x4b, 0x81, 0xf0, 0xd5, 0xe9,
+ 0x85, 0xc9, 0x75, 0xd6, 0xa9, 0x07, 0x5a, 0x41, 0xd4, 0x17, 0xc6, 0xd9,
+ 0x93, 0xcb, 0x49, 0x73, 0xcb, 0xe5, 0x12, 0xa6, 0x7d, 0xb3, 0x1f, 0x6a,
+ 0xec, 0x8c, 0xc3, 0xe9, 0xe5, 0xeb, 0xdc, 0x1e, 0xb7, 0xb4, 0x74, 0x54,
+ 0x51, 0x52, 0xa1, 0x56, 0xd5, 0xac, 0x58, 0x7d,
+};
+
+static const struct smallprime smallprimes[] = {
+ { .data = smallprime_8, .length = sizeof(smallprime_8) },
+ { .data = smallprime_16, .length = sizeof(smallprime_16) },
+ { .data = smallprime_32, .length = sizeof(smallprime_32) },
+ { .data = smallprime_48, .length = sizeof(smallprime_48) },
+ { .data = smallprime_64, .length = sizeof(smallprime_64) },
+ { .data = smallprime_72, .length = sizeof(smallprime_72) },
+ { .data = smallprime_96, .length = sizeof(smallprime_96) },
+ { .data = smallprime_128, .length = sizeof(smallprime_128) },
+ { .data = smallprime_184, .length = sizeof(smallprime_184) },
+ { .data = smallprime_192, .length = sizeof(smallprime_192) },
+ { .data = smallprime_256, .length = sizeof(smallprime_256) },
+ { .data = smallprime_384, .length = sizeof(smallprime_384) },
+ { .data = smallprime_512, .length = sizeof(smallprime_512) },
+};
+
+/*
+ * Search the small prime closed to the given input bytes size
+ *
+ * @size Size in bytes
+ * @prime [out] Output predefined small prime
+ */
+static void search_smallprime(size_t size, struct caambuf *prime)
+{
+ size_t nb_elem = ARRAY_SIZE(smallprimes);
+ size_t idx = 0;
+ size_t psize = 0;
+
+ for (; idx < nb_elem; idx++) {
+ psize = smallprimes[idx].length;
+
+ if (psize == size) {
+ /* Found a predefined prime */
+ RSA_TRACE("Found prime idx %zu", idx);
+ prime->data = (uint8_t *)smallprimes[idx].data;
+ prime->length = psize;
+ prime->paddr = virt_to_phys(prime->data);
+ break;
+ }
+ }
+}
+
+/*
+ * Build the descriptor preparing the CAAM global variables used during the
+ * prime generation
+ *
+ * @desc [out] Descriptor built
+ * @data Prime generation data
+ * @small_prime Pre-generated small prime value
+ * @desc_prime Physical address of the prime generator descriptor
+ */
+static enum caam_status do_desc_setup(uint32_t *desc, struct prime_data *data,
+ const struct caambuf *small_prime,
+ const paddr_t desc_prime)
+{
+ /*
+ * Referring to FIPS.186-4, B.3.3 (step 4.7)
+ * Maximum tries = 5 * (nlen / 2)
+ * Where nlen is the RSA security length in bit
+ */
+ caam_desc_init(desc);
+ caam_desc_add_word(desc, DESC_HEADER(0));
+
+ caam_desc_add_word(desc, MATH(ADD, IMM_DATA, ZERO, SOL, 4));
+ caam_desc_add_word(desc, 5 * (data->key_size / 2));
+
+ /*
+ * Referring to FIPS.186-4, Table C.2
+ * Get the number Miller-Rabin test interation function
+ * of the prime number size
+ */
+ caam_desc_add_word(desc, MATH(ADD, IMM_DATA, ZERO, SIL, 4));
+ if (data->p->length > (MR_PRIME_SIZE / 8))
+ caam_desc_add_word(desc, 0x4);
+ else
+ caam_desc_add_word(desc, 0x5);
+
+ /*
+ * Preload PKHA A2 with the sqrt_value array (step 4.4)
+ * Do it once, not at each loop
+ */
+ caam_desc_add_word(desc, FIFO_LD(CLASS_1, PKHA_A2, NOACTION,
+ data->p->length));
+ caam_desc_add_ptr(desc, virt_to_phys((void *)sqrt_value));
+
+ if (data->era >= 8 && small_prime->paddr) {
+ /*
+ * Preload PKHA B2 with small prime predefined
+ * (preload only prime size requested)
+ *
+ * Before Era 8, the PRIME TEST function overwrites PKHA B2
+ * hence PKHA B2 must be reloaded if new prime tentative after
+ * PRIME TEST on Era < 8
+ */
+ caam_desc_add_word(desc, FIFO_LD(CLASS_1, PKHA_B2, NOACTION,
+ small_prime->length));
+ caam_desc_add_ptr(desc, small_prime->paddr);
+ }
+
+ /* Set the High order bit used to turn on MSB in prime candidate */
+ caam_desc_add_word(desc, MATHI_OP1(SHIFT_L, ONE, 0x3F, REG2, 8));
+
+ /* Load PKHA N Size with the prime size */
+ caam_desc_add_word(desc, LD_IMM(CLASS_1, REG_PKHA_N_SIZE, 4));
+ caam_desc_add_word(desc, data->p->length);
+
+ /*
+ * Set the number of maximum tries because of generated value
+ * is too small. This value is used to not lock the system
+ * in prime number generation
+ */
+ caam_desc_add_word(desc, MATH(ADD, ZERO, IMM_DATA, DPOVRD, 4));
+ caam_desc_add_word(desc, MAX_RETRY_PRIME_GEN);
+
+ /* Jump to the next descriptor desc */
+ caam_desc_add_word(desc, JUMP_NOTLOCAL(CLASS_NO, ALL_COND_TRUE,
+ JMP_COND(NONE)));
+ caam_desc_add_ptr(desc, desc_prime);
+
+ RSA_DUMPDESC(desc);
+ cache_operation(TEE_CACHECLEAN, (void *)sqrt_value, data->p->length);
+
+ return CAAM_NO_ERROR;
+}
+
+/*
+ * Build the descriptor generating a prime
+ *
+ * @desc [out] Descriptor built
+ * @data Prime generation data
+ * @small_prime Pre-generated small prime value
+ * @do_prime_q Generate Prime Q
+ * @desc_next Physical address of the next descriptor (can be NULL)
+ */
+static void do_desc_prime(uint32_t *desc, struct prime_data *data,
+ const struct caambuf *small_prime, bool do_prime_q,
+ const paddr_t desc_next)
+{
+ uint32_t desclen = 0;
+ uint32_t retry_too_small = 0;
+ uint32_t retry_new_number = 0;
+ uint32_t retry_new_mr_failed = 0;
+ uint32_t retry_mr_test = 0;
+
+ caam_desc_init(desc);
+ caam_desc_add_word(desc, DESC_HEADER(0));
+
+ /* Setup the number of try counter = MAX (counting down) */
+ caam_desc_add_word(desc, MATH(ADD, SOL, ZERO, VSOL, 4));
+
+ retry_new_mr_failed = caam_desc_get_len(desc);
+ if (data->era < 8 && small_prime->paddr) {
+ /*
+ * Preload PKHA B2 with small prime predefined
+ * (preload only prime size requested)
+ */
+ caam_desc_add_word(desc, FIFO_LD(CLASS_1, PKHA_B2, NOACTION,
+ small_prime->length));
+ caam_desc_add_ptr(desc, small_prime->paddr);
+ }
+
+ retry_new_number = caam_desc_get_len(desc);
+ /* Decrement the number of try */
+ caam_desc_add_word(desc, MATH(SUB, VSOL, ONE, VSOL, 4));
+ /* Exceed retry count - exit with RSA_TRY_FAIL error */
+ caam_desc_add_word(desc,
+ HALT_USER(ALL_COND_TRUE, MATH_N, RSA_TRY_FAIL));
+
+ retry_too_small = caam_desc_get_len(desc);
+ /* Check internal limit on random value generation */
+ caam_desc_add_word(desc, MATH(SUB, DPOVRD, ONE, DPOVRD, 4));
+ caam_desc_add_word(desc,
+ HALT_USER(ALL_COND_TRUE, MATH_Z, RETRY_TOO_SMALL));
+
+ /*
+ * Step 4.2 - Obtain a string p of (nlen/2) bits
+ * Step 4.3 - if (p is not odd) then p = p + 1
+ */
+ /* Generate 16 random bytes load into DECO fifo */
+ caam_desc_add_word(desc, LD_IMM(CLASS_NO, REG_NFIFO, 4));
+ caam_desc_add_word(desc, NFIFO_PAD(DECO, NFIFO_LC1, MSG, RND, 16));
+
+ /* Get the DECO Input fifo 8 MSB and force on high bit */
+ caam_desc_add_word(desc, MATH(OR, REG2, IFIFO, REG0, 8));
+ /* Get the DECO Input fifo 8 LSB and force it be be odd */
+ caam_desc_add_word(desc, MATH(OR, ONE, IFIFO, REG1, 8));
+ /* Move the MSB and LSB into IFIFO */
+ caam_desc_add_word(desc, MOVE(MATH_REG0, IFIFO, 0, 16));
+ /* Send the 8 MSB into PKHA N */
+ caam_desc_add_word(desc, LD_IMM(CLASS_NO, REG_NFIFO, 4));
+ caam_desc_add_word(desc, NFIFO_NOPAD(C1, 0, IFIFO, PKHA_N, 8));
+
+ /*
+ * Generate the "middle" random bytes and start them
+ * on their way into PKHA N
+ */
+ caam_desc_add_word(desc, LD_IMM(CLASS_NO, REG_NFIFO, 8));
+ caam_desc_add_word(desc, NFIFO_PAD(C1, 0, PKHA_N, RND, 0));
+ caam_desc_add_word(desc, data->p->length - 16);
+
+ /* And send the 8 LSB into PKHA N */
+ caam_desc_add_word(desc, LD_IMM(CLASS_NO, REG_NFIFO, 4));
+ caam_desc_add_word(desc, NFIFO_NOPAD(C1, NFIFO_FC1, IFIFO, PKHA_N, 8));
+
+ /*
+ * Step 4.4 - if ((prime < (sqrt 2)(2^((nlen / 2) - 1))
+ * ==> retry_too_small
+ */
+ caam_desc_add_word(desc, PKHA_CPY_SSIZE(A2, B0));
+ caam_desc_add_word(desc, PKHA_CPY_SSIZE(B0, A0));
+ caam_desc_add_word(desc, PKHA_OP(MOD_AMODN, A));
+ caam_desc_add_word(desc, PKHA_CPY_SSIZE(A2, B0));
+ caam_desc_add_word(desc, PKHA_F2M_OP(MOD_ADD_A_B, B));
+
+ desclen = caam_desc_get_len(desc);
+ caam_desc_add_word(desc, JUMP_CNO_LOCAL(ANY_COND_FALSE,
+ JMP_COND(PKHA_IS_ZERO),
+ retry_too_small - desclen));
+
+ /*
+ * Step 4.5 - Compute GCD(prime-1, e) and test if = 1 else try
+ * another candidate
+ */
+ caam_desc_add_word(desc, PKHA_CPY_SSIZE(N0, A0));
+ caam_desc_add_word(desc, FIFO_LD_IMM(CLASS_1, PKHA_B, NOACTION, 1));
+ caam_desc_add_word(desc, 0x01);
+ caam_desc_add_word(desc, PKHA_F2M_OP(MOD_ADD_A_B, B));
+ caam_desc_add_word(desc, PKHA_CPY_SSIZE(B0, N0));
+
+ caam_desc_add_word(desc,
+ FIFO_LD(CLASS_1, PKHA_A, NOACTION, data->e->length));
+ caam_desc_add_ptr(desc, data->e->paddr);
+ caam_desc_add_word(desc, PKHA_OP(GCD_A_N, B));
+
+ desclen = caam_desc_get_len(desc);
+ caam_desc_add_word(desc,
+ JUMP_CNO_LOCAL(ANY_COND_FALSE, JMP_COND(PKHA_GCD_1),
+ retry_new_number - desclen));
+
+ caam_desc_add_word(desc, PKHA_CPY_SSIZE(N0, A0));
+ caam_desc_add_word(desc, FIFO_LD_IMM(CLASS_1, PKHA_B, NOACTION, 1));
+ caam_desc_add_word(desc, 0x01);
+ caam_desc_add_word(desc, PKHA_F2M_OP(MOD_ADD_A_B, B));
+ caam_desc_add_word(desc, PKHA_CPY_SSIZE(B0, N0));
+
+ /*
+ * Step 4.5.1 - test primality
+ */
+ if (small_prime->paddr) {
+ caam_desc_add_word(desc, PKHA_CPY_SSIZE(B2, A0));
+ caam_desc_add_word(desc, PKHA_OP(GCD_A_N, B));
+ desclen = caam_desc_get_len(desc);
+ caam_desc_add_word(desc,
+ JUMP_CNO_LOCAL(ANY_COND_FALSE,
+ JMP_COND(PKHA_GCD_1),
+ retry_new_number - desclen));
+ }
+
+ /* Generate 8 random bytes 'miller-rabin seed' */
+ /* Load the number of Miller-Rabin test iteration */
+ caam_desc_add_word(desc, MATH(ADD, SIL, ZERO, VSIL, 4));
+ retry_mr_test = caam_desc_get_len(desc);
+ caam_desc_add_word(desc, LD_IMM(CLASS_NO, REG_NFIFO, 8));
+ caam_desc_add_word(desc, NFIFO_PAD(C1, NFIFO_FC1, PKHA_A, RND, 0));
+ caam_desc_add_word(desc, data->p->length);
+ caam_desc_add_word(desc, FIFO_LD_IMM(CLASS_1, PKHA_B, NOACTION, 1));
+ caam_desc_add_word(desc, 0x01);
+ caam_desc_add_word(desc, PKHA_OP(MR_PRIMER_TEST, B));
+
+ desclen = caam_desc_get_len(desc);
+ caam_desc_add_word(desc, JUMP_CNO_LOCAL(ANY_COND_FALSE,
+ JMP_COND(PKHA_IS_PRIME),
+ retry_new_mr_failed - desclen));
+ caam_desc_add_word(desc, MATH(SUB, VSIL, ONE, VSIL, 4));
+
+ desclen = caam_desc_get_len(desc);
+ caam_desc_add_word(desc,
+ JUMP_CNO_LOCAL(ALL_COND_FALSE,
+ JMP_COND(MATH_N) | JMP_COND(MATH_Z),
+ retry_mr_test - desclen));
+
+ /* Save prime generated */
+ caam_desc_add_word(desc, FIFO_ST(PKHA_N, data->p->length));
+
+ if (do_prime_q)
+ caam_desc_add_ptr(desc, data->q->paddr);
+ else
+ caam_desc_add_ptr(desc, data->p->paddr);
+
+ if (desc_next) {
+ /* Jump to the next descriptor desc */
+ caam_desc_add_word(desc, JUMP_NOTLOCAL(CLASS_NO, ALL_COND_TRUE,
+ JMP_COND(NONE)));
+ caam_desc_add_ptr(desc, desc_next);
+ }
+
+ RSA_DUMPDESC(desc);
+}
+
+/*
+ * Build the descriptor to check primes p and q not too closed.
+ * Check the upper 100 bits with operation:
+ * |p - q| <= 2^(nlen/2-100)
+ *
+ * @desc [out] Descriptor built
+ * @p Prime P
+ * @max_n Max N built with 0xFFFF...
+ * @desc_new_q Physical address to generate a new Q value
+ */
+static void do_checks_primes(uint32_t *desc, const struct caambuf *p,
+ const struct caambuf *max_n,
+ const paddr_t desc_new_q)
+{
+ const uint8_t check_len = 16; /* Check 128 bits */
+
+ caam_desc_init(desc);
+ caam_desc_add_word(desc, DESC_HEADER(0));
+
+ /* Load prime p */
+ caam_desc_add_word(desc, FIFO_LD(CLASS_1, PKHA_B, NOACTION, p->length));
+ caam_desc_add_ptr(desc, p->paddr);
+
+ /* Retrieve Q from PKHA N, previously computed */
+ caam_desc_add_word(desc, PKHA_CPY_SSIZE(N0, A0));
+
+ /* Calculate p - q, need a modulus of size prime p filled with 0xFF */
+ caam_desc_add_word(desc,
+ FIFO_LD(CLASS_1, PKHA_N, NOACTION, max_n->length));
+ caam_desc_add_ptr(desc, max_n->paddr);
+
+ /* PKHA_B = p - q */
+ caam_desc_add_word(desc, PKHA_OP(MOD_SUB_A_B, B));
+
+ /* Unload PKHA register B to output Data FIFO */
+ caam_desc_add_word(desc, LD_NOCLASS_IMM(REG_CHA_CTRL, 4));
+ caam_desc_add_word(desc, CCTRL_ULOAD_PKHA_B);
+
+ /* Get the first 128 bits in MATH 0 */
+ caam_desc_add_word(desc, MOVE_WAIT(OFIFO, MATH_REG0, 0, check_len));
+
+ /*
+ * We now need to trash the rest of the result.
+ * We started with 128, 192, or 256 bytes in the OFIFO before we moved
+ * check_len bytes into MATH registers.
+ */
+ if (p->length > 128 + check_len) {
+ caam_desc_add_word(desc, MOVE(OFIFO, C1_CTX_REG, 0, check_len));
+ caam_desc_add_word(desc, MOVE(OFIFO, C1_CTX_REG, 0,
+ (p->length - 128 - check_len)));
+ } else if (p->length > check_len) {
+ caam_desc_add_word(desc, MOVE(OFIFO, C1_CTX_REG, 0,
+ (p->length - check_len)));
+ }
+
+ /*
+ * In MATH registers we have the p - q value modulo 0xFFFFF...
+ * Check the upper 100 bits are either zero or one meaning
+ * q is too close to p
+ */
+ /* Check first 64 bits if not 0's check if 1's */
+ caam_desc_add_word(desc, MATH(ADD, ZERO, REG0, REG0, 8));
+ caam_desc_add_word(desc,
+ JUMP_CNO_LOCAL(ANY_COND_FALSE, JMP_COND(MATH_Z), 6));
+ /* First 64 bits are 0's, check next 36 bits */
+ caam_desc_add_word(desc, MATH(AND, REG1, IMM_DATA, REG1, 8));
+ caam_desc_add_word(desc, UINT32_MAX);
+ caam_desc_add_word(desc, 0xF0000000);
+
+ /* Next 36 bits are 0 */
+ caam_desc_add_word(desc,
+ JUMP_CNO_LOCAL(ALL_COND_TRUE, JMP_COND(MATH_Z), 10));
+ /* Exit status GOOD Q */
+ caam_desc_add_word(desc, HALT_USER(ALL_COND_TRUE, NONE, STATUS_GOOD_Q));
+
+ /* Check if 100 bits are 1's */
+ caam_desc_add_word(desc, MATH(ADD, ONE, REG0, REG0, 8));
+ /* Not all 1's exit status GOOD Q */
+ caam_desc_add_word(desc,
+ HALT_USER(ANY_COND_FALSE, MATH_Z, STATUS_GOOD_Q));
+ /* First 64 bits are 1's, check next 36 bits */
+ caam_desc_add_word(desc, MATH(AND, REG1, IMM_DATA, REG1, 8));
+ caam_desc_add_word(desc, UINT32_MAX);
+ caam_desc_add_word(desc, SHIFT_U32(0xF, 28));
+
+ /* Use only 4 bytes of immediate data even is operation is 8 bytes */
+ caam_desc_add_word(desc, MATH(ADD, REG1, IMM_DATA, REG1, 8) | MATH_IFB);
+ caam_desc_add_word(desc, SHIFT_U32(1, 28));
+
+ /* Not all 1's exit status GOOD Q */
+ caam_desc_add_word(desc,
+ HALT_USER(ANY_COND_FALSE, MATH_Z, STATUS_GOOD_Q));
+
+ if (desc_new_q) {
+ caam_desc_add_word(desc, JUMP_NOTLOCAL(CLASS_NO, ALL_COND_TRUE,
+ JMP_COND(NONE)));
+ caam_desc_add_ptr(desc, desc_new_q);
+ }
+
+ RSA_DUMPDESC(desc);
+}
+
+enum caam_status caam_prime_gen(struct prime_data *data)
+{
+ enum caam_status retstatus = CAAM_FAILURE;
+ struct caambuf small_prime = { };
+ struct caambuf max_n = { };
+ struct caam_jobctx jobctx = { };
+ uint32_t *all_descs = NULL;
+ uint32_t *desc_p = NULL;
+ uint32_t *desc_q = NULL;
+ uint32_t *desc_check_p_q = NULL;
+ paddr_t paddr_desc_p = 0;
+ paddr_t paddr_desc_q = 0;
+ paddr_t paddr_desc_check_p_q = 0;
+ size_t size_all_descs = 0;
+
+ /* Allocate the job used to prepare the operation */
+ if (data->q) {
+ size_all_descs = SETUP_RSA_DESC_ENTRIES +
+ GEN_RSA_DESC_ENTRIES * 2 +
+ CHECK_P_Q_DESC_ENTRIES;
+
+ retstatus = caam_calloc_buf(&max_n, data->p->length + 1);
+ if (retstatus != CAAM_NO_ERROR)
+ goto end_gen_prime;
+
+ /* Set the max_n with 0xFFF... to operate the check P and Q */
+ memset(max_n.data, UINT8_MAX, max_n.length);
+ cache_operation(TEE_CACHECLEAN, max_n.data, max_n.length);
+ } else {
+ size_all_descs = SETUP_RSA_DESC_ENTRIES + GEN_RSA_DESC_ENTRIES;
+ }
+
+ all_descs = caam_calloc_desc(size_all_descs);
+ if (!all_descs) {
+ retstatus = CAAM_OUT_MEMORY;
+ goto end_gen_prime;
+ }
+
+ /* Descriptor Prime P */
+ desc_p = all_descs + SETUP_RSA_DESC_ENTRIES;
+ paddr_desc_p = virt_to_phys(desc_p);
+ if (!paddr_desc_p) {
+ retstatus = CAAM_FAILURE;
+ goto end_gen_prime;
+ }
+
+ /*
+ * Search predefined prime in the small_prime list, if the
+ * small prime is not found in the list, continue anyway
+ * but prime will be probably not so strong
+ */
+ search_smallprime(data->p->length, &small_prime);
+
+ RSA_TRACE("Do prime of %zu bytes (security len %zu bits) (ERA=%" PRId8
+ ")",
+ data->p->length, data->key_size, data->era);
+
+ retstatus = do_desc_setup(all_descs, data, &small_prime, paddr_desc_p);
+
+ if (data->q) {
+ /* Descriptor Prime Q */
+ desc_q = desc_p + GEN_RSA_DESC_ENTRIES;
+ paddr_desc_q =
+ paddr_desc_p + DESC_SZBYTES(GEN_RSA_DESC_ENTRIES);
+
+ /* Descriptor Check Primes P & Q */
+ desc_check_p_q = desc_q + GEN_RSA_DESC_ENTRIES;
+ paddr_desc_check_p_q =
+ paddr_desc_q + DESC_SZBYTES(GEN_RSA_DESC_ENTRIES);
+
+ /* Generate Prime P and Q then check Q not too close than P */
+ do_desc_prime(desc_p, data, &small_prime, false, paddr_desc_q);
+
+ do_desc_prime(desc_q, data, &small_prime, true,
+ paddr_desc_check_p_q);
+
+ do_checks_primes(desc_check_p_q, data->p, &max_n, paddr_desc_q);
+ } else {
+ do_desc_prime(desc_p, data, &small_prime, false, 0);
+ }
+
+ cache_operation(TEE_CACHECLEAN, small_prime.data, data->p->length);
+ cache_operation(TEE_CACHECLEAN, data->e->data, data->e->length);
+ cache_operation(TEE_CACHEFLUSH, data->p->data, data->p->length);
+
+ if (data->q)
+ cache_operation(TEE_CACHEFLUSH, data->q->data, data->q->length);
+
+ jobctx.desc = all_descs;
+
+ cache_operation(TEE_CACHECLEAN, (void *)all_descs,
+ DESC_SZBYTES(size_all_descs));
+
+ retstatus = caam_jr_enqueue(&jobctx, NULL);
+
+ if (data->q && retstatus == CAAM_JOB_STATUS) {
+ /*
+ * Expect to have a retstatus == CAAM_JOB_STATUS, where
+ * job status == STATUS_GOOD_Q
+ */
+ RSA_TRACE("Check Prime Q Status 0x%08" PRIx32, jobctx.status);
+
+ if (JRSTA_GET_HALT_USER(jobctx.status) == STATUS_GOOD_Q) {
+ cache_operation(TEE_CACHEINVALIDATE, data->p->data,
+ data->p->length);
+ cache_operation(TEE_CACHEINVALIDATE, data->q->data,
+ data->q->length);
+
+ RSA_DUMPBUF("Prime P", data->p->data, data->p->length);
+ RSA_DUMPBUF("Prime Q", data->q->data, data->q->length);
+ retstatus = CAAM_NO_ERROR;
+ goto end_gen_prime;
+ }
+ } else if (retstatus == CAAM_NO_ERROR && !data->q) {
+ cache_operation(TEE_CACHEINVALIDATE, data->p->data,
+ data->p->length);
+
+ RSA_DUMPBUF("Prime", data->p->data, data->p->length);
+
+ retstatus = CAAM_NO_ERROR;
+ goto end_gen_prime;
+ }
+
+ RSA_TRACE("Prime Status 0x%08" PRIx32, jobctx.status);
+ retstatus = CAAM_FAILURE;
+
+end_gen_prime:
+ caam_free_desc(&all_descs);
+ caam_free_buf(&max_n);
+
+ return retstatus;
+}
diff --git a/core/drivers/crypto/caam/acipher/caam_rsa.c b/core/drivers/crypto/caam/acipher/caam_rsa.c
new file mode 100644
index 00000000..60d73a22
--- /dev/null
+++ b/core/drivers/crypto/caam/acipher/caam_rsa.c
@@ -0,0 +1,1668 @@
+// SPDX-License-Identifier: BSD-2-Clause
+/*
+ * Copyright 2018-2020 NXP
+ *
+ * CAAM RSA manager.
+ * Implementation of RSA functions
+ */
+#include <caam_acipher.h>
+#include <caam_common.h>
+#include <caam_hal_ctrl.h>
+#include <caam_io.h>
+#include <caam_jr.h>
+#include <caam_utils_mem.h>
+#include <caam_utils_sgt.h>
+#include <caam_utils_status.h>
+#include <drvcrypt.h>
+#include <drvcrypt_acipher.h>
+#include <drvcrypt_math.h>
+#include <mm/core_memprot.h>
+#include <string.h>
+#include <tee/cache.h>
+#include <tee/tee_cryp_utl.h>
+
+#include "local.h"
+
+/*
+ * Definition of the maximum bits of Exponent e
+ * Refer to sp800-56b
+ */
+#define MAX_BITS_EXP_E 256
+
+/*
+ * Define the maximum number of entries in a descriptor
+ * function of the encrypt/decrypt and private key format
+ */
+#ifdef CFG_CAAM_64BIT
+#define MAX_DESC_ENC (8 + 4)
+#define MAX_DESC_DEC_1 (7 + 2 + 4)
+#define MAX_DESC_DEC_2 (11 + 2 + 7)
+#define MAX_DESC_DEC_3 (13 + 2 + 10)
+/* Define the maximum number of entries in the RSA Finish Key descriptor */
+#define MAX_DESC_KEY_FINISH 24
+#else
+#define MAX_DESC_ENC 8
+#define MAX_DESC_DEC_1 (7 + 2)
+#define MAX_DESC_DEC_2 (11 + 2)
+#define MAX_DESC_DEC_3 (13 + 2)
+/* Define the maximum number of entries in the RSA Finish Key descriptor */
+#define MAX_DESC_KEY_FINISH 15
+#endif /* CFG_CAAM_64BIT */
+
+static TEE_Result do_caam_encrypt(struct drvcrypt_rsa_ed *rsa_data,
+ uint32_t operation);
+static TEE_Result do_caam_decrypt(struct drvcrypt_rsa_ed *rsa_data,
+ uint32_t operation);
+
+/*
+ * Definition of the local RSA keypair
+ * Public Key Format: (n, e)
+ * Private Key Format #1: (n, d)
+ * Private Key Format #2: (p, q, d)
+ * Private Key Format #3: (p, q, dp, dq, qp)
+ */
+struct caam_rsa_keypair {
+ uint8_t format; /* Define the Private Key Format (1, 2 or 3) */
+ struct caambuf n; /* Modulus [n = p * q] */
+ struct caambuf e; /* Public Exponent 65537 <= e < 2^256 */
+ struct caambuf d; /* Private Exponent [d = 1/e mod LCM(p-1, q-1)] */
+ struct caambuf p; /* Private Prime p */
+ struct caambuf q; /* Private Prime q */
+ struct caambuf dp; /* Private [dp = d mod (p-1)] */
+ struct caambuf dq; /* Private [dq = d mod (q-1)] */
+ struct caambuf qp; /* Private [qp = 1/q mod p] */
+};
+
+/* CAAM Era version */
+static uint8_t caam_era;
+
+/*
+ * Free local RSA keypair
+ *
+ * @key RSA keypair
+ */
+static void do_keypair_free(struct caam_rsa_keypair *key)
+{
+ caam_free_buf(&key->e);
+ caam_free_buf(&key->n);
+ caam_free_buf(&key->d);
+
+ if (key->format > 1 && key->p.data) {
+ key->p.length += key->q.length;
+ caam_free_buf(&key->p);
+ }
+
+ if (key->format > 2 && key->dp.data) {
+ key->dp.length += key->dq.length + key->qp.length;
+ caam_free_buf(&key->dp);
+ }
+}
+
+/*
+ * Convert Crypto RSA Key to local RSA Public Key
+ * Ensure Key is push in physical memory
+ *
+ * @outkey [out] Output keypair in local format
+ * @inkey Input key in TEE Crypto format
+ */
+static enum caam_status do_keypub_conv(struct caam_rsa_keypair *outkey,
+ const struct rsa_public_key *inkey)
+{
+ enum caam_status retstatus = CAAM_FAILURE;
+
+ RSA_TRACE("RSA Convert Public Key size N=%zu",
+ crypto_bignum_num_bytes(inkey->n));
+
+ retstatus = caam_calloc_align_buf(&outkey->e,
+ crypto_bignum_num_bytes(inkey->e));
+ if (retstatus != CAAM_NO_ERROR)
+ goto exit_conv;
+
+ crypto_bignum_bn2bin(inkey->e, outkey->e.data);
+ cache_operation(TEE_CACHECLEAN, outkey->e.data, outkey->e.length);
+
+ retstatus = caam_calloc_align_buf(&outkey->n,
+ crypto_bignum_num_bytes(inkey->n));
+ if (retstatus != CAAM_NO_ERROR)
+ goto exit_conv;
+
+ crypto_bignum_bn2bin(inkey->n, outkey->n.data);
+ cache_operation(TEE_CACHECLEAN, outkey->n.data, outkey->n.length);
+
+ return CAAM_NO_ERROR;
+
+exit_conv:
+ do_keypair_free(outkey);
+
+ return CAAM_OUT_MEMORY;
+}
+
+/*
+ * Convert Crypto RSA Key additional fields of the key format #3
+ * Optional fields (dp, dq, qp)
+ *
+ * @outkey [out] Output keypair in local format
+ * @inkey Input key in TEE Crypto format
+ */
+static enum caam_status do_keypair_conv_f3(struct caam_rsa_keypair *outkey,
+ const struct rsa_keypair *inkey)
+{
+ enum caam_status retstatus = CAAM_FAILURE;
+ size_t size_p = 0;
+ size_t size_q = 0;
+ size_t size_dp = 0;
+ size_t size_dq = 0;
+ size_t size_qp = 0;
+
+ size_p = outkey->p.length;
+ size_q = outkey->q.length;
+ size_dp = crypto_bignum_num_bytes(inkey->dp);
+ size_dq = crypto_bignum_num_bytes(inkey->dq);
+ size_qp = crypto_bignum_num_bytes(inkey->qp);
+
+ /* Check that dp, dq and qp size not exceed p and q size */
+ if (size_dp > size_p || size_dq > size_q || size_qp > size_p)
+ return CAAM_FAILURE;
+
+ /*
+ * If one of the parameters dp, dq or qp are not filled,
+ * returns immediately. This is not an error.
+ */
+ if (!size_dp || !size_dq || !size_qp)
+ return CAAM_NO_ERROR;
+
+ /*
+ * CAAM is assuming that:
+ * - dp and dq are same size as p
+ * - dq same size as q
+ *
+ * Because calculation of dp, dq and qp can be less
+ * than above assumption, force the dp, dq and qp
+ * buffer size.
+ */
+ /* Allocate one buffer for the 3 fields */
+ retstatus =
+ caam_calloc_align_buf(&outkey->dp, size_p + size_q + size_p);
+ if (retstatus != CAAM_NO_ERROR)
+ return CAAM_OUT_MEMORY;
+
+ /* Field dp */
+ outkey->dp.length = size_p;
+
+ /*
+ * Ensure buffer is copied starting with 0's
+ * if size_dp != size_p
+ */
+ crypto_bignum_bn2bin(inkey->dp, outkey->dp.data + size_p - size_dp);
+
+ /* Field dq */
+ outkey->dq.data = outkey->dp.data + size_p;
+ outkey->dq.length = size_q;
+ outkey->dq.paddr = outkey->dp.paddr + size_p;
+
+ /*
+ * Ensure buffer is copied starting with 0's
+ * if size_dq != size_q
+ */
+ crypto_bignum_bn2bin(inkey->dq, outkey->dq.data + size_q - size_dq);
+
+ /* Field qp */
+ outkey->qp.data = outkey->dq.data + size_q;
+ outkey->qp.length = size_p;
+ outkey->qp.paddr = outkey->dq.paddr + size_q;
+
+ /*
+ * Ensure buffer is copied starting with 0's
+ * if size_qp != size_p
+ */
+ crypto_bignum_bn2bin(inkey->qp, outkey->qp.data + size_p - size_qp);
+
+ /* Push fields value to the physical memory */
+ cache_operation(TEE_CACHECLEAN, outkey->dp.data,
+ outkey->dp.length + outkey->dq.length +
+ outkey->qp.length);
+
+ outkey->format = 2;
+
+ return CAAM_NO_ERROR;
+}
+
+/*
+ * Convert Crypto RSA Key additional fields of the key format #2
+ * Optional fields (p, q)
+ *
+ * @outkey [out] Output keypair in local format
+ * @inkey Input key in TEE Crypto format
+ */
+static enum caam_status do_keypair_conv_f2(struct caam_rsa_keypair *outkey,
+ const struct rsa_keypair *inkey)
+{
+ enum caam_status retstatus = CAAM_FAILURE;
+ size_t size_p = 0;
+ size_t size_q = 0;
+
+ size_p = crypto_bignum_num_bytes(inkey->p);
+ size_q = crypto_bignum_num_bytes(inkey->q);
+
+ /*
+ * If the Prime P or Prime Q are not filled, returns
+ * immediately. This is not an error.
+ */
+ if (size_p || !size_q)
+ return CAAM_NO_ERROR;
+
+ /* Allocate one buffer for both */
+ retstatus = caam_calloc_align_buf(&outkey->p, size_p + size_q);
+ if (retstatus != CAAM_NO_ERROR)
+ return CAAM_OUT_MEMORY;
+
+ /* Field Prime p */
+ outkey->p.length = size_p;
+ crypto_bignum_bn2bin(inkey->p, outkey->p.data);
+
+ /* Field Prime q */
+ outkey->q.data = outkey->p.data + size_p;
+ outkey->q.length = size_q;
+ outkey->q.paddr = outkey->p.paddr + size_p;
+
+ crypto_bignum_bn2bin(inkey->q, outkey->q.data);
+
+ /* Push fields value to the physical memory */
+ cache_operation(TEE_CACHECLEAN, outkey->p.data, size_p + size_q);
+
+ outkey->format = 2;
+
+ if (CFG_NXP_CAAM_RSA_KEY_FORMAT > 2) {
+ retstatus = do_keypair_conv_f3(outkey, inkey);
+ RSA_TRACE("do_keypair_conv_f3 returned 0x%" PRIx32, retstatus);
+ }
+
+ return retstatus;
+}
+
+/*
+ * Convert Crypto RSA Key to local RSA Keypair Key
+ * Ensure Key is push in physical memory
+ * Don't convert the exponent e not used in decrytion
+ *
+ * @outkey [out] Output keypair in local format
+ * @inkey Input key in TEE Crypto format
+ */
+static enum caam_status do_keypair_conv(struct caam_rsa_keypair *outkey,
+ const struct rsa_keypair *inkey)
+{
+ enum caam_status retstatus = CAAM_FAILURE;
+
+ RSA_TRACE("RSA Convert Keypair size N=%zu",
+ crypto_bignum_num_bytes(inkey->n));
+
+ /* Mandatory fields are n and d => Private Key Format #1 */
+ retstatus = caam_calloc_align_buf(&outkey->n,
+ crypto_bignum_num_bytes(inkey->n));
+ if (retstatus != CAAM_NO_ERROR)
+ return retstatus;
+
+ crypto_bignum_bn2bin(inkey->n, outkey->n.data);
+ cache_operation(TEE_CACHECLEAN, outkey->n.data, outkey->n.length);
+
+ retstatus = caam_calloc_align_buf(&outkey->d,
+ crypto_bignum_num_bytes(inkey->d));
+ if (retstatus != CAAM_NO_ERROR)
+ return retstatus;
+
+ crypto_bignum_bn2bin(inkey->d, outkey->d.data);
+ cache_operation(TEE_CACHECLEAN, outkey->d.data, outkey->d.length);
+
+ outkey->format = 1;
+
+ if (CFG_NXP_CAAM_RSA_KEY_FORMAT > 1) {
+ retstatus = do_keypair_conv_f2(outkey, inkey);
+ RSA_TRACE("do_keypair_conv_f2 returned 0x%" PRIx32, retstatus);
+ }
+
+ return retstatus;
+}
+
+/*
+ * Allocate a RSA keypair
+ *
+ * @key Keypair
+ * @size_bits Key size in bits
+ */
+static TEE_Result do_allocate_keypair(struct rsa_keypair *key, size_t size_bits)
+{
+ RSA_TRACE("Allocate Keypair of %zu bits", size_bits);
+
+ /* Initialize all input key fields to 0 */
+ memset(key, 0, sizeof(*key));
+
+ /* Allocate the Public Exponent to maximum size */
+ key->e = crypto_bignum_allocate(MAX_BITS_EXP_E);
+ if (!key->e)
+ goto err_alloc_keypair;
+
+ /* Allocate the Private Exponent [d = 1/e mod LCM(p-1, q-1)] */
+ key->d = crypto_bignum_allocate(size_bits);
+ if (!key->d)
+ goto err_alloc_keypair;
+
+ /* Allocate the Modulus (size_bits) [n = p * q] */
+ key->n = crypto_bignum_allocate(size_bits);
+ if (!key->n)
+ goto err_alloc_keypair;
+
+ /* Allocate the prime number p of size (size_bits / 2) */
+ key->p = crypto_bignum_allocate(size_bits / 2);
+ if (!key->p)
+ goto err_alloc_keypair;
+
+ /* Allocate the prime number q of size (size_bits / 2) */
+ key->q = crypto_bignum_allocate(size_bits / 2);
+ if (!key->q)
+ goto err_alloc_keypair;
+
+ /* Allocate dp (size_bits / 2) [d mod (p-1)] */
+ key->dp = crypto_bignum_allocate(size_bits / 2);
+ if (!key->dp)
+ goto err_alloc_keypair;
+
+ /* Allocate dq (size_bits / 2) [d mod (q-1)] */
+ key->dq = crypto_bignum_allocate(size_bits / 2);
+ if (!key->dq)
+ goto err_alloc_keypair;
+
+ /* Allocate qp (size_bits / 2) [1/q mod p] */
+ key->qp = crypto_bignum_allocate(size_bits / 2);
+ if (!key->qp)
+ goto err_alloc_keypair;
+
+ return TEE_SUCCESS;
+
+err_alloc_keypair:
+ RSA_TRACE("Allocation error");
+
+ crypto_bignum_free(key->e);
+ crypto_bignum_free(key->d);
+ crypto_bignum_free(key->n);
+ crypto_bignum_free(key->p);
+ crypto_bignum_free(key->q);
+ crypto_bignum_free(key->dp);
+ crypto_bignum_free(key->dq);
+ crypto_bignum_free(key->qp);
+
+ return TEE_ERROR_OUT_OF_MEMORY;
+}
+
+/*
+ * Allocate a RSA public key
+ *
+ * @key Public Key
+ * @size_bits Key size in bits
+ */
+static TEE_Result do_allocate_publickey(struct rsa_public_key *key,
+ size_t size_bits)
+{
+ RSA_TRACE("Allocate Public Key of %zu bits", size_bits);
+
+ /* Initialize all input key fields to 0 */
+ memset(key, 0, sizeof(*key));
+
+ /* Allocate the Public Exponent to maximum size */
+ key->e = crypto_bignum_allocate(MAX_BITS_EXP_E);
+ if (!key->e)
+ goto err_alloc_publickey;
+
+ /* Allocate the Modulus (size_bits) [n = p * q] */
+ key->n = crypto_bignum_allocate(size_bits);
+ if (!key->n)
+ goto err_alloc_publickey;
+
+ return TEE_SUCCESS;
+
+err_alloc_publickey:
+ RSA_TRACE("Allocation error");
+
+ crypto_bignum_free(key->e);
+ crypto_bignum_free(key->n);
+
+ return TEE_ERROR_OUT_OF_MEMORY;
+}
+
+/*
+ * Free a RSA public key
+ *
+ * @key Public Key
+ */
+static void do_free_publickey(struct rsa_public_key *key)
+{
+ crypto_bignum_free(key->e);
+ crypto_bignum_free(key->n);
+}
+
+/*
+ * Output the RSA keypair format 3 additional fields in bignumber object
+ *
+ * @key [out] Keypair
+ * @key_size Key size in bits
+ */
+static TEE_Result gen_keypair_get_f3(struct rsa_keypair *key,
+ struct caam_rsa_keypair *genkey)
+{
+ TEE_Result ret = TEE_ERROR_GENERIC;
+
+ cache_operation(TEE_CACHEINVALIDATE, genkey->dp.data,
+ genkey->dp.length + genkey->dq.length +
+ genkey->qp.length);
+
+ RSA_DUMPBUF("dp", genkey->dp.data, genkey->dp.length);
+ RSA_DUMPBUF("dq", genkey->dq.data, genkey->dq.length);
+ RSA_DUMPBUF("qp", genkey->qp.data, genkey->qp.length);
+
+ ret = crypto_bignum_bin2bn(genkey->dp.data, genkey->dp.length, key->dp);
+ if (ret != TEE_SUCCESS)
+ return ret;
+
+ ret = crypto_bignum_bin2bn(genkey->dq.data, genkey->dq.length, key->dq);
+ if (ret != TEE_SUCCESS)
+ return ret;
+
+ ret = crypto_bignum_bin2bn(genkey->qp.data, genkey->qp.length, key->qp);
+ return ret;
+}
+
+/*
+ * Output the RSA keypair format 2 additional fields in big number object
+ *
+ * @key [out] Keypair
+ * @key_size Key size in bits
+ */
+static TEE_Result gen_keypair_get_f2(struct rsa_keypair *key,
+ struct caam_rsa_keypair *genkey)
+{
+ TEE_Result ret = TEE_ERROR_GENERIC;
+
+ cache_operation(TEE_CACHEINVALIDATE, genkey->p.data,
+ genkey->p.length + genkey->q.length);
+
+ ret = crypto_bignum_bin2bn(genkey->p.data, genkey->p.length, key->p);
+ if (ret != TEE_SUCCESS)
+ return ret;
+
+ ret = crypto_bignum_bin2bn(genkey->q.data, genkey->q.length, key->q);
+
+ if (ret == TEE_SUCCESS && genkey->format > 2)
+ ret = gen_keypair_get_f3(key, genkey);
+
+ return ret;
+}
+
+/*
+ * Generates a RSA keypair
+ *
+ * @key [out] Keypair
+ * @key_size Key size in bits
+ */
+static TEE_Result do_gen_keypair(struct rsa_keypair *key, size_t key_size)
+{
+ TEE_Result ret = TEE_ERROR_GENERIC;
+ enum caam_status retstatus = CAAM_FAILURE;
+ struct caam_rsa_keypair genkey = { };
+ size_t size_d = 0;
+ size_t size_n = 0;
+ size_t size_d_gen = 0;
+ struct caam_jobctx jobctx = { };
+ uint32_t *desc = 0;
+ uint32_t desclen = 0;
+ struct prime_data prime = { };
+
+ RSA_TRACE("Generate Keypair of %zu bits", key_size);
+
+ genkey.format = CFG_NXP_CAAM_RSA_KEY_FORMAT;
+
+ /* Allocate the job used to prepare the operation */
+ desc = caam_calloc_desc(MAX_DESC_KEY_FINISH);
+ if (!desc) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_gen_keypair;
+ }
+
+ /* First allocate primes p and q in one buffer */
+ retstatus = caam_calloc_align_buf(&genkey.p, key_size / 8);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_gen_keypair;
+ }
+
+ /* Prepare q */
+ genkey.p.length /= 2;
+ genkey.q.data = genkey.p.data + genkey.p.length;
+ genkey.q.length = genkey.p.length;
+ genkey.q.paddr = genkey.p.paddr + genkey.p.length;
+
+ /* Allocate Public exponent to a caam buffer */
+ retstatus = caam_calloc_buf(&genkey.e, crypto_bignum_num_bytes(key->e));
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_gen_keypair;
+ }
+
+ /*
+ * Allocate d and n in one buffer.
+ * Size of d is (key_size + 1) bits - Add a 32 bits word to
+ * retrieve the length of d generated by CAAM RSA Finalize Key
+ */
+ size_d = sizeof(uint32_t) + key_size / 8 + 1;
+ size_n = key_size / 8;
+
+ retstatus = caam_calloc_align_buf(&genkey.d, size_d + size_n);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_gen_keypair;
+ }
+
+ genkey.d.length = size_d;
+ genkey.n.data = genkey.d.data + size_d;
+ genkey.n.length = size_n;
+ genkey.n.paddr = genkey.d.paddr + size_d;
+
+ if (genkey.format > 2) {
+ /* Allocate dp, dq and qp in one buffer */
+ retstatus = caam_calloc_align_buf(&genkey.dp,
+ ((key_size / 8) / 2) * 3);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_gen_keypair;
+ }
+
+ genkey.dp.length /= 3;
+ /* Prepare dq and qp */
+ genkey.dq.data = genkey.dp.data + genkey.dp.length;
+ genkey.dq.length = genkey.dp.length;
+ genkey.dq.paddr = genkey.dp.paddr + genkey.dp.length;
+
+ genkey.qp.data = genkey.dq.data + genkey.dq.length;
+ genkey.qp.length = genkey.dq.length;
+ genkey.qp.paddr = genkey.dq.paddr + genkey.dq.length;
+ }
+
+ crypto_bignum_bn2bin(key->e, genkey.e.data);
+
+ prime.era = caam_era;
+ prime.key_size = key_size;
+ prime.e = &genkey.e;
+ prime.p = &genkey.p;
+ prime.q = &genkey.q;
+
+ /* Generate prime p and q */
+ retstatus = caam_prime_gen(&prime);
+ RSA_TRACE("Generate Prime P and Q returned 0x%" PRIx32, retstatus);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto exit_gen_keypair;
+ }
+
+ caam_desc_init(desc);
+ caam_desc_add_word(desc, DESC_HEADER(0));
+
+ caam_desc_add_word(desc, 0);
+ caam_desc_add_word(desc, PDB_RSA_KEY_P_SIZE(genkey.p.length));
+ caam_desc_add_word(desc, PDB_RSA_KEY_N_SIZE(genkey.n.length) |
+ PDB_RSA_KEY_E_SIZE(genkey.e.length));
+
+ caam_desc_add_ptr(desc, genkey.p.paddr);
+ caam_desc_add_ptr(desc, genkey.q.paddr);
+ caam_desc_add_ptr(desc, genkey.e.paddr);
+ caam_desc_add_ptr(desc, genkey.n.paddr);
+ caam_desc_add_ptr(desc, genkey.d.paddr + sizeof(uint32_t));
+ caam_desc_add_ptr(desc, genkey.d.paddr);
+
+ if (genkey.format > 2) {
+ caam_desc_add_ptr(desc, genkey.dp.paddr);
+ caam_desc_add_ptr(desc, genkey.dq.paddr);
+ caam_desc_add_ptr(desc, genkey.qp.paddr);
+ caam_desc_add_word(desc, RSA_FINAL_KEY(ALL));
+
+ cache_operation(TEE_CACHEFLUSH, genkey.dp.data,
+ genkey.dp.length + genkey.dq.length +
+ genkey.qp.length);
+
+ } else {
+ caam_desc_add_word(desc, RSA_FINAL_KEY(N_D));
+ }
+
+ desclen = caam_desc_get_len(desc);
+ caam_desc_update_hdr(desc, DESC_HEADER_IDX(desclen, desclen - 1));
+
+ jobctx.desc = desc;
+ RSA_DUMPDESC(desc);
+
+ cache_operation(TEE_CACHECLEAN, genkey.e.data, genkey.e.length);
+ cache_operation(TEE_CACHEFLUSH, genkey.p.data,
+ genkey.p.length + genkey.q.length);
+ cache_operation(TEE_CACHEFLUSH, genkey.d.data,
+ genkey.d.length + genkey.n.length);
+
+ retstatus = caam_jr_enqueue(&jobctx, NULL);
+
+ if (retstatus == CAAM_NO_ERROR) {
+ cache_operation(TEE_CACHEINVALIDATE, genkey.d.data,
+ genkey.d.length + genkey.n.length);
+
+ size_d_gen = genkey.d.data[0] + (genkey.d.data[1] << 8);
+
+ RSA_TRACE("D size %zu", size_d_gen);
+ RSA_DUMPBUF("N", genkey.n.data, genkey.n.length);
+ RSA_DUMPBUF("D", genkey.d.data + sizeof(uint32_t), size_d_gen);
+
+ ret = crypto_bignum_bin2bn(genkey.n.data, genkey.n.length,
+ key->n);
+ if (ret != TEE_SUCCESS)
+ goto exit_gen_keypair;
+
+ ret = crypto_bignum_bin2bn(genkey.d.data + sizeof(uint32_t),
+ size_d_gen, key->d);
+ if (ret != TEE_SUCCESS)
+ goto exit_gen_keypair;
+
+ if (genkey.format > 1)
+ ret = gen_keypair_get_f2(key, &genkey);
+ } else {
+ RSA_TRACE("CAAM Status 0x%08" PRIx32, jobctx.status);
+ ret = job_status_to_tee_result(jobctx.status);
+ }
+
+exit_gen_keypair:
+ genkey.d.length += genkey.n.length;
+ genkey.n.data = NULL;
+ do_keypair_free(&genkey);
+
+ caam_free_desc(&desc);
+
+ return ret;
+}
+
+/*
+ * RSA EME-OAEP Decoding operation
+ * Refer the chapter 7.1.2 Decryption operation of pkcs-1v2-1 specification
+ *
+ * @rsa_data [in/out] RSA Data to encode
+ */
+static TEE_Result do_oaep_decoding(struct drvcrypt_rsa_ed *rsa_data)
+{
+ TEE_Result ret = TEE_ERROR_GENERIC;
+ enum caam_status retstatus = CAAM_FAILURE;
+ struct caambuf DB = { };
+ struct caambuf lHash = { };
+ struct caambuf seed = { };
+ struct caambuf dbMask = { };
+ struct caambuf maskedDB = { };
+ struct caambuf maskedSeed = { };
+ struct caambuf EM = { };
+ size_t db_size = 0;
+ size_t b01_idx = 0;
+ struct drvcrypt_rsa_mgf mgf_data = { };
+ struct drvcrypt_rsa_ed dec_data = { };
+ struct drvcrypt_mod_op mod_op = { };
+
+ RSA_TRACE("RSA OAEP Decoding");
+
+ /*
+ * First Decryption of the Cipher to a EM of modulus size
+ */
+ retstatus = caam_calloc_align_buf(&EM, rsa_data->key.n_size);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_oaep_decrypt;
+ }
+
+ memcpy(&dec_data, rsa_data, sizeof(dec_data));
+ dec_data.message.data = EM.data;
+ dec_data.message.length = EM.length;
+
+ ret = do_caam_decrypt(&dec_data, RSA_DECRYPT(NO));
+
+ RSA_DUMPBUF("EM", EM.data, EM.length);
+
+ /*
+ * DB = lHash' || PS || 0x01 || M
+ * DB length = k - hLen - 1
+ *
+ * PS is a 0's buffer of length h - mLen - 2hLen - 2
+ *
+ * k is the key modulus length
+ * hLen is the Hash digest length
+ * mLen is the input RSA message length
+ */
+ /* Calculate the DB size */
+ db_size = rsa_data->key.n_size - rsa_data->digest_size - 1;
+ RSA_TRACE("DB is %zu bytes", db_size);
+
+ /* Allocate the DB buffer */
+ retstatus = caam_calloc_align_buf(&DB, db_size);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_oaep_decrypt;
+ }
+
+ /*
+ * Step a
+ * Generate the lHash
+ */
+ /* Allocate the lHash buffer */
+ retstatus = caam_calloc_align_buf(&lHash, rsa_data->digest_size);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_oaep_decrypt;
+ }
+
+ RSA_TRACE("Hash the RSA Label of %zu bytes", rsa_data->label.length);
+ ret = tee_hash_createdigest(rsa_data->hash_algo, rsa_data->label.data,
+ rsa_data->label.length, lHash.data,
+ lHash.length);
+ RSA_TRACE("Hash the RSA Label returned 0x%08" PRIx32, ret);
+ if (ret != TEE_SUCCESS)
+ goto exit_oaep_decrypt;
+
+ RSA_DUMPBUF("lHash", lHash.data, lHash.length);
+
+ /* Allocate the seed buffer */
+ retstatus = caam_calloc_align_buf(&seed, rsa_data->digest_size);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_oaep_decrypt;
+ }
+
+ /* Allocate the dbMask buffer */
+ retstatus = caam_calloc_align_buf(&dbMask, db_size);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_oaep_decrypt;
+ }
+
+ /*
+ * Step b
+ * Split the EM string
+ * EM = Y || maskedSeed || maskedDB
+ *
+ * Where:
+ * Y size = 1 byte
+ * maskedSeed size = hLen
+ * maskedDB size = k - hLen - 1 bytes
+ *
+ * k is the key modulus length
+ * hLen is the Hash digest length
+ * mLen is the input RSA message length
+ *
+ * Note Y should have been remove during the
+ */
+ maskedSeed.data = &EM.data[1];
+ maskedSeed.length = rsa_data->digest_size;
+ maskedSeed.paddr = EM.paddr + sizeof(uint8_t);
+
+ maskedDB.data = &EM.data[1 + rsa_data->digest_size];
+ maskedDB.length = dbMask.length;
+ maskedDB.paddr = EM.paddr + sizeof(uint8_t) + rsa_data->digest_size;
+
+ /*
+ * Step c
+ * Generate a Mask of the maskedDB
+ * seedMask = MGF(maskedDB, k - hLen - 1)
+ *
+ * Note: Use same buffer for seed and seedMask
+ */
+ mgf_data.hash_algo = rsa_data->hash_algo;
+ mgf_data.digest_size = rsa_data->digest_size;
+ mgf_data.seed.data = maskedDB.data;
+ mgf_data.seed.length = maskedDB.length;
+ mgf_data.mask.data = seed.data;
+ mgf_data.mask.length = seed.length;
+
+ ret = rsa_data->mgf(&mgf_data);
+ if (ret != TEE_SUCCESS)
+ goto exit_oaep_decrypt;
+
+ /*
+ * Step d
+ * seed = maskedSeed xor seedMask
+ *
+ * Note: Use same buffer for seed and seedMask
+ */
+ mod_op.n.length = seed.length;
+ mod_op.a.data = maskedSeed.data;
+ mod_op.a.length = maskedSeed.length;
+ mod_op.b.data = seed.data;
+ mod_op.b.length = seed.length;
+ mod_op.result.data = seed.data;
+ mod_op.result.length = seed.length;
+
+ retstatus = drvcrypt_xor_mod_n(&mod_op);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto exit_oaep_decrypt;
+ }
+
+ RSA_DUMPBUF("Seed", seed.data, seed.length);
+
+ /*
+ * Step e
+ * Generate a Mask of the seed value
+ * dbMask = MGF(seed, k - hLen - 1)
+ */
+ mgf_data.seed.data = seed.data;
+ mgf_data.seed.length = seed.length;
+ mgf_data.mask.data = dbMask.data;
+ mgf_data.mask.length = dbMask.length;
+
+ ret = rsa_data->mgf(&mgf_data);
+ if (ret != TEE_SUCCESS)
+ goto exit_oaep_decrypt;
+
+ /*
+ * Step f
+ * DB = maskedDB xor dbMask
+ */
+ mod_op.n.length = DB.length;
+ mod_op.a.data = maskedDB.data;
+ mod_op.a.length = maskedDB.length;
+ mod_op.b.data = dbMask.data;
+ mod_op.b.length = dbMask.length;
+ mod_op.result.data = DB.data;
+ mod_op.result.length = DB.length;
+
+ retstatus = drvcrypt_xor_mod_n(&mod_op);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto exit_oaep_decrypt;
+ }
+
+ RSA_DUMPBUF("DB", DB.data, DB.length);
+
+ /*
+ * Step g
+ * Check the DB generated
+ * DB = lHash' || PS || 0x01 || M
+ *
+ * Error if:
+ * - lHash' != lHash (First step - Hash the Label)
+ * - byte 0x01 between PS and M is not present
+ */
+ /* Check Hash values */
+ if (memcmp(DB.data, lHash.data, lHash.length)) {
+ RSA_TRACE("Hash error");
+ ret = TEE_ERROR_BAD_PARAMETERS;
+ goto exit_oaep_decrypt;
+ }
+
+ /* Find the byte 0x01 separating PS and M */
+ for (b01_idx = rsa_data->digest_size;
+ b01_idx < db_size && !DB.data[b01_idx]; b01_idx++)
+ ;
+
+ if (b01_idx == db_size) {
+ RSA_TRACE("byte 0x01 not present");
+ ret = TEE_ERROR_BAD_PARAMETERS;
+ goto exit_oaep_decrypt;
+ }
+
+ rsa_data->message.length = DB.length - b01_idx - 1;
+ memcpy(rsa_data->message.data, &DB.data[b01_idx + 1],
+ rsa_data->message.length);
+
+ RSA_DUMPBUF("Message decrypted", rsa_data->message.data,
+ rsa_data->message.length);
+ ret = TEE_SUCCESS;
+
+exit_oaep_decrypt:
+ caam_free_buf(&EM);
+ caam_free_buf(&DB);
+ caam_free_buf(&seed);
+ caam_free_buf(&dbMask);
+ caam_free_buf(&lHash);
+
+ return ret;
+}
+
+/*
+ * RSA EME-OAEP Encoding operation
+ * Refer the chapter 7.1.1 Encryption operation of pkcs-1v2-1 specification
+ *
+ * @rsa_data [int/out] RSA Data to encode
+ */
+static TEE_Result do_oaep_encoding(struct drvcrypt_rsa_ed *rsa_data)
+{
+ TEE_Result ret = TEE_ERROR_GENERIC;
+ enum caam_status retstatus;
+ struct caambuf DB = { };
+ struct caambuf lHash = { };
+ struct caambuf seed = { };
+ struct caambuf dbMask = { };
+ struct caambuf maskedDB = { };
+ struct caambuf maskedSeed = { };
+ struct caambuf EM = { };
+ size_t db_size = 0;
+ size_t ps_size = 0;
+ struct drvcrypt_rsa_mgf mgf_data = { };
+ struct drvcrypt_rsa_ed enc_data = { };
+ struct drvcrypt_mod_op mod_op = { };
+
+ RSA_TRACE("RSA OAEP Encoding");
+
+ /*
+ * DB = lHash || PS || 0x01 || M
+ * DB length = k - hLen - 1
+ *
+ * PS is a 0's buffer of length h - mLen - 2hLen - 2
+ *
+ * k is the key modulus length
+ * hLen is the Hash digest length
+ * mLen is the input RSA message length
+ */
+ /* Calculate the DB size */
+ db_size = rsa_data->key.n_size - rsa_data->digest_size - 1;
+ RSA_TRACE("DB is %zu bytes", db_size);
+
+ /* Allocate the DB buffer */
+ retstatus = caam_calloc_align_buf(&DB, db_size);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_oaep_encrypt;
+ }
+
+ /*
+ * Step a
+ * Generate the lHash
+ */
+ lHash.length = rsa_data->digest_size;
+ lHash.data = DB.data;
+
+ RSA_TRACE("Hash the RSA Label of %zu bytes", rsa_data->label.length);
+ ret = tee_hash_createdigest(rsa_data->hash_algo, rsa_data->label.data,
+ rsa_data->label.length, lHash.data,
+ lHash.length);
+ RSA_TRACE("Hash the RSA Label returned 0x%08" PRIx32, ret);
+ if (ret != TEE_SUCCESS)
+ goto exit_oaep_encrypt;
+ RSA_DUMPBUF("lHash", lHash.data, lHash.length);
+
+ /*
+ * Step b
+ * Add PS 0's
+ * Note: DB is already filled with 0's at the allocation
+ */
+ ps_size = rsa_data->key.n_size - rsa_data->message.length -
+ 2 * rsa_data->digest_size - 2;
+ RSA_TRACE("PS is %zu bytes", ps_size);
+
+ /*
+ * Step c
+ * Set the value 0x01 after the lHash and the PS
+ * Concatenate result with input message
+ */
+ DB.data[lHash.length + ps_size] = 0x01;
+ memcpy(&DB.data[lHash.length + ps_size + 1], rsa_data->message.data,
+ rsa_data->message.length);
+
+ RSA_DUMPBUF("DB", DB.data, DB.length);
+
+ /*
+ * Step d
+ * Generate a random seed of hLen
+ */
+ /* Allocate the seed buffer */
+ retstatus = caam_calloc_align_buf(&seed, rsa_data->digest_size);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_oaep_encrypt;
+ }
+
+ /* Allocate the dbMask buffer */
+ retstatus = caam_calloc_align_buf(&dbMask, db_size);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_oaep_encrypt;
+ }
+
+ ret = crypto_rng_read(seed.data, seed.length);
+ RSA_TRACE("Get seed of %zu bytes (ret = 0x%08" PRIx32 ")", seed.length,
+ ret);
+ if (ret != TEE_SUCCESS)
+ goto exit_oaep_encrypt;
+
+ RSA_DUMPBUF("Seed", seed.data, seed.length);
+
+ /*
+ * Step e
+ * Generate a Mask of the seed value
+ * dbMask = MGF(seed, k - hLen - 1)
+ */
+ mgf_data.hash_algo = rsa_data->hash_algo;
+ mgf_data.digest_size = rsa_data->digest_size;
+ mgf_data.seed.data = seed.data;
+ mgf_data.seed.length = seed.length;
+ mgf_data.mask.data = dbMask.data;
+ mgf_data.mask.length = dbMask.length;
+
+ ret = rsa_data->mgf(&mgf_data);
+ if (ret != TEE_SUCCESS)
+ goto exit_oaep_encrypt;
+
+ /*
+ * Step f
+ * maskedDB = DB xor dbMask
+ */
+ retstatus = caam_calloc_align_buf(&EM, rsa_data->key.n_size);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_oaep_encrypt;
+ }
+
+ maskedDB.data = &EM.data[1 + rsa_data->digest_size];
+ maskedDB.length = dbMask.length;
+ maskedDB.paddr = EM.paddr + sizeof(uint8_t) + rsa_data->digest_size;
+
+ mod_op.n.length = maskedDB.length;
+ mod_op.a.data = DB.data;
+ mod_op.a.length = DB.length;
+ mod_op.b.data = dbMask.data;
+ mod_op.b.length = dbMask.length;
+ mod_op.result.data = maskedDB.data;
+ mod_op.result.length = maskedDB.length;
+
+ ret = drvcrypt_xor_mod_n(&mod_op);
+ if (ret != TEE_SUCCESS) {
+ ret = TEE_ERROR_GENERIC;
+ goto exit_oaep_encrypt;
+ }
+
+ /*
+ * Step g
+ * Generate a Mask of the maskedDB
+ * seedMask = MGF(maskedDB, hLen)
+ *
+ * Note: Use same buffer for seedMask and maskedSeed
+ */
+ maskedSeed.data = &EM.data[1];
+ maskedSeed.length = rsa_data->digest_size;
+ maskedSeed.paddr = EM.paddr + sizeof(uint8_t);
+
+ mgf_data.seed.data = maskedDB.data;
+ mgf_data.seed.length = maskedDB.length;
+ mgf_data.mask.data = maskedSeed.data;
+ mgf_data.mask.length = maskedSeed.length;
+ ret = rsa_data->mgf(&mgf_data);
+ if (ret != TEE_SUCCESS)
+ goto exit_oaep_encrypt;
+
+ /*
+ * Step h
+ * maskedSeed = seed xor seedMask
+ */
+ mod_op.n.length = maskedSeed.length;
+ mod_op.a.data = seed.data;
+ mod_op.a.length = seed.length;
+ mod_op.b.data = maskedSeed.data;
+ mod_op.b.length = maskedSeed.length;
+ mod_op.result.data = maskedSeed.data;
+ mod_op.result.length = maskedSeed.length;
+
+ ret = drvcrypt_xor_mod_n(&mod_op);
+ if (ret != TEE_SUCCESS)
+ goto exit_oaep_encrypt;
+
+ RSA_DUMPBUF("EM", EM.data, EM.length);
+
+ /*
+ * Last Encryption of the EM of modulus size to Cipher
+ */
+ memcpy(&enc_data, rsa_data, sizeof(enc_data));
+
+ enc_data.message.data = EM.data;
+ enc_data.message.length = EM.length;
+
+ ret = do_caam_encrypt(&enc_data, RSA_ENCRYPT(NO));
+
+exit_oaep_encrypt:
+ caam_free_buf(&DB);
+ caam_free_buf(&seed);
+ caam_free_buf(&dbMask);
+ caam_free_buf(&EM);
+
+ return ret;
+}
+
+/*
+ * CAAM RSA Encryption of the input message to a cipher
+ *
+ * @rsa_data [in/out] RSA Data to encrypt
+ * @operation CAAM RSA Decryption operation
+ */
+static TEE_Result do_caam_encrypt(struct drvcrypt_rsa_ed *rsa_data,
+ uint32_t operation)
+{
+ TEE_Result ret = TEE_ERROR_GENERIC;
+ enum caam_status retstatus = CAAM_FAILURE;
+ struct caam_rsa_keypair key = { };
+ paddr_t paddr_msg = 0;
+ struct caambuf msg_tmp = { };
+ struct caamsgtbuf sgtmsg = { .sgt_type = false };
+ int realloc = 0;
+ struct caambuf cipher_align = { };
+ struct caamsgtbuf sgtcipher = { .sgt_type = false };
+ paddr_t paddr_cipher = 0;
+ struct caam_jobctx jobctx = { };
+ uint32_t *desc = NULL;
+ uint32_t desclen = 0;
+ uint32_t pdb_sgt_flags = 0;
+
+ RSA_TRACE("RSA Encrypt mode %d", rsa_data->rsa_id);
+
+ /* Allocate the job descriptor */
+ desc = caam_calloc_desc(MAX_DESC_ENC);
+ if (!desc) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_encrypt;
+ }
+
+ /*
+ * Convert TEE rsa key type to CAAM rsa key type
+ * Push key value to memory
+ */
+ retstatus = do_keypub_conv(&key, rsa_data->key.key);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto exit_encrypt;
+ }
+
+ /*
+ * ReAllocate the cipher result buffer with a maximum size
+ * of the Key Modulus's size (N) if not cache aligned
+ */
+ realloc = caam_set_or_alloc_align_buf(rsa_data->cipher.data,
+ &cipher_align, key.n.length);
+ if (realloc == -1) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_encrypt;
+ }
+
+ retstatus = caam_sgt_build_block_data(&sgtcipher, NULL, &cipher_align);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto exit_encrypt;
+ }
+
+ if (sgtcipher.sgt_type) {
+ pdb_sgt_flags |= PDB_RSA_ENC_SGT_G;
+ paddr_cipher = virt_to_phys(sgtcipher.sgt);
+ caam_sgt_cache_op(TEE_CACHEFLUSH, &sgtcipher);
+ } else {
+ paddr_cipher = sgtcipher.buf->paddr;
+ if (!sgtcipher.buf->nocache)
+ cache_operation(TEE_CACHEFLUSH, sgtcipher.buf->data,
+ sgtcipher.length);
+ }
+
+ /* Prepare the input message CAAM descriptor entry */
+ msg_tmp.data = rsa_data->message.data;
+ msg_tmp.length = rsa_data->message.length;
+ msg_tmp.paddr = virt_to_phys(rsa_data->message.data);
+ if (!caam_mem_is_cached_buf(rsa_data->message.data,
+ rsa_data->message.length))
+ msg_tmp.nocache = 1;
+
+ retstatus = caam_sgt_build_block_data(&sgtmsg, NULL, &msg_tmp);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto exit_encrypt;
+ }
+
+ if (sgtmsg.sgt_type) {
+ pdb_sgt_flags |= PDB_RSA_ENC_SGT_F;
+ paddr_msg = virt_to_phys(sgtmsg.sgt);
+ caam_sgt_cache_op(TEE_CACHECLEAN, &sgtmsg);
+ } else {
+ paddr_msg = sgtmsg.buf->paddr;
+ if (!sgtmsg.buf->nocache)
+ cache_operation(TEE_CACHECLEAN, sgtmsg.buf->data,
+ sgtmsg.length);
+ }
+
+ caam_desc_init(desc);
+ caam_desc_add_word(desc, DESC_HEADER(0));
+ caam_desc_add_word(desc, PDB_RSA_ENC_E_SIZE(key.e.length) |
+ PDB_RSA_ENC_N_SIZE(key.n.length) |
+ pdb_sgt_flags);
+ caam_desc_add_ptr(desc, paddr_msg);
+ caam_desc_add_ptr(desc, paddr_cipher);
+ caam_desc_add_ptr(desc, key.n.paddr);
+ caam_desc_add_ptr(desc, key.e.paddr);
+ caam_desc_add_word(desc, PDB_RSA_ENC_F_SIZE(rsa_data->message.length));
+ caam_desc_add_word(desc, operation);
+
+ /* Set the descriptor Header with length */
+ desclen = caam_desc_get_len(desc);
+ caam_desc_update_hdr(desc, DESC_HEADER_IDX(desclen, desclen - 1));
+ RSA_DUMPDESC(desc);
+
+ jobctx.desc = desc;
+ retstatus = caam_jr_enqueue(&jobctx, NULL);
+
+ if (retstatus == CAAM_NO_ERROR) {
+ if (!cipher_align.nocache)
+ cache_operation(TEE_CACHEINVALIDATE, cipher_align.data,
+ cipher_align.length);
+
+ if (realloc == 1)
+ memcpy(rsa_data->cipher.data, cipher_align.data,
+ cipher_align.length);
+
+ rsa_data->cipher.length = cipher_align.length;
+
+ RSA_DUMPBUF("Output", rsa_data->cipher.data,
+ rsa_data->cipher.length);
+ ret = TEE_SUCCESS;
+ } else {
+ RSA_TRACE("CAAM Status 0x%08" PRIx32, jobctx.status);
+ ret = job_status_to_tee_result(jobctx.status);
+ }
+
+exit_encrypt:
+ caam_free_desc(&desc);
+ do_keypair_free(&key);
+
+ if (realloc == 1)
+ caam_free_buf(&cipher_align);
+
+ if (sgtmsg.sgt_type)
+ caam_sgtbuf_free(&sgtmsg);
+
+ if (sgtcipher.sgt_type)
+ caam_sgtbuf_free(&sgtcipher);
+
+ return ret;
+}
+
+/*
+ * CAAM RSA Decryption of the input cipher to a message
+ *
+ * @rsa_data [in/out] RSA Data to decrypt
+ * @operation CAAM RSA Decryption operation
+ */
+static TEE_Result do_caam_decrypt(struct drvcrypt_rsa_ed *rsa_data,
+ uint32_t operation)
+{
+ TEE_Result ret = TEE_ERROR_GENERIC;
+ enum caam_status retstatus = CAAM_FAILURE;
+ struct caam_rsa_keypair key = { };
+ struct caambuf cipher_tmp = { };
+ struct caamsgtbuf sgtcipher = { .sgt_type = false };
+ paddr_t paddr_cipher = 0;
+ int realloc = 0;
+ struct caambuf msg_align = { };
+ struct caamsgtbuf sgtmsg = { .sgt_type = false };
+ paddr_t paddr_msg = 0;
+ struct caam_jobctx jobctx = { };
+ uint32_t *desc = NULL;
+ uint32_t desclen = 0;
+ uint32_t pdb_sgt_flags = 0;
+ struct caambuf size_msg = { };
+ struct caambuf tmp = { };
+
+ RSA_TRACE("RSA Decrypt mode %d", rsa_data->rsa_id);
+
+ /*
+ * Convert TEE rsa key type to CAAM rsa key type
+ * Push key value to memory
+ */
+ retstatus = do_keypair_conv(&key, rsa_data->key.key);
+ if (retstatus != CAAM_NO_ERROR) {
+ RSA_TRACE("do_keypair_conv returned 0x%" PRIx32, retstatus);
+ ret = TEE_ERROR_GENERIC;
+ goto exit_decrypt;
+ }
+
+ /*
+ * Allocate the temporary result buffer with a maximum size
+ * of the Key Modulus's size (N)
+ */
+ if (rsa_data->message.length < key.n.length) {
+ retstatus = caam_alloc_align_buf(&msg_align, key.n.length);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_decrypt;
+ }
+
+ realloc = 1;
+ } else {
+ realloc = caam_set_or_alloc_align_buf(rsa_data->message.data,
+ &msg_align, key.n.length);
+ if (realloc == (-1)) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_decrypt;
+ }
+ }
+
+ retstatus = caam_sgt_build_block_data(&sgtmsg, NULL, &msg_align);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto exit_decrypt;
+ }
+
+ if (sgtmsg.sgt_type) {
+ pdb_sgt_flags |= PDB_RSA_DEC_SGT_F;
+ paddr_msg = virt_to_phys(sgtmsg.sgt);
+ caam_sgt_cache_op(TEE_CACHEFLUSH, &sgtmsg);
+ } else {
+ paddr_msg = sgtmsg.buf->paddr;
+ if (!sgtmsg.buf->nocache)
+ cache_operation(TEE_CACHEFLUSH, sgtmsg.buf->data,
+ sgtmsg.length);
+ }
+
+ /* Allocate the returned computed size when PKCS V1.5 */
+ if (operation == RSA_DECRYPT(PKCS_V1_5)) {
+ retstatus = caam_alloc_align_buf(&size_msg, 4);
+ if (retstatus != CAAM_NO_ERROR)
+ goto exit_decrypt;
+
+ cache_operation(TEE_CACHEFLUSH, size_msg.data, size_msg.length);
+ }
+
+ /* Prepare the input cipher CAAM descriptor entry */
+ cipher_tmp.data = rsa_data->cipher.data;
+ cipher_tmp.length = rsa_data->cipher.length;
+ cipher_tmp.paddr = virt_to_phys(rsa_data->cipher.data);
+ if (!caam_mem_is_cached_buf(rsa_data->cipher.data,
+ rsa_data->cipher.length))
+ cipher_tmp.nocache = 1;
+
+ retstatus = caam_sgt_build_block_data(&sgtcipher, NULL, &cipher_tmp);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_GENERIC;
+ goto exit_decrypt;
+ }
+
+ if (sgtcipher.sgt_type) {
+ pdb_sgt_flags |= PDB_RSA_DEC_SGT_G;
+ paddr_cipher = virt_to_phys(sgtcipher.sgt);
+ caam_sgt_cache_op(TEE_CACHECLEAN, &sgtcipher);
+ } else {
+ paddr_cipher = sgtcipher.buf->paddr;
+ if (!sgtcipher.buf->nocache)
+ cache_operation(TEE_CACHECLEAN, sgtcipher.buf->data,
+ sgtcipher.length);
+ }
+
+ /* Allocate the job descriptor function of the Private key format */
+ switch (key.format) {
+ case 1:
+ desc = caam_calloc_desc(MAX_DESC_DEC_1);
+ if (!desc) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_decrypt;
+ }
+ break;
+
+ case 2:
+ case 3:
+ if (key.format == 2)
+ desc = caam_calloc_desc(MAX_DESC_DEC_2);
+ else
+ desc = caam_calloc_desc(MAX_DESC_DEC_3);
+
+ if (!desc) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_decrypt;
+ }
+ /* Allocate temporary buffers used by the CAAM */
+ retstatus =
+ caam_alloc_align_buf(&tmp, key.p.length + key.q.length);
+ if (retstatus != CAAM_NO_ERROR) {
+ ret = TEE_ERROR_OUT_OF_MEMORY;
+ goto exit_decrypt;
+ }
+
+ cache_operation(TEE_CACHEFLUSH, tmp.data, tmp.length);
+ break;
+
+ default:
+ ret = TEE_ERROR_GENERIC;
+ goto exit_decrypt;
+ }
+
+ caam_desc_init(desc);
+ caam_desc_add_word(desc, DESC_HEADER(0));
+
+ /* Build the descriptor function of the Private Key format */
+ switch (key.format) {
+ case 1:
+ caam_desc_add_word(desc,
+ PDB_RSA_DEC_D_SIZE(key.d.length) |
+ PDB_RSA_DEC_N_SIZE(key.n.length) |
+ pdb_sgt_flags);
+ caam_desc_add_ptr(desc, paddr_cipher);
+ caam_desc_add_ptr(desc, paddr_msg);
+ caam_desc_add_ptr(desc, key.n.paddr);
+ caam_desc_add_ptr(desc, key.d.paddr);
+
+ break;
+
+ case 2:
+ caam_desc_add_word(desc,
+ PDB_RSA_DEC_D_SIZE(key.d.length) |
+ PDB_RSA_DEC_N_SIZE(key.n.length) |
+ pdb_sgt_flags);
+ caam_desc_add_ptr(desc, paddr_cipher);
+ caam_desc_add_ptr(desc, paddr_msg);
+ caam_desc_add_ptr(desc, key.d.paddr);
+ caam_desc_add_ptr(desc, key.p.paddr);
+ caam_desc_add_ptr(desc, key.q.paddr);
+ caam_desc_add_ptr(desc, tmp.paddr);
+ caam_desc_add_ptr(desc, tmp.paddr + key.p.length);
+ caam_desc_add_word(desc,
+ PDB_RSA_DEC_Q_SIZE(key.q.length) |
+ PDB_RSA_DEC_P_SIZE(key.p.length));
+ break;
+
+ case 3:
+ caam_desc_add_word(desc, PDB_RSA_DEC_N_SIZE(key.n.length) |
+ pdb_sgt_flags);
+ caam_desc_add_ptr(desc, paddr_cipher);
+ caam_desc_add_ptr(desc, paddr_msg);
+ caam_desc_add_ptr(desc, key.qp.paddr);
+ caam_desc_add_ptr(desc, key.p.paddr);
+ caam_desc_add_ptr(desc, key.q.paddr);
+ caam_desc_add_ptr(desc, key.dp.paddr);
+ caam_desc_add_ptr(desc, key.dq.paddr);
+ caam_desc_add_ptr(desc, tmp.paddr);
+ caam_desc_add_ptr(desc, tmp.paddr + key.p.length);
+ caam_desc_add_word(desc,
+ PDB_RSA_DEC_Q_SIZE(key.q.length) |
+ PDB_RSA_DEC_P_SIZE(key.p.length));
+ break;
+
+ default:
+ ret = TEE_ERROR_GENERIC;
+ goto exit_decrypt;
+ }
+
+ /* Set the Decryption operation type */
+ caam_desc_add_word(desc, operation | PROT_RSA_DEC_KEYFORM(key.format));
+
+ if (operation == RSA_DECRYPT(PKCS_V1_5)) {
+ /* Get the PPKCS1 v1.5 Message length generated */
+ caam_desc_add_word(desc,
+ ST_NOIMM_OFF(CLASS_DECO, REG_MATH0, 4, 4));
+ caam_desc_add_ptr(desc, size_msg.paddr);
+ /* Set the descriptor Header with length */
+ desclen = caam_desc_get_len(desc);
+#ifdef CFG_CAAM_64BIT
+ caam_desc_update_hdr(desc,
+ DESC_HEADER_IDX(desclen, desclen - 1 - 3));
+#else
+ caam_desc_update_hdr(desc,
+ DESC_HEADER_IDX(desclen, desclen - 1 - 2));
+#endif /* CFG_CAAM_64BIT */
+ } else {
+ desclen = caam_desc_get_len(desc);
+ /* Set the descriptor Header with length */
+ caam_desc_update_hdr(desc,
+ DESC_HEADER_IDX(desclen, desclen - 1));
+ }
+
+ RSA_DUMPDESC(desc);
+
+ cache_operation(TEE_CACHECLEAN, rsa_data->cipher.data,
+ rsa_data->cipher.length);
+
+ jobctx.desc = desc;
+ retstatus = caam_jr_enqueue(&jobctx, NULL);
+
+ if (retstatus == CAAM_NO_ERROR) {
+ if (!msg_align.nocache)
+ cache_operation(TEE_CACHEINVALIDATE, msg_align.data,
+ msg_align.length);
+
+ if (operation == RSA_DECRYPT(NO)) {
+ if (rsa_data->rsa_id == DRVCRYPT_RSA_NOPAD) {
+ struct caambuf outmsg = {
+ .data = rsa_data->message.data,
+ .length = rsa_data->message.length
+ };
+ caam_mem_cpy_ltrim_buf(&outmsg, &msg_align);
+ rsa_data->message.length = outmsg.length;
+ } else if (realloc == 1) {
+ rsa_data->message.length =
+ MIN(key.n.length,
+ rsa_data->message.length);
+ memcpy(rsa_data->message.data, msg_align.data,
+ rsa_data->message.length);
+ }
+ } else {
+ /* PKCS 1 v1.5 */
+ cache_operation(TEE_CACHEINVALIDATE, size_msg.data,
+ size_msg.length);
+
+ rsa_data->message.length =
+ caam_read_val32(size_msg.data);
+ if (realloc == 1)
+ memcpy(rsa_data->message.data, msg_align.data,
+ rsa_data->message.length);
+ }
+
+ RSA_DUMPBUF("Output", rsa_data->message.data,
+ rsa_data->message.length);
+ ret = TEE_SUCCESS;
+ } else {
+ RSA_TRACE("CAAM Status 0x%08" PRIx32, jobctx.status);
+ ret = job_status_to_tee_result(jobctx.status);
+ }
+
+exit_decrypt:
+ caam_free_desc(&desc);
+ do_keypair_free(&key);
+ caam_free_buf(&size_msg);
+
+ if (realloc == 1)
+ caam_free_buf(&msg_align);
+
+ if (sgtmsg.sgt_type)
+ caam_sgtbuf_free(&sgtmsg);
+
+ if (sgtcipher.sgt_type)
+ caam_sgtbuf_free(&sgtcipher);
+
+ caam_free_buf(&tmp);
+
+ return ret;
+}
+
+/*
+ * RSA Encryption
+ *
+ * @rsa_data [in/out] RSA Data to encrypt / Cipher resulting
+ */
+static TEE_Result do_encrypt(struct drvcrypt_rsa_ed *rsa_data)
+{
+ TEE_Result ret = TEE_ERROR_NOT_IMPLEMENTED;
+
+ switch (rsa_data->rsa_id) {
+ case DRVCRYPT_RSA_NOPAD:
+ case DRVCRYPT_RSASSA_PKCS_V1_5:
+ case DRVCRYPT_RSASSA_PSS:
+ ret = do_caam_encrypt(rsa_data, RSA_ENCRYPT(NO));
+ break;
+
+ case DRVCRYPT_RSA_PKCS_V1_5:
+ ret = do_caam_encrypt(rsa_data, RSA_ENCRYPT(PKCS_V1_5));
+ break;
+
+ case DRVCRYPT_RSA_OAEP:
+ ret = do_oaep_encoding(rsa_data);
+ break;
+
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+/*
+ * RSA Decryption
+ *
+ * @rsa_data [in/out] RSA Data to decrypt / Message resulting
+ */
+static TEE_Result do_decrypt(struct drvcrypt_rsa_ed *rsa_data)
+{
+ TEE_Result ret = TEE_ERROR_NOT_IMPLEMENTED;
+
+ switch (rsa_data->rsa_id) {
+ case DRVCRYPT_RSA_NOPAD:
+ case DRVCRYPT_RSASSA_PKCS_V1_5:
+ case DRVCRYPT_RSASSA_PSS:
+ ret = do_caam_decrypt(rsa_data, RSA_DECRYPT(NO));
+ break;
+
+ case DRVCRYPT_RSA_PKCS_V1_5:
+ ret = do_caam_decrypt(rsa_data, RSA_DECRYPT(PKCS_V1_5));
+ break;
+
+ case DRVCRYPT_RSA_OAEP:
+ ret = do_oaep_decoding(rsa_data);
+ break;
+
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+/*
+ * Registration of the RSA Driver
+ */
+static const struct drvcrypt_rsa driver_rsa = {
+ .alloc_keypair = &do_allocate_keypair,
+ .alloc_publickey = &do_allocate_publickey,
+ .free_publickey = &do_free_publickey,
+ .gen_keypair = &do_gen_keypair,
+ .encrypt = &do_encrypt,
+ .decrypt = &do_decrypt,
+ .optional.ssa_sign = NULL,
+ .optional.ssa_verify = NULL,
+};
+
+enum caam_status caam_rsa_init(vaddr_t ctrl_addr)
+{
+ enum caam_status retstatus = CAAM_FAILURE;
+
+ if (caam_hal_ctrl_pknum(ctrl_addr)) {
+ caam_era = caam_hal_ctrl_era(ctrl_addr);
+ RSA_TRACE("CAAM Era %d", caam_era);
+
+ if (!drvcrypt_register_rsa(&driver_rsa))
+ retstatus = CAAM_NO_ERROR;
+ }
+
+ return retstatus;
+}
diff --git a/core/drivers/crypto/caam/acipher/local.h b/core/drivers/crypto/caam/acipher/local.h
new file mode 100644
index 00000000..7a2c7cbe
--- /dev/null
+++ b/core/drivers/crypto/caam/acipher/local.h
@@ -0,0 +1,31 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright 2018-2020 NXP
+ *
+ * CAAM Cipher Local header.
+ */
+#ifndef __LOCAL_H__
+#define __LOCAL_H__
+
+#include "caam_common.h"
+
+/*
+ * Prime generator structure
+ */
+struct prime_data {
+ uint8_t era; /* CAAM Era version */
+ size_t key_size; /* Key size in bits */
+ struct caambuf *e; /* Key exponent e */
+ struct caambuf *p; /* Prime p */
+ struct caambuf *q; /* Prime q (can be NULL of only p asked) */
+};
+
+/*
+ * Generate a Prime Number
+ * Algorithm based on the Chapter B.3.3 of the FIPS.184-6 specification
+ *
+ * @data [in/out] Prime generation data
+ */
+enum caam_status caam_prime_gen(struct prime_data *data);
+
+#endif /* __LOCAL_H__ */
diff --git a/core/drivers/crypto/caam/acipher/sub.mk b/core/drivers/crypto/caam/acipher/sub.mk
new file mode 100644
index 00000000..ca4c4b3f
--- /dev/null
+++ b/core/drivers/crypto/caam/acipher/sub.mk
@@ -0,0 +1,4 @@
+incdirs-y += ../include
+
+srcs-$(CFG_NXP_CAAM_RSA_DRV) += caam_rsa.c
+srcs-y += caam_prime.c caam_math.c
diff --git a/core/drivers/crypto/caam/caam_ctrl.c b/core/drivers/crypto/caam/caam_ctrl.c
index e4993c19..7c35ce9d 100644
--- a/core/drivers/crypto/caam/caam_ctrl.c
+++ b/core/drivers/crypto/caam/caam_ctrl.c
@@ -1,9 +1,10 @@
// SPDX-License-Identifier: BSD-2-Clause
/*
- * Copyright 2017-2019 NXP
+ * Copyright 2017-2020 NXP
*
* Brief CAAM Global Controller.
*/
+#include <caam_acipher.h>
#include <caam_common.h>
#include <caam_hal_cfg.h>
#include <caam_hal_clk.h>
@@ -57,6 +58,20 @@ static TEE_Result crypto_driver_init(void)
goto exit_init;
}
+ /* Initialize the MATH Module */
+ retstatus = caam_math_init(jrcfg.base);
+ if (retstatus != CAAM_NO_ERROR) {
+ retresult = TEE_ERROR_GENERIC;
+ goto exit_init;
+ }
+
+ /* Initialize the RSA Module */
+ retstatus = caam_rsa_init(jrcfg.base);
+ if (retstatus != CAAM_NO_ERROR) {
+ retresult = TEE_ERROR_GENERIC;
+ goto exit_init;
+ }
+
/* Everything is OK, register the Power Management handler */
caam_pwr_init();
diff --git a/core/drivers/crypto/caam/hal/common/hal_ctrl.c b/core/drivers/crypto/caam/hal/common/hal_ctrl.c
index 42e1fe71..cf94d08b 100644
--- a/core/drivers/crypto/caam/hal/common/hal_ctrl.c
+++ b/core/drivers/crypto/caam/hal/common/hal_ctrl.c
@@ -1,6 +1,6 @@
// SPDX-License-Identifier: BSD-2-Clause
/*
- * Copyright 2018-2019 NXP
+ * Copyright 2018-2020 NXP
*
* Brief CAAM Controller Hardware Abstration Layer.
* Implementation of primitives to access HW.
@@ -38,3 +38,22 @@ uint8_t caam_hal_ctrl_hash_limit(vaddr_t baseaddr)
return UINT8_MAX;
}
+
+uint8_t caam_hal_ctrl_pknum(vaddr_t baseaddr)
+{
+ uint32_t val = 0;
+
+ val = io_caam_read32(baseaddr + CHANUM_LS);
+
+ return GET_CHANUM_LS_PKNUM(val);
+}
+
+uint8_t caam_hal_ctrl_era(vaddr_t baseaddr)
+{
+ uint32_t val = 0;
+
+ /* Read the number of instance */
+ val = io_caam_read32(baseaddr + CCBVID);
+
+ return GET_CCBVID_CAAM_ERA(val);
+}
diff --git a/core/drivers/crypto/caam/hal/common/registers/version_regs.h b/core/drivers/crypto/caam/hal/common/registers/version_regs.h
index 63d8eb07..de2a2f68 100644
--- a/core/drivers/crypto/caam/hal/common/registers/version_regs.h
+++ b/core/drivers/crypto/caam/hal/common/registers/version_regs.h
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright 2017-2019 NXP
+ * Copyright 2017-2020 NXP
*
* Brief Version Registers.
*/
@@ -38,6 +38,8 @@
#define GET_CHANUM_MS_JRNUM(val) (((val) & BM_CHANUM_MS_JRNUM) >> 28)
#define CHANUM_LS 0x0FF4
+#define BM_CHANUM_LS_PKNUM SHIFT_U32(0xF, 28)
+#define GET_CHANUM_LS_PKNUM(val) (((val) & BM_CHANUM_LS_PKNUM) >> 28)
#define BM_CHANUM_LS_MDNUM SHIFT_U32(0xF, 12)
#define GET_CHANUM_LS_MDNUM(val) (((val) & BM_CHANUM_LS_MDNUM) >> 12)
diff --git a/core/drivers/crypto/caam/include/caam_acipher.h b/core/drivers/crypto/caam/include/caam_acipher.h
new file mode 100644
index 00000000..1064c7a1
--- /dev/null
+++ b/core/drivers/crypto/caam/include/caam_acipher.h
@@ -0,0 +1,39 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright 2018-2020 NXP
+ *
+ * Brief CAAM Asymmetric Cipher manager header.
+ */
+#ifndef __CAAM_ACIPHER_H__
+#define __CAAM_ACIPHER_H__
+
+#include <caam_common.h>
+
+#ifdef CFG_NXP_CAAM_RSA_DRV
+/*
+ * Initialize the RSA module
+ *
+ * @ctrl_addr Controller base address
+ */
+enum caam_status caam_rsa_init(vaddr_t ctrl_addr);
+#else
+static inline enum caam_status caam_rsa_init(vaddr_t ctrl_addr __unused)
+{
+ return CAAM_NO_ERROR;
+}
+#endif /* CFG_NXP_CAAM_RSA_DRV */
+
+#ifdef CFG_NXP_CAAM_ACIPHER_DRV
+/*
+ * Initialize the MATH module
+ *
+ * @ctrl_addr Controller base address
+ */
+enum caam_status caam_math_init(vaddr_t ctrl_addr);
+#else
+static inline enum caam_status caam_math_init(vaddr_t ctrl_addr __unused)
+{
+ return CAAM_NO_ERROR;
+}
+#endif /* CFG_NXP_CAAM_ACIPHER_DRV */
+#endif /* __CAAM_ACIPHER_H__ */
diff --git a/core/drivers/crypto/caam/hal/common/registers/ccb_regs.h b/core/drivers/crypto/caam/include/caam_desc_ccb_defines.h
index b9565bf3..02b3ace6 100644
--- a/core/drivers/crypto/caam/hal/common/registers/ccb_regs.h
+++ b/core/drivers/crypto/caam/include/caam_desc_ccb_defines.h
@@ -1,14 +1,14 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright 2018-2019 NXP
+ * Copyright 2018-2020 NXP
*
- * Brief CCB Registers
+ * Brief Define the CCB Registers to use in the CAAM descriptor
*/
-#ifndef __CCB_REGS_H__
-#define __CCB_REGS_H__
+#ifndef __CAAM_DESC_CCB_DEFINES_H__
+#define __CAAM_DESC_CCB_DEFINES_H__
/* CCB CHA Control Register */
-#define CCTRL_ULOAD_PKHA_B BIT32(27)
+#define CCTRL_ULOAD_PKHA_B BIT32(27)
/* CCB NFIFO */
#define NFIFO_CLASS(cla) SHIFT_U32(NFIFO_CLASS_##cla & 0x3, 30)
@@ -48,4 +48,4 @@
(NFIFO_CLASS(cla) | (options) | NFIFO_STYPE(src) | NFIFO_DTYPE(data) | \
NFIFO_PTYPE(ZERO) | NFIFO_DATA_LENGTH(len))
-#endif /* __CCB_REGS_H__ */
+#endif /* __CAAM_DESC_CCB_DEFINES_H__ */
diff --git a/core/drivers/crypto/caam/include/caam_desc_defines.h b/core/drivers/crypto/caam/include/caam_desc_defines.h
index 1416c4f4..9d8b5f82 100644
--- a/core/drivers/crypto/caam/include/caam_desc_defines.h
+++ b/core/drivers/crypto/caam/include/caam_desc_defines.h
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright 2018-2019 NXP
+ * Copyright 2018-2020 NXP
*
* Brief CAAM Descriptor defines.
*/
@@ -604,11 +604,15 @@
* Descriptor Protocol Data Block
*/
/* RSA Encryption */
+#define PDB_RSA_ENC_SGT_F SHIFT_U32(1, 31)
+#define PDB_RSA_ENC_SGT_G SHIFT_U32(1, 30)
#define PDB_RSA_ENC_E_SIZE(len) SHIFT_U32((len) & 0xFFF, 12)
#define PDB_RSA_ENC_N_SIZE(len) SHIFT_U32((len) & 0xFFF, 0)
#define PDB_RSA_ENC_F_SIZE(len) SHIFT_U32((len) & 0xFFF, 0)
/* RSA Decryption */
+#define PDB_RSA_DEC_SGT_G SHIFT_U32(1, 31)
+#define PDB_RSA_DEC_SGT_F SHIFT_U32(1, 30)
#define PDB_RSA_DEC_D_SIZE(len) SHIFT_U32((len) & 0xFFF, 12)
#define PDB_RSA_DEC_N_SIZE(len) SHIFT_U32((len) & 0xFFF, 0)
#define PDB_RSA_DEC_Q_SIZE(len) SHIFT_U32((len) & 0xFFF, 12)
diff --git a/core/drivers/crypto/caam/include/caam_hal_ctrl.h b/core/drivers/crypto/caam/include/caam_hal_ctrl.h
index 289628aa..cce9e813 100644
--- a/core/drivers/crypto/caam/include/caam_hal_ctrl.h
+++ b/core/drivers/crypto/caam/include/caam_hal_ctrl.h
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright 2018-2019 NXP
+ * Copyright 2018-2020 NXP
*
* Brief CAAM Controller Hardware Abstration Layer header.
*/
@@ -31,4 +31,17 @@ uint8_t caam_hal_ctrl_jrnum(vaddr_t baseaddr);
*/
uint8_t caam_hal_ctrl_hash_limit(vaddr_t baseaddr);
+/*
+ * Returns the number of Public Key module supported
+ *
+ * @baseaddr Controller base address
+ */
+uint8_t caam_hal_ctrl_pknum(vaddr_t baseaddr);
+
+/*
+ * Returns the CAAM Era
+ *
+ * @baseaddr Controller base address
+ */
+uint8_t caam_hal_ctrl_era(vaddr_t baseaddr);
#endif /* __CAAM_HAL_CTRL_H__ */
diff --git a/core/drivers/crypto/caam/include/caam_jr_status.h b/core/drivers/crypto/caam/include/caam_jr_status.h
index b145a043..1c43ab97 100644
--- a/core/drivers/crypto/caam/include/caam_jr_status.h
+++ b/core/drivers/crypto/caam/include/caam_jr_status.h
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright 2018-2019 NXP
+ * Copyright 2018-2020 NXP
*
* Brief CAAM Job Ring Status definition header.
*/
@@ -27,4 +27,11 @@
#define JRSTA_CCB_ERRID_HW SHIFT_U32(0xB, 0)
#define JRSTA_DECO_ERRID_FORMAT SHIFT_U32(0x88, 0)
+/* Return the Halt User status else 0 if not a Jump Halt User */
+#define JRSTA_GET_HALT_USER(status) \
+ (__extension__({ \
+ __typeof__(status) _status = (status); \
+ JRSTA_SRC_GET(_status) == JRSTA_SRC(JMP_HALT_USER) ? \
+ _status & UINT8_MAX : \
+ 0; }))
#endif /* __CAAM_JR_STATUS_H__ */
diff --git a/core/drivers/crypto/caam/include/caam_trace.h b/core/drivers/crypto/caam/include/caam_trace.h
index eab36a8c..49188cf8 100644
--- a/core/drivers/crypto/caam/include/caam_trace.h
+++ b/core/drivers/crypto/caam/include/caam_trace.h
@@ -33,6 +33,7 @@
#define DBG_TRACE_JR BIT32(5) /* Job Ring trace */
#define DBG_TRACE_RNG BIT32(6) /* RNG trace */
#define DBG_TRACE_HASH BIT32(7) /* Hash trace */
+#define DBG_TRACE_RSA BIT32(8) /* RSA trace */
/* HAL */
#if CAAM_DBG_TRACE(HAL)
@@ -126,6 +127,29 @@
#define HASH_DUMPBUF(...)
#endif
+/* RSA */
+#if CAAM_DBG_TRACE(RSA)
+#define RSA_TRACE DRV_TRACE
+#if CAAM_DBG_DESC(RSA)
+#define RSA_DUMPDESC(desc) \
+ do { \
+ RSA_TRACE("RSA Descriptor"); \
+ DRV_DUMPDESC(desc); \
+ } while (0)
+#else
+#define RSA_DUMPDESC(desc)
+#endif
+#if CAAM_DBG_BUF(RSA)
+#define RSA_DUMPBUF DRV_DUMPBUF
+#else
+#define RSA_DUMPBUF(...)
+#endif
+#else
+#define RSA_TRACE(...)
+#define RSA_DUMPDESC(desc)
+#define RSA_DUMPBUF(...)
+#endif
+
#if (TRACE_LEVEL >= TRACE_DEBUG)
#define DRV_TRACE(...) \
trace_printf(__func__, __LINE__, TRACE_DEBUG, true, __VA_ARGS__)
@@ -136,8 +160,7 @@
__typeof__(buf) _buf = (buf); \
__typeof__(len) _len = (len); \
\
- DRV_TRACE("%s @0x%" PRIxPTR ": %zu", title, (uintptr_t)_buf, \
- _len); \
+ DRV_TRACE("%s @%p : %zu", title, _buf, _len); \
dhex_dump(NULL, 0, 0, _buf, _len); \
} while (0)
diff --git a/core/drivers/crypto/caam/include/caam_utils_mem.h b/core/drivers/crypto/caam/include/caam_utils_mem.h
index 19582a0f..4f206db7 100644
--- a/core/drivers/crypto/caam/include/caam_utils_mem.h
+++ b/core/drivers/crypto/caam/include/caam_utils_mem.h
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright 2018-2019 NXP
+ * Copyright 2018-2020 NXP
*
* Brief Memory management utilities.
* Primitive to allocate, free memory.
@@ -139,4 +139,14 @@ int caam_mem_get_pa_area(struct caambuf *buf, struct caambuf **pabufs);
* @size Buffer size
*/
bool caam_mem_is_cached_buf(void *buf, size_t size);
+
+/*
+ * Copy source data into the destination buffer removing non-significant
+ * first zeros (left zeros).
+ * If all source @src buffer is zero, left only one zero in the destination.
+ *
+ * @dst [out] Destination buffer
+ * @src Source to copy
+ */
+void caam_mem_cpy_ltrim_buf(struct caambuf *dst, struct caambuf *src);
#endif /* __CAAM_UTILS_MEM_H__ */
diff --git a/core/drivers/crypto/caam/sub.mk b/core/drivers/crypto/caam/sub.mk
index 6715fa38..5932df2d 100644
--- a/core/drivers/crypto/caam/sub.mk
+++ b/core/drivers/crypto/caam/sub.mk
@@ -9,4 +9,4 @@ srcs-y += caam_jr.c
srcs-y += caam_rng.c
srcs-y += caam_desc.c
subdirs-$(CFG_NXP_CAAM_HASH_DRV) += hash
-
+subdirs-$(CFG_NXP_CAAM_ACIPHER_DRV) += acipher
diff --git a/core/drivers/crypto/caam/utils/utils_mem.c b/core/drivers/crypto/caam/utils/utils_mem.c
index 8acaeb10..fa42b10b 100644
--- a/core/drivers/crypto/caam/utils/utils_mem.c
+++ b/core/drivers/crypto/caam/utils/utils_mem.c
@@ -522,3 +522,23 @@ int caam_mem_get_pa_area(struct caambuf *buf, struct caambuf **out_pabufs)
MEM_TRACE("Nb Physical Area %d", nb_pa_area + 1);
return nb_pa_area + 1;
}
+
+void caam_mem_cpy_ltrim_buf(struct caambuf *dst, struct caambuf *src)
+{
+ size_t offset = 0;
+ size_t cpy_size = 0;
+
+ /* Calculate the offset to start the copy */
+ while (!src->data[offset] && offset < src->length)
+ offset++;
+
+ if (offset >= src->length)
+ offset = src->length - 1;
+
+ cpy_size = MIN(dst->length, (src->length - offset));
+ MEM_TRACE("Copy %zu of src %zu bytes (offset = %zu)", cpy_size,
+ src->length, offset);
+ memcpy(dst->data, &src->data[offset], cpy_size);
+
+ dst->length = cpy_size;
+}