1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/internal/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/sm4.h>
11 #include <crypto/scatterwalk.h>
13 #include "cc_driver.h"
14 #include "cc_lli_defs.h"
15 #include "cc_buffer_mgr.h"
16 #include "cc_cipher.h"
17 #include "cc_request_mgr.h"
19 #define MAX_SKCIPHER_SEQ_LEN 6
21 #define template_skcipher template_u.skcipher
23 struct cc_user_key_info {
25 dma_addr_t key_dma_addr;
28 struct cc_hw_key_info {
29 enum cc_hw_crypto_key key1_slot;
30 enum cc_hw_crypto_key key2_slot;
33 struct cc_cpp_key_info {
39 CC_UNPROTECTED_KEY, /* User key */
40 CC_HW_PROTECTED_KEY, /* HW (FDE) key */
41 CC_POLICY_PROTECTED_KEY, /* CPP key */
42 CC_INVALID_PROTECTED_KEY /* Invalid key */
45 struct cc_cipher_ctx {
46 struct cc_drvdata *drvdata;
52 enum cc_key_type key_type;
53 struct cc_user_key_info user;
55 struct cc_hw_key_info hw;
56 struct cc_cpp_key_info cpp;
58 struct crypto_shash *shash_tfm;
61 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
63 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
65 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
67 return ctx_p->key_type;
70 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
72 switch (ctx_p->flow_mode) {
75 case CC_AES_128_BIT_KEY_SIZE:
76 case CC_AES_192_BIT_KEY_SIZE:
77 if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
78 ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
79 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
82 case CC_AES_256_BIT_KEY_SIZE:
84 case (CC_AES_192_BIT_KEY_SIZE * 2):
85 case (CC_AES_256_BIT_KEY_SIZE * 2):
86 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
87 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
88 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
96 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
100 if (size == SM4_KEY_SIZE)
108 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
111 switch (ctx_p->flow_mode) {
113 switch (ctx_p->cipher_mode) {
115 case DRV_CIPHER_CBC_CTS:
116 if (size >= AES_BLOCK_SIZE)
124 case DRV_CIPHER_ESSIV:
125 case DRV_CIPHER_BITLOCKER:
126 if (IS_ALIGNED(size, AES_BLOCK_SIZE))
134 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
138 switch (ctx_p->cipher_mode) {
143 if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
154 static int cc_cipher_init(struct crypto_tfm *tfm)
156 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
157 struct cc_crypto_alg *cc_alg =
158 container_of(tfm->__crt_alg, struct cc_crypto_alg,
160 struct device *dev = drvdata_to_dev(cc_alg->drvdata);
161 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
164 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
165 crypto_tfm_alg_name(tfm));
167 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
168 sizeof(struct cipher_req_ctx));
170 ctx_p->cipher_mode = cc_alg->cipher_mode;
171 ctx_p->flow_mode = cc_alg->flow_mode;
172 ctx_p->drvdata = cc_alg->drvdata;
174 /* Allocate key buffer, cache line aligned */
175 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
176 if (!ctx_p->user.key)
179 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
183 ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key,
186 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
187 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
188 max_key_buf_size, ctx_p->user.key);
191 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
192 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
194 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
195 /* Alloc hash tfm for essiv */
196 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
197 if (IS_ERR(ctx_p->shash_tfm)) {
198 dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
199 return PTR_ERR(ctx_p->shash_tfm);
206 static void cc_cipher_exit(struct crypto_tfm *tfm)
208 struct crypto_alg *alg = tfm->__crt_alg;
209 struct cc_crypto_alg *cc_alg =
210 container_of(alg, struct cc_crypto_alg,
212 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
213 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
214 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
216 dev_dbg(dev, "Clearing context @%p for %s\n",
217 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
219 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
220 /* Free hash tfm for essiv */
221 crypto_free_shash(ctx_p->shash_tfm);
222 ctx_p->shash_tfm = NULL;
225 /* Unmap key buffer */
226 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
228 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
229 &ctx_p->user.key_dma_addr);
231 /* Free key buffer in context */
232 kzfree(ctx_p->user.key);
233 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
237 u8 key1[DES_KEY_SIZE];
238 u8 key2[DES_KEY_SIZE];
239 u8 key3[DES_KEY_SIZE];
242 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
257 static u8 cc_slot_to_cpp_key(u8 slot_num)
259 return (slot_num - CC_FIRST_CPP_KEY_SLOT);
262 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
264 if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
265 return CC_HW_PROTECTED_KEY;
266 else if (slot_num >= CC_FIRST_CPP_KEY_SLOT &&
267 slot_num <= CC_LAST_CPP_KEY_SLOT)
268 return CC_POLICY_PROTECTED_KEY;
270 return CC_INVALID_PROTECTED_KEY;
273 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
276 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
277 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
278 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
279 struct cc_hkey_info hki;
281 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
282 ctx_p, crypto_tfm_alg_name(tfm), keylen);
283 dump_byte_array("key", key, keylen);
285 /* STAT_PHASE_0: Init and sanity checks */
287 /* This check the size of the protected key token */
288 if (keylen != sizeof(hki)) {
289 dev_err(dev, "Unsupported protected key size %d.\n", keylen);
293 memcpy(&hki, key, keylen);
295 /* The real key len for crypto op is the size of the HW key
296 * referenced by the HW key slot, not the hardware key token
300 if (validate_keys_sizes(ctx_p, keylen)) {
301 dev_dbg(dev, "Unsupported key size %d.\n", keylen);
305 ctx_p->keylen = keylen;
307 switch (cc_slot_to_key_type(hki.hw_key1)) {
308 case CC_HW_PROTECTED_KEY:
309 if (ctx_p->flow_mode == S_DIN_to_SM4) {
310 dev_err(dev, "Only AES HW protected keys are supported\n");
314 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
315 if (ctx_p->hw.key1_slot == END_OF_KEYS) {
316 dev_err(dev, "Unsupported hw key1 number (%d)\n",
321 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
322 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
323 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
324 if (hki.hw_key1 == hki.hw_key2) {
325 dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
326 hki.hw_key1, hki.hw_key2);
330 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
331 if (ctx_p->hw.key2_slot == END_OF_KEYS) {
332 dev_err(dev, "Unsupported hw key2 number (%d)\n",
338 ctx_p->key_type = CC_HW_PROTECTED_KEY;
339 dev_dbg(dev, "HW protected key %d/%d set\n.",
340 ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
343 case CC_POLICY_PROTECTED_KEY:
344 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
345 dev_err(dev, "CPP keys not supported in this hardware revision.\n");
349 if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
350 ctx_p->cipher_mode != DRV_CIPHER_CTR) {
351 dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
355 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
356 if (ctx_p->flow_mode == S_DIN_to_AES)
357 ctx_p->cpp.alg = CC_CPP_AES;
358 else /* Must be SM4 since due to sethkey registration */
359 ctx_p->cpp.alg = CC_CPP_SM4;
360 ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
361 dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
362 ctx_p->cpp.alg, ctx_p->cpp.slot);
366 dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
373 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
376 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
377 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
378 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
379 struct cc_crypto_alg *cc_alg =
380 container_of(tfm->__crt_alg, struct cc_crypto_alg,
382 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
384 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
385 ctx_p, crypto_tfm_alg_name(tfm), keylen);
386 dump_byte_array("key", key, keylen);
388 /* STAT_PHASE_0: Init and sanity checks */
390 if (validate_keys_sizes(ctx_p, keylen)) {
391 dev_dbg(dev, "Unsupported key size %d.\n", keylen);
395 ctx_p->key_type = CC_UNPROTECTED_KEY;
398 * Verify DES weak keys
399 * Note that we're dropping the expanded key since the
400 * HW does the expansion on its own.
402 if (ctx_p->flow_mode == S_DIN_to_DES) {
403 if ((keylen == DES3_EDE_KEY_SIZE &&
404 verify_skcipher_des3_key(sktfm, key)) ||
405 verify_skcipher_des_key(sktfm, key)) {
406 dev_dbg(dev, "weak DES key");
411 if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
412 xts_check_key(tfm, key, keylen)) {
413 dev_dbg(dev, "weak XTS key");
417 /* STAT_PHASE_1: Copy key to ctx */
418 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
419 max_key_buf_size, DMA_TO_DEVICE);
421 memcpy(ctx_p->user.key, key, keylen);
423 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
425 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
426 /* sha256 for key2 - use sw implementation */
427 int key_len = keylen >> 1;
430 err = crypto_shash_tfm_digest(ctx_p->shash_tfm,
431 ctx_p->user.key, key_len,
432 ctx_p->user.key + key_len);
434 dev_err(dev, "Failed to hash ESSIV key.\n");
438 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
439 max_key_buf_size, DMA_TO_DEVICE);
440 ctx_p->keylen = keylen;
442 dev_dbg(dev, "return safely");
446 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
448 switch (ctx_p->flow_mode) {
450 return S_AES_to_DOUT;
452 return S_DES_to_DOUT;
454 return S_SM4_to_DOUT;
456 return ctx_p->flow_mode;
460 static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
461 struct cipher_req_ctx *req_ctx,
462 unsigned int ivsize, struct cc_hw_desc desc[],
463 unsigned int *seq_size)
465 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
466 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
467 int cipher_mode = ctx_p->cipher_mode;
468 int flow_mode = cc_out_setup_mode(ctx_p);
469 int direction = req_ctx->gen_ctx.op_type;
470 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
472 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
475 switch (cipher_mode) {
479 case DRV_CIPHER_CBC_CTS:
483 hw_desc_init(&desc[*seq_size]);
484 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
485 set_cipher_config0(&desc[*seq_size], direction);
486 set_flow_mode(&desc[*seq_size], flow_mode);
487 set_cipher_mode(&desc[*seq_size], cipher_mode);
488 if (cipher_mode == DRV_CIPHER_CTR ||
489 cipher_mode == DRV_CIPHER_OFB) {
490 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
492 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
494 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
498 case DRV_CIPHER_ESSIV:
499 case DRV_CIPHER_BITLOCKER:
501 hw_desc_init(&desc[*seq_size]);
502 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
503 set_cipher_mode(&desc[*seq_size], cipher_mode);
504 set_cipher_config0(&desc[*seq_size], direction);
505 set_flow_mode(&desc[*seq_size], flow_mode);
506 set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
508 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
512 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
517 static void cc_setup_state_desc(struct crypto_tfm *tfm,
518 struct cipher_req_ctx *req_ctx,
519 unsigned int ivsize, unsigned int nbytes,
520 struct cc_hw_desc desc[],
521 unsigned int *seq_size)
523 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
524 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
525 int cipher_mode = ctx_p->cipher_mode;
526 int flow_mode = ctx_p->flow_mode;
527 int direction = req_ctx->gen_ctx.op_type;
528 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
530 switch (cipher_mode) {
534 case DRV_CIPHER_CBC_CTS:
538 hw_desc_init(&desc[*seq_size]);
539 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
541 set_cipher_config0(&desc[*seq_size], direction);
542 set_flow_mode(&desc[*seq_size], flow_mode);
543 set_cipher_mode(&desc[*seq_size], cipher_mode);
544 if (cipher_mode == DRV_CIPHER_CTR ||
545 cipher_mode == DRV_CIPHER_OFB) {
546 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
548 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
553 case DRV_CIPHER_ESSIV:
554 case DRV_CIPHER_BITLOCKER:
557 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
562 static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
563 struct cipher_req_ctx *req_ctx,
564 unsigned int ivsize, unsigned int nbytes,
565 struct cc_hw_desc desc[],
566 unsigned int *seq_size)
568 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
569 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
570 int cipher_mode = ctx_p->cipher_mode;
571 int flow_mode = ctx_p->flow_mode;
572 int direction = req_ctx->gen_ctx.op_type;
573 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
574 unsigned int key_len = ctx_p->keylen;
575 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
576 unsigned int du_size = nbytes;
578 struct cc_crypto_alg *cc_alg =
579 container_of(tfm->__crt_alg, struct cc_crypto_alg,
582 if (cc_alg->data_unit)
583 du_size = cc_alg->data_unit;
585 switch (cipher_mode) {
589 case DRV_CIPHER_CBC_CTS:
594 case DRV_CIPHER_ESSIV:
595 case DRV_CIPHER_BITLOCKER:
597 hw_desc_init(&desc[*seq_size]);
598 set_cipher_mode(&desc[*seq_size], cipher_mode);
599 set_cipher_config0(&desc[*seq_size], direction);
600 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
601 set_hw_crypto_key(&desc[*seq_size],
602 ctx_p->hw.key2_slot);
604 set_din_type(&desc[*seq_size], DMA_DLLI,
605 (key_dma_addr + (key_len / 2)),
606 (key_len / 2), NS_BIT);
608 set_xex_data_unit_size(&desc[*seq_size], du_size);
609 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
610 set_key_size_aes(&desc[*seq_size], (key_len / 2));
611 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
615 hw_desc_init(&desc[*seq_size]);
616 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
617 set_cipher_mode(&desc[*seq_size], cipher_mode);
618 set_cipher_config0(&desc[*seq_size], direction);
619 set_key_size_aes(&desc[*seq_size], (key_len / 2));
620 set_flow_mode(&desc[*seq_size], flow_mode);
621 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
622 CC_AES_BLOCK_SIZE, NS_BIT);
626 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
630 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
632 switch (ctx_p->flow_mode) {
640 return ctx_p->flow_mode;
644 static void cc_setup_key_desc(struct crypto_tfm *tfm,
645 struct cipher_req_ctx *req_ctx,
646 unsigned int nbytes, struct cc_hw_desc desc[],
647 unsigned int *seq_size)
649 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
650 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
651 int cipher_mode = ctx_p->cipher_mode;
652 int flow_mode = ctx_p->flow_mode;
653 int direction = req_ctx->gen_ctx.op_type;
654 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
655 unsigned int key_len = ctx_p->keylen;
656 unsigned int din_size;
658 switch (cipher_mode) {
660 case DRV_CIPHER_CBC_CTS:
665 hw_desc_init(&desc[*seq_size]);
666 set_cipher_mode(&desc[*seq_size], cipher_mode);
667 set_cipher_config0(&desc[*seq_size], direction);
669 if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
670 /* We use the AES key size coding for all CPP algs */
671 set_key_size_aes(&desc[*seq_size], key_len);
672 set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
673 flow_mode = cc_out_flow_mode(ctx_p);
675 if (flow_mode == S_DIN_to_AES) {
676 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
677 set_hw_crypto_key(&desc[*seq_size],
678 ctx_p->hw.key1_slot);
680 /* CC_POLICY_UNPROTECTED_KEY
681 * Invalid keys are filtered out in
684 din_size = (key_len == 24) ?
685 AES_MAX_KEY_SIZE : key_len;
687 set_din_type(&desc[*seq_size], DMA_DLLI,
688 key_dma_addr, din_size,
691 set_key_size_aes(&desc[*seq_size], key_len);
694 set_din_type(&desc[*seq_size], DMA_DLLI,
695 key_dma_addr, key_len, NS_BIT);
696 set_key_size_des(&desc[*seq_size], key_len);
698 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
700 set_flow_mode(&desc[*seq_size], flow_mode);
704 case DRV_CIPHER_ESSIV:
705 case DRV_CIPHER_BITLOCKER:
707 hw_desc_init(&desc[*seq_size]);
708 set_cipher_mode(&desc[*seq_size], cipher_mode);
709 set_cipher_config0(&desc[*seq_size], direction);
710 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
711 set_hw_crypto_key(&desc[*seq_size],
712 ctx_p->hw.key1_slot);
714 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
715 (key_len / 2), NS_BIT);
717 set_key_size_aes(&desc[*seq_size], (key_len / 2));
718 set_flow_mode(&desc[*seq_size], flow_mode);
719 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
723 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
727 static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
728 struct cipher_req_ctx *req_ctx,
729 struct scatterlist *dst, struct scatterlist *src,
730 unsigned int nbytes, void *areq,
731 struct cc_hw_desc desc[], unsigned int *seq_size)
733 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
734 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
736 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
738 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
739 &req_ctx->mlli_params.mlli_dma_addr,
740 req_ctx->mlli_params.mlli_len,
741 ctx_p->drvdata->mlli_sram_addr);
742 hw_desc_init(&desc[*seq_size]);
743 set_din_type(&desc[*seq_size], DMA_DLLI,
744 req_ctx->mlli_params.mlli_dma_addr,
745 req_ctx->mlli_params.mlli_len, NS_BIT);
746 set_dout_sram(&desc[*seq_size],
747 ctx_p->drvdata->mlli_sram_addr,
748 req_ctx->mlli_params.mlli_len);
749 set_flow_mode(&desc[*seq_size], BYPASS);
754 static void cc_setup_flow_desc(struct crypto_tfm *tfm,
755 struct cipher_req_ctx *req_ctx,
756 struct scatterlist *dst, struct scatterlist *src,
757 unsigned int nbytes, struct cc_hw_desc desc[],
758 unsigned int *seq_size)
760 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
761 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
762 unsigned int flow_mode = cc_out_flow_mode(ctx_p);
763 bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
764 ctx_p->cipher_mode == DRV_CIPHER_ECB);
767 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
768 dev_dbg(dev, " data params addr %pad length 0x%X\n",
769 &sg_dma_address(src), nbytes);
770 dev_dbg(dev, " data params addr %pad length 0x%X\n",
771 &sg_dma_address(dst), nbytes);
772 hw_desc_init(&desc[*seq_size]);
773 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
775 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
776 nbytes, NS_BIT, (!last_desc ? 0 : 1));
778 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
780 set_flow_mode(&desc[*seq_size], flow_mode);
783 hw_desc_init(&desc[*seq_size]);
784 set_din_type(&desc[*seq_size], DMA_MLLI,
785 ctx_p->drvdata->mlli_sram_addr,
786 req_ctx->in_mlli_nents, NS_BIT);
787 if (req_ctx->out_nents == 0) {
788 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
789 ctx_p->drvdata->mlli_sram_addr,
790 ctx_p->drvdata->mlli_sram_addr);
791 set_dout_mlli(&desc[*seq_size],
792 ctx_p->drvdata->mlli_sram_addr,
793 req_ctx->in_mlli_nents, NS_BIT,
794 (!last_desc ? 0 : 1));
796 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
797 ctx_p->drvdata->mlli_sram_addr,
798 ctx_p->drvdata->mlli_sram_addr +
799 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
800 set_dout_mlli(&desc[*seq_size],
801 (ctx_p->drvdata->mlli_sram_addr +
802 (LLI_ENTRY_BYTE_SIZE *
803 req_ctx->in_mlli_nents)),
804 req_ctx->out_mlli_nents, NS_BIT,
805 (!last_desc ? 0 : 1));
808 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
810 set_flow_mode(&desc[*seq_size], flow_mode);
815 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
817 struct skcipher_request *req = (struct skcipher_request *)cc_req;
818 struct scatterlist *dst = req->dst;
819 struct scatterlist *src = req->src;
820 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
821 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
822 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
824 if (err != -EINPROGRESS) {
825 /* Not a BACKLOG notification */
826 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
827 memcpy(req->iv, req_ctx->iv, ivsize);
831 skcipher_request_complete(req, err);
834 static int cc_cipher_process(struct skcipher_request *req,
835 enum drv_crypto_direction direction)
837 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
838 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
839 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
840 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
841 struct scatterlist *dst = req->dst;
842 struct scatterlist *src = req->src;
843 unsigned int nbytes = req->cryptlen;
845 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
846 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
847 struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
848 struct cc_crypto_req cc_req = {};
850 unsigned int seq_len = 0;
851 gfp_t flags = cc_gfp_flags(&req->base);
853 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
854 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
855 "Encrypt" : "Decrypt"), req, iv, nbytes);
857 /* STAT_PHASE_0: Init and sanity checks */
859 if (validate_data_size(ctx_p, nbytes)) {
860 dev_dbg(dev, "Unsupported data size %d.\n", nbytes);
865 /* No data to process is valid */
870 /* The IV we are handed may be allocted from the stack so
871 * we must copy it to a DMAable buffer before use.
873 req_ctx->iv = kmemdup(iv, ivsize, flags);
879 /* Setup request structure */
880 cc_req.user_cb = cc_cipher_complete;
881 cc_req.user_arg = req;
883 /* Setup CPP operation details */
884 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
885 cc_req.cpp.is_cpp = true;
886 cc_req.cpp.alg = ctx_p->cpp.alg;
887 cc_req.cpp.slot = ctx_p->cpp.slot;
890 /* Setup request context */
891 req_ctx->gen_ctx.op_type = direction;
893 /* STAT_PHASE_1: Map buffers */
895 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
896 req_ctx->iv, src, dst, flags);
898 dev_err(dev, "map_request() failed\n");
902 /* STAT_PHASE_2: Create sequence */
904 /* Setup state (IV) */
905 cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
906 /* Setup MLLI line, if needed */
907 cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
909 cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
910 /* Setup state (IV and XEX key) */
911 cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
912 /* Data processing */
913 cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
915 cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
917 /* STAT_PHASE_3: Lock HW and push sequence */
919 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
921 if (rc != -EINPROGRESS && rc != -EBUSY) {
922 /* Failed to send the request or request completed
925 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
929 if (rc != -EINPROGRESS && rc != -EBUSY) {
936 static int cc_cipher_encrypt(struct skcipher_request *req)
938 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
940 memset(req_ctx, 0, sizeof(*req_ctx));
942 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
945 static int cc_cipher_decrypt(struct skcipher_request *req)
947 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
949 memset(req_ctx, 0, sizeof(*req_ctx));
951 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
954 /* Block cipher alg */
955 static const struct cc_alg_template skcipher_algs[] = {
958 .driver_name = "xts-paes-ccree",
960 .template_skcipher = {
961 .setkey = cc_cipher_sethkey,
962 .encrypt = cc_cipher_encrypt,
963 .decrypt = cc_cipher_decrypt,
964 .min_keysize = CC_HW_KEY_SIZE,
965 .max_keysize = CC_HW_KEY_SIZE,
966 .ivsize = AES_BLOCK_SIZE,
968 .cipher_mode = DRV_CIPHER_XTS,
969 .flow_mode = S_DIN_to_AES,
970 .min_hw_rev = CC_HW_REV_630,
971 .std_body = CC_STD_NIST,
975 .name = "xts512(paes)",
976 .driver_name = "xts-paes-du512-ccree",
978 .template_skcipher = {
979 .setkey = cc_cipher_sethkey,
980 .encrypt = cc_cipher_encrypt,
981 .decrypt = cc_cipher_decrypt,
982 .min_keysize = CC_HW_KEY_SIZE,
983 .max_keysize = CC_HW_KEY_SIZE,
984 .ivsize = AES_BLOCK_SIZE,
986 .cipher_mode = DRV_CIPHER_XTS,
987 .flow_mode = S_DIN_to_AES,
989 .min_hw_rev = CC_HW_REV_712,
990 .std_body = CC_STD_NIST,
994 .name = "xts4096(paes)",
995 .driver_name = "xts-paes-du4096-ccree",
997 .template_skcipher = {
998 .setkey = cc_cipher_sethkey,
999 .encrypt = cc_cipher_encrypt,
1000 .decrypt = cc_cipher_decrypt,
1001 .min_keysize = CC_HW_KEY_SIZE,
1002 .max_keysize = CC_HW_KEY_SIZE,
1003 .ivsize = AES_BLOCK_SIZE,
1005 .cipher_mode = DRV_CIPHER_XTS,
1006 .flow_mode = S_DIN_to_AES,
1008 .min_hw_rev = CC_HW_REV_712,
1009 .std_body = CC_STD_NIST,
1013 .name = "essiv(paes)",
1014 .driver_name = "essiv-paes-ccree",
1015 .blocksize = AES_BLOCK_SIZE,
1016 .template_skcipher = {
1017 .setkey = cc_cipher_sethkey,
1018 .encrypt = cc_cipher_encrypt,
1019 .decrypt = cc_cipher_decrypt,
1020 .min_keysize = CC_HW_KEY_SIZE,
1021 .max_keysize = CC_HW_KEY_SIZE,
1022 .ivsize = AES_BLOCK_SIZE,
1024 .cipher_mode = DRV_CIPHER_ESSIV,
1025 .flow_mode = S_DIN_to_AES,
1026 .min_hw_rev = CC_HW_REV_712,
1027 .std_body = CC_STD_NIST,
1031 .name = "essiv512(paes)",
1032 .driver_name = "essiv-paes-du512-ccree",
1033 .blocksize = AES_BLOCK_SIZE,
1034 .template_skcipher = {
1035 .setkey = cc_cipher_sethkey,
1036 .encrypt = cc_cipher_encrypt,
1037 .decrypt = cc_cipher_decrypt,
1038 .min_keysize = CC_HW_KEY_SIZE,
1039 .max_keysize = CC_HW_KEY_SIZE,
1040 .ivsize = AES_BLOCK_SIZE,
1042 .cipher_mode = DRV_CIPHER_ESSIV,
1043 .flow_mode = S_DIN_to_AES,
1045 .min_hw_rev = CC_HW_REV_712,
1046 .std_body = CC_STD_NIST,
1050 .name = "essiv4096(paes)",
1051 .driver_name = "essiv-paes-du4096-ccree",
1052 .blocksize = AES_BLOCK_SIZE,
1053 .template_skcipher = {
1054 .setkey = cc_cipher_sethkey,
1055 .encrypt = cc_cipher_encrypt,
1056 .decrypt = cc_cipher_decrypt,
1057 .min_keysize = CC_HW_KEY_SIZE,
1058 .max_keysize = CC_HW_KEY_SIZE,
1059 .ivsize = AES_BLOCK_SIZE,
1061 .cipher_mode = DRV_CIPHER_ESSIV,
1062 .flow_mode = S_DIN_to_AES,
1064 .min_hw_rev = CC_HW_REV_712,
1065 .std_body = CC_STD_NIST,
1069 .name = "bitlocker(paes)",
1070 .driver_name = "bitlocker-paes-ccree",
1071 .blocksize = AES_BLOCK_SIZE,
1072 .template_skcipher = {
1073 .setkey = cc_cipher_sethkey,
1074 .encrypt = cc_cipher_encrypt,
1075 .decrypt = cc_cipher_decrypt,
1076 .min_keysize = CC_HW_KEY_SIZE,
1077 .max_keysize = CC_HW_KEY_SIZE,
1078 .ivsize = AES_BLOCK_SIZE,
1080 .cipher_mode = DRV_CIPHER_BITLOCKER,
1081 .flow_mode = S_DIN_to_AES,
1082 .min_hw_rev = CC_HW_REV_712,
1083 .std_body = CC_STD_NIST,
1087 .name = "bitlocker512(paes)",
1088 .driver_name = "bitlocker-paes-du512-ccree",
1089 .blocksize = AES_BLOCK_SIZE,
1090 .template_skcipher = {
1091 .setkey = cc_cipher_sethkey,
1092 .encrypt = cc_cipher_encrypt,
1093 .decrypt = cc_cipher_decrypt,
1094 .min_keysize = CC_HW_KEY_SIZE,
1095 .max_keysize = CC_HW_KEY_SIZE,
1096 .ivsize = AES_BLOCK_SIZE,
1098 .cipher_mode = DRV_CIPHER_BITLOCKER,
1099 .flow_mode = S_DIN_to_AES,
1101 .min_hw_rev = CC_HW_REV_712,
1102 .std_body = CC_STD_NIST,
1106 .name = "bitlocker4096(paes)",
1107 .driver_name = "bitlocker-paes-du4096-ccree",
1108 .blocksize = AES_BLOCK_SIZE,
1109 .template_skcipher = {
1110 .setkey = cc_cipher_sethkey,
1111 .encrypt = cc_cipher_encrypt,
1112 .decrypt = cc_cipher_decrypt,
1113 .min_keysize = CC_HW_KEY_SIZE,
1114 .max_keysize = CC_HW_KEY_SIZE,
1115 .ivsize = AES_BLOCK_SIZE,
1117 .cipher_mode = DRV_CIPHER_BITLOCKER,
1118 .flow_mode = S_DIN_to_AES,
1120 .min_hw_rev = CC_HW_REV_712,
1121 .std_body = CC_STD_NIST,
1125 .name = "ecb(paes)",
1126 .driver_name = "ecb-paes-ccree",
1127 .blocksize = AES_BLOCK_SIZE,
1128 .template_skcipher = {
1129 .setkey = cc_cipher_sethkey,
1130 .encrypt = cc_cipher_encrypt,
1131 .decrypt = cc_cipher_decrypt,
1132 .min_keysize = CC_HW_KEY_SIZE,
1133 .max_keysize = CC_HW_KEY_SIZE,
1136 .cipher_mode = DRV_CIPHER_ECB,
1137 .flow_mode = S_DIN_to_AES,
1138 .min_hw_rev = CC_HW_REV_712,
1139 .std_body = CC_STD_NIST,
1143 .name = "cbc(paes)",
1144 .driver_name = "cbc-paes-ccree",
1145 .blocksize = AES_BLOCK_SIZE,
1146 .template_skcipher = {
1147 .setkey = cc_cipher_sethkey,
1148 .encrypt = cc_cipher_encrypt,
1149 .decrypt = cc_cipher_decrypt,
1150 .min_keysize = CC_HW_KEY_SIZE,
1151 .max_keysize = CC_HW_KEY_SIZE,
1152 .ivsize = AES_BLOCK_SIZE,
1154 .cipher_mode = DRV_CIPHER_CBC,
1155 .flow_mode = S_DIN_to_AES,
1156 .min_hw_rev = CC_HW_REV_712,
1157 .std_body = CC_STD_NIST,
1161 .name = "ofb(paes)",
1162 .driver_name = "ofb-paes-ccree",
1163 .blocksize = AES_BLOCK_SIZE,
1164 .template_skcipher = {
1165 .setkey = cc_cipher_sethkey,
1166 .encrypt = cc_cipher_encrypt,
1167 .decrypt = cc_cipher_decrypt,
1168 .min_keysize = CC_HW_KEY_SIZE,
1169 .max_keysize = CC_HW_KEY_SIZE,
1170 .ivsize = AES_BLOCK_SIZE,
1172 .cipher_mode = DRV_CIPHER_OFB,
1173 .flow_mode = S_DIN_to_AES,
1174 .min_hw_rev = CC_HW_REV_712,
1175 .std_body = CC_STD_NIST,
1179 .name = "cts(cbc(paes))",
1180 .driver_name = "cts-cbc-paes-ccree",
1181 .blocksize = AES_BLOCK_SIZE,
1182 .template_skcipher = {
1183 .setkey = cc_cipher_sethkey,
1184 .encrypt = cc_cipher_encrypt,
1185 .decrypt = cc_cipher_decrypt,
1186 .min_keysize = CC_HW_KEY_SIZE,
1187 .max_keysize = CC_HW_KEY_SIZE,
1188 .ivsize = AES_BLOCK_SIZE,
1190 .cipher_mode = DRV_CIPHER_CBC_CTS,
1191 .flow_mode = S_DIN_to_AES,
1192 .min_hw_rev = CC_HW_REV_712,
1193 .std_body = CC_STD_NIST,
1197 .name = "ctr(paes)",
1198 .driver_name = "ctr-paes-ccree",
1200 .template_skcipher = {
1201 .setkey = cc_cipher_sethkey,
1202 .encrypt = cc_cipher_encrypt,
1203 .decrypt = cc_cipher_decrypt,
1204 .min_keysize = CC_HW_KEY_SIZE,
1205 .max_keysize = CC_HW_KEY_SIZE,
1206 .ivsize = AES_BLOCK_SIZE,
1208 .cipher_mode = DRV_CIPHER_CTR,
1209 .flow_mode = S_DIN_to_AES,
1210 .min_hw_rev = CC_HW_REV_712,
1211 .std_body = CC_STD_NIST,
1215 /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html
1216 * for the reason why this differs from the generic
1220 .driver_name = "xts-aes-ccree",
1222 .template_skcipher = {
1223 .setkey = cc_cipher_setkey,
1224 .encrypt = cc_cipher_encrypt,
1225 .decrypt = cc_cipher_decrypt,
1226 .min_keysize = AES_MIN_KEY_SIZE * 2,
1227 .max_keysize = AES_MAX_KEY_SIZE * 2,
1228 .ivsize = AES_BLOCK_SIZE,
1230 .cipher_mode = DRV_CIPHER_XTS,
1231 .flow_mode = S_DIN_to_AES,
1232 .min_hw_rev = CC_HW_REV_630,
1233 .std_body = CC_STD_NIST,
1236 .name = "xts512(aes)",
1237 .driver_name = "xts-aes-du512-ccree",
1239 .template_skcipher = {
1240 .setkey = cc_cipher_setkey,
1241 .encrypt = cc_cipher_encrypt,
1242 .decrypt = cc_cipher_decrypt,
1243 .min_keysize = AES_MIN_KEY_SIZE * 2,
1244 .max_keysize = AES_MAX_KEY_SIZE * 2,
1245 .ivsize = AES_BLOCK_SIZE,
1247 .cipher_mode = DRV_CIPHER_XTS,
1248 .flow_mode = S_DIN_to_AES,
1250 .min_hw_rev = CC_HW_REV_712,
1251 .std_body = CC_STD_NIST,
1254 .name = "xts4096(aes)",
1255 .driver_name = "xts-aes-du4096-ccree",
1257 .template_skcipher = {
1258 .setkey = cc_cipher_setkey,
1259 .encrypt = cc_cipher_encrypt,
1260 .decrypt = cc_cipher_decrypt,
1261 .min_keysize = AES_MIN_KEY_SIZE * 2,
1262 .max_keysize = AES_MAX_KEY_SIZE * 2,
1263 .ivsize = AES_BLOCK_SIZE,
1265 .cipher_mode = DRV_CIPHER_XTS,
1266 .flow_mode = S_DIN_to_AES,
1268 .min_hw_rev = CC_HW_REV_712,
1269 .std_body = CC_STD_NIST,
1272 .name = "essiv(aes)",
1273 .driver_name = "essiv-aes-ccree",
1274 .blocksize = AES_BLOCK_SIZE,
1275 .template_skcipher = {
1276 .setkey = cc_cipher_setkey,
1277 .encrypt = cc_cipher_encrypt,
1278 .decrypt = cc_cipher_decrypt,
1279 .min_keysize = AES_MIN_KEY_SIZE * 2,
1280 .max_keysize = AES_MAX_KEY_SIZE * 2,
1281 .ivsize = AES_BLOCK_SIZE,
1283 .cipher_mode = DRV_CIPHER_ESSIV,
1284 .flow_mode = S_DIN_to_AES,
1285 .min_hw_rev = CC_HW_REV_712,
1286 .std_body = CC_STD_NIST,
1289 .name = "essiv512(aes)",
1290 .driver_name = "essiv-aes-du512-ccree",
1291 .blocksize = AES_BLOCK_SIZE,
1292 .template_skcipher = {
1293 .setkey = cc_cipher_setkey,
1294 .encrypt = cc_cipher_encrypt,
1295 .decrypt = cc_cipher_decrypt,
1296 .min_keysize = AES_MIN_KEY_SIZE * 2,
1297 .max_keysize = AES_MAX_KEY_SIZE * 2,
1298 .ivsize = AES_BLOCK_SIZE,
1300 .cipher_mode = DRV_CIPHER_ESSIV,
1301 .flow_mode = S_DIN_to_AES,
1303 .min_hw_rev = CC_HW_REV_712,
1304 .std_body = CC_STD_NIST,
1307 .name = "essiv4096(aes)",
1308 .driver_name = "essiv-aes-du4096-ccree",
1309 .blocksize = AES_BLOCK_SIZE,
1310 .template_skcipher = {
1311 .setkey = cc_cipher_setkey,
1312 .encrypt = cc_cipher_encrypt,
1313 .decrypt = cc_cipher_decrypt,
1314 .min_keysize = AES_MIN_KEY_SIZE * 2,
1315 .max_keysize = AES_MAX_KEY_SIZE * 2,
1316 .ivsize = AES_BLOCK_SIZE,
1318 .cipher_mode = DRV_CIPHER_ESSIV,
1319 .flow_mode = S_DIN_to_AES,
1321 .min_hw_rev = CC_HW_REV_712,
1322 .std_body = CC_STD_NIST,
1325 .name = "bitlocker(aes)",
1326 .driver_name = "bitlocker-aes-ccree",
1327 .blocksize = AES_BLOCK_SIZE,
1328 .template_skcipher = {
1329 .setkey = cc_cipher_setkey,
1330 .encrypt = cc_cipher_encrypt,
1331 .decrypt = cc_cipher_decrypt,
1332 .min_keysize = AES_MIN_KEY_SIZE * 2,
1333 .max_keysize = AES_MAX_KEY_SIZE * 2,
1334 .ivsize = AES_BLOCK_SIZE,
1336 .cipher_mode = DRV_CIPHER_BITLOCKER,
1337 .flow_mode = S_DIN_to_AES,
1338 .min_hw_rev = CC_HW_REV_712,
1339 .std_body = CC_STD_NIST,
1342 .name = "bitlocker512(aes)",
1343 .driver_name = "bitlocker-aes-du512-ccree",
1344 .blocksize = AES_BLOCK_SIZE,
1345 .template_skcipher = {
1346 .setkey = cc_cipher_setkey,
1347 .encrypt = cc_cipher_encrypt,
1348 .decrypt = cc_cipher_decrypt,
1349 .min_keysize = AES_MIN_KEY_SIZE * 2,
1350 .max_keysize = AES_MAX_KEY_SIZE * 2,
1351 .ivsize = AES_BLOCK_SIZE,
1353 .cipher_mode = DRV_CIPHER_BITLOCKER,
1354 .flow_mode = S_DIN_to_AES,
1356 .min_hw_rev = CC_HW_REV_712,
1357 .std_body = CC_STD_NIST,
1360 .name = "bitlocker4096(aes)",
1361 .driver_name = "bitlocker-aes-du4096-ccree",
1362 .blocksize = AES_BLOCK_SIZE,
1363 .template_skcipher = {
1364 .setkey = cc_cipher_setkey,
1365 .encrypt = cc_cipher_encrypt,
1366 .decrypt = cc_cipher_decrypt,
1367 .min_keysize = AES_MIN_KEY_SIZE * 2,
1368 .max_keysize = AES_MAX_KEY_SIZE * 2,
1369 .ivsize = AES_BLOCK_SIZE,
1371 .cipher_mode = DRV_CIPHER_BITLOCKER,
1372 .flow_mode = S_DIN_to_AES,
1374 .min_hw_rev = CC_HW_REV_712,
1375 .std_body = CC_STD_NIST,
1379 .driver_name = "ecb-aes-ccree",
1380 .blocksize = AES_BLOCK_SIZE,
1381 .template_skcipher = {
1382 .setkey = cc_cipher_setkey,
1383 .encrypt = cc_cipher_encrypt,
1384 .decrypt = cc_cipher_decrypt,
1385 .min_keysize = AES_MIN_KEY_SIZE,
1386 .max_keysize = AES_MAX_KEY_SIZE,
1389 .cipher_mode = DRV_CIPHER_ECB,
1390 .flow_mode = S_DIN_to_AES,
1391 .min_hw_rev = CC_HW_REV_630,
1392 .std_body = CC_STD_NIST,
1396 .driver_name = "cbc-aes-ccree",
1397 .blocksize = AES_BLOCK_SIZE,
1398 .template_skcipher = {
1399 .setkey = cc_cipher_setkey,
1400 .encrypt = cc_cipher_encrypt,
1401 .decrypt = cc_cipher_decrypt,
1402 .min_keysize = AES_MIN_KEY_SIZE,
1403 .max_keysize = AES_MAX_KEY_SIZE,
1404 .ivsize = AES_BLOCK_SIZE,
1406 .cipher_mode = DRV_CIPHER_CBC,
1407 .flow_mode = S_DIN_to_AES,
1408 .min_hw_rev = CC_HW_REV_630,
1409 .std_body = CC_STD_NIST,
1413 .driver_name = "ofb-aes-ccree",
1415 .template_skcipher = {
1416 .setkey = cc_cipher_setkey,
1417 .encrypt = cc_cipher_encrypt,
1418 .decrypt = cc_cipher_decrypt,
1419 .min_keysize = AES_MIN_KEY_SIZE,
1420 .max_keysize = AES_MAX_KEY_SIZE,
1421 .ivsize = AES_BLOCK_SIZE,
1423 .cipher_mode = DRV_CIPHER_OFB,
1424 .flow_mode = S_DIN_to_AES,
1425 .min_hw_rev = CC_HW_REV_630,
1426 .std_body = CC_STD_NIST,
1429 .name = "cts(cbc(aes))",
1430 .driver_name = "cts-cbc-aes-ccree",
1431 .blocksize = AES_BLOCK_SIZE,
1432 .template_skcipher = {
1433 .setkey = cc_cipher_setkey,
1434 .encrypt = cc_cipher_encrypt,
1435 .decrypt = cc_cipher_decrypt,
1436 .min_keysize = AES_MIN_KEY_SIZE,
1437 .max_keysize = AES_MAX_KEY_SIZE,
1438 .ivsize = AES_BLOCK_SIZE,
1440 .cipher_mode = DRV_CIPHER_CBC_CTS,
1441 .flow_mode = S_DIN_to_AES,
1442 .min_hw_rev = CC_HW_REV_630,
1443 .std_body = CC_STD_NIST,
1447 .driver_name = "ctr-aes-ccree",
1449 .template_skcipher = {
1450 .setkey = cc_cipher_setkey,
1451 .encrypt = cc_cipher_encrypt,
1452 .decrypt = cc_cipher_decrypt,
1453 .min_keysize = AES_MIN_KEY_SIZE,
1454 .max_keysize = AES_MAX_KEY_SIZE,
1455 .ivsize = AES_BLOCK_SIZE,
1457 .cipher_mode = DRV_CIPHER_CTR,
1458 .flow_mode = S_DIN_to_AES,
1459 .min_hw_rev = CC_HW_REV_630,
1460 .std_body = CC_STD_NIST,
1463 .name = "cbc(des3_ede)",
1464 .driver_name = "cbc-3des-ccree",
1465 .blocksize = DES3_EDE_BLOCK_SIZE,
1466 .template_skcipher = {
1467 .setkey = cc_cipher_setkey,
1468 .encrypt = cc_cipher_encrypt,
1469 .decrypt = cc_cipher_decrypt,
1470 .min_keysize = DES3_EDE_KEY_SIZE,
1471 .max_keysize = DES3_EDE_KEY_SIZE,
1472 .ivsize = DES3_EDE_BLOCK_SIZE,
1474 .cipher_mode = DRV_CIPHER_CBC,
1475 .flow_mode = S_DIN_to_DES,
1476 .min_hw_rev = CC_HW_REV_630,
1477 .std_body = CC_STD_NIST,
1480 .name = "ecb(des3_ede)",
1481 .driver_name = "ecb-3des-ccree",
1482 .blocksize = DES3_EDE_BLOCK_SIZE,
1483 .template_skcipher = {
1484 .setkey = cc_cipher_setkey,
1485 .encrypt = cc_cipher_encrypt,
1486 .decrypt = cc_cipher_decrypt,
1487 .min_keysize = DES3_EDE_KEY_SIZE,
1488 .max_keysize = DES3_EDE_KEY_SIZE,
1491 .cipher_mode = DRV_CIPHER_ECB,
1492 .flow_mode = S_DIN_to_DES,
1493 .min_hw_rev = CC_HW_REV_630,
1494 .std_body = CC_STD_NIST,
1498 .driver_name = "cbc-des-ccree",
1499 .blocksize = DES_BLOCK_SIZE,
1500 .template_skcipher = {
1501 .setkey = cc_cipher_setkey,
1502 .encrypt = cc_cipher_encrypt,
1503 .decrypt = cc_cipher_decrypt,
1504 .min_keysize = DES_KEY_SIZE,
1505 .max_keysize = DES_KEY_SIZE,
1506 .ivsize = DES_BLOCK_SIZE,
1508 .cipher_mode = DRV_CIPHER_CBC,
1509 .flow_mode = S_DIN_to_DES,
1510 .min_hw_rev = CC_HW_REV_630,
1511 .std_body = CC_STD_NIST,
1515 .driver_name = "ecb-des-ccree",
1516 .blocksize = DES_BLOCK_SIZE,
1517 .template_skcipher = {
1518 .setkey = cc_cipher_setkey,
1519 .encrypt = cc_cipher_encrypt,
1520 .decrypt = cc_cipher_decrypt,
1521 .min_keysize = DES_KEY_SIZE,
1522 .max_keysize = DES_KEY_SIZE,
1525 .cipher_mode = DRV_CIPHER_ECB,
1526 .flow_mode = S_DIN_to_DES,
1527 .min_hw_rev = CC_HW_REV_630,
1528 .std_body = CC_STD_NIST,
1532 .driver_name = "cbc-sm4-ccree",
1533 .blocksize = SM4_BLOCK_SIZE,
1534 .template_skcipher = {
1535 .setkey = cc_cipher_setkey,
1536 .encrypt = cc_cipher_encrypt,
1537 .decrypt = cc_cipher_decrypt,
1538 .min_keysize = SM4_KEY_SIZE,
1539 .max_keysize = SM4_KEY_SIZE,
1540 .ivsize = SM4_BLOCK_SIZE,
1542 .cipher_mode = DRV_CIPHER_CBC,
1543 .flow_mode = S_DIN_to_SM4,
1544 .min_hw_rev = CC_HW_REV_713,
1545 .std_body = CC_STD_OSCCA,
1549 .driver_name = "ecb-sm4-ccree",
1550 .blocksize = SM4_BLOCK_SIZE,
1551 .template_skcipher = {
1552 .setkey = cc_cipher_setkey,
1553 .encrypt = cc_cipher_encrypt,
1554 .decrypt = cc_cipher_decrypt,
1555 .min_keysize = SM4_KEY_SIZE,
1556 .max_keysize = SM4_KEY_SIZE,
1559 .cipher_mode = DRV_CIPHER_ECB,
1560 .flow_mode = S_DIN_to_SM4,
1561 .min_hw_rev = CC_HW_REV_713,
1562 .std_body = CC_STD_OSCCA,
1566 .driver_name = "ctr-sm4-ccree",
1568 .template_skcipher = {
1569 .setkey = cc_cipher_setkey,
1570 .encrypt = cc_cipher_encrypt,
1571 .decrypt = cc_cipher_decrypt,
1572 .min_keysize = SM4_KEY_SIZE,
1573 .max_keysize = SM4_KEY_SIZE,
1574 .ivsize = SM4_BLOCK_SIZE,
1576 .cipher_mode = DRV_CIPHER_CTR,
1577 .flow_mode = S_DIN_to_SM4,
1578 .min_hw_rev = CC_HW_REV_713,
1579 .std_body = CC_STD_OSCCA,
1582 .name = "cbc(psm4)",
1583 .driver_name = "cbc-psm4-ccree",
1584 .blocksize = SM4_BLOCK_SIZE,
1585 .template_skcipher = {
1586 .setkey = cc_cipher_sethkey,
1587 .encrypt = cc_cipher_encrypt,
1588 .decrypt = cc_cipher_decrypt,
1589 .min_keysize = CC_HW_KEY_SIZE,
1590 .max_keysize = CC_HW_KEY_SIZE,
1591 .ivsize = SM4_BLOCK_SIZE,
1593 .cipher_mode = DRV_CIPHER_CBC,
1594 .flow_mode = S_DIN_to_SM4,
1595 .min_hw_rev = CC_HW_REV_713,
1596 .std_body = CC_STD_OSCCA,
1600 .name = "ctr(psm4)",
1601 .driver_name = "ctr-psm4-ccree",
1602 .blocksize = SM4_BLOCK_SIZE,
1603 .template_skcipher = {
1604 .setkey = cc_cipher_sethkey,
1605 .encrypt = cc_cipher_encrypt,
1606 .decrypt = cc_cipher_decrypt,
1607 .min_keysize = CC_HW_KEY_SIZE,
1608 .max_keysize = CC_HW_KEY_SIZE,
1609 .ivsize = SM4_BLOCK_SIZE,
1611 .cipher_mode = DRV_CIPHER_CTR,
1612 .flow_mode = S_DIN_to_SM4,
1613 .min_hw_rev = CC_HW_REV_713,
1614 .std_body = CC_STD_OSCCA,
1619 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1622 struct cc_crypto_alg *t_alg;
1623 struct skcipher_alg *alg;
1625 t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL);
1627 return ERR_PTR(-ENOMEM);
1629 alg = &t_alg->skcipher_alg;
1631 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1633 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1634 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1636 alg->base.cra_module = THIS_MODULE;
1637 alg->base.cra_priority = CC_CRA_PRIO;
1638 alg->base.cra_blocksize = tmpl->blocksize;
1639 alg->base.cra_alignmask = 0;
1640 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1642 alg->base.cra_init = cc_cipher_init;
1643 alg->base.cra_exit = cc_cipher_exit;
1644 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1646 t_alg->cipher_mode = tmpl->cipher_mode;
1647 t_alg->flow_mode = tmpl->flow_mode;
1648 t_alg->data_unit = tmpl->data_unit;
1653 int cc_cipher_free(struct cc_drvdata *drvdata)
1655 struct cc_crypto_alg *t_alg, *n;
1657 /* Remove registered algs */
1658 list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) {
1659 crypto_unregister_skcipher(&t_alg->skcipher_alg);
1660 list_del(&t_alg->entry);
1665 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1667 struct cc_crypto_alg *t_alg;
1668 struct device *dev = drvdata_to_dev(drvdata);
1672 INIT_LIST_HEAD(&drvdata->alg_list);
1675 dev_dbg(dev, "Number of algorithms = %zu\n",
1676 ARRAY_SIZE(skcipher_algs));
1677 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1678 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1679 !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1680 (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1683 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1684 t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1685 if (IS_ERR(t_alg)) {
1686 rc = PTR_ERR(t_alg);
1687 dev_err(dev, "%s alg allocation failed\n",
1688 skcipher_algs[alg].driver_name);
1691 t_alg->drvdata = drvdata;
1693 dev_dbg(dev, "registering %s\n",
1694 skcipher_algs[alg].driver_name);
1695 rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1696 dev_dbg(dev, "%s alg registration rc = %x\n",
1697 t_alg->skcipher_alg.base.cra_driver_name, rc);
1699 dev_err(dev, "%s alg registration failed\n",
1700 t_alg->skcipher_alg.base.cra_driver_name);
1704 list_add_tail(&t_alg->entry, &drvdata->alg_list);
1705 dev_dbg(dev, "Registered %s\n",
1706 t_alg->skcipher_alg.base.cra_driver_name);
1711 cc_cipher_free(drvdata);