2 * Copyright (C) 2012-2017 ARM Limited or its affiliates.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
13 * You should have received a copy of the GNU General Public License
14 * along with this program; if not, see <http://www.gnu.org/licenses/>.
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/platform_device.h>
20 #include <linux/semaphore.h>
21 #include <crypto/algapi.h>
22 #include <crypto/internal/skcipher.h>
23 #include <crypto/aes.h>
24 #include <crypto/ctr.h>
25 #include <crypto/des.h>
26 #include <crypto/xts.h>
27 #include <crypto/scatterwalk.h>
29 #include "ssi_config.h"
30 #include "ssi_driver.h"
31 #include "cc_lli_defs.h"
32 #include "ssi_buffer_mgr.h"
33 #include "ssi_cipher.h"
34 #include "ssi_request_mgr.h"
35 #include "ssi_sysfs.h"
37 #define MAX_ABLKCIPHER_SEQ_LEN 6
39 #define template_ablkcipher template_u.ablkcipher
41 #define SSI_MIN_AES_XTS_SIZE 0x10
42 #define SSI_MAX_AES_XTS_SIZE 0x2000
43 struct ssi_blkcipher_handle {
44 struct list_head blkcipher_alg_list;
47 struct cc_user_key_info {
49 dma_addr_t key_dma_addr;
52 struct cc_hw_key_info {
53 enum cc_hw_crypto_key key1_slot;
54 enum cc_hw_crypto_key key2_slot;
57 struct ssi_ablkcipher_ctx {
58 struct ssi_drvdata *drvdata;
64 struct blkcipher_req_ctx *sync_ctx;
65 struct cc_user_key_info user;
66 struct cc_hw_key_info hw;
67 struct crypto_shash *shash_tfm;
70 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base);
72 static int validate_keys_sizes(struct ssi_ablkcipher_ctx *ctx_p, u32 size)
74 switch (ctx_p->flow_mode) {
77 case CC_AES_128_BIT_KEY_SIZE:
78 case CC_AES_192_BIT_KEY_SIZE:
79 if (likely((ctx_p->cipher_mode != DRV_CIPHER_XTS) &&
80 (ctx_p->cipher_mode != DRV_CIPHER_ESSIV) &&
81 (ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)))
84 case CC_AES_256_BIT_KEY_SIZE:
86 case (CC_AES_192_BIT_KEY_SIZE * 2):
87 case (CC_AES_256_BIT_KEY_SIZE * 2):
88 if (likely((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
89 (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
90 (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)))
97 if (likely(size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE))
100 #if SSI_CC_HAS_MULTI2
101 case S_DIN_to_MULTI2:
102 if (likely(size == CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE))
112 static int validate_data_size(struct ssi_ablkcipher_ctx *ctx_p, unsigned int size)
114 switch (ctx_p->flow_mode) {
116 switch (ctx_p->cipher_mode) {
118 if ((size >= SSI_MIN_AES_XTS_SIZE) &&
119 (size <= SSI_MAX_AES_XTS_SIZE) &&
120 IS_ALIGNED(size, AES_BLOCK_SIZE))
123 case DRV_CIPHER_CBC_CTS:
124 if (likely(size >= AES_BLOCK_SIZE))
132 case DRV_CIPHER_ESSIV:
133 case DRV_CIPHER_BITLOCKER:
134 if (likely(IS_ALIGNED(size, AES_BLOCK_SIZE)))
142 if (likely(IS_ALIGNED(size, DES_BLOCK_SIZE)))
145 #if SSI_CC_HAS_MULTI2
146 case S_DIN_to_MULTI2:
147 switch (ctx_p->cipher_mode) {
149 if (likely(IS_ALIGNED(size, CC_MULTI2_BLOCK_SIZE)))
158 #endif /*SSI_CC_HAS_MULTI2*/
165 static unsigned int get_max_keysize(struct crypto_tfm *tfm)
167 struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
169 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_ABLKCIPHER)
170 return ssi_alg->crypto_alg.cra_ablkcipher.max_keysize;
172 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_BLKCIPHER)
173 return ssi_alg->crypto_alg.cra_blkcipher.max_keysize;
178 static int ssi_blkcipher_init(struct crypto_tfm *tfm)
180 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
181 struct crypto_alg *alg = tfm->__crt_alg;
182 struct ssi_crypto_alg *ssi_alg =
183 container_of(alg, struct ssi_crypto_alg, crypto_alg);
186 unsigned int max_key_buf_size = get_max_keysize(tfm);
188 SSI_LOG_DEBUG("Initializing context @%p for %s\n",
189 ctx_p, crypto_tfm_alg_name(tfm));
191 ctx_p->cipher_mode = ssi_alg->cipher_mode;
192 ctx_p->flow_mode = ssi_alg->flow_mode;
193 ctx_p->drvdata = ssi_alg->drvdata;
194 dev = &ctx_p->drvdata->plat_dev->dev;
196 /* Allocate key buffer, cache line aligned */
197 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL | GFP_DMA);
198 if (!ctx_p->user.key) {
199 SSI_LOG_ERR("Allocating key buffer in context failed\n");
202 SSI_LOG_DEBUG("Allocated key buffer in context. key=@%p\n",
206 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
209 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
210 SSI_LOG_ERR("Mapping Key %u B at va=%pK for DMA failed\n",
211 max_key_buf_size, ctx_p->user.key);
214 SSI_LOG_DEBUG("Mapped key %u B at va=%pK to dma=%pad\n",
215 max_key_buf_size, ctx_p->user.key,
216 ctx_p->user.key_dma_addr);
218 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
219 /* Alloc hash tfm for essiv */
220 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
221 if (IS_ERR(ctx_p->shash_tfm)) {
222 SSI_LOG_ERR("Error allocating hash tfm for ESSIV.\n");
223 return PTR_ERR(ctx_p->shash_tfm);
230 static void ssi_blkcipher_exit(struct crypto_tfm *tfm)
232 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
233 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
234 unsigned int max_key_buf_size = get_max_keysize(tfm);
236 SSI_LOG_DEBUG("Clearing context @%p for %s\n",
237 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
239 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
240 /* Free hash tfm for essiv */
241 crypto_free_shash(ctx_p->shash_tfm);
242 ctx_p->shash_tfm = NULL;
245 /* Unmap key buffer */
246 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
248 SSI_LOG_DEBUG("Unmapped key buffer key_dma_addr=%pad\n",
249 ctx_p->user.key_dma_addr);
251 /* Free key buffer in context */
252 kfree(ctx_p->user.key);
253 SSI_LOG_DEBUG("Free key buffer in context. key=@%p\n", ctx_p->user.key);
257 u8 key1[DES_KEY_SIZE];
258 u8 key2[DES_KEY_SIZE];
259 u8 key3[DES_KEY_SIZE];
262 static const u8 zero_buff[] = { 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
263 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
264 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
265 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0};
267 /* The function verifies that tdes keys are not weak.*/
268 static int ssi_verify_3des_keys(const u8 *key, unsigned int keylen)
270 struct tdes_keys *tdes_key = (struct tdes_keys *)key;
272 /* verify key1 != key2 and key3 != key2*/
273 if (unlikely((memcmp((u8 *)tdes_key->key1, (u8 *)tdes_key->key2, sizeof(tdes_key->key1)) == 0) ||
274 (memcmp((u8 *)tdes_key->key3, (u8 *)tdes_key->key2, sizeof(tdes_key->key3)) == 0))) {
281 static enum cc_hw_crypto_key hw_key_to_cc_hw_key(int slot_num)
296 static int ssi_blkcipher_setkey(struct crypto_tfm *tfm,
300 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
301 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
302 u32 tmp[DES_EXPKEY_WORDS];
303 unsigned int max_key_buf_size = get_max_keysize(tfm);
305 SSI_LOG_DEBUG("Setting key in context @%p for %s. keylen=%u\n",
306 ctx_p, crypto_tfm_alg_name(tfm), keylen);
307 dump_byte_array("key", (u8 *)key, keylen);
309 SSI_LOG_DEBUG("after FIPS check");
311 /* STAT_PHASE_0: Init and sanity checks */
313 #if SSI_CC_HAS_MULTI2
314 /*last byte of key buffer is round number and should not be a part of key size*/
315 if (ctx_p->flow_mode == S_DIN_to_MULTI2)
317 #endif /*SSI_CC_HAS_MULTI2*/
319 if (unlikely(validate_keys_sizes(ctx_p, keylen) != 0)) {
320 SSI_LOG_ERR("Unsupported key size %d.\n", keylen);
321 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
325 if (ssi_is_hw_key(tfm)) {
326 /* setting HW key slots */
327 struct arm_hw_key_info *hki = (struct arm_hw_key_info *)key;
329 if (unlikely(ctx_p->flow_mode != S_DIN_to_AES)) {
330 SSI_LOG_ERR("HW key not supported for non-AES flows\n");
334 ctx_p->hw.key1_slot = hw_key_to_cc_hw_key(hki->hw_key1);
335 if (unlikely(ctx_p->hw.key1_slot == END_OF_KEYS)) {
336 SSI_LOG_ERR("Unsupported hw key1 number (%d)\n", hki->hw_key1);
340 if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
341 (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
342 (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)) {
343 if (unlikely(hki->hw_key1 == hki->hw_key2)) {
344 SSI_LOG_ERR("Illegal hw key numbers (%d,%d)\n", hki->hw_key1, hki->hw_key2);
347 ctx_p->hw.key2_slot = hw_key_to_cc_hw_key(hki->hw_key2);
348 if (unlikely(ctx_p->hw.key2_slot == END_OF_KEYS)) {
349 SSI_LOG_ERR("Unsupported hw key2 number (%d)\n", hki->hw_key2);
354 ctx_p->keylen = keylen;
355 SSI_LOG_DEBUG("ssi_is_hw_key ret 0");
361 if (ctx_p->flow_mode == S_DIN_to_DES) {
362 if (unlikely(!des_ekey(tmp, key)) &&
363 (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
364 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
365 SSI_LOG_DEBUG("weak DES key");
369 if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) &&
370 xts_check_key(tfm, key, keylen) != 0) {
371 SSI_LOG_DEBUG("weak XTS key");
374 if ((ctx_p->flow_mode == S_DIN_to_DES) &&
375 (keylen == DES3_EDE_KEY_SIZE) &&
376 ssi_verify_3des_keys(key, keylen) != 0) {
377 SSI_LOG_DEBUG("weak 3DES key");
381 /* STAT_PHASE_1: Copy key to ctx */
382 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
383 max_key_buf_size, DMA_TO_DEVICE);
385 if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
386 #if SSI_CC_HAS_MULTI2
387 memcpy(ctx_p->user.key, key, CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE);
388 ctx_p->key_round_number = key[CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE];
389 if (ctx_p->key_round_number < CC_MULTI2_MIN_NUM_ROUNDS ||
390 ctx_p->key_round_number > CC_MULTI2_MAX_NUM_ROUNDS) {
391 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
392 SSI_LOG_DEBUG("SSI_CC_HAS_MULTI2 einval");
394 #endif /*SSI_CC_HAS_MULTI2*/
396 memcpy(ctx_p->user.key, key, keylen);
398 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
400 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
401 /* sha256 for key2 - use sw implementation */
402 int key_len = keylen >> 1;
404 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
406 desc->tfm = ctx_p->shash_tfm;
408 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, ctx_p->user.key + key_len);
410 SSI_LOG_ERR("Failed to hash ESSIV key.\n");
415 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
416 max_key_buf_size, DMA_TO_DEVICE);
417 ctx_p->keylen = keylen;
419 SSI_LOG_DEBUG("return safely");
424 ssi_blkcipher_create_setup_desc(
425 struct crypto_tfm *tfm,
426 struct blkcipher_req_ctx *req_ctx,
429 struct cc_hw_desc desc[],
430 unsigned int *seq_size)
432 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
433 int cipher_mode = ctx_p->cipher_mode;
434 int flow_mode = ctx_p->flow_mode;
435 int direction = req_ctx->gen_ctx.op_type;
436 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
437 unsigned int key_len = ctx_p->keylen;
438 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
439 unsigned int du_size = nbytes;
441 struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
443 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_512)
445 if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_4096)
448 switch (cipher_mode) {
450 case DRV_CIPHER_CBC_CTS:
453 /* Load cipher state */
454 hw_desc_init(&desc[*seq_size]);
455 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
457 set_cipher_config0(&desc[*seq_size], direction);
458 set_flow_mode(&desc[*seq_size], flow_mode);
459 set_cipher_mode(&desc[*seq_size], cipher_mode);
460 if ((cipher_mode == DRV_CIPHER_CTR) ||
461 (cipher_mode == DRV_CIPHER_OFB)) {
462 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
464 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
470 hw_desc_init(&desc[*seq_size]);
471 set_cipher_mode(&desc[*seq_size], cipher_mode);
472 set_cipher_config0(&desc[*seq_size], direction);
473 if (flow_mode == S_DIN_to_AES) {
474 if (ssi_is_hw_key(tfm)) {
475 set_hw_crypto_key(&desc[*seq_size],
476 ctx_p->hw.key1_slot);
478 set_din_type(&desc[*seq_size], DMA_DLLI,
479 key_dma_addr, ((key_len == 24) ?
483 set_key_size_aes(&desc[*seq_size], key_len);
486 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
488 set_key_size_des(&desc[*seq_size], key_len);
490 set_flow_mode(&desc[*seq_size], flow_mode);
491 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
495 case DRV_CIPHER_ESSIV:
496 case DRV_CIPHER_BITLOCKER:
498 hw_desc_init(&desc[*seq_size]);
499 set_cipher_mode(&desc[*seq_size], cipher_mode);
500 set_cipher_config0(&desc[*seq_size], direction);
501 if (ssi_is_hw_key(tfm)) {
502 set_hw_crypto_key(&desc[*seq_size],
503 ctx_p->hw.key1_slot);
505 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
506 (key_len / 2), NS_BIT);
508 set_key_size_aes(&desc[*seq_size], (key_len / 2));
509 set_flow_mode(&desc[*seq_size], flow_mode);
510 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
514 hw_desc_init(&desc[*seq_size]);
515 set_cipher_mode(&desc[*seq_size], cipher_mode);
516 set_cipher_config0(&desc[*seq_size], direction);
517 if (ssi_is_hw_key(tfm)) {
518 set_hw_crypto_key(&desc[*seq_size],
519 ctx_p->hw.key2_slot);
521 set_din_type(&desc[*seq_size], DMA_DLLI,
522 (key_dma_addr + (key_len / 2)),
523 (key_len / 2), NS_BIT);
525 set_xex_data_unit_size(&desc[*seq_size], du_size);
526 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
527 set_key_size_aes(&desc[*seq_size], (key_len / 2));
528 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
532 hw_desc_init(&desc[*seq_size]);
533 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
534 set_cipher_mode(&desc[*seq_size], cipher_mode);
535 set_cipher_config0(&desc[*seq_size], direction);
536 set_key_size_aes(&desc[*seq_size], (key_len / 2));
537 set_flow_mode(&desc[*seq_size], flow_mode);
538 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
539 CC_AES_BLOCK_SIZE, NS_BIT);
543 SSI_LOG_ERR("Unsupported cipher mode (%d)\n", cipher_mode);
548 #if SSI_CC_HAS_MULTI2
549 static inline void ssi_blkcipher_create_multi2_setup_desc(
550 struct crypto_tfm *tfm,
551 struct blkcipher_req_ctx *req_ctx,
553 struct cc_hw_desc desc[],
554 unsigned int *seq_size)
556 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
558 int direction = req_ctx->gen_ctx.op_type;
559 /* Load system key */
560 hw_desc_init(&desc[*seq_size]);
561 set_cipher_mode(&desc[*seq_size], ctx_p->cipher_mode);
562 set_cipher_config0(&desc[*seq_size], direction);
563 set_din_type(&desc[*seq_size], DMA_DLLI, ctx_p->user.key_dma_addr,
564 CC_MULTI2_SYSTEM_KEY_SIZE, NS_BIT);
565 set_flow_mode(&desc[*seq_size], ctx_p->flow_mode);
566 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
570 hw_desc_init(&desc[*seq_size]);
571 set_din_type(&desc[*seq_size], DMA_DLLI,
572 (ctx_p->user.key_dma_addr + CC_MULTI2_SYSTEM_KEY_SIZE),
573 CC_MULTI2_DATA_KEY_SIZE, NS_BIT);
574 set_multi2_num_rounds(&desc[*seq_size], ctx_p->key_round_number);
575 set_flow_mode(&desc[*seq_size], ctx_p->flow_mode);
576 set_cipher_mode(&desc[*seq_size], ctx_p->cipher_mode);
577 set_cipher_config0(&desc[*seq_size], direction);
578 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
582 hw_desc_init(&desc[*seq_size]);
583 set_din_type(&desc[*seq_size], DMA_DLLI, req_ctx->gen_ctx.iv_dma_addr,
585 set_cipher_config0(&desc[*seq_size], direction);
586 set_flow_mode(&desc[*seq_size], ctx_p->flow_mode);
587 set_cipher_mode(&desc[*seq_size], ctx_p->cipher_mode);
588 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
591 #endif /*SSI_CC_HAS_MULTI2*/
594 ssi_blkcipher_create_data_desc(
595 struct crypto_tfm *tfm,
596 struct blkcipher_req_ctx *req_ctx,
597 struct scatterlist *dst, struct scatterlist *src,
600 struct cc_hw_desc desc[],
601 unsigned int *seq_size)
603 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
604 unsigned int flow_mode = ctx_p->flow_mode;
606 switch (ctx_p->flow_mode) {
608 flow_mode = DIN_AES_DOUT;
611 flow_mode = DIN_DES_DOUT;
613 #if SSI_CC_HAS_MULTI2
614 case S_DIN_to_MULTI2:
615 flow_mode = DIN_MULTI2_DOUT;
617 #endif /*SSI_CC_HAS_MULTI2*/
619 SSI_LOG_ERR("invalid flow mode, flow_mode = %d\n", flow_mode);
623 if (likely(req_ctx->dma_buf_type == SSI_DMA_BUF_DLLI)) {
624 SSI_LOG_DEBUG(" data params addr %pad length 0x%X\n",
625 sg_dma_address(src), nbytes);
626 SSI_LOG_DEBUG(" data params addr %pad length 0x%X\n",
627 sg_dma_address(dst), nbytes);
628 hw_desc_init(&desc[*seq_size]);
629 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
631 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
632 nbytes, NS_BIT, (!areq ? 0 : 1));
634 set_queue_last_ind(&desc[*seq_size]);
636 set_flow_mode(&desc[*seq_size], flow_mode);
640 SSI_LOG_DEBUG(" bypass params addr %pad "
641 "length 0x%X addr 0x%08X\n",
642 req_ctx->mlli_params.mlli_dma_addr,
643 req_ctx->mlli_params.mlli_len,
644 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
645 hw_desc_init(&desc[*seq_size]);
646 set_din_type(&desc[*seq_size], DMA_DLLI,
647 req_ctx->mlli_params.mlli_dma_addr,
648 req_ctx->mlli_params.mlli_len, NS_BIT);
649 set_dout_sram(&desc[*seq_size],
650 ctx_p->drvdata->mlli_sram_addr,
651 req_ctx->mlli_params.mlli_len);
652 set_flow_mode(&desc[*seq_size], BYPASS);
655 hw_desc_init(&desc[*seq_size]);
656 set_din_type(&desc[*seq_size], DMA_MLLI,
657 ctx_p->drvdata->mlli_sram_addr,
658 req_ctx->in_mlli_nents, NS_BIT);
659 if (req_ctx->out_nents == 0) {
660 SSI_LOG_DEBUG(" din/dout params addr 0x%08X "
662 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
663 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
664 set_dout_mlli(&desc[*seq_size],
665 ctx_p->drvdata->mlli_sram_addr,
666 req_ctx->in_mlli_nents, NS_BIT,
669 SSI_LOG_DEBUG(" din/dout params "
670 "addr 0x%08X addr 0x%08X\n",
671 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
672 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
673 (u32)LLI_ENTRY_BYTE_SIZE *
675 set_dout_mlli(&desc[*seq_size],
676 (ctx_p->drvdata->mlli_sram_addr +
677 (LLI_ENTRY_BYTE_SIZE *
678 req_ctx->in_mlli_nents)),
679 req_ctx->out_mlli_nents, NS_BIT,
683 set_queue_last_ind(&desc[*seq_size]);
685 set_flow_mode(&desc[*seq_size], flow_mode);
690 static int ssi_blkcipher_complete(struct device *dev,
691 struct ssi_ablkcipher_ctx *ctx_p,
692 struct blkcipher_req_ctx *req_ctx,
693 struct scatterlist *dst,
694 struct scatterlist *src,
697 void __iomem *cc_base)
699 int completion_error = 0;
700 u32 inflight_counter;
701 struct ablkcipher_request *req = (struct ablkcipher_request *)areq;
703 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
705 /*Set the inflight couter value to local variable*/
706 inflight_counter = ctx_p->drvdata->inflight_counter;
707 /*Decrease the inflight counter*/
708 if (ctx_p->flow_mode == BYPASS && ctx_p->drvdata->inflight_counter > 0)
709 ctx_p->drvdata->inflight_counter--;
713 * The crypto API expects us to set the req->info to the last
714 * ciphertext block. For encrypt, simply copy from the result.
715 * For decrypt, we must copy from a saved buffer since this
716 * could be an in-place decryption operation and the src is
717 * lost by this point.
719 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) {
720 memcpy(req->info, req_ctx->backup_info, ivsize);
721 kfree(req_ctx->backup_info);
723 scatterwalk_map_and_copy(req->info, req->dst,
724 (req->nbytes - ivsize),
728 ablkcipher_request_complete(areq, completion_error);
731 return completion_error;
734 static int ssi_blkcipher_process(
735 struct crypto_tfm *tfm,
736 struct blkcipher_req_ctx *req_ctx,
737 struct scatterlist *dst, struct scatterlist *src,
739 void *info, //req info
742 enum drv_crypto_direction direction)
744 struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
745 struct device *dev = &ctx_p->drvdata->plat_dev->dev;
746 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
747 struct ssi_crypto_req ssi_req = {};
748 int rc, seq_len = 0, cts_restore_flag = 0;
750 SSI_LOG_DEBUG("%s areq=%p info=%p nbytes=%d\n",
751 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ? "Encrypt" : "Decrypt"),
754 /* STAT_PHASE_0: Init and sanity checks */
756 /* TODO: check data length according to mode */
757 if (unlikely(validate_data_size(ctx_p, nbytes))) {
758 SSI_LOG_ERR("Unsupported data size %d.\n", nbytes);
759 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
764 /* No data to process is valid */
768 /*For CTS in case of data size aligned to 16 use CBC mode*/
769 if (((nbytes % AES_BLOCK_SIZE) == 0) && (ctx_p->cipher_mode == DRV_CIPHER_CBC_CTS)) {
770 ctx_p->cipher_mode = DRV_CIPHER_CBC;
771 cts_restore_flag = 1;
774 /* Setup DX request structure */
775 ssi_req.user_cb = (void *)ssi_ablkcipher_complete;
776 ssi_req.user_arg = (void *)areq;
778 #ifdef ENABLE_CYCLE_COUNT
779 ssi_req.op_type = (direction == DRV_CRYPTO_DIRECTION_DECRYPT) ?
780 STAT_OP_TYPE_DECODE : STAT_OP_TYPE_ENCODE;
784 /* Setup request context */
785 req_ctx->gen_ctx.op_type = direction;
787 /* STAT_PHASE_1: Map buffers */
789 rc = ssi_buffer_mgr_map_blkcipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, info, src, dst);
790 if (unlikely(rc != 0)) {
791 SSI_LOG_ERR("map_request() failed\n");
795 /* STAT_PHASE_2: Create sequence */
797 /* Setup processing */
798 #if SSI_CC_HAS_MULTI2
799 if (ctx_p->flow_mode == S_DIN_to_MULTI2)
800 ssi_blkcipher_create_multi2_setup_desc(tfm, req_ctx, ivsize,
803 #endif /*SSI_CC_HAS_MULTI2*/
804 ssi_blkcipher_create_setup_desc(tfm, req_ctx, ivsize, nbytes,
806 /* Data processing */
807 ssi_blkcipher_create_data_desc(tfm, req_ctx, dst, src, nbytes, areq,
810 /* do we need to generate IV? */
811 if (req_ctx->is_giv) {
812 ssi_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr;
813 ssi_req.ivgen_dma_addr_len = 1;
814 /* set the IV size (8/16 B long)*/
815 ssi_req.ivgen_size = ivsize;
818 /* STAT_PHASE_3: Lock HW and push sequence */
820 rc = send_request(ctx_p->drvdata, &ssi_req, desc, seq_len, (!areq) ? 0 : 1);
822 if (unlikely(rc != -EINPROGRESS)) {
823 /* Failed to send the request or request completed synchronously */
824 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
829 ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
831 rc = ssi_blkcipher_complete(dev, ctx_p, req_ctx, dst,
833 ctx_p->drvdata->cc_base);
838 if (cts_restore_flag != 0)
839 ctx_p->cipher_mode = DRV_CIPHER_CBC_CTS;
841 if (rc != -EINPROGRESS)
842 kfree(req_ctx->backup_info);
847 static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base)
849 struct ablkcipher_request *areq = (struct ablkcipher_request *)ssi_req;
850 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(areq);
851 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(areq);
852 struct ssi_ablkcipher_ctx *ctx_p = crypto_ablkcipher_ctx(tfm);
853 unsigned int ivsize = crypto_ablkcipher_ivsize(tfm);
855 ssi_blkcipher_complete(dev, ctx_p, req_ctx, areq->dst, areq->src,
856 ivsize, areq, cc_base);
859 /* Async wrap functions */
861 static int ssi_ablkcipher_init(struct crypto_tfm *tfm)
863 struct ablkcipher_tfm *ablktfm = &tfm->crt_ablkcipher;
865 ablktfm->reqsize = sizeof(struct blkcipher_req_ctx);
867 return ssi_blkcipher_init(tfm);
870 static int ssi_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
874 return ssi_blkcipher_setkey(crypto_ablkcipher_tfm(tfm), key, keylen);
877 static int ssi_ablkcipher_encrypt(struct ablkcipher_request *req)
879 struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
880 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
881 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
882 unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
884 req_ctx->is_giv = false;
886 return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_ENCRYPT);
889 static int ssi_ablkcipher_decrypt(struct ablkcipher_request *req)
891 struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
892 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
893 struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
894 unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
897 * Allocate and save the last IV sized bytes of the source, which will
898 * be lost in case of in-place decryption and might be needed for CTS.
900 req_ctx->backup_info = kmalloc(ivsize, GFP_KERNEL);
901 if (!req_ctx->backup_info)
904 scatterwalk_map_and_copy(req_ctx->backup_info, req->src,
905 (req->nbytes - ivsize), ivsize, 0);
906 req_ctx->is_giv = false;
908 return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_DECRYPT);
911 /* DX Block cipher alg */
912 static struct ssi_alg_template blkcipher_algs[] = {
914 #if SSI_CC_HAS_AES_XTS
917 .driver_name = "xts-aes-dx",
918 .blocksize = AES_BLOCK_SIZE,
919 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
920 .template_ablkcipher = {
921 .setkey = ssi_ablkcipher_setkey,
922 .encrypt = ssi_ablkcipher_encrypt,
923 .decrypt = ssi_ablkcipher_decrypt,
924 .min_keysize = AES_MIN_KEY_SIZE * 2,
925 .max_keysize = AES_MAX_KEY_SIZE * 2,
926 .ivsize = AES_BLOCK_SIZE,
929 .cipher_mode = DRV_CIPHER_XTS,
930 .flow_mode = S_DIN_to_AES,
934 .driver_name = "xts-aes-du512-dx",
935 .blocksize = AES_BLOCK_SIZE,
936 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
937 .template_ablkcipher = {
938 .setkey = ssi_ablkcipher_setkey,
939 .encrypt = ssi_ablkcipher_encrypt,
940 .decrypt = ssi_ablkcipher_decrypt,
941 .min_keysize = AES_MIN_KEY_SIZE * 2,
942 .max_keysize = AES_MAX_KEY_SIZE * 2,
943 .ivsize = AES_BLOCK_SIZE,
945 .cipher_mode = DRV_CIPHER_XTS,
946 .flow_mode = S_DIN_to_AES,
950 .driver_name = "xts-aes-du4096-dx",
951 .blocksize = AES_BLOCK_SIZE,
952 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
953 .template_ablkcipher = {
954 .setkey = ssi_ablkcipher_setkey,
955 .encrypt = ssi_ablkcipher_encrypt,
956 .decrypt = ssi_ablkcipher_decrypt,
957 .min_keysize = AES_MIN_KEY_SIZE * 2,
958 .max_keysize = AES_MAX_KEY_SIZE * 2,
959 .ivsize = AES_BLOCK_SIZE,
961 .cipher_mode = DRV_CIPHER_XTS,
962 .flow_mode = S_DIN_to_AES,
964 #endif /*SSI_CC_HAS_AES_XTS*/
965 #if SSI_CC_HAS_AES_ESSIV
967 .name = "essiv(aes)",
968 .driver_name = "essiv-aes-dx",
969 .blocksize = AES_BLOCK_SIZE,
970 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
971 .template_ablkcipher = {
972 .setkey = ssi_ablkcipher_setkey,
973 .encrypt = ssi_ablkcipher_encrypt,
974 .decrypt = ssi_ablkcipher_decrypt,
975 .min_keysize = AES_MIN_KEY_SIZE * 2,
976 .max_keysize = AES_MAX_KEY_SIZE * 2,
977 .ivsize = AES_BLOCK_SIZE,
979 .cipher_mode = DRV_CIPHER_ESSIV,
980 .flow_mode = S_DIN_to_AES,
983 .name = "essiv(aes)",
984 .driver_name = "essiv-aes-du512-dx",
985 .blocksize = AES_BLOCK_SIZE,
986 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
987 .template_ablkcipher = {
988 .setkey = ssi_ablkcipher_setkey,
989 .encrypt = ssi_ablkcipher_encrypt,
990 .decrypt = ssi_ablkcipher_decrypt,
991 .min_keysize = AES_MIN_KEY_SIZE * 2,
992 .max_keysize = AES_MAX_KEY_SIZE * 2,
993 .ivsize = AES_BLOCK_SIZE,
995 .cipher_mode = DRV_CIPHER_ESSIV,
996 .flow_mode = S_DIN_to_AES,
999 .name = "essiv(aes)",
1000 .driver_name = "essiv-aes-du4096-dx",
1001 .blocksize = AES_BLOCK_SIZE,
1002 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1003 .template_ablkcipher = {
1004 .setkey = ssi_ablkcipher_setkey,
1005 .encrypt = ssi_ablkcipher_encrypt,
1006 .decrypt = ssi_ablkcipher_decrypt,
1007 .min_keysize = AES_MIN_KEY_SIZE * 2,
1008 .max_keysize = AES_MAX_KEY_SIZE * 2,
1009 .ivsize = AES_BLOCK_SIZE,
1011 .cipher_mode = DRV_CIPHER_ESSIV,
1012 .flow_mode = S_DIN_to_AES,
1014 #endif /*SSI_CC_HAS_AES_ESSIV*/
1015 #if SSI_CC_HAS_AES_BITLOCKER
1017 .name = "bitlocker(aes)",
1018 .driver_name = "bitlocker-aes-dx",
1019 .blocksize = AES_BLOCK_SIZE,
1020 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1021 .template_ablkcipher = {
1022 .setkey = ssi_ablkcipher_setkey,
1023 .encrypt = ssi_ablkcipher_encrypt,
1024 .decrypt = ssi_ablkcipher_decrypt,
1025 .min_keysize = AES_MIN_KEY_SIZE * 2,
1026 .max_keysize = AES_MAX_KEY_SIZE * 2,
1027 .ivsize = AES_BLOCK_SIZE,
1029 .cipher_mode = DRV_CIPHER_BITLOCKER,
1030 .flow_mode = S_DIN_to_AES,
1033 .name = "bitlocker(aes)",
1034 .driver_name = "bitlocker-aes-du512-dx",
1035 .blocksize = AES_BLOCK_SIZE,
1036 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1037 .template_ablkcipher = {
1038 .setkey = ssi_ablkcipher_setkey,
1039 .encrypt = ssi_ablkcipher_encrypt,
1040 .decrypt = ssi_ablkcipher_decrypt,
1041 .min_keysize = AES_MIN_KEY_SIZE * 2,
1042 .max_keysize = AES_MAX_KEY_SIZE * 2,
1043 .ivsize = AES_BLOCK_SIZE,
1045 .cipher_mode = DRV_CIPHER_BITLOCKER,
1046 .flow_mode = S_DIN_to_AES,
1049 .name = "bitlocker(aes)",
1050 .driver_name = "bitlocker-aes-du4096-dx",
1051 .blocksize = AES_BLOCK_SIZE,
1052 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1053 .template_ablkcipher = {
1054 .setkey = ssi_ablkcipher_setkey,
1055 .encrypt = ssi_ablkcipher_encrypt,
1056 .decrypt = ssi_ablkcipher_decrypt,
1057 .min_keysize = AES_MIN_KEY_SIZE * 2,
1058 .max_keysize = AES_MAX_KEY_SIZE * 2,
1059 .ivsize = AES_BLOCK_SIZE,
1061 .cipher_mode = DRV_CIPHER_BITLOCKER,
1062 .flow_mode = S_DIN_to_AES,
1064 #endif /*SSI_CC_HAS_AES_BITLOCKER*/
1067 .driver_name = "ecb-aes-dx",
1068 .blocksize = AES_BLOCK_SIZE,
1069 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1070 .template_ablkcipher = {
1071 .setkey = ssi_ablkcipher_setkey,
1072 .encrypt = ssi_ablkcipher_encrypt,
1073 .decrypt = ssi_ablkcipher_decrypt,
1074 .min_keysize = AES_MIN_KEY_SIZE,
1075 .max_keysize = AES_MAX_KEY_SIZE,
1078 .cipher_mode = DRV_CIPHER_ECB,
1079 .flow_mode = S_DIN_to_AES,
1083 .driver_name = "cbc-aes-dx",
1084 .blocksize = AES_BLOCK_SIZE,
1085 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1086 .template_ablkcipher = {
1087 .setkey = ssi_ablkcipher_setkey,
1088 .encrypt = ssi_ablkcipher_encrypt,
1089 .decrypt = ssi_ablkcipher_decrypt,
1090 .min_keysize = AES_MIN_KEY_SIZE,
1091 .max_keysize = AES_MAX_KEY_SIZE,
1092 .ivsize = AES_BLOCK_SIZE,
1094 .cipher_mode = DRV_CIPHER_CBC,
1095 .flow_mode = S_DIN_to_AES,
1099 .driver_name = "ofb-aes-dx",
1100 .blocksize = AES_BLOCK_SIZE,
1101 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1102 .template_ablkcipher = {
1103 .setkey = ssi_ablkcipher_setkey,
1104 .encrypt = ssi_ablkcipher_encrypt,
1105 .decrypt = ssi_ablkcipher_decrypt,
1106 .min_keysize = AES_MIN_KEY_SIZE,
1107 .max_keysize = AES_MAX_KEY_SIZE,
1108 .ivsize = AES_BLOCK_SIZE,
1110 .cipher_mode = DRV_CIPHER_OFB,
1111 .flow_mode = S_DIN_to_AES,
1113 #if SSI_CC_HAS_AES_CTS
1115 .name = "cts1(cbc(aes))",
1116 .driver_name = "cts1-cbc-aes-dx",
1117 .blocksize = AES_BLOCK_SIZE,
1118 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1119 .template_ablkcipher = {
1120 .setkey = ssi_ablkcipher_setkey,
1121 .encrypt = ssi_ablkcipher_encrypt,
1122 .decrypt = ssi_ablkcipher_decrypt,
1123 .min_keysize = AES_MIN_KEY_SIZE,
1124 .max_keysize = AES_MAX_KEY_SIZE,
1125 .ivsize = AES_BLOCK_SIZE,
1127 .cipher_mode = DRV_CIPHER_CBC_CTS,
1128 .flow_mode = S_DIN_to_AES,
1133 .driver_name = "ctr-aes-dx",
1135 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1136 .template_ablkcipher = {
1137 .setkey = ssi_ablkcipher_setkey,
1138 .encrypt = ssi_ablkcipher_encrypt,
1139 .decrypt = ssi_ablkcipher_decrypt,
1140 .min_keysize = AES_MIN_KEY_SIZE,
1141 .max_keysize = AES_MAX_KEY_SIZE,
1142 .ivsize = AES_BLOCK_SIZE,
1144 .cipher_mode = DRV_CIPHER_CTR,
1145 .flow_mode = S_DIN_to_AES,
1148 .name = "cbc(des3_ede)",
1149 .driver_name = "cbc-3des-dx",
1150 .blocksize = DES3_EDE_BLOCK_SIZE,
1151 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1152 .template_ablkcipher = {
1153 .setkey = ssi_ablkcipher_setkey,
1154 .encrypt = ssi_ablkcipher_encrypt,
1155 .decrypt = ssi_ablkcipher_decrypt,
1156 .min_keysize = DES3_EDE_KEY_SIZE,
1157 .max_keysize = DES3_EDE_KEY_SIZE,
1158 .ivsize = DES3_EDE_BLOCK_SIZE,
1160 .cipher_mode = DRV_CIPHER_CBC,
1161 .flow_mode = S_DIN_to_DES,
1164 .name = "ecb(des3_ede)",
1165 .driver_name = "ecb-3des-dx",
1166 .blocksize = DES3_EDE_BLOCK_SIZE,
1167 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1168 .template_ablkcipher = {
1169 .setkey = ssi_ablkcipher_setkey,
1170 .encrypt = ssi_ablkcipher_encrypt,
1171 .decrypt = ssi_ablkcipher_decrypt,
1172 .min_keysize = DES3_EDE_KEY_SIZE,
1173 .max_keysize = DES3_EDE_KEY_SIZE,
1176 .cipher_mode = DRV_CIPHER_ECB,
1177 .flow_mode = S_DIN_to_DES,
1181 .driver_name = "cbc-des-dx",
1182 .blocksize = DES_BLOCK_SIZE,
1183 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1184 .template_ablkcipher = {
1185 .setkey = ssi_ablkcipher_setkey,
1186 .encrypt = ssi_ablkcipher_encrypt,
1187 .decrypt = ssi_ablkcipher_decrypt,
1188 .min_keysize = DES_KEY_SIZE,
1189 .max_keysize = DES_KEY_SIZE,
1190 .ivsize = DES_BLOCK_SIZE,
1192 .cipher_mode = DRV_CIPHER_CBC,
1193 .flow_mode = S_DIN_to_DES,
1197 .driver_name = "ecb-des-dx",
1198 .blocksize = DES_BLOCK_SIZE,
1199 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1200 .template_ablkcipher = {
1201 .setkey = ssi_ablkcipher_setkey,
1202 .encrypt = ssi_ablkcipher_encrypt,
1203 .decrypt = ssi_ablkcipher_decrypt,
1204 .min_keysize = DES_KEY_SIZE,
1205 .max_keysize = DES_KEY_SIZE,
1208 .cipher_mode = DRV_CIPHER_ECB,
1209 .flow_mode = S_DIN_to_DES,
1211 #if SSI_CC_HAS_MULTI2
1213 .name = "cbc(multi2)",
1214 .driver_name = "cbc-multi2-dx",
1215 .blocksize = CC_MULTI2_BLOCK_SIZE,
1216 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1217 .template_ablkcipher = {
1218 .setkey = ssi_ablkcipher_setkey,
1219 .encrypt = ssi_ablkcipher_encrypt,
1220 .decrypt = ssi_ablkcipher_decrypt,
1221 .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1222 .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1223 .ivsize = CC_MULTI2_IV_SIZE,
1225 .cipher_mode = DRV_MULTI2_CBC,
1226 .flow_mode = S_DIN_to_MULTI2,
1229 .name = "ofb(multi2)",
1230 .driver_name = "ofb-multi2-dx",
1232 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1233 .template_ablkcipher = {
1234 .setkey = ssi_ablkcipher_setkey,
1235 .encrypt = ssi_ablkcipher_encrypt,
1236 .decrypt = ssi_ablkcipher_encrypt,
1237 .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1238 .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1239 .ivsize = CC_MULTI2_IV_SIZE,
1241 .cipher_mode = DRV_MULTI2_OFB,
1242 .flow_mode = S_DIN_to_MULTI2,
1244 #endif /*SSI_CC_HAS_MULTI2*/
1248 struct ssi_crypto_alg *ssi_ablkcipher_create_alg(struct ssi_alg_template *template)
1250 struct ssi_crypto_alg *t_alg;
1251 struct crypto_alg *alg;
1253 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
1255 SSI_LOG_ERR("failed to allocate t_alg\n");
1256 return ERR_PTR(-ENOMEM);
1259 alg = &t_alg->crypto_alg;
1261 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
1262 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1263 template->driver_name);
1264 alg->cra_module = THIS_MODULE;
1265 alg->cra_priority = SSI_CRA_PRIO;
1266 alg->cra_blocksize = template->blocksize;
1267 alg->cra_alignmask = 0;
1268 alg->cra_ctxsize = sizeof(struct ssi_ablkcipher_ctx);
1270 alg->cra_init = ssi_ablkcipher_init;
1271 alg->cra_exit = ssi_blkcipher_exit;
1272 alg->cra_type = &crypto_ablkcipher_type;
1273 alg->cra_ablkcipher = template->template_ablkcipher;
1274 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
1277 t_alg->cipher_mode = template->cipher_mode;
1278 t_alg->flow_mode = template->flow_mode;
1283 int ssi_ablkcipher_free(struct ssi_drvdata *drvdata)
1285 struct ssi_crypto_alg *t_alg, *n;
1286 struct ssi_blkcipher_handle *blkcipher_handle =
1287 drvdata->blkcipher_handle;
1290 dev = &drvdata->plat_dev->dev;
1292 if (blkcipher_handle) {
1293 /* Remove registered algs */
1294 list_for_each_entry_safe(t_alg, n,
1295 &blkcipher_handle->blkcipher_alg_list,
1297 crypto_unregister_alg(&t_alg->crypto_alg);
1298 list_del(&t_alg->entry);
1301 kfree(blkcipher_handle);
1302 drvdata->blkcipher_handle = NULL;
1307 int ssi_ablkcipher_alloc(struct ssi_drvdata *drvdata)
1309 struct ssi_blkcipher_handle *ablkcipher_handle;
1310 struct ssi_crypto_alg *t_alg;
1314 ablkcipher_handle = kmalloc(sizeof(*ablkcipher_handle), GFP_KERNEL);
1315 if (!ablkcipher_handle)
1318 drvdata->blkcipher_handle = ablkcipher_handle;
1320 INIT_LIST_HEAD(&ablkcipher_handle->blkcipher_alg_list);
1323 SSI_LOG_DEBUG("Number of algorithms = %zu\n", ARRAY_SIZE(blkcipher_algs));
1324 for (alg = 0; alg < ARRAY_SIZE(blkcipher_algs); alg++) {
1325 SSI_LOG_DEBUG("creating %s\n", blkcipher_algs[alg].driver_name);
1326 t_alg = ssi_ablkcipher_create_alg(&blkcipher_algs[alg]);
1327 if (IS_ERR(t_alg)) {
1328 rc = PTR_ERR(t_alg);
1329 SSI_LOG_ERR("%s alg allocation failed\n",
1330 blkcipher_algs[alg].driver_name);
1333 t_alg->drvdata = drvdata;
1335 SSI_LOG_DEBUG("registering %s\n", blkcipher_algs[alg].driver_name);
1336 rc = crypto_register_alg(&t_alg->crypto_alg);
1337 SSI_LOG_DEBUG("%s alg registration rc = %x\n",
1338 t_alg->crypto_alg.cra_driver_name, rc);
1339 if (unlikely(rc != 0)) {
1340 SSI_LOG_ERR("%s alg registration failed\n",
1341 t_alg->crypto_alg.cra_driver_name);
1345 list_add_tail(&t_alg->entry,
1346 &ablkcipher_handle->blkcipher_alg_list);
1347 SSI_LOG_DEBUG("Registered %s\n",
1348 t_alg->crypto_alg.cra_driver_name);
1354 ssi_ablkcipher_free(drvdata);