1 // SPDX-License-Identifier: GPL-2.0+
3 * Copyright (c) 2021 Aspeed Technology Inc.
6 #include "aspeed-hace.h"
8 #ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_CRYPTO_DEBUG
9 #define CIPHER_DBG(h, fmt, ...) \
10 dev_info((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
12 #define CIPHER_DBG(h, fmt, ...) \
13 dev_dbg((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
16 static int aspeed_crypto_do_fallback(struct skcipher_request *areq)
18 struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
19 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
20 struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
23 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
24 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
25 areq->base.complete, areq->base.data);
26 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
27 areq->cryptlen, areq->iv);
29 if (rctx->enc_cmd & HACE_CMD_ENCRYPT)
30 err = crypto_skcipher_encrypt(&rctx->fallback_req);
32 err = crypto_skcipher_decrypt(&rctx->fallback_req);
37 static bool aspeed_crypto_need_fallback(struct skcipher_request *areq)
39 struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
41 if (areq->cryptlen == 0)
44 if ((rctx->enc_cmd & HACE_CMD_DES_SELECT) &&
45 !IS_ALIGNED(areq->cryptlen, DES_BLOCK_SIZE))
48 if ((!(rctx->enc_cmd & HACE_CMD_DES_SELECT)) &&
49 !IS_ALIGNED(areq->cryptlen, AES_BLOCK_SIZE))
55 static int aspeed_hace_crypto_handle_queue(struct aspeed_hace_dev *hace_dev,
56 struct skcipher_request *req)
58 if (hace_dev->version == AST2500_VERSION &&
59 aspeed_crypto_need_fallback(req)) {
60 CIPHER_DBG(hace_dev, "SW fallback\n");
61 return aspeed_crypto_do_fallback(req);
64 return crypto_transfer_skcipher_request_to_engine(
65 hace_dev->crypt_engine_crypto, req);
68 static int aspeed_crypto_do_request(struct crypto_engine *engine, void *areq)
70 struct skcipher_request *req = skcipher_request_cast(areq);
71 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
72 struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
73 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
74 struct aspeed_engine_crypto *crypto_engine;
77 crypto_engine = &hace_dev->crypto_engine;
78 crypto_engine->req = req;
79 crypto_engine->flags |= CRYPTO_FLAGS_BUSY;
81 rc = ctx->start(hace_dev);
83 if (rc != -EINPROGRESS)
89 static int aspeed_sk_complete(struct aspeed_hace_dev *hace_dev, int err)
91 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
92 struct aspeed_cipher_reqctx *rctx;
93 struct skcipher_request *req;
95 CIPHER_DBG(hace_dev, "\n");
97 req = crypto_engine->req;
98 rctx = skcipher_request_ctx(req);
100 if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
101 if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
102 memcpy(req->iv, crypto_engine->cipher_ctx +
103 DES_KEY_SIZE, DES_KEY_SIZE);
105 memcpy(req->iv, crypto_engine->cipher_ctx,
109 crypto_engine->flags &= ~CRYPTO_FLAGS_BUSY;
111 crypto_finalize_skcipher_request(hace_dev->crypt_engine_crypto, req,
117 static int aspeed_sk_transfer_sg(struct aspeed_hace_dev *hace_dev)
119 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
120 struct device *dev = hace_dev->dev;
121 struct aspeed_cipher_reqctx *rctx;
122 struct skcipher_request *req;
124 CIPHER_DBG(hace_dev, "\n");
126 req = crypto_engine->req;
127 rctx = skcipher_request_ctx(req);
129 if (req->src == req->dst) {
130 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_BIDIRECTIONAL);
132 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
133 dma_unmap_sg(dev, req->dst, rctx->dst_nents, DMA_FROM_DEVICE);
136 return aspeed_sk_complete(hace_dev, 0);
139 static int aspeed_sk_transfer(struct aspeed_hace_dev *hace_dev)
141 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
142 struct aspeed_cipher_reqctx *rctx;
143 struct skcipher_request *req;
144 struct scatterlist *out_sg;
148 req = crypto_engine->req;
149 rctx = skcipher_request_ctx(req);
152 /* Copy output buffer to dst scatter-gather lists */
153 nbytes = sg_copy_from_buffer(out_sg, rctx->dst_nents,
154 crypto_engine->cipher_addr, req->cryptlen);
156 dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
157 "nbytes", nbytes, "cryptlen", req->cryptlen);
161 CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
162 "nbytes", nbytes, "req->cryptlen", req->cryptlen,
163 "nb_out_sg", rctx->dst_nents,
164 "cipher addr", crypto_engine->cipher_addr);
166 return aspeed_sk_complete(hace_dev, rc);
169 static int aspeed_sk_start(struct aspeed_hace_dev *hace_dev)
171 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
172 struct aspeed_cipher_reqctx *rctx;
173 struct skcipher_request *req;
174 struct scatterlist *in_sg;
177 req = crypto_engine->req;
178 rctx = skcipher_request_ctx(req);
181 nbytes = sg_copy_to_buffer(in_sg, rctx->src_nents,
182 crypto_engine->cipher_addr, req->cryptlen);
184 CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
185 "nbytes", nbytes, "req->cryptlen", req->cryptlen,
186 "nb_in_sg", rctx->src_nents,
187 "cipher addr", crypto_engine->cipher_addr);
190 dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
191 "nbytes", nbytes, "cryptlen", req->cryptlen);
195 crypto_engine->resume = aspeed_sk_transfer;
197 /* Trigger engines */
198 ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
200 ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
202 ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
203 ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
208 static int aspeed_sk_start_sg(struct aspeed_hace_dev *hace_dev)
210 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
211 struct aspeed_sg_list *src_list, *dst_list;
212 dma_addr_t src_dma_addr, dst_dma_addr;
213 struct aspeed_cipher_reqctx *rctx;
214 struct skcipher_request *req;
215 struct scatterlist *s;
221 CIPHER_DBG(hace_dev, "\n");
223 req = crypto_engine->req;
224 rctx = skcipher_request_ctx(req);
226 rctx->enc_cmd |= HACE_CMD_DES_SG_CTRL | HACE_CMD_SRC_SG_CTRL |
227 HACE_CMD_AES_KEY_HW_EXP | HACE_CMD_MBUS_REQ_SYNC_EN;
230 if (req->dst == req->src) {
231 src_sg_len = dma_map_sg(hace_dev->dev, req->src,
232 rctx->src_nents, DMA_BIDIRECTIONAL);
233 dst_sg_len = src_sg_len;
235 dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
240 src_sg_len = dma_map_sg(hace_dev->dev, req->src,
241 rctx->src_nents, DMA_TO_DEVICE);
243 dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
247 dst_sg_len = dma_map_sg(hace_dev->dev, req->dst,
248 rctx->dst_nents, DMA_FROM_DEVICE);
250 dev_warn(hace_dev->dev, "dma_map_sg() dst error\n");
256 src_list = (struct aspeed_sg_list *)crypto_engine->cipher_addr;
257 src_dma_addr = crypto_engine->cipher_dma_addr;
258 total = req->cryptlen;
260 for_each_sg(req->src, s, src_sg_len, i) {
261 u32 phy_addr = sg_dma_address(s);
262 u32 len = sg_dma_len(s);
273 src_list[i].phy_addr = cpu_to_le32(phy_addr);
274 src_list[i].len = cpu_to_le32(len);
282 if (req->dst == req->src) {
284 dst_dma_addr = src_dma_addr;
287 dst_list = (struct aspeed_sg_list *)crypto_engine->dst_sg_addr;
288 dst_dma_addr = crypto_engine->dst_sg_dma_addr;
289 total = req->cryptlen;
291 for_each_sg(req->dst, s, dst_sg_len, i) {
292 u32 phy_addr = sg_dma_address(s);
293 u32 len = sg_dma_len(s);
304 dst_list[i].phy_addr = cpu_to_le32(phy_addr);
305 dst_list[i].len = cpu_to_le32(len);
309 dst_list[dst_sg_len].phy_addr = 0;
310 dst_list[dst_sg_len].len = 0;
318 crypto_engine->resume = aspeed_sk_transfer_sg;
320 /* Memory barrier to ensure all data setup before engine starts */
323 /* Trigger engines */
324 ast_hace_write(hace_dev, src_dma_addr, ASPEED_HACE_SRC);
325 ast_hace_write(hace_dev, dst_dma_addr, ASPEED_HACE_DEST);
326 ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
327 ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
332 if (req->dst == req->src) {
333 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
337 dma_unmap_sg(hace_dev->dev, req->dst, rctx->dst_nents,
339 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
346 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
351 static int aspeed_hace_skcipher_trigger(struct aspeed_hace_dev *hace_dev)
353 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
354 struct aspeed_cipher_reqctx *rctx;
355 struct crypto_skcipher *cipher;
356 struct aspeed_cipher_ctx *ctx;
357 struct skcipher_request *req;
359 CIPHER_DBG(hace_dev, "\n");
361 req = crypto_engine->req;
362 rctx = skcipher_request_ctx(req);
363 cipher = crypto_skcipher_reqtfm(req);
364 ctx = crypto_skcipher_ctx(cipher);
366 /* enable interrupt */
367 rctx->enc_cmd |= HACE_CMD_ISR_EN;
369 rctx->dst_nents = sg_nents(req->dst);
370 rctx->src_nents = sg_nents(req->src);
372 ast_hace_write(hace_dev, crypto_engine->cipher_ctx_dma,
373 ASPEED_HACE_CONTEXT);
375 if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
376 if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
377 memcpy(crypto_engine->cipher_ctx + DES_BLOCK_SIZE,
378 req->iv, DES_BLOCK_SIZE);
380 memcpy(crypto_engine->cipher_ctx, req->iv,
384 if (hace_dev->version == AST2600_VERSION) {
385 memcpy(crypto_engine->cipher_ctx + 16, ctx->key, ctx->key_len);
387 return aspeed_sk_start_sg(hace_dev);
390 memcpy(crypto_engine->cipher_ctx + 16, ctx->key, AES_MAX_KEYLENGTH);
392 return aspeed_sk_start(hace_dev);
395 static int aspeed_des_crypt(struct skcipher_request *req, u32 cmd)
397 struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
398 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
399 struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
400 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
401 u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
403 CIPHER_DBG(hace_dev, "\n");
405 if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
406 if (!IS_ALIGNED(req->cryptlen, DES_BLOCK_SIZE))
410 rctx->enc_cmd = cmd | HACE_CMD_DES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
411 HACE_CMD_DES | HACE_CMD_CONTEXT_LOAD_ENABLE |
412 HACE_CMD_CONTEXT_SAVE_ENABLE;
414 return aspeed_hace_crypto_handle_queue(hace_dev, req);
417 static int aspeed_des_setkey(struct crypto_skcipher *cipher, const u8 *key,
420 struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
421 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
422 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
425 CIPHER_DBG(hace_dev, "keylen: %d bits\n", keylen);
427 if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
428 dev_warn(hace_dev->dev, "invalid keylen: %d bits\n", keylen);
432 if (keylen == DES_KEY_SIZE) {
433 rc = crypto_des_verify_key(tfm, key);
437 } else if (keylen == DES3_EDE_KEY_SIZE) {
438 rc = crypto_des3_ede_verify_key(tfm, key);
443 memcpy(ctx->key, key, keylen);
444 ctx->key_len = keylen;
446 crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
447 crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
448 CRYPTO_TFM_REQ_MASK);
450 return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
453 static int aspeed_tdes_ctr_decrypt(struct skcipher_request *req)
455 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
456 HACE_CMD_TRIPLE_DES);
459 static int aspeed_tdes_ctr_encrypt(struct skcipher_request *req)
461 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
462 HACE_CMD_TRIPLE_DES);
465 static int aspeed_tdes_ofb_decrypt(struct skcipher_request *req)
467 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB |
468 HACE_CMD_TRIPLE_DES);
471 static int aspeed_tdes_ofb_encrypt(struct skcipher_request *req)
473 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB |
474 HACE_CMD_TRIPLE_DES);
477 static int aspeed_tdes_cfb_decrypt(struct skcipher_request *req)
479 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB |
480 HACE_CMD_TRIPLE_DES);
483 static int aspeed_tdes_cfb_encrypt(struct skcipher_request *req)
485 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB |
486 HACE_CMD_TRIPLE_DES);
489 static int aspeed_tdes_cbc_decrypt(struct skcipher_request *req)
491 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
492 HACE_CMD_TRIPLE_DES);
495 static int aspeed_tdes_cbc_encrypt(struct skcipher_request *req)
497 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
498 HACE_CMD_TRIPLE_DES);
501 static int aspeed_tdes_ecb_decrypt(struct skcipher_request *req)
503 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
504 HACE_CMD_TRIPLE_DES);
507 static int aspeed_tdes_ecb_encrypt(struct skcipher_request *req)
509 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
510 HACE_CMD_TRIPLE_DES);
513 static int aspeed_des_ctr_decrypt(struct skcipher_request *req)
515 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
516 HACE_CMD_SINGLE_DES);
519 static int aspeed_des_ctr_encrypt(struct skcipher_request *req)
521 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
522 HACE_CMD_SINGLE_DES);
525 static int aspeed_des_ofb_decrypt(struct skcipher_request *req)
527 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB |
528 HACE_CMD_SINGLE_DES);
531 static int aspeed_des_ofb_encrypt(struct skcipher_request *req)
533 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB |
534 HACE_CMD_SINGLE_DES);
537 static int aspeed_des_cfb_decrypt(struct skcipher_request *req)
539 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB |
540 HACE_CMD_SINGLE_DES);
543 static int aspeed_des_cfb_encrypt(struct skcipher_request *req)
545 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB |
546 HACE_CMD_SINGLE_DES);
549 static int aspeed_des_cbc_decrypt(struct skcipher_request *req)
551 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
552 HACE_CMD_SINGLE_DES);
555 static int aspeed_des_cbc_encrypt(struct skcipher_request *req)
557 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
558 HACE_CMD_SINGLE_DES);
561 static int aspeed_des_ecb_decrypt(struct skcipher_request *req)
563 return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
564 HACE_CMD_SINGLE_DES);
567 static int aspeed_des_ecb_encrypt(struct skcipher_request *req)
569 return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
570 HACE_CMD_SINGLE_DES);
573 static int aspeed_aes_crypt(struct skcipher_request *req, u32 cmd)
575 struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
576 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
577 struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
578 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
579 u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
581 if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
582 if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
586 CIPHER_DBG(hace_dev, "%s\n",
587 (cmd & HACE_CMD_ENCRYPT) ? "encrypt" : "decrypt");
589 cmd |= HACE_CMD_AES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
590 HACE_CMD_CONTEXT_LOAD_ENABLE | HACE_CMD_CONTEXT_SAVE_ENABLE;
592 switch (ctx->key_len) {
593 case AES_KEYSIZE_128:
594 cmd |= HACE_CMD_AES128;
596 case AES_KEYSIZE_192:
597 cmd |= HACE_CMD_AES192;
599 case AES_KEYSIZE_256:
600 cmd |= HACE_CMD_AES256;
608 return aspeed_hace_crypto_handle_queue(hace_dev, req);
611 static int aspeed_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
614 struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
615 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
616 struct crypto_aes_ctx gen_aes_key;
618 CIPHER_DBG(hace_dev, "keylen: %d bits\n", (keylen * 8));
620 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
621 keylen != AES_KEYSIZE_256)
624 if (ctx->hace_dev->version == AST2500_VERSION) {
625 aes_expandkey(&gen_aes_key, key, keylen);
626 memcpy(ctx->key, gen_aes_key.key_enc, AES_MAX_KEYLENGTH);
629 memcpy(ctx->key, key, keylen);
632 ctx->key_len = keylen;
634 crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
635 crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
636 CRYPTO_TFM_REQ_MASK);
638 return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
641 static int aspeed_aes_ctr_decrypt(struct skcipher_request *req)
643 return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR);
646 static int aspeed_aes_ctr_encrypt(struct skcipher_request *req)
648 return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR);
651 static int aspeed_aes_ofb_decrypt(struct skcipher_request *req)
653 return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB);
656 static int aspeed_aes_ofb_encrypt(struct skcipher_request *req)
658 return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB);
661 static int aspeed_aes_cfb_decrypt(struct skcipher_request *req)
663 return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB);
666 static int aspeed_aes_cfb_encrypt(struct skcipher_request *req)
668 return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB);
671 static int aspeed_aes_cbc_decrypt(struct skcipher_request *req)
673 return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC);
676 static int aspeed_aes_cbc_encrypt(struct skcipher_request *req)
678 return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC);
681 static int aspeed_aes_ecb_decrypt(struct skcipher_request *req)
683 return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB);
686 static int aspeed_aes_ecb_encrypt(struct skcipher_request *req)
688 return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB);
691 static int aspeed_crypto_cra_init(struct crypto_skcipher *tfm)
693 struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
694 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
695 const char *name = crypto_tfm_alg_name(&tfm->base);
696 struct aspeed_hace_alg *crypto_alg;
699 crypto_alg = container_of(alg, struct aspeed_hace_alg, alg.skcipher);
700 ctx->hace_dev = crypto_alg->hace_dev;
701 ctx->start = aspeed_hace_skcipher_trigger;
703 CIPHER_DBG(ctx->hace_dev, "%s\n", name);
705 ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_ASYNC |
706 CRYPTO_ALG_NEED_FALLBACK);
707 if (IS_ERR(ctx->fallback_tfm)) {
708 dev_err(ctx->hace_dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
709 name, PTR_ERR(ctx->fallback_tfm));
710 return PTR_ERR(ctx->fallback_tfm);
713 crypto_skcipher_set_reqsize(tfm, sizeof(struct aspeed_cipher_reqctx) +
714 crypto_skcipher_reqsize(ctx->fallback_tfm));
716 ctx->enginectx.op.do_one_request = aspeed_crypto_do_request;
717 ctx->enginectx.op.prepare_request = NULL;
718 ctx->enginectx.op.unprepare_request = NULL;
723 static void aspeed_crypto_cra_exit(struct crypto_skcipher *tfm)
725 struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
726 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
728 CIPHER_DBG(hace_dev, "%s\n", crypto_tfm_alg_name(&tfm->base));
729 crypto_free_skcipher(ctx->fallback_tfm);
732 static struct aspeed_hace_alg aspeed_crypto_algs[] = {
735 .min_keysize = AES_MIN_KEY_SIZE,
736 .max_keysize = AES_MAX_KEY_SIZE,
737 .setkey = aspeed_aes_setkey,
738 .encrypt = aspeed_aes_ecb_encrypt,
739 .decrypt = aspeed_aes_ecb_decrypt,
740 .init = aspeed_crypto_cra_init,
741 .exit = aspeed_crypto_cra_exit,
743 .cra_name = "ecb(aes)",
744 .cra_driver_name = "aspeed-ecb-aes",
746 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
748 CRYPTO_ALG_NEED_FALLBACK,
749 .cra_blocksize = AES_BLOCK_SIZE,
750 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
751 .cra_alignmask = 0x0f,
752 .cra_module = THIS_MODULE,
758 .ivsize = AES_BLOCK_SIZE,
759 .min_keysize = AES_MIN_KEY_SIZE,
760 .max_keysize = AES_MAX_KEY_SIZE,
761 .setkey = aspeed_aes_setkey,
762 .encrypt = aspeed_aes_cbc_encrypt,
763 .decrypt = aspeed_aes_cbc_decrypt,
764 .init = aspeed_crypto_cra_init,
765 .exit = aspeed_crypto_cra_exit,
767 .cra_name = "cbc(aes)",
768 .cra_driver_name = "aspeed-cbc-aes",
770 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
772 CRYPTO_ALG_NEED_FALLBACK,
773 .cra_blocksize = AES_BLOCK_SIZE,
774 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
775 .cra_alignmask = 0x0f,
776 .cra_module = THIS_MODULE,
782 .ivsize = AES_BLOCK_SIZE,
783 .min_keysize = AES_MIN_KEY_SIZE,
784 .max_keysize = AES_MAX_KEY_SIZE,
785 .setkey = aspeed_aes_setkey,
786 .encrypt = aspeed_aes_cfb_encrypt,
787 .decrypt = aspeed_aes_cfb_decrypt,
788 .init = aspeed_crypto_cra_init,
789 .exit = aspeed_crypto_cra_exit,
791 .cra_name = "cfb(aes)",
792 .cra_driver_name = "aspeed-cfb-aes",
794 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
796 CRYPTO_ALG_NEED_FALLBACK,
798 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
799 .cra_alignmask = 0x0f,
800 .cra_module = THIS_MODULE,
806 .ivsize = AES_BLOCK_SIZE,
807 .min_keysize = AES_MIN_KEY_SIZE,
808 .max_keysize = AES_MAX_KEY_SIZE,
809 .setkey = aspeed_aes_setkey,
810 .encrypt = aspeed_aes_ofb_encrypt,
811 .decrypt = aspeed_aes_ofb_decrypt,
812 .init = aspeed_crypto_cra_init,
813 .exit = aspeed_crypto_cra_exit,
815 .cra_name = "ofb(aes)",
816 .cra_driver_name = "aspeed-ofb-aes",
818 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
820 CRYPTO_ALG_NEED_FALLBACK,
822 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
823 .cra_alignmask = 0x0f,
824 .cra_module = THIS_MODULE,
830 .min_keysize = DES_KEY_SIZE,
831 .max_keysize = DES_KEY_SIZE,
832 .setkey = aspeed_des_setkey,
833 .encrypt = aspeed_des_ecb_encrypt,
834 .decrypt = aspeed_des_ecb_decrypt,
835 .init = aspeed_crypto_cra_init,
836 .exit = aspeed_crypto_cra_exit,
838 .cra_name = "ecb(des)",
839 .cra_driver_name = "aspeed-ecb-des",
841 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
843 CRYPTO_ALG_NEED_FALLBACK,
844 .cra_blocksize = DES_BLOCK_SIZE,
845 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
846 .cra_alignmask = 0x0f,
847 .cra_module = THIS_MODULE,
853 .ivsize = DES_BLOCK_SIZE,
854 .min_keysize = DES_KEY_SIZE,
855 .max_keysize = DES_KEY_SIZE,
856 .setkey = aspeed_des_setkey,
857 .encrypt = aspeed_des_cbc_encrypt,
858 .decrypt = aspeed_des_cbc_decrypt,
859 .init = aspeed_crypto_cra_init,
860 .exit = aspeed_crypto_cra_exit,
862 .cra_name = "cbc(des)",
863 .cra_driver_name = "aspeed-cbc-des",
865 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
867 CRYPTO_ALG_NEED_FALLBACK,
868 .cra_blocksize = DES_BLOCK_SIZE,
869 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
870 .cra_alignmask = 0x0f,
871 .cra_module = THIS_MODULE,
877 .ivsize = DES_BLOCK_SIZE,
878 .min_keysize = DES_KEY_SIZE,
879 .max_keysize = DES_KEY_SIZE,
880 .setkey = aspeed_des_setkey,
881 .encrypt = aspeed_des_cfb_encrypt,
882 .decrypt = aspeed_des_cfb_decrypt,
883 .init = aspeed_crypto_cra_init,
884 .exit = aspeed_crypto_cra_exit,
886 .cra_name = "cfb(des)",
887 .cra_driver_name = "aspeed-cfb-des",
889 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
891 CRYPTO_ALG_NEED_FALLBACK,
892 .cra_blocksize = DES_BLOCK_SIZE,
893 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
894 .cra_alignmask = 0x0f,
895 .cra_module = THIS_MODULE,
901 .ivsize = DES_BLOCK_SIZE,
902 .min_keysize = DES_KEY_SIZE,
903 .max_keysize = DES_KEY_SIZE,
904 .setkey = aspeed_des_setkey,
905 .encrypt = aspeed_des_ofb_encrypt,
906 .decrypt = aspeed_des_ofb_decrypt,
907 .init = aspeed_crypto_cra_init,
908 .exit = aspeed_crypto_cra_exit,
910 .cra_name = "ofb(des)",
911 .cra_driver_name = "aspeed-ofb-des",
913 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
915 CRYPTO_ALG_NEED_FALLBACK,
916 .cra_blocksize = DES_BLOCK_SIZE,
917 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
918 .cra_alignmask = 0x0f,
919 .cra_module = THIS_MODULE,
925 .min_keysize = DES3_EDE_KEY_SIZE,
926 .max_keysize = DES3_EDE_KEY_SIZE,
927 .setkey = aspeed_des_setkey,
928 .encrypt = aspeed_tdes_ecb_encrypt,
929 .decrypt = aspeed_tdes_ecb_decrypt,
930 .init = aspeed_crypto_cra_init,
931 .exit = aspeed_crypto_cra_exit,
933 .cra_name = "ecb(des3_ede)",
934 .cra_driver_name = "aspeed-ecb-tdes",
936 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
938 CRYPTO_ALG_NEED_FALLBACK,
939 .cra_blocksize = DES_BLOCK_SIZE,
940 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
941 .cra_alignmask = 0x0f,
942 .cra_module = THIS_MODULE,
948 .ivsize = DES_BLOCK_SIZE,
949 .min_keysize = DES3_EDE_KEY_SIZE,
950 .max_keysize = DES3_EDE_KEY_SIZE,
951 .setkey = aspeed_des_setkey,
952 .encrypt = aspeed_tdes_cbc_encrypt,
953 .decrypt = aspeed_tdes_cbc_decrypt,
954 .init = aspeed_crypto_cra_init,
955 .exit = aspeed_crypto_cra_exit,
957 .cra_name = "cbc(des3_ede)",
958 .cra_driver_name = "aspeed-cbc-tdes",
960 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
962 CRYPTO_ALG_NEED_FALLBACK,
963 .cra_blocksize = DES_BLOCK_SIZE,
964 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
965 .cra_alignmask = 0x0f,
966 .cra_module = THIS_MODULE,
972 .ivsize = DES_BLOCK_SIZE,
973 .min_keysize = DES3_EDE_KEY_SIZE,
974 .max_keysize = DES3_EDE_KEY_SIZE,
975 .setkey = aspeed_des_setkey,
976 .encrypt = aspeed_tdes_cfb_encrypt,
977 .decrypt = aspeed_tdes_cfb_decrypt,
978 .init = aspeed_crypto_cra_init,
979 .exit = aspeed_crypto_cra_exit,
981 .cra_name = "cfb(des3_ede)",
982 .cra_driver_name = "aspeed-cfb-tdes",
984 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
986 CRYPTO_ALG_NEED_FALLBACK,
987 .cra_blocksize = DES_BLOCK_SIZE,
988 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
989 .cra_alignmask = 0x0f,
990 .cra_module = THIS_MODULE,
996 .ivsize = DES_BLOCK_SIZE,
997 .min_keysize = DES3_EDE_KEY_SIZE,
998 .max_keysize = DES3_EDE_KEY_SIZE,
999 .setkey = aspeed_des_setkey,
1000 .encrypt = aspeed_tdes_ofb_encrypt,
1001 .decrypt = aspeed_tdes_ofb_decrypt,
1002 .init = aspeed_crypto_cra_init,
1003 .exit = aspeed_crypto_cra_exit,
1005 .cra_name = "ofb(des3_ede)",
1006 .cra_driver_name = "aspeed-ofb-tdes",
1007 .cra_priority = 300,
1008 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1010 CRYPTO_ALG_NEED_FALLBACK,
1011 .cra_blocksize = DES_BLOCK_SIZE,
1012 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
1013 .cra_alignmask = 0x0f,
1014 .cra_module = THIS_MODULE,
1020 static struct aspeed_hace_alg aspeed_crypto_algs_g6[] = {
1023 .ivsize = AES_BLOCK_SIZE,
1024 .min_keysize = AES_MIN_KEY_SIZE,
1025 .max_keysize = AES_MAX_KEY_SIZE,
1026 .setkey = aspeed_aes_setkey,
1027 .encrypt = aspeed_aes_ctr_encrypt,
1028 .decrypt = aspeed_aes_ctr_decrypt,
1029 .init = aspeed_crypto_cra_init,
1030 .exit = aspeed_crypto_cra_exit,
1032 .cra_name = "ctr(aes)",
1033 .cra_driver_name = "aspeed-ctr-aes",
1034 .cra_priority = 300,
1035 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1038 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
1039 .cra_alignmask = 0x0f,
1040 .cra_module = THIS_MODULE,
1046 .ivsize = DES_BLOCK_SIZE,
1047 .min_keysize = DES_KEY_SIZE,
1048 .max_keysize = DES_KEY_SIZE,
1049 .setkey = aspeed_des_setkey,
1050 .encrypt = aspeed_des_ctr_encrypt,
1051 .decrypt = aspeed_des_ctr_decrypt,
1052 .init = aspeed_crypto_cra_init,
1053 .exit = aspeed_crypto_cra_exit,
1055 .cra_name = "ctr(des)",
1056 .cra_driver_name = "aspeed-ctr-des",
1057 .cra_priority = 300,
1058 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1061 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
1062 .cra_alignmask = 0x0f,
1063 .cra_module = THIS_MODULE,
1069 .ivsize = DES_BLOCK_SIZE,
1070 .min_keysize = DES3_EDE_KEY_SIZE,
1071 .max_keysize = DES3_EDE_KEY_SIZE,
1072 .setkey = aspeed_des_setkey,
1073 .encrypt = aspeed_tdes_ctr_encrypt,
1074 .decrypt = aspeed_tdes_ctr_decrypt,
1075 .init = aspeed_crypto_cra_init,
1076 .exit = aspeed_crypto_cra_exit,
1078 .cra_name = "ctr(des3_ede)",
1079 .cra_driver_name = "aspeed-ctr-tdes",
1080 .cra_priority = 300,
1081 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1084 .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
1085 .cra_alignmask = 0x0f,
1086 .cra_module = THIS_MODULE,
1093 void aspeed_unregister_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
1097 for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++)
1098 crypto_unregister_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
1100 if (hace_dev->version != AST2600_VERSION)
1103 for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++)
1104 crypto_unregister_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
1107 void aspeed_register_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
1111 CIPHER_DBG(hace_dev, "\n");
1113 for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++) {
1114 aspeed_crypto_algs[i].hace_dev = hace_dev;
1115 rc = crypto_register_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
1117 CIPHER_DBG(hace_dev, "Failed to register %s\n",
1118 aspeed_crypto_algs[i].alg.skcipher.base.cra_name);
1122 if (hace_dev->version != AST2600_VERSION)
1125 for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++) {
1126 aspeed_crypto_algs_g6[i].hace_dev = hace_dev;
1127 rc = crypto_register_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
1129 CIPHER_DBG(hace_dev, "Failed to register %s\n",
1130 aspeed_crypto_algs_g6[i].alg.skcipher.base.cra_name);