1 // SPDX-License-Identifier: GPL-2.0-only
3 * Crypto acceleration support for Rockchip RK3288
5 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
7 * Author: Zain Wang <zain.wang@rock-chips.com>
9 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
11 #include "rk3288_crypto.h"
13 #define RK_CRYPTO_DEC BIT(0)
15 static void rk_crypto_complete(struct crypto_async_request *base, int err)
18 base->complete(base, err);
21 static int rk_handle_req(struct rk_crypto_info *dev,
22 struct ablkcipher_request *req)
24 if (!IS_ALIGNED(req->nbytes, dev->align_size))
27 return dev->enqueue(dev, &req->base);
30 static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
31 const u8 *key, unsigned int keylen)
33 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
34 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
36 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
37 keylen != AES_KEYSIZE_256) {
38 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
42 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
46 static int rk_des_setkey(struct crypto_ablkcipher *cipher,
47 const u8 *key, unsigned int keylen)
49 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
50 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
51 u32 tmp[DES_EXPKEY_WORDS];
53 if (!des_ekey(tmp, key) &&
54 (tfm->crt_flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
55 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
60 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
64 static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
65 const u8 *key, unsigned int keylen)
67 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
71 flags = crypto_ablkcipher_get_flags(cipher);
72 err = __des3_verify_key(&flags, key);
74 crypto_ablkcipher_set_flags(cipher, flags);
79 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
83 static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
85 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
86 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
87 struct rk_crypto_info *dev = ctx->dev;
89 ctx->mode = RK_CRYPTO_AES_ECB_MODE;
90 return rk_handle_req(dev, req);
93 static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
95 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
96 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
97 struct rk_crypto_info *dev = ctx->dev;
99 ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
100 return rk_handle_req(dev, req);
103 static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
105 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
106 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
107 struct rk_crypto_info *dev = ctx->dev;
109 ctx->mode = RK_CRYPTO_AES_CBC_MODE;
110 return rk_handle_req(dev, req);
113 static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
115 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
116 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
117 struct rk_crypto_info *dev = ctx->dev;
119 ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
120 return rk_handle_req(dev, req);
123 static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
125 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
126 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
127 struct rk_crypto_info *dev = ctx->dev;
130 return rk_handle_req(dev, req);
133 static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
135 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
136 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
137 struct rk_crypto_info *dev = ctx->dev;
139 ctx->mode = RK_CRYPTO_DEC;
140 return rk_handle_req(dev, req);
143 static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
145 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
146 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
147 struct rk_crypto_info *dev = ctx->dev;
149 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
150 return rk_handle_req(dev, req);
153 static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
155 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
156 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
157 struct rk_crypto_info *dev = ctx->dev;
159 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
160 return rk_handle_req(dev, req);
163 static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
165 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
166 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
167 struct rk_crypto_info *dev = ctx->dev;
169 ctx->mode = RK_CRYPTO_TDES_SELECT;
170 return rk_handle_req(dev, req);
173 static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
175 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
176 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
177 struct rk_crypto_info *dev = ctx->dev;
179 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
180 return rk_handle_req(dev, req);
183 static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
185 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
186 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
187 struct rk_crypto_info *dev = ctx->dev;
189 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
190 return rk_handle_req(dev, req);
193 static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
195 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
196 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
197 struct rk_crypto_info *dev = ctx->dev;
199 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
201 return rk_handle_req(dev, req);
204 static void rk_ablk_hw_init(struct rk_crypto_info *dev)
206 struct ablkcipher_request *req =
207 ablkcipher_request_cast(dev->async_req);
208 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(req);
209 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
210 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
211 u32 ivsize, block, conf_reg = 0;
213 block = crypto_tfm_alg_blocksize(tfm);
214 ivsize = crypto_ablkcipher_ivsize(cipher);
216 if (block == DES_BLOCK_SIZE) {
217 ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
218 RK_CRYPTO_TDES_BYTESWAP_KEY |
219 RK_CRYPTO_TDES_BYTESWAP_IV;
220 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
221 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->info, ivsize);
222 conf_reg = RK_CRYPTO_DESSEL;
224 ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
225 RK_CRYPTO_AES_KEY_CHANGE |
226 RK_CRYPTO_AES_BYTESWAP_KEY |
227 RK_CRYPTO_AES_BYTESWAP_IV;
228 if (ctx->keylen == AES_KEYSIZE_192)
229 ctx->mode |= RK_CRYPTO_AES_192BIT_key;
230 else if (ctx->keylen == AES_KEYSIZE_256)
231 ctx->mode |= RK_CRYPTO_AES_256BIT_key;
232 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
233 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->info, ivsize);
235 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
236 RK_CRYPTO_BYTESWAP_BRFIFO;
237 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
238 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
239 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
242 static void crypto_dma_start(struct rk_crypto_info *dev)
244 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
245 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
246 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
247 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
248 _SBF(RK_CRYPTO_BLOCK_START, 16));
251 static int rk_set_data_start(struct rk_crypto_info *dev)
254 struct ablkcipher_request *req =
255 ablkcipher_request_cast(dev->async_req);
256 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
257 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
258 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
259 u8 *src_last_blk = page_address(sg_page(dev->sg_src)) +
260 dev->sg_src->offset + dev->sg_src->length - ivsize;
262 /* Store the iv that need to be updated in chain mode.
263 * And update the IV buffer to contain the next IV for decryption mode.
265 if (ctx->mode & RK_CRYPTO_DEC) {
266 memcpy(ctx->iv, src_last_blk, ivsize);
267 sg_pcopy_to_buffer(dev->first, dev->src_nents, req->info,
268 ivsize, dev->total - ivsize);
271 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
273 crypto_dma_start(dev);
277 static int rk_ablk_start(struct rk_crypto_info *dev)
279 struct ablkcipher_request *req =
280 ablkcipher_request_cast(dev->async_req);
284 dev->left_bytes = req->nbytes;
285 dev->total = req->nbytes;
286 dev->sg_src = req->src;
287 dev->first = req->src;
288 dev->src_nents = sg_nents(req->src);
289 dev->sg_dst = req->dst;
290 dev->dst_nents = sg_nents(req->dst);
293 spin_lock_irqsave(&dev->lock, flags);
294 rk_ablk_hw_init(dev);
295 err = rk_set_data_start(dev);
296 spin_unlock_irqrestore(&dev->lock, flags);
300 static void rk_iv_copyback(struct rk_crypto_info *dev)
302 struct ablkcipher_request *req =
303 ablkcipher_request_cast(dev->async_req);
304 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
305 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
306 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
308 /* Update the IV buffer to contain the next IV for encryption mode. */
309 if (!(ctx->mode & RK_CRYPTO_DEC)) {
311 memcpy(req->info, sg_virt(dev->sg_dst) +
312 dev->sg_dst->length - ivsize, ivsize);
314 memcpy(req->info, dev->addr_vir +
315 dev->count - ivsize, ivsize);
320 static void rk_update_iv(struct rk_crypto_info *dev)
322 struct ablkcipher_request *req =
323 ablkcipher_request_cast(dev->async_req);
324 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
325 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
326 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
329 if (ctx->mode & RK_CRYPTO_DEC) {
332 new_iv = page_address(sg_page(dev->sg_dst)) +
333 dev->sg_dst->offset + dev->sg_dst->length - ivsize;
336 if (ivsize == DES_BLOCK_SIZE)
337 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, new_iv, ivsize);
338 else if (ivsize == AES_BLOCK_SIZE)
339 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, new_iv, ivsize);
343 * true some err was occurred
344 * fault no err, continue
346 static int rk_ablk_rx(struct rk_crypto_info *dev)
349 struct ablkcipher_request *req =
350 ablkcipher_request_cast(dev->async_req);
352 dev->unload_data(dev);
354 if (!sg_pcopy_from_buffer(req->dst, dev->dst_nents,
355 dev->addr_vir, dev->count,
356 dev->total - dev->left_bytes -
362 if (dev->left_bytes) {
365 if (sg_is_last(dev->sg_src)) {
366 dev_err(dev->dev, "[%s:%d] Lack of data\n",
371 dev->sg_src = sg_next(dev->sg_src);
372 dev->sg_dst = sg_next(dev->sg_dst);
374 err = rk_set_data_start(dev);
377 /* here show the calculation is over without any err */
378 dev->complete(dev->async_req, 0);
379 tasklet_schedule(&dev->queue_task);
385 static int rk_ablk_cra_init(struct crypto_tfm *tfm)
387 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
388 struct crypto_alg *alg = tfm->__crt_alg;
389 struct rk_crypto_tmp *algt;
391 algt = container_of(alg, struct rk_crypto_tmp, alg.crypto);
393 ctx->dev = algt->dev;
394 ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
395 ctx->dev->start = rk_ablk_start;
396 ctx->dev->update = rk_ablk_rx;
397 ctx->dev->complete = rk_crypto_complete;
398 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
400 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
403 static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
405 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
407 free_page((unsigned long)ctx->dev->addr_vir);
408 ctx->dev->disable_clk(ctx->dev);
411 struct rk_crypto_tmp rk_ecb_aes_alg = {
412 .type = ALG_TYPE_CIPHER,
414 .cra_name = "ecb(aes)",
415 .cra_driver_name = "ecb-aes-rk",
417 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
419 .cra_blocksize = AES_BLOCK_SIZE,
420 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
421 .cra_alignmask = 0x0f,
422 .cra_type = &crypto_ablkcipher_type,
423 .cra_module = THIS_MODULE,
424 .cra_init = rk_ablk_cra_init,
425 .cra_exit = rk_ablk_cra_exit,
426 .cra_u.ablkcipher = {
427 .min_keysize = AES_MIN_KEY_SIZE,
428 .max_keysize = AES_MAX_KEY_SIZE,
429 .setkey = rk_aes_setkey,
430 .encrypt = rk_aes_ecb_encrypt,
431 .decrypt = rk_aes_ecb_decrypt,
436 struct rk_crypto_tmp rk_cbc_aes_alg = {
437 .type = ALG_TYPE_CIPHER,
439 .cra_name = "cbc(aes)",
440 .cra_driver_name = "cbc-aes-rk",
442 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
444 .cra_blocksize = AES_BLOCK_SIZE,
445 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
446 .cra_alignmask = 0x0f,
447 .cra_type = &crypto_ablkcipher_type,
448 .cra_module = THIS_MODULE,
449 .cra_init = rk_ablk_cra_init,
450 .cra_exit = rk_ablk_cra_exit,
451 .cra_u.ablkcipher = {
452 .min_keysize = AES_MIN_KEY_SIZE,
453 .max_keysize = AES_MAX_KEY_SIZE,
454 .ivsize = AES_BLOCK_SIZE,
455 .setkey = rk_aes_setkey,
456 .encrypt = rk_aes_cbc_encrypt,
457 .decrypt = rk_aes_cbc_decrypt,
462 struct rk_crypto_tmp rk_ecb_des_alg = {
463 .type = ALG_TYPE_CIPHER,
465 .cra_name = "ecb(des)",
466 .cra_driver_name = "ecb-des-rk",
468 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
470 .cra_blocksize = DES_BLOCK_SIZE,
471 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
472 .cra_alignmask = 0x07,
473 .cra_type = &crypto_ablkcipher_type,
474 .cra_module = THIS_MODULE,
475 .cra_init = rk_ablk_cra_init,
476 .cra_exit = rk_ablk_cra_exit,
477 .cra_u.ablkcipher = {
478 .min_keysize = DES_KEY_SIZE,
479 .max_keysize = DES_KEY_SIZE,
480 .setkey = rk_des_setkey,
481 .encrypt = rk_des_ecb_encrypt,
482 .decrypt = rk_des_ecb_decrypt,
487 struct rk_crypto_tmp rk_cbc_des_alg = {
488 .type = ALG_TYPE_CIPHER,
490 .cra_name = "cbc(des)",
491 .cra_driver_name = "cbc-des-rk",
493 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
495 .cra_blocksize = DES_BLOCK_SIZE,
496 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
497 .cra_alignmask = 0x07,
498 .cra_type = &crypto_ablkcipher_type,
499 .cra_module = THIS_MODULE,
500 .cra_init = rk_ablk_cra_init,
501 .cra_exit = rk_ablk_cra_exit,
502 .cra_u.ablkcipher = {
503 .min_keysize = DES_KEY_SIZE,
504 .max_keysize = DES_KEY_SIZE,
505 .ivsize = DES_BLOCK_SIZE,
506 .setkey = rk_des_setkey,
507 .encrypt = rk_des_cbc_encrypt,
508 .decrypt = rk_des_cbc_decrypt,
513 struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
514 .type = ALG_TYPE_CIPHER,
516 .cra_name = "ecb(des3_ede)",
517 .cra_driver_name = "ecb-des3-ede-rk",
519 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
521 .cra_blocksize = DES_BLOCK_SIZE,
522 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
523 .cra_alignmask = 0x07,
524 .cra_type = &crypto_ablkcipher_type,
525 .cra_module = THIS_MODULE,
526 .cra_init = rk_ablk_cra_init,
527 .cra_exit = rk_ablk_cra_exit,
528 .cra_u.ablkcipher = {
529 .min_keysize = DES3_EDE_KEY_SIZE,
530 .max_keysize = DES3_EDE_KEY_SIZE,
531 .ivsize = DES_BLOCK_SIZE,
532 .setkey = rk_tdes_setkey,
533 .encrypt = rk_des3_ede_ecb_encrypt,
534 .decrypt = rk_des3_ede_ecb_decrypt,
539 struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
540 .type = ALG_TYPE_CIPHER,
542 .cra_name = "cbc(des3_ede)",
543 .cra_driver_name = "cbc-des3-ede-rk",
545 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
547 .cra_blocksize = DES_BLOCK_SIZE,
548 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
549 .cra_alignmask = 0x07,
550 .cra_type = &crypto_ablkcipher_type,
551 .cra_module = THIS_MODULE,
552 .cra_init = rk_ablk_cra_init,
553 .cra_exit = rk_ablk_cra_exit,
554 .cra_u.ablkcipher = {
555 .min_keysize = DES3_EDE_KEY_SIZE,
556 .max_keysize = DES3_EDE_KEY_SIZE,
557 .ivsize = DES_BLOCK_SIZE,
558 .setkey = rk_tdes_setkey,
559 .encrypt = rk_des3_ede_cbc_encrypt,
560 .decrypt = rk_des3_ede_cbc_decrypt,