1 // SPDX-License-Identifier: GPL-2.0-only
3 * Crypto acceleration support for Rockchip RK3288
5 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
7 * Author: Zain Wang <zain.wang@rock-chips.com>
9 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
11 #include "rk3288_crypto.h"
13 #define RK_CRYPTO_DEC BIT(0)
15 static void rk_crypto_complete(struct crypto_async_request *base, int err)
18 base->complete(base, err);
21 static int rk_handle_req(struct rk_crypto_info *dev,
22 struct ablkcipher_request *req)
24 if (!IS_ALIGNED(req->nbytes, dev->align_size))
27 return dev->enqueue(dev, &req->base);
30 static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
31 const u8 *key, unsigned int keylen)
33 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
34 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
36 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
37 keylen != AES_KEYSIZE_256) {
38 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
42 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
46 static int rk_des_setkey(struct crypto_ablkcipher *cipher,
47 const u8 *key, unsigned int keylen)
49 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
52 err = verify_ablkcipher_des_key(cipher, key);
57 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
61 static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
62 const u8 *key, unsigned int keylen)
64 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
67 err = verify_ablkcipher_des3_key(cipher, key);
72 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
76 static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
78 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
79 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
80 struct rk_crypto_info *dev = ctx->dev;
82 ctx->mode = RK_CRYPTO_AES_ECB_MODE;
83 return rk_handle_req(dev, req);
86 static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
88 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
89 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
90 struct rk_crypto_info *dev = ctx->dev;
92 ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
93 return rk_handle_req(dev, req);
96 static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
98 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
99 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
100 struct rk_crypto_info *dev = ctx->dev;
102 ctx->mode = RK_CRYPTO_AES_CBC_MODE;
103 return rk_handle_req(dev, req);
106 static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
108 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
109 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
110 struct rk_crypto_info *dev = ctx->dev;
112 ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
113 return rk_handle_req(dev, req);
116 static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
118 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
119 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
120 struct rk_crypto_info *dev = ctx->dev;
123 return rk_handle_req(dev, req);
126 static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
128 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
129 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
130 struct rk_crypto_info *dev = ctx->dev;
132 ctx->mode = RK_CRYPTO_DEC;
133 return rk_handle_req(dev, req);
136 static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
138 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
139 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
140 struct rk_crypto_info *dev = ctx->dev;
142 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
143 return rk_handle_req(dev, req);
146 static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
148 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
149 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
150 struct rk_crypto_info *dev = ctx->dev;
152 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
153 return rk_handle_req(dev, req);
156 static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
158 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
159 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
160 struct rk_crypto_info *dev = ctx->dev;
162 ctx->mode = RK_CRYPTO_TDES_SELECT;
163 return rk_handle_req(dev, req);
166 static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
168 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
169 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
170 struct rk_crypto_info *dev = ctx->dev;
172 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
173 return rk_handle_req(dev, req);
176 static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
178 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
179 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
180 struct rk_crypto_info *dev = ctx->dev;
182 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
183 return rk_handle_req(dev, req);
186 static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
188 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
189 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
190 struct rk_crypto_info *dev = ctx->dev;
192 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
194 return rk_handle_req(dev, req);
197 static void rk_ablk_hw_init(struct rk_crypto_info *dev)
199 struct ablkcipher_request *req =
200 ablkcipher_request_cast(dev->async_req);
201 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(req);
202 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
203 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
204 u32 ivsize, block, conf_reg = 0;
206 block = crypto_tfm_alg_blocksize(tfm);
207 ivsize = crypto_ablkcipher_ivsize(cipher);
209 if (block == DES_BLOCK_SIZE) {
210 ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
211 RK_CRYPTO_TDES_BYTESWAP_KEY |
212 RK_CRYPTO_TDES_BYTESWAP_IV;
213 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
214 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->info, ivsize);
215 conf_reg = RK_CRYPTO_DESSEL;
217 ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
218 RK_CRYPTO_AES_KEY_CHANGE |
219 RK_CRYPTO_AES_BYTESWAP_KEY |
220 RK_CRYPTO_AES_BYTESWAP_IV;
221 if (ctx->keylen == AES_KEYSIZE_192)
222 ctx->mode |= RK_CRYPTO_AES_192BIT_key;
223 else if (ctx->keylen == AES_KEYSIZE_256)
224 ctx->mode |= RK_CRYPTO_AES_256BIT_key;
225 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
226 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->info, ivsize);
228 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
229 RK_CRYPTO_BYTESWAP_BRFIFO;
230 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
231 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
232 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
235 static void crypto_dma_start(struct rk_crypto_info *dev)
237 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
238 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
239 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
240 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
241 _SBF(RK_CRYPTO_BLOCK_START, 16));
244 static int rk_set_data_start(struct rk_crypto_info *dev)
247 struct ablkcipher_request *req =
248 ablkcipher_request_cast(dev->async_req);
249 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
250 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
251 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
252 u8 *src_last_blk = page_address(sg_page(dev->sg_src)) +
253 dev->sg_src->offset + dev->sg_src->length - ivsize;
255 /* Store the iv that need to be updated in chain mode.
256 * And update the IV buffer to contain the next IV for decryption mode.
258 if (ctx->mode & RK_CRYPTO_DEC) {
259 memcpy(ctx->iv, src_last_blk, ivsize);
260 sg_pcopy_to_buffer(dev->first, dev->src_nents, req->info,
261 ivsize, dev->total - ivsize);
264 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
266 crypto_dma_start(dev);
270 static int rk_ablk_start(struct rk_crypto_info *dev)
272 struct ablkcipher_request *req =
273 ablkcipher_request_cast(dev->async_req);
277 dev->left_bytes = req->nbytes;
278 dev->total = req->nbytes;
279 dev->sg_src = req->src;
280 dev->first = req->src;
281 dev->src_nents = sg_nents(req->src);
282 dev->sg_dst = req->dst;
283 dev->dst_nents = sg_nents(req->dst);
286 spin_lock_irqsave(&dev->lock, flags);
287 rk_ablk_hw_init(dev);
288 err = rk_set_data_start(dev);
289 spin_unlock_irqrestore(&dev->lock, flags);
293 static void rk_iv_copyback(struct rk_crypto_info *dev)
295 struct ablkcipher_request *req =
296 ablkcipher_request_cast(dev->async_req);
297 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
298 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
299 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
301 /* Update the IV buffer to contain the next IV for encryption mode. */
302 if (!(ctx->mode & RK_CRYPTO_DEC)) {
304 memcpy(req->info, sg_virt(dev->sg_dst) +
305 dev->sg_dst->length - ivsize, ivsize);
307 memcpy(req->info, dev->addr_vir +
308 dev->count - ivsize, ivsize);
313 static void rk_update_iv(struct rk_crypto_info *dev)
315 struct ablkcipher_request *req =
316 ablkcipher_request_cast(dev->async_req);
317 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
318 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
319 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
322 if (ctx->mode & RK_CRYPTO_DEC) {
325 new_iv = page_address(sg_page(dev->sg_dst)) +
326 dev->sg_dst->offset + dev->sg_dst->length - ivsize;
329 if (ivsize == DES_BLOCK_SIZE)
330 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, new_iv, ivsize);
331 else if (ivsize == AES_BLOCK_SIZE)
332 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, new_iv, ivsize);
336 * true some err was occurred
337 * fault no err, continue
339 static int rk_ablk_rx(struct rk_crypto_info *dev)
342 struct ablkcipher_request *req =
343 ablkcipher_request_cast(dev->async_req);
345 dev->unload_data(dev);
347 if (!sg_pcopy_from_buffer(req->dst, dev->dst_nents,
348 dev->addr_vir, dev->count,
349 dev->total - dev->left_bytes -
355 if (dev->left_bytes) {
358 if (sg_is_last(dev->sg_src)) {
359 dev_err(dev->dev, "[%s:%d] Lack of data\n",
364 dev->sg_src = sg_next(dev->sg_src);
365 dev->sg_dst = sg_next(dev->sg_dst);
367 err = rk_set_data_start(dev);
370 /* here show the calculation is over without any err */
371 dev->complete(dev->async_req, 0);
372 tasklet_schedule(&dev->queue_task);
378 static int rk_ablk_cra_init(struct crypto_tfm *tfm)
380 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
381 struct crypto_alg *alg = tfm->__crt_alg;
382 struct rk_crypto_tmp *algt;
384 algt = container_of(alg, struct rk_crypto_tmp, alg.crypto);
386 ctx->dev = algt->dev;
387 ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
388 ctx->dev->start = rk_ablk_start;
389 ctx->dev->update = rk_ablk_rx;
390 ctx->dev->complete = rk_crypto_complete;
391 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
393 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
396 static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
398 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
400 free_page((unsigned long)ctx->dev->addr_vir);
401 ctx->dev->disable_clk(ctx->dev);
404 struct rk_crypto_tmp rk_ecb_aes_alg = {
405 .type = ALG_TYPE_CIPHER,
407 .cra_name = "ecb(aes)",
408 .cra_driver_name = "ecb-aes-rk",
410 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
412 .cra_blocksize = AES_BLOCK_SIZE,
413 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
414 .cra_alignmask = 0x0f,
415 .cra_type = &crypto_ablkcipher_type,
416 .cra_module = THIS_MODULE,
417 .cra_init = rk_ablk_cra_init,
418 .cra_exit = rk_ablk_cra_exit,
419 .cra_u.ablkcipher = {
420 .min_keysize = AES_MIN_KEY_SIZE,
421 .max_keysize = AES_MAX_KEY_SIZE,
422 .setkey = rk_aes_setkey,
423 .encrypt = rk_aes_ecb_encrypt,
424 .decrypt = rk_aes_ecb_decrypt,
429 struct rk_crypto_tmp rk_cbc_aes_alg = {
430 .type = ALG_TYPE_CIPHER,
432 .cra_name = "cbc(aes)",
433 .cra_driver_name = "cbc-aes-rk",
435 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
437 .cra_blocksize = AES_BLOCK_SIZE,
438 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
439 .cra_alignmask = 0x0f,
440 .cra_type = &crypto_ablkcipher_type,
441 .cra_module = THIS_MODULE,
442 .cra_init = rk_ablk_cra_init,
443 .cra_exit = rk_ablk_cra_exit,
444 .cra_u.ablkcipher = {
445 .min_keysize = AES_MIN_KEY_SIZE,
446 .max_keysize = AES_MAX_KEY_SIZE,
447 .ivsize = AES_BLOCK_SIZE,
448 .setkey = rk_aes_setkey,
449 .encrypt = rk_aes_cbc_encrypt,
450 .decrypt = rk_aes_cbc_decrypt,
455 struct rk_crypto_tmp rk_ecb_des_alg = {
456 .type = ALG_TYPE_CIPHER,
458 .cra_name = "ecb(des)",
459 .cra_driver_name = "ecb-des-rk",
461 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
463 .cra_blocksize = DES_BLOCK_SIZE,
464 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
465 .cra_alignmask = 0x07,
466 .cra_type = &crypto_ablkcipher_type,
467 .cra_module = THIS_MODULE,
468 .cra_init = rk_ablk_cra_init,
469 .cra_exit = rk_ablk_cra_exit,
470 .cra_u.ablkcipher = {
471 .min_keysize = DES_KEY_SIZE,
472 .max_keysize = DES_KEY_SIZE,
473 .setkey = rk_des_setkey,
474 .encrypt = rk_des_ecb_encrypt,
475 .decrypt = rk_des_ecb_decrypt,
480 struct rk_crypto_tmp rk_cbc_des_alg = {
481 .type = ALG_TYPE_CIPHER,
483 .cra_name = "cbc(des)",
484 .cra_driver_name = "cbc-des-rk",
486 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
488 .cra_blocksize = DES_BLOCK_SIZE,
489 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
490 .cra_alignmask = 0x07,
491 .cra_type = &crypto_ablkcipher_type,
492 .cra_module = THIS_MODULE,
493 .cra_init = rk_ablk_cra_init,
494 .cra_exit = rk_ablk_cra_exit,
495 .cra_u.ablkcipher = {
496 .min_keysize = DES_KEY_SIZE,
497 .max_keysize = DES_KEY_SIZE,
498 .ivsize = DES_BLOCK_SIZE,
499 .setkey = rk_des_setkey,
500 .encrypt = rk_des_cbc_encrypt,
501 .decrypt = rk_des_cbc_decrypt,
506 struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
507 .type = ALG_TYPE_CIPHER,
509 .cra_name = "ecb(des3_ede)",
510 .cra_driver_name = "ecb-des3-ede-rk",
512 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
514 .cra_blocksize = DES_BLOCK_SIZE,
515 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
516 .cra_alignmask = 0x07,
517 .cra_type = &crypto_ablkcipher_type,
518 .cra_module = THIS_MODULE,
519 .cra_init = rk_ablk_cra_init,
520 .cra_exit = rk_ablk_cra_exit,
521 .cra_u.ablkcipher = {
522 .min_keysize = DES3_EDE_KEY_SIZE,
523 .max_keysize = DES3_EDE_KEY_SIZE,
524 .ivsize = DES_BLOCK_SIZE,
525 .setkey = rk_tdes_setkey,
526 .encrypt = rk_des3_ede_ecb_encrypt,
527 .decrypt = rk_des3_ede_ecb_decrypt,
532 struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
533 .type = ALG_TYPE_CIPHER,
535 .cra_name = "cbc(des3_ede)",
536 .cra_driver_name = "cbc-des3-ede-rk",
538 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
540 .cra_blocksize = DES_BLOCK_SIZE,
541 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
542 .cra_alignmask = 0x07,
543 .cra_type = &crypto_ablkcipher_type,
544 .cra_module = THIS_MODULE,
545 .cra_init = rk_ablk_cra_init,
546 .cra_exit = rk_ablk_cra_exit,
547 .cra_u.ablkcipher = {
548 .min_keysize = DES3_EDE_KEY_SIZE,
549 .max_keysize = DES3_EDE_KEY_SIZE,
550 .ivsize = DES_BLOCK_SIZE,
551 .setkey = rk_tdes_setkey,
552 .encrypt = rk_des3_ede_cbc_encrypt,
553 .decrypt = rk_des3_ede_cbc_decrypt,