1 // SPDX-License-Identifier: GPL-2.0-only
3 * Bit sliced AES using NEON instructions
5 * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
10 #include <crypto/aes.h>
11 #include <crypto/ctr.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/scatterwalk.h>
15 #include <crypto/xts.h>
16 #include <linux/module.h>
18 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
19 MODULE_LICENSE("GPL v2");
21 MODULE_ALIAS_CRYPTO("ecb(aes)");
22 MODULE_ALIAS_CRYPTO("cbc(aes)");
23 MODULE_ALIAS_CRYPTO("ctr(aes)");
24 MODULE_ALIAS_CRYPTO("xts(aes)");
26 asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
28 asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
29 int rounds, int blocks);
30 asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
31 int rounds, int blocks);
33 asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
34 int rounds, int blocks, u8 iv[]);
36 asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
37 int rounds, int blocks, u8 ctr[], u8 final[]);
39 asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
40 int rounds, int blocks, u8 iv[], int);
41 asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
42 int rounds, int blocks, u8 iv[], int);
46 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
49 struct aesbs_cbc_ctx {
51 struct crypto_skcipher *enc_tfm;
54 struct aesbs_xts_ctx {
56 struct crypto_cipher *cts_tfm;
57 struct crypto_cipher *tweak_tfm;
60 struct aesbs_ctr_ctx {
61 struct aesbs_ctx key; /* must be first member */
62 struct crypto_aes_ctx fallback;
65 static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
68 struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
69 struct crypto_aes_ctx rk;
72 err = aes_expandkey(&rk, in_key, key_len);
76 ctx->rounds = 6 + key_len / 4;
79 aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
85 static int __ecb_crypt(struct skcipher_request *req,
86 void (*fn)(u8 out[], u8 const in[], u8 const rk[],
87 int rounds, int blocks))
89 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
90 struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
91 struct skcipher_walk walk;
94 err = skcipher_walk_virt(&walk, req, false);
96 while (walk.nbytes >= AES_BLOCK_SIZE) {
97 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
99 if (walk.nbytes < walk.total)
100 blocks = round_down(blocks,
101 walk.stride / AES_BLOCK_SIZE);
104 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
105 ctx->rounds, blocks);
107 err = skcipher_walk_done(&walk,
108 walk.nbytes - blocks * AES_BLOCK_SIZE);
114 static int ecb_encrypt(struct skcipher_request *req)
116 return __ecb_crypt(req, aesbs_ecb_encrypt);
119 static int ecb_decrypt(struct skcipher_request *req)
121 return __ecb_crypt(req, aesbs_ecb_decrypt);
124 static int aesbs_cbc_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
125 unsigned int key_len)
127 struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
128 struct crypto_aes_ctx rk;
131 err = aes_expandkey(&rk, in_key, key_len);
135 ctx->key.rounds = 6 + key_len / 4;
138 aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
140 memzero_explicit(&rk, sizeof(rk));
142 return crypto_skcipher_setkey(ctx->enc_tfm, in_key, key_len);
145 static int cbc_encrypt(struct skcipher_request *req)
147 struct skcipher_request *subreq = skcipher_request_ctx(req);
148 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
149 struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
151 skcipher_request_set_tfm(subreq, ctx->enc_tfm);
152 skcipher_request_set_callback(subreq,
153 skcipher_request_flags(req),
155 skcipher_request_set_crypt(subreq, req->src, req->dst,
156 req->cryptlen, req->iv);
158 return crypto_skcipher_encrypt(subreq);
161 static int cbc_decrypt(struct skcipher_request *req)
163 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
164 struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
165 struct skcipher_walk walk;
168 err = skcipher_walk_virt(&walk, req, false);
170 while (walk.nbytes >= AES_BLOCK_SIZE) {
171 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
173 if (walk.nbytes < walk.total)
174 blocks = round_down(blocks,
175 walk.stride / AES_BLOCK_SIZE);
178 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
179 ctx->key.rk, ctx->key.rounds, blocks,
182 err = skcipher_walk_done(&walk,
183 walk.nbytes - blocks * AES_BLOCK_SIZE);
189 static int cbc_init(struct crypto_skcipher *tfm)
191 struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
192 unsigned int reqsize;
194 ctx->enc_tfm = crypto_alloc_skcipher("cbc(aes)", 0, CRYPTO_ALG_ASYNC);
195 if (IS_ERR(ctx->enc_tfm))
196 return PTR_ERR(ctx->enc_tfm);
198 reqsize = sizeof(struct skcipher_request);
199 reqsize += crypto_skcipher_reqsize(ctx->enc_tfm);
200 crypto_skcipher_set_reqsize(tfm, reqsize);
205 static void cbc_exit(struct crypto_skcipher *tfm)
207 struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
209 crypto_free_skcipher(ctx->enc_tfm);
212 static int aesbs_ctr_setkey_sync(struct crypto_skcipher *tfm, const u8 *in_key,
213 unsigned int key_len)
215 struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
218 err = aes_expandkey(&ctx->fallback, in_key, key_len);
222 ctx->key.rounds = 6 + key_len / 4;
225 aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds);
231 static int ctr_encrypt(struct skcipher_request *req)
233 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
234 struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
235 struct skcipher_walk walk;
236 u8 buf[AES_BLOCK_SIZE];
239 err = skcipher_walk_virt(&walk, req, false);
241 while (walk.nbytes > 0) {
242 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
243 u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL;
245 if (walk.nbytes < walk.total) {
246 blocks = round_down(blocks,
247 walk.stride / AES_BLOCK_SIZE);
252 aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
253 ctx->rk, ctx->rounds, blocks, walk.iv, final);
257 u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
258 u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
260 crypto_xor_cpy(dst, src, final,
261 walk.total % AES_BLOCK_SIZE);
263 err = skcipher_walk_done(&walk, 0);
266 err = skcipher_walk_done(&walk,
267 walk.nbytes - blocks * AES_BLOCK_SIZE);
273 static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
275 struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
279 * Temporarily disable interrupts to avoid races where
280 * cachelines are evicted when the CPU is interrupted
281 * to do something else.
283 local_irq_save(flags);
284 aes_encrypt(&ctx->fallback, dst, src);
285 local_irq_restore(flags);
288 static int ctr_encrypt_sync(struct skcipher_request *req)
290 if (!crypto_simd_usable())
291 return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
293 return ctr_encrypt(req);
296 static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
297 unsigned int key_len)
299 struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
302 err = xts_verify_key(tfm, in_key, key_len);
307 err = crypto_cipher_setkey(ctx->cts_tfm, in_key, key_len);
310 err = crypto_cipher_setkey(ctx->tweak_tfm, in_key + key_len, key_len);
314 return aesbs_setkey(tfm, in_key, key_len);
317 static int xts_init(struct crypto_skcipher *tfm)
319 struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
321 ctx->cts_tfm = crypto_alloc_cipher("aes", 0, 0);
322 if (IS_ERR(ctx->cts_tfm))
323 return PTR_ERR(ctx->cts_tfm);
325 ctx->tweak_tfm = crypto_alloc_cipher("aes", 0, 0);
326 if (IS_ERR(ctx->tweak_tfm))
327 crypto_free_cipher(ctx->cts_tfm);
329 return PTR_ERR_OR_ZERO(ctx->tweak_tfm);
332 static void xts_exit(struct crypto_skcipher *tfm)
334 struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
336 crypto_free_cipher(ctx->tweak_tfm);
337 crypto_free_cipher(ctx->cts_tfm);
340 static int __xts_crypt(struct skcipher_request *req, bool encrypt,
341 void (*fn)(u8 out[], u8 const in[], u8 const rk[],
342 int rounds, int blocks, u8 iv[], int))
344 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
345 struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
346 int tail = req->cryptlen % AES_BLOCK_SIZE;
347 struct skcipher_request subreq;
348 u8 buf[2 * AES_BLOCK_SIZE];
349 struct skcipher_walk walk;
352 if (req->cryptlen < AES_BLOCK_SIZE)
355 if (unlikely(tail)) {
356 skcipher_request_set_tfm(&subreq, tfm);
357 skcipher_request_set_callback(&subreq,
358 skcipher_request_flags(req),
360 skcipher_request_set_crypt(&subreq, req->src, req->dst,
361 req->cryptlen - tail, req->iv);
365 err = skcipher_walk_virt(&walk, req, true);
369 crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv);
371 while (walk.nbytes >= AES_BLOCK_SIZE) {
372 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
373 int reorder_last_tweak = !encrypt && tail > 0;
375 if (walk.nbytes < walk.total) {
376 blocks = round_down(blocks,
377 walk.stride / AES_BLOCK_SIZE);
378 reorder_last_tweak = 0;
382 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->key.rk,
383 ctx->key.rounds, blocks, walk.iv, reorder_last_tweak);
385 err = skcipher_walk_done(&walk,
386 walk.nbytes - blocks * AES_BLOCK_SIZE);
389 if (err || likely(!tail))
392 /* handle ciphertext stealing */
393 scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE,
395 memcpy(buf + AES_BLOCK_SIZE, buf, tail);
396 scatterwalk_map_and_copy(buf, req->src, req->cryptlen, tail, 0);
398 crypto_xor(buf, req->iv, AES_BLOCK_SIZE);
401 crypto_cipher_encrypt_one(ctx->cts_tfm, buf, buf);
403 crypto_cipher_decrypt_one(ctx->cts_tfm, buf, buf);
405 crypto_xor(buf, req->iv, AES_BLOCK_SIZE);
407 scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE,
408 AES_BLOCK_SIZE + tail, 1);
412 static int xts_encrypt(struct skcipher_request *req)
414 return __xts_crypt(req, true, aesbs_xts_encrypt);
417 static int xts_decrypt(struct skcipher_request *req)
419 return __xts_crypt(req, false, aesbs_xts_decrypt);
422 static struct skcipher_alg aes_algs[] = { {
423 .base.cra_name = "__ecb(aes)",
424 .base.cra_driver_name = "__ecb-aes-neonbs",
425 .base.cra_priority = 250,
426 .base.cra_blocksize = AES_BLOCK_SIZE,
427 .base.cra_ctxsize = sizeof(struct aesbs_ctx),
428 .base.cra_module = THIS_MODULE,
429 .base.cra_flags = CRYPTO_ALG_INTERNAL,
431 .min_keysize = AES_MIN_KEY_SIZE,
432 .max_keysize = AES_MAX_KEY_SIZE,
433 .walksize = 8 * AES_BLOCK_SIZE,
434 .setkey = aesbs_setkey,
435 .encrypt = ecb_encrypt,
436 .decrypt = ecb_decrypt,
438 .base.cra_name = "__cbc(aes)",
439 .base.cra_driver_name = "__cbc-aes-neonbs",
440 .base.cra_priority = 250,
441 .base.cra_blocksize = AES_BLOCK_SIZE,
442 .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctx),
443 .base.cra_module = THIS_MODULE,
444 .base.cra_flags = CRYPTO_ALG_INTERNAL,
446 .min_keysize = AES_MIN_KEY_SIZE,
447 .max_keysize = AES_MAX_KEY_SIZE,
448 .walksize = 8 * AES_BLOCK_SIZE,
449 .ivsize = AES_BLOCK_SIZE,
450 .setkey = aesbs_cbc_setkey,
451 .encrypt = cbc_encrypt,
452 .decrypt = cbc_decrypt,
456 .base.cra_name = "__ctr(aes)",
457 .base.cra_driver_name = "__ctr-aes-neonbs",
458 .base.cra_priority = 250,
459 .base.cra_blocksize = 1,
460 .base.cra_ctxsize = sizeof(struct aesbs_ctx),
461 .base.cra_module = THIS_MODULE,
462 .base.cra_flags = CRYPTO_ALG_INTERNAL,
464 .min_keysize = AES_MIN_KEY_SIZE,
465 .max_keysize = AES_MAX_KEY_SIZE,
466 .chunksize = AES_BLOCK_SIZE,
467 .walksize = 8 * AES_BLOCK_SIZE,
468 .ivsize = AES_BLOCK_SIZE,
469 .setkey = aesbs_setkey,
470 .encrypt = ctr_encrypt,
471 .decrypt = ctr_encrypt,
473 .base.cra_name = "ctr(aes)",
474 .base.cra_driver_name = "ctr-aes-neonbs-sync",
475 .base.cra_priority = 250 - 1,
476 .base.cra_blocksize = 1,
477 .base.cra_ctxsize = sizeof(struct aesbs_ctr_ctx),
478 .base.cra_module = THIS_MODULE,
480 .min_keysize = AES_MIN_KEY_SIZE,
481 .max_keysize = AES_MAX_KEY_SIZE,
482 .chunksize = AES_BLOCK_SIZE,
483 .walksize = 8 * AES_BLOCK_SIZE,
484 .ivsize = AES_BLOCK_SIZE,
485 .setkey = aesbs_ctr_setkey_sync,
486 .encrypt = ctr_encrypt_sync,
487 .decrypt = ctr_encrypt_sync,
489 .base.cra_name = "__xts(aes)",
490 .base.cra_driver_name = "__xts-aes-neonbs",
491 .base.cra_priority = 250,
492 .base.cra_blocksize = AES_BLOCK_SIZE,
493 .base.cra_ctxsize = sizeof(struct aesbs_xts_ctx),
494 .base.cra_module = THIS_MODULE,
495 .base.cra_flags = CRYPTO_ALG_INTERNAL,
497 .min_keysize = 2 * AES_MIN_KEY_SIZE,
498 .max_keysize = 2 * AES_MAX_KEY_SIZE,
499 .walksize = 8 * AES_BLOCK_SIZE,
500 .ivsize = AES_BLOCK_SIZE,
501 .setkey = aesbs_xts_setkey,
502 .encrypt = xts_encrypt,
503 .decrypt = xts_decrypt,
508 static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
510 static void aes_exit(void)
514 for (i = 0; i < ARRAY_SIZE(aes_simd_algs); i++)
515 if (aes_simd_algs[i])
516 simd_skcipher_free(aes_simd_algs[i]);
518 crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
521 static int __init aes_init(void)
523 struct simd_skcipher_alg *simd;
524 const char *basename;
530 if (!(elf_hwcap & HWCAP_NEON))
533 err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
537 for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
538 if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
541 algname = aes_algs[i].base.cra_name + 2;
542 drvname = aes_algs[i].base.cra_driver_name + 2;
543 basename = aes_algs[i].base.cra_driver_name;
544 simd = simd_skcipher_create_compat(algname, drvname, basename);
547 goto unregister_simds;
549 aes_simd_algs[i] = simd;
558 late_initcall(aes_init);
559 module_exit(aes_exit);