1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Shared glue code for 128bit block ciphers
5 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
7 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
11 #include <linux/module.h>
12 #include <crypto/b128ops.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/scatterwalk.h>
15 #include <asm/crypto/glue_helper.h>
17 int glue_ecb_req_128bit(const struct common_glue_ctx *gctx,
18 struct skcipher_request *req)
20 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
21 const unsigned int bsize = 128 / 8;
22 struct skcipher_walk walk;
23 bool fpu_enabled = false;
27 err = skcipher_walk_virt(&walk, req, false);
29 while ((nbytes = walk.nbytes)) {
30 const u8 *src = walk.src.virt.addr;
31 u8 *dst = walk.dst.virt.addr;
32 unsigned int func_bytes;
35 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
36 &walk, fpu_enabled, nbytes);
37 for (i = 0; i < gctx->num_funcs; i++) {
38 func_bytes = bsize * gctx->funcs[i].num_blocks;
40 if (nbytes < func_bytes)
43 /* Process multi-block batch */
45 gctx->funcs[i].fn_u.ecb(ctx, dst, src);
49 } while (nbytes >= func_bytes);
54 err = skcipher_walk_done(&walk, nbytes);
57 glue_fpu_end(fpu_enabled);
60 EXPORT_SYMBOL_GPL(glue_ecb_req_128bit);
62 int glue_cbc_encrypt_req_128bit(const common_glue_func_t fn,
63 struct skcipher_request *req)
65 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
66 const unsigned int bsize = 128 / 8;
67 struct skcipher_walk walk;
71 err = skcipher_walk_virt(&walk, req, false);
73 while ((nbytes = walk.nbytes)) {
74 const u128 *src = (u128 *)walk.src.virt.addr;
75 u128 *dst = (u128 *)walk.dst.virt.addr;
76 u128 *iv = (u128 *)walk.iv;
79 u128_xor(dst, src, iv);
80 fn(ctx, (u8 *)dst, (u8 *)dst);
85 } while (nbytes >= bsize);
87 *(u128 *)walk.iv = *iv;
88 err = skcipher_walk_done(&walk, nbytes);
92 EXPORT_SYMBOL_GPL(glue_cbc_encrypt_req_128bit);
94 int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx,
95 struct skcipher_request *req)
97 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
98 const unsigned int bsize = 128 / 8;
99 struct skcipher_walk walk;
100 bool fpu_enabled = false;
104 err = skcipher_walk_virt(&walk, req, false);
106 while ((nbytes = walk.nbytes)) {
107 const u128 *src = walk.src.virt.addr;
108 u128 *dst = walk.dst.virt.addr;
109 unsigned int func_bytes, num_blocks;
113 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
114 &walk, fpu_enabled, nbytes);
115 /* Start of the last block. */
116 src += nbytes / bsize - 1;
117 dst += nbytes / bsize - 1;
121 for (i = 0; i < gctx->num_funcs; i++) {
122 num_blocks = gctx->funcs[i].num_blocks;
123 func_bytes = bsize * num_blocks;
125 if (nbytes < func_bytes)
128 /* Process multi-block batch */
130 src -= num_blocks - 1;
131 dst -= num_blocks - 1;
133 gctx->funcs[i].fn_u.cbc(ctx, (u8 *)dst,
136 nbytes -= func_bytes;
140 u128_xor(dst, dst, --src);
142 } while (nbytes >= func_bytes);
145 u128_xor(dst, dst, (u128 *)walk.iv);
146 *(u128 *)walk.iv = last_iv;
147 err = skcipher_walk_done(&walk, nbytes);
150 glue_fpu_end(fpu_enabled);
153 EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit);
155 MODULE_LICENSE("GPL");