io_uring: add timeout support for io_uring_enter()
[linux-2.6-microblaze.git] / arch / x86 / crypto / camellia_aesni_avx2_glue.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Glue Code for x86_64/AVX2/AES-NI assembler optimized version of Camellia
4  *
5  * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6  */
7
8 #include <asm/crypto/camellia.h>
9 #include <asm/crypto/glue_helper.h>
10 #include <crypto/algapi.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/xts.h>
13 #include <linux/crypto.h>
14 #include <linux/err.h>
15 #include <linux/module.h>
16 #include <linux/types.h>
17
18 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
19 #define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
20
21 /* 32-way AVX2/AES-NI parallel cipher functions */
22 asmlinkage void camellia_ecb_enc_32way(const void *ctx, u8 *dst, const u8 *src);
23 asmlinkage void camellia_ecb_dec_32way(const void *ctx, u8 *dst, const u8 *src);
24
25 asmlinkage void camellia_cbc_dec_32way(const void *ctx, u8 *dst, const u8 *src);
26 asmlinkage void camellia_ctr_32way(const void *ctx, u8 *dst, const u8 *src,
27                                    le128 *iv);
28
29 asmlinkage void camellia_xts_enc_32way(const void *ctx, u8 *dst, const u8 *src,
30                                        le128 *iv);
31 asmlinkage void camellia_xts_dec_32way(const void *ctx, u8 *dst, const u8 *src,
32                                        le128 *iv);
33
34 static const struct common_glue_ctx camellia_enc = {
35         .num_funcs = 4,
36         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
37
38         .funcs = { {
39                 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
40                 .fn_u = { .ecb = camellia_ecb_enc_32way }
41         }, {
42                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
43                 .fn_u = { .ecb = camellia_ecb_enc_16way }
44         }, {
45                 .num_blocks = 2,
46                 .fn_u = { .ecb = camellia_enc_blk_2way }
47         }, {
48                 .num_blocks = 1,
49                 .fn_u = { .ecb = camellia_enc_blk }
50         } }
51 };
52
53 static const struct common_glue_ctx camellia_ctr = {
54         .num_funcs = 4,
55         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
56
57         .funcs = { {
58                 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
59                 .fn_u = { .ctr = camellia_ctr_32way }
60         }, {
61                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
62                 .fn_u = { .ctr = camellia_ctr_16way }
63         }, {
64                 .num_blocks = 2,
65                 .fn_u = { .ctr = camellia_crypt_ctr_2way }
66         }, {
67                 .num_blocks = 1,
68                 .fn_u = { .ctr = camellia_crypt_ctr }
69         } }
70 };
71
72 static const struct common_glue_ctx camellia_enc_xts = {
73         .num_funcs = 3,
74         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
75
76         .funcs = { {
77                 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
78                 .fn_u = { .xts = camellia_xts_enc_32way }
79         }, {
80                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
81                 .fn_u = { .xts = camellia_xts_enc_16way }
82         }, {
83                 .num_blocks = 1,
84                 .fn_u = { .xts = camellia_xts_enc }
85         } }
86 };
87
88 static const struct common_glue_ctx camellia_dec = {
89         .num_funcs = 4,
90         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
91
92         .funcs = { {
93                 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
94                 .fn_u = { .ecb = camellia_ecb_dec_32way }
95         }, {
96                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
97                 .fn_u = { .ecb = camellia_ecb_dec_16way }
98         }, {
99                 .num_blocks = 2,
100                 .fn_u = { .ecb = camellia_dec_blk_2way }
101         }, {
102                 .num_blocks = 1,
103                 .fn_u = { .ecb = camellia_dec_blk }
104         } }
105 };
106
107 static const struct common_glue_ctx camellia_dec_cbc = {
108         .num_funcs = 4,
109         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
110
111         .funcs = { {
112                 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
113                 .fn_u = { .cbc = camellia_cbc_dec_32way }
114         }, {
115                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
116                 .fn_u = { .cbc = camellia_cbc_dec_16way }
117         }, {
118                 .num_blocks = 2,
119                 .fn_u = { .cbc = camellia_decrypt_cbc_2way }
120         }, {
121                 .num_blocks = 1,
122                 .fn_u = { .cbc = camellia_dec_blk }
123         } }
124 };
125
126 static const struct common_glue_ctx camellia_dec_xts = {
127         .num_funcs = 3,
128         .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
129
130         .funcs = { {
131                 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
132                 .fn_u = { .xts = camellia_xts_dec_32way }
133         }, {
134                 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
135                 .fn_u = { .xts = camellia_xts_dec_16way }
136         }, {
137                 .num_blocks = 1,
138                 .fn_u = { .xts = camellia_xts_dec }
139         } }
140 };
141
142 static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
143                            unsigned int keylen)
144 {
145         return __camellia_setkey(crypto_skcipher_ctx(tfm), key, keylen);
146 }
147
148 static int ecb_encrypt(struct skcipher_request *req)
149 {
150         return glue_ecb_req_128bit(&camellia_enc, req);
151 }
152
153 static int ecb_decrypt(struct skcipher_request *req)
154 {
155         return glue_ecb_req_128bit(&camellia_dec, req);
156 }
157
158 static int cbc_encrypt(struct skcipher_request *req)
159 {
160         return glue_cbc_encrypt_req_128bit(camellia_enc_blk, req);
161 }
162
163 static int cbc_decrypt(struct skcipher_request *req)
164 {
165         return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc, req);
166 }
167
168 static int ctr_crypt(struct skcipher_request *req)
169 {
170         return glue_ctr_req_128bit(&camellia_ctr, req);
171 }
172
173 static int xts_encrypt(struct skcipher_request *req)
174 {
175         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
176         struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
177
178         return glue_xts_req_128bit(&camellia_enc_xts, req, camellia_enc_blk,
179                                    &ctx->tweak_ctx, &ctx->crypt_ctx, false);
180 }
181
182 static int xts_decrypt(struct skcipher_request *req)
183 {
184         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
185         struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
186
187         return glue_xts_req_128bit(&camellia_dec_xts, req, camellia_enc_blk,
188                                    &ctx->tweak_ctx, &ctx->crypt_ctx, true);
189 }
190
191 static struct skcipher_alg camellia_algs[] = {
192         {
193                 .base.cra_name          = "__ecb(camellia)",
194                 .base.cra_driver_name   = "__ecb-camellia-aesni-avx2",
195                 .base.cra_priority      = 500,
196                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
197                 .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
198                 .base.cra_ctxsize       = sizeof(struct camellia_ctx),
199                 .base.cra_module        = THIS_MODULE,
200                 .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
201                 .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
202                 .setkey                 = camellia_setkey,
203                 .encrypt                = ecb_encrypt,
204                 .decrypt                = ecb_decrypt,
205         }, {
206                 .base.cra_name          = "__cbc(camellia)",
207                 .base.cra_driver_name   = "__cbc-camellia-aesni-avx2",
208                 .base.cra_priority      = 500,
209                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
210                 .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
211                 .base.cra_ctxsize       = sizeof(struct camellia_ctx),
212                 .base.cra_module        = THIS_MODULE,
213                 .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
214                 .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
215                 .ivsize                 = CAMELLIA_BLOCK_SIZE,
216                 .setkey                 = camellia_setkey,
217                 .encrypt                = cbc_encrypt,
218                 .decrypt                = cbc_decrypt,
219         }, {
220                 .base.cra_name          = "__ctr(camellia)",
221                 .base.cra_driver_name   = "__ctr-camellia-aesni-avx2",
222                 .base.cra_priority      = 500,
223                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
224                 .base.cra_blocksize     = 1,
225                 .base.cra_ctxsize       = sizeof(struct camellia_ctx),
226                 .base.cra_module        = THIS_MODULE,
227                 .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
228                 .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
229                 .ivsize                 = CAMELLIA_BLOCK_SIZE,
230                 .chunksize              = CAMELLIA_BLOCK_SIZE,
231                 .setkey                 = camellia_setkey,
232                 .encrypt                = ctr_crypt,
233                 .decrypt                = ctr_crypt,
234         }, {
235                 .base.cra_name          = "__xts(camellia)",
236                 .base.cra_driver_name   = "__xts-camellia-aesni-avx2",
237                 .base.cra_priority      = 500,
238                 .base.cra_flags         = CRYPTO_ALG_INTERNAL,
239                 .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
240                 .base.cra_ctxsize       = sizeof(struct camellia_xts_ctx),
241                 .base.cra_module        = THIS_MODULE,
242                 .min_keysize            = 2 * CAMELLIA_MIN_KEY_SIZE,
243                 .max_keysize            = 2 * CAMELLIA_MAX_KEY_SIZE,
244                 .ivsize                 = CAMELLIA_BLOCK_SIZE,
245                 .setkey                 = xts_camellia_setkey,
246                 .encrypt                = xts_encrypt,
247                 .decrypt                = xts_decrypt,
248         },
249 };
250
251 static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
252
253 static int __init camellia_aesni_init(void)
254 {
255         const char *feature_name;
256
257         if (!boot_cpu_has(X86_FEATURE_AVX) ||
258             !boot_cpu_has(X86_FEATURE_AVX2) ||
259             !boot_cpu_has(X86_FEATURE_AES) ||
260             !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
261                 pr_info("AVX2 or AES-NI instructions are not detected.\n");
262                 return -ENODEV;
263         }
264
265         if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
266                                 &feature_name)) {
267                 pr_info("CPU feature '%s' is not supported.\n", feature_name);
268                 return -ENODEV;
269         }
270
271         return simd_register_skciphers_compat(camellia_algs,
272                                               ARRAY_SIZE(camellia_algs),
273                                               camellia_simd_algs);
274 }
275
276 static void __exit camellia_aesni_fini(void)
277 {
278         simd_unregister_skciphers(camellia_algs, ARRAY_SIZE(camellia_algs),
279                                   camellia_simd_algs);
280 }
281
282 module_init(camellia_aesni_init);
283 module_exit(camellia_aesni_fini);
284
285 MODULE_LICENSE("GPL");
286 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX2 optimized");
287 MODULE_ALIAS_CRYPTO("camellia");
288 MODULE_ALIAS_CRYPTO("camellia-asm");