powerpc/64s: Make NMI record implicitly soft-masked code as irqs disabled
[linux-2.6-microblaze.git] / arch / x86 / crypto / sha1_ssse3_glue.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API.
4  *
5  * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
6  * Supplemental SSE3 instructions.
7  *
8  * This file is based on sha1_generic.c
9  *
10  * Copyright (c) Alan Smithee.
11  * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
12  * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
13  * Copyright (c) Mathias Krause <minipli@googlemail.com>
14  * Copyright (c) Chandramouli Narayanan <mouli@linux.intel.com>
15  */
16
17 #define pr_fmt(fmt)     KBUILD_MODNAME ": " fmt
18
19 #include <crypto/internal/hash.h>
20 #include <crypto/internal/simd.h>
21 #include <linux/init.h>
22 #include <linux/module.h>
23 #include <linux/mm.h>
24 #include <linux/types.h>
25 #include <crypto/sha1.h>
26 #include <crypto/sha1_base.h>
27 #include <asm/simd.h>
28
29 static int sha1_update(struct shash_desc *desc, const u8 *data,
30                              unsigned int len, sha1_block_fn *sha1_xform)
31 {
32         struct sha1_state *sctx = shash_desc_ctx(desc);
33
34         if (!crypto_simd_usable() ||
35             (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
36                 return crypto_sha1_update(desc, data, len);
37
38         /*
39          * Make sure struct sha1_state begins directly with the SHA1
40          * 160-bit internal state, as this is what the asm functions expect.
41          */
42         BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
43
44         kernel_fpu_begin();
45         sha1_base_do_update(desc, data, len, sha1_xform);
46         kernel_fpu_end();
47
48         return 0;
49 }
50
51 static int sha1_finup(struct shash_desc *desc, const u8 *data,
52                       unsigned int len, u8 *out, sha1_block_fn *sha1_xform)
53 {
54         if (!crypto_simd_usable())
55                 return crypto_sha1_finup(desc, data, len, out);
56
57         kernel_fpu_begin();
58         if (len)
59                 sha1_base_do_update(desc, data, len, sha1_xform);
60         sha1_base_do_finalize(desc, sha1_xform);
61         kernel_fpu_end();
62
63         return sha1_base_finish(desc, out);
64 }
65
66 asmlinkage void sha1_transform_ssse3(struct sha1_state *state,
67                                      const u8 *data, int blocks);
68
69 static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
70                              unsigned int len)
71 {
72         return sha1_update(desc, data, len, sha1_transform_ssse3);
73 }
74
75 static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
76                               unsigned int len, u8 *out)
77 {
78         return sha1_finup(desc, data, len, out, sha1_transform_ssse3);
79 }
80
81 /* Add padding and return the message digest. */
82 static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
83 {
84         return sha1_ssse3_finup(desc, NULL, 0, out);
85 }
86
87 static struct shash_alg sha1_ssse3_alg = {
88         .digestsize     =       SHA1_DIGEST_SIZE,
89         .init           =       sha1_base_init,
90         .update         =       sha1_ssse3_update,
91         .final          =       sha1_ssse3_final,
92         .finup          =       sha1_ssse3_finup,
93         .descsize       =       sizeof(struct sha1_state),
94         .base           =       {
95                 .cra_name       =       "sha1",
96                 .cra_driver_name =      "sha1-ssse3",
97                 .cra_priority   =       150,
98                 .cra_blocksize  =       SHA1_BLOCK_SIZE,
99                 .cra_module     =       THIS_MODULE,
100         }
101 };
102
103 static int register_sha1_ssse3(void)
104 {
105         if (boot_cpu_has(X86_FEATURE_SSSE3))
106                 return crypto_register_shash(&sha1_ssse3_alg);
107         return 0;
108 }
109
110 static void unregister_sha1_ssse3(void)
111 {
112         if (boot_cpu_has(X86_FEATURE_SSSE3))
113                 crypto_unregister_shash(&sha1_ssse3_alg);
114 }
115
116 asmlinkage void sha1_transform_avx(struct sha1_state *state,
117                                    const u8 *data, int blocks);
118
119 static int sha1_avx_update(struct shash_desc *desc, const u8 *data,
120                              unsigned int len)
121 {
122         return sha1_update(desc, data, len, sha1_transform_avx);
123 }
124
125 static int sha1_avx_finup(struct shash_desc *desc, const u8 *data,
126                               unsigned int len, u8 *out)
127 {
128         return sha1_finup(desc, data, len, out, sha1_transform_avx);
129 }
130
131 static int sha1_avx_final(struct shash_desc *desc, u8 *out)
132 {
133         return sha1_avx_finup(desc, NULL, 0, out);
134 }
135
136 static struct shash_alg sha1_avx_alg = {
137         .digestsize     =       SHA1_DIGEST_SIZE,
138         .init           =       sha1_base_init,
139         .update         =       sha1_avx_update,
140         .final          =       sha1_avx_final,
141         .finup          =       sha1_avx_finup,
142         .descsize       =       sizeof(struct sha1_state),
143         .base           =       {
144                 .cra_name       =       "sha1",
145                 .cra_driver_name =      "sha1-avx",
146                 .cra_priority   =       160,
147                 .cra_blocksize  =       SHA1_BLOCK_SIZE,
148                 .cra_module     =       THIS_MODULE,
149         }
150 };
151
152 static bool avx_usable(void)
153 {
154         if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
155                 if (boot_cpu_has(X86_FEATURE_AVX))
156                         pr_info("AVX detected but unusable.\n");
157                 return false;
158         }
159
160         return true;
161 }
162
163 static int register_sha1_avx(void)
164 {
165         if (avx_usable())
166                 return crypto_register_shash(&sha1_avx_alg);
167         return 0;
168 }
169
170 static void unregister_sha1_avx(void)
171 {
172         if (avx_usable())
173                 crypto_unregister_shash(&sha1_avx_alg);
174 }
175
176 #define SHA1_AVX2_BLOCK_OPTSIZE 4       /* optimal 4*64 bytes of SHA1 blocks */
177
178 asmlinkage void sha1_transform_avx2(struct sha1_state *state,
179                                     const u8 *data, int blocks);
180
181 static bool avx2_usable(void)
182 {
183         if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2)
184                 && boot_cpu_has(X86_FEATURE_BMI1)
185                 && boot_cpu_has(X86_FEATURE_BMI2))
186                 return true;
187
188         return false;
189 }
190
191 static void sha1_apply_transform_avx2(struct sha1_state *state,
192                                       const u8 *data, int blocks)
193 {
194         /* Select the optimal transform based on data block size */
195         if (blocks >= SHA1_AVX2_BLOCK_OPTSIZE)
196                 sha1_transform_avx2(state, data, blocks);
197         else
198                 sha1_transform_avx(state, data, blocks);
199 }
200
201 static int sha1_avx2_update(struct shash_desc *desc, const u8 *data,
202                              unsigned int len)
203 {
204         return sha1_update(desc, data, len, sha1_apply_transform_avx2);
205 }
206
207 static int sha1_avx2_finup(struct shash_desc *desc, const u8 *data,
208                               unsigned int len, u8 *out)
209 {
210         return sha1_finup(desc, data, len, out, sha1_apply_transform_avx2);
211 }
212
213 static int sha1_avx2_final(struct shash_desc *desc, u8 *out)
214 {
215         return sha1_avx2_finup(desc, NULL, 0, out);
216 }
217
218 static struct shash_alg sha1_avx2_alg = {
219         .digestsize     =       SHA1_DIGEST_SIZE,
220         .init           =       sha1_base_init,
221         .update         =       sha1_avx2_update,
222         .final          =       sha1_avx2_final,
223         .finup          =       sha1_avx2_finup,
224         .descsize       =       sizeof(struct sha1_state),
225         .base           =       {
226                 .cra_name       =       "sha1",
227                 .cra_driver_name =      "sha1-avx2",
228                 .cra_priority   =       170,
229                 .cra_blocksize  =       SHA1_BLOCK_SIZE,
230                 .cra_module     =       THIS_MODULE,
231         }
232 };
233
234 static int register_sha1_avx2(void)
235 {
236         if (avx2_usable())
237                 return crypto_register_shash(&sha1_avx2_alg);
238         return 0;
239 }
240
241 static void unregister_sha1_avx2(void)
242 {
243         if (avx2_usable())
244                 crypto_unregister_shash(&sha1_avx2_alg);
245 }
246
247 #ifdef CONFIG_AS_SHA1_NI
248 asmlinkage void sha1_ni_transform(struct sha1_state *digest, const u8 *data,
249                                   int rounds);
250
251 static int sha1_ni_update(struct shash_desc *desc, const u8 *data,
252                              unsigned int len)
253 {
254         return sha1_update(desc, data, len, sha1_ni_transform);
255 }
256
257 static int sha1_ni_finup(struct shash_desc *desc, const u8 *data,
258                               unsigned int len, u8 *out)
259 {
260         return sha1_finup(desc, data, len, out, sha1_ni_transform);
261 }
262
263 static int sha1_ni_final(struct shash_desc *desc, u8 *out)
264 {
265         return sha1_ni_finup(desc, NULL, 0, out);
266 }
267
268 static struct shash_alg sha1_ni_alg = {
269         .digestsize     =       SHA1_DIGEST_SIZE,
270         .init           =       sha1_base_init,
271         .update         =       sha1_ni_update,
272         .final          =       sha1_ni_final,
273         .finup          =       sha1_ni_finup,
274         .descsize       =       sizeof(struct sha1_state),
275         .base           =       {
276                 .cra_name       =       "sha1",
277                 .cra_driver_name =      "sha1-ni",
278                 .cra_priority   =       250,
279                 .cra_blocksize  =       SHA1_BLOCK_SIZE,
280                 .cra_module     =       THIS_MODULE,
281         }
282 };
283
284 static int register_sha1_ni(void)
285 {
286         if (boot_cpu_has(X86_FEATURE_SHA_NI))
287                 return crypto_register_shash(&sha1_ni_alg);
288         return 0;
289 }
290
291 static void unregister_sha1_ni(void)
292 {
293         if (boot_cpu_has(X86_FEATURE_SHA_NI))
294                 crypto_unregister_shash(&sha1_ni_alg);
295 }
296
297 #else
298 static inline int register_sha1_ni(void) { return 0; }
299 static inline void unregister_sha1_ni(void) { }
300 #endif
301
302 static int __init sha1_ssse3_mod_init(void)
303 {
304         if (register_sha1_ssse3())
305                 goto fail;
306
307         if (register_sha1_avx()) {
308                 unregister_sha1_ssse3();
309                 goto fail;
310         }
311
312         if (register_sha1_avx2()) {
313                 unregister_sha1_avx();
314                 unregister_sha1_ssse3();
315                 goto fail;
316         }
317
318         if (register_sha1_ni()) {
319                 unregister_sha1_avx2();
320                 unregister_sha1_avx();
321                 unregister_sha1_ssse3();
322                 goto fail;
323         }
324
325         return 0;
326 fail:
327         return -ENODEV;
328 }
329
330 static void __exit sha1_ssse3_mod_fini(void)
331 {
332         unregister_sha1_ni();
333         unregister_sha1_avx2();
334         unregister_sha1_avx();
335         unregister_sha1_ssse3();
336 }
337
338 module_init(sha1_ssse3_mod_init);
339 module_exit(sha1_ssse3_mod_fini);
340
341 MODULE_LICENSE("GPL");
342 MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
343
344 MODULE_ALIAS_CRYPTO("sha1");
345 MODULE_ALIAS_CRYPTO("sha1-ssse3");
346 MODULE_ALIAS_CRYPTO("sha1-avx");
347 MODULE_ALIAS_CRYPTO("sha1-avx2");
348 #ifdef CONFIG_AS_SHA1_NI
349 MODULE_ALIAS_CRYPTO("sha1-ni");
350 #endif