Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/dtor/input
[linux-2.6-microblaze.git] / drivers / crypto / padlock-sha.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API.
4  *
5  * Support for VIA PadLock hardware crypto engine.
6  *
7  * Copyright (c) 2006  Michal Ludvig <michal@logix.cz>
8  */
9
10 #include <crypto/internal/hash.h>
11 #include <crypto/padlock.h>
12 #include <crypto/sha.h>
13 #include <linux/err.h>
14 #include <linux/module.h>
15 #include <linux/init.h>
16 #include <linux/errno.h>
17 #include <linux/interrupt.h>
18 #include <linux/kernel.h>
19 #include <linux/scatterlist.h>
20 #include <asm/cpu_device_id.h>
21 #include <asm/fpu/api.h>
22
23 struct padlock_sha_desc {
24         struct shash_desc fallback;
25 };
26
27 struct padlock_sha_ctx {
28         struct crypto_shash *fallback;
29 };
30
31 static int padlock_sha_init(struct shash_desc *desc)
32 {
33         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
34         struct padlock_sha_ctx *ctx = crypto_shash_ctx(desc->tfm);
35
36         dctx->fallback.tfm = ctx->fallback;
37         return crypto_shash_init(&dctx->fallback);
38 }
39
40 static int padlock_sha_update(struct shash_desc *desc,
41                               const u8 *data, unsigned int length)
42 {
43         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
44
45         return crypto_shash_update(&dctx->fallback, data, length);
46 }
47
48 static int padlock_sha_export(struct shash_desc *desc, void *out)
49 {
50         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
51
52         return crypto_shash_export(&dctx->fallback, out);
53 }
54
55 static int padlock_sha_import(struct shash_desc *desc, const void *in)
56 {
57         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
58         struct padlock_sha_ctx *ctx = crypto_shash_ctx(desc->tfm);
59
60         dctx->fallback.tfm = ctx->fallback;
61         return crypto_shash_import(&dctx->fallback, in);
62 }
63
64 static inline void padlock_output_block(uint32_t *src,
65                         uint32_t *dst, size_t count)
66 {
67         while (count--)
68                 *dst++ = swab32(*src++);
69 }
70
71 static int padlock_sha1_finup(struct shash_desc *desc, const u8 *in,
72                               unsigned int count, u8 *out)
73 {
74         /* We can't store directly to *out as it may be unaligned. */
75         /* BTW Don't reduce the buffer size below 128 Bytes!
76          *     PadLock microcode needs it that big. */
77         char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
78                 ((aligned(STACK_ALIGN)));
79         char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
80         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
81         struct sha1_state state;
82         unsigned int space;
83         unsigned int leftover;
84         int err;
85
86         err = crypto_shash_export(&dctx->fallback, &state);
87         if (err)
88                 goto out;
89
90         if (state.count + count > ULONG_MAX)
91                 return crypto_shash_finup(&dctx->fallback, in, count, out);
92
93         leftover = ((state.count - 1) & (SHA1_BLOCK_SIZE - 1)) + 1;
94         space =  SHA1_BLOCK_SIZE - leftover;
95         if (space) {
96                 if (count > space) {
97                         err = crypto_shash_update(&dctx->fallback, in, space) ?:
98                               crypto_shash_export(&dctx->fallback, &state);
99                         if (err)
100                                 goto out;
101                         count -= space;
102                         in += space;
103                 } else {
104                         memcpy(state.buffer + leftover, in, count);
105                         in = state.buffer;
106                         count += leftover;
107                         state.count &= ~(SHA1_BLOCK_SIZE - 1);
108                 }
109         }
110
111         memcpy(result, &state.state, SHA1_DIGEST_SIZE);
112
113         asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */
114                       : \
115                       : "c"((unsigned long)state.count + count), \
116                         "a"((unsigned long)state.count), \
117                         "S"(in), "D"(result));
118
119         padlock_output_block((uint32_t *)result, (uint32_t *)out, 5);
120
121 out:
122         return err;
123 }
124
125 static int padlock_sha1_final(struct shash_desc *desc, u8 *out)
126 {
127         u8 buf[4];
128
129         return padlock_sha1_finup(desc, buf, 0, out);
130 }
131
132 static int padlock_sha256_finup(struct shash_desc *desc, const u8 *in,
133                                 unsigned int count, u8 *out)
134 {
135         /* We can't store directly to *out as it may be unaligned. */
136         /* BTW Don't reduce the buffer size below 128 Bytes!
137          *     PadLock microcode needs it that big. */
138         char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
139                 ((aligned(STACK_ALIGN)));
140         char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
141         struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
142         struct sha256_state state;
143         unsigned int space;
144         unsigned int leftover;
145         int err;
146
147         err = crypto_shash_export(&dctx->fallback, &state);
148         if (err)
149                 goto out;
150
151         if (state.count + count > ULONG_MAX)
152                 return crypto_shash_finup(&dctx->fallback, in, count, out);
153
154         leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1;
155         space =  SHA256_BLOCK_SIZE - leftover;
156         if (space) {
157                 if (count > space) {
158                         err = crypto_shash_update(&dctx->fallback, in, space) ?:
159                               crypto_shash_export(&dctx->fallback, &state);
160                         if (err)
161                                 goto out;
162                         count -= space;
163                         in += space;
164                 } else {
165                         memcpy(state.buf + leftover, in, count);
166                         in = state.buf;
167                         count += leftover;
168                         state.count &= ~(SHA1_BLOCK_SIZE - 1);
169                 }
170         }
171
172         memcpy(result, &state.state, SHA256_DIGEST_SIZE);
173
174         asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */
175                       : \
176                       : "c"((unsigned long)state.count + count), \
177                         "a"((unsigned long)state.count), \
178                         "S"(in), "D"(result));
179
180         padlock_output_block((uint32_t *)result, (uint32_t *)out, 8);
181
182 out:
183         return err;
184 }
185
186 static int padlock_sha256_final(struct shash_desc *desc, u8 *out)
187 {
188         u8 buf[4];
189
190         return padlock_sha256_finup(desc, buf, 0, out);
191 }
192
193 static int padlock_cra_init(struct crypto_tfm *tfm)
194 {
195         struct crypto_shash *hash = __crypto_shash_cast(tfm);
196         const char *fallback_driver_name = crypto_tfm_alg_name(tfm);
197         struct padlock_sha_ctx *ctx = crypto_tfm_ctx(tfm);
198         struct crypto_shash *fallback_tfm;
199         int err = -ENOMEM;
200
201         /* Allocate a fallback and abort if it failed. */
202         fallback_tfm = crypto_alloc_shash(fallback_driver_name, 0,
203                                           CRYPTO_ALG_NEED_FALLBACK);
204         if (IS_ERR(fallback_tfm)) {
205                 printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n",
206                        fallback_driver_name);
207                 err = PTR_ERR(fallback_tfm);
208                 goto out;
209         }
210
211         ctx->fallback = fallback_tfm;
212         hash->descsize += crypto_shash_descsize(fallback_tfm);
213         return 0;
214
215 out:
216         return err;
217 }
218
219 static void padlock_cra_exit(struct crypto_tfm *tfm)
220 {
221         struct padlock_sha_ctx *ctx = crypto_tfm_ctx(tfm);
222
223         crypto_free_shash(ctx->fallback);
224 }
225
226 static struct shash_alg sha1_alg = {
227         .digestsize     =       SHA1_DIGEST_SIZE,
228         .init           =       padlock_sha_init,
229         .update         =       padlock_sha_update,
230         .finup          =       padlock_sha1_finup,
231         .final          =       padlock_sha1_final,
232         .export         =       padlock_sha_export,
233         .import         =       padlock_sha_import,
234         .descsize       =       sizeof(struct padlock_sha_desc),
235         .statesize      =       sizeof(struct sha1_state),
236         .base           =       {
237                 .cra_name               =       "sha1",
238                 .cra_driver_name        =       "sha1-padlock",
239                 .cra_priority           =       PADLOCK_CRA_PRIORITY,
240                 .cra_flags              =       CRYPTO_ALG_NEED_FALLBACK,
241                 .cra_blocksize          =       SHA1_BLOCK_SIZE,
242                 .cra_ctxsize            =       sizeof(struct padlock_sha_ctx),
243                 .cra_module             =       THIS_MODULE,
244                 .cra_init               =       padlock_cra_init,
245                 .cra_exit               =       padlock_cra_exit,
246         }
247 };
248
249 static struct shash_alg sha256_alg = {
250         .digestsize     =       SHA256_DIGEST_SIZE,
251         .init           =       padlock_sha_init,
252         .update         =       padlock_sha_update,
253         .finup          =       padlock_sha256_finup,
254         .final          =       padlock_sha256_final,
255         .export         =       padlock_sha_export,
256         .import         =       padlock_sha_import,
257         .descsize       =       sizeof(struct padlock_sha_desc),
258         .statesize      =       sizeof(struct sha256_state),
259         .base           =       {
260                 .cra_name               =       "sha256",
261                 .cra_driver_name        =       "sha256-padlock",
262                 .cra_priority           =       PADLOCK_CRA_PRIORITY,
263                 .cra_flags              =       CRYPTO_ALG_NEED_FALLBACK,
264                 .cra_blocksize          =       SHA256_BLOCK_SIZE,
265                 .cra_ctxsize            =       sizeof(struct padlock_sha_ctx),
266                 .cra_module             =       THIS_MODULE,
267                 .cra_init               =       padlock_cra_init,
268                 .cra_exit               =       padlock_cra_exit,
269         }
270 };
271
272 /* Add two shash_alg instance for hardware-implemented *
273 * multiple-parts hash supported by VIA Nano Processor.*/
274 static int padlock_sha1_init_nano(struct shash_desc *desc)
275 {
276         struct sha1_state *sctx = shash_desc_ctx(desc);
277
278         *sctx = (struct sha1_state){
279                 .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
280         };
281
282         return 0;
283 }
284
285 static int padlock_sha1_update_nano(struct shash_desc *desc,
286                         const u8 *data, unsigned int len)
287 {
288         struct sha1_state *sctx = shash_desc_ctx(desc);
289         unsigned int partial, done;
290         const u8 *src;
291         /*The PHE require the out buffer must 128 bytes and 16-bytes aligned*/
292         u8 buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
293                 ((aligned(STACK_ALIGN)));
294         u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
295
296         partial = sctx->count & 0x3f;
297         sctx->count += len;
298         done = 0;
299         src = data;
300         memcpy(dst, (u8 *)(sctx->state), SHA1_DIGEST_SIZE);
301
302         if ((partial + len) >= SHA1_BLOCK_SIZE) {
303
304                 /* Append the bytes in state's buffer to a block to handle */
305                 if (partial) {
306                         done = -partial;
307                         memcpy(sctx->buffer + partial, data,
308                                 done + SHA1_BLOCK_SIZE);
309                         src = sctx->buffer;
310                         asm volatile (".byte 0xf3,0x0f,0xa6,0xc8"
311                         : "+S"(src), "+D"(dst) \
312                         : "a"((long)-1), "c"((unsigned long)1));
313                         done += SHA1_BLOCK_SIZE;
314                         src = data + done;
315                 }
316
317                 /* Process the left bytes from the input data */
318                 if (len - done >= SHA1_BLOCK_SIZE) {
319                         asm volatile (".byte 0xf3,0x0f,0xa6,0xc8"
320                         : "+S"(src), "+D"(dst)
321                         : "a"((long)-1),
322                         "c"((unsigned long)((len - done) / SHA1_BLOCK_SIZE)));
323                         done += ((len - done) - (len - done) % SHA1_BLOCK_SIZE);
324                         src = data + done;
325                 }
326                 partial = 0;
327         }
328         memcpy((u8 *)(sctx->state), dst, SHA1_DIGEST_SIZE);
329         memcpy(sctx->buffer + partial, src, len - done);
330
331         return 0;
332 }
333
334 static int padlock_sha1_final_nano(struct shash_desc *desc, u8 *out)
335 {
336         struct sha1_state *state = (struct sha1_state *)shash_desc_ctx(desc);
337         unsigned int partial, padlen;
338         __be64 bits;
339         static const u8 padding[64] = { 0x80, };
340
341         bits = cpu_to_be64(state->count << 3);
342
343         /* Pad out to 56 mod 64 */
344         partial = state->count & 0x3f;
345         padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial);
346         padlock_sha1_update_nano(desc, padding, padlen);
347
348         /* Append length field bytes */
349         padlock_sha1_update_nano(desc, (const u8 *)&bits, sizeof(bits));
350
351         /* Swap to output */
352         padlock_output_block((uint32_t *)(state->state), (uint32_t *)out, 5);
353
354         return 0;
355 }
356
357 static int padlock_sha256_init_nano(struct shash_desc *desc)
358 {
359         struct sha256_state *sctx = shash_desc_ctx(desc);
360
361         *sctx = (struct sha256_state){
362                 .state = { SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3, \
363                                 SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7},
364         };
365
366         return 0;
367 }
368
369 static int padlock_sha256_update_nano(struct shash_desc *desc, const u8 *data,
370                           unsigned int len)
371 {
372         struct sha256_state *sctx = shash_desc_ctx(desc);
373         unsigned int partial, done;
374         const u8 *src;
375         /*The PHE require the out buffer must 128 bytes and 16-bytes aligned*/
376         u8 buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
377                 ((aligned(STACK_ALIGN)));
378         u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
379
380         partial = sctx->count & 0x3f;
381         sctx->count += len;
382         done = 0;
383         src = data;
384         memcpy(dst, (u8 *)(sctx->state), SHA256_DIGEST_SIZE);
385
386         if ((partial + len) >= SHA256_BLOCK_SIZE) {
387
388                 /* Append the bytes in state's buffer to a block to handle */
389                 if (partial) {
390                         done = -partial;
391                         memcpy(sctx->buf + partial, data,
392                                 done + SHA256_BLOCK_SIZE);
393                         src = sctx->buf;
394                         asm volatile (".byte 0xf3,0x0f,0xa6,0xd0"
395                         : "+S"(src), "+D"(dst)
396                         : "a"((long)-1), "c"((unsigned long)1));
397                         done += SHA256_BLOCK_SIZE;
398                         src = data + done;
399                 }
400
401                 /* Process the left bytes from input data*/
402                 if (len - done >= SHA256_BLOCK_SIZE) {
403                         asm volatile (".byte 0xf3,0x0f,0xa6,0xd0"
404                         : "+S"(src), "+D"(dst)
405                         : "a"((long)-1),
406                         "c"((unsigned long)((len - done) / 64)));
407                         done += ((len - done) - (len - done) % 64);
408                         src = data + done;
409                 }
410                 partial = 0;
411         }
412         memcpy((u8 *)(sctx->state), dst, SHA256_DIGEST_SIZE);
413         memcpy(sctx->buf + partial, src, len - done);
414
415         return 0;
416 }
417
418 static int padlock_sha256_final_nano(struct shash_desc *desc, u8 *out)
419 {
420         struct sha256_state *state =
421                 (struct sha256_state *)shash_desc_ctx(desc);
422         unsigned int partial, padlen;
423         __be64 bits;
424         static const u8 padding[64] = { 0x80, };
425
426         bits = cpu_to_be64(state->count << 3);
427
428         /* Pad out to 56 mod 64 */
429         partial = state->count & 0x3f;
430         padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial);
431         padlock_sha256_update_nano(desc, padding, padlen);
432
433         /* Append length field bytes */
434         padlock_sha256_update_nano(desc, (const u8 *)&bits, sizeof(bits));
435
436         /* Swap to output */
437         padlock_output_block((uint32_t *)(state->state), (uint32_t *)out, 8);
438
439         return 0;
440 }
441
442 static int padlock_sha_export_nano(struct shash_desc *desc,
443                                 void *out)
444 {
445         int statesize = crypto_shash_statesize(desc->tfm);
446         void *sctx = shash_desc_ctx(desc);
447
448         memcpy(out, sctx, statesize);
449         return 0;
450 }
451
452 static int padlock_sha_import_nano(struct shash_desc *desc,
453                                 const void *in)
454 {
455         int statesize = crypto_shash_statesize(desc->tfm);
456         void *sctx = shash_desc_ctx(desc);
457
458         memcpy(sctx, in, statesize);
459         return 0;
460 }
461
462 static struct shash_alg sha1_alg_nano = {
463         .digestsize     =       SHA1_DIGEST_SIZE,
464         .init           =       padlock_sha1_init_nano,
465         .update         =       padlock_sha1_update_nano,
466         .final          =       padlock_sha1_final_nano,
467         .export         =       padlock_sha_export_nano,
468         .import         =       padlock_sha_import_nano,
469         .descsize       =       sizeof(struct sha1_state),
470         .statesize      =       sizeof(struct sha1_state),
471         .base           =       {
472                 .cra_name               =       "sha1",
473                 .cra_driver_name        =       "sha1-padlock-nano",
474                 .cra_priority           =       PADLOCK_CRA_PRIORITY,
475                 .cra_blocksize          =       SHA1_BLOCK_SIZE,
476                 .cra_module             =       THIS_MODULE,
477         }
478 };
479
480 static struct shash_alg sha256_alg_nano = {
481         .digestsize     =       SHA256_DIGEST_SIZE,
482         .init           =       padlock_sha256_init_nano,
483         .update         =       padlock_sha256_update_nano,
484         .final          =       padlock_sha256_final_nano,
485         .export         =       padlock_sha_export_nano,
486         .import         =       padlock_sha_import_nano,
487         .descsize       =       sizeof(struct sha256_state),
488         .statesize      =       sizeof(struct sha256_state),
489         .base           =       {
490                 .cra_name               =       "sha256",
491                 .cra_driver_name        =       "sha256-padlock-nano",
492                 .cra_priority           =       PADLOCK_CRA_PRIORITY,
493                 .cra_blocksize          =       SHA256_BLOCK_SIZE,
494                 .cra_module             =       THIS_MODULE,
495         }
496 };
497
498 static const struct x86_cpu_id padlock_sha_ids[] = {
499         X86_FEATURE_MATCH(X86_FEATURE_PHE),
500         {}
501 };
502 MODULE_DEVICE_TABLE(x86cpu, padlock_sha_ids);
503
504 static int __init padlock_init(void)
505 {
506         int rc = -ENODEV;
507         struct cpuinfo_x86 *c = &cpu_data(0);
508         struct shash_alg *sha1;
509         struct shash_alg *sha256;
510
511         if (!x86_match_cpu(padlock_sha_ids) || !boot_cpu_has(X86_FEATURE_PHE_EN))
512                 return -ENODEV;
513
514         /* Register the newly added algorithm module if on *
515         * VIA Nano processor, or else just do as before */
516         if (c->x86_model < 0x0f) {
517                 sha1 = &sha1_alg;
518                 sha256 = &sha256_alg;
519         } else {
520                 sha1 = &sha1_alg_nano;
521                 sha256 = &sha256_alg_nano;
522         }
523
524         rc = crypto_register_shash(sha1);
525         if (rc)
526                 goto out;
527
528         rc = crypto_register_shash(sha256);
529         if (rc)
530                 goto out_unreg1;
531
532         printk(KERN_NOTICE PFX "Using VIA PadLock ACE for SHA1/SHA256 algorithms.\n");
533
534         return 0;
535
536 out_unreg1:
537         crypto_unregister_shash(sha1);
538
539 out:
540         printk(KERN_ERR PFX "VIA PadLock SHA1/SHA256 initialization failed.\n");
541         return rc;
542 }
543
544 static void __exit padlock_fini(void)
545 {
546         struct cpuinfo_x86 *c = &cpu_data(0);
547
548         if (c->x86_model >= 0x0f) {
549                 crypto_unregister_shash(&sha1_alg_nano);
550                 crypto_unregister_shash(&sha256_alg_nano);
551         } else {
552                 crypto_unregister_shash(&sha1_alg);
553                 crypto_unregister_shash(&sha256_alg);
554         }
555 }
556
557 module_init(padlock_init);
558 module_exit(padlock_fini);
559
560 MODULE_DESCRIPTION("VIA PadLock SHA1/SHA256 algorithms support.");
561 MODULE_LICENSE("GPL");
562 MODULE_AUTHOR("Michal Ludvig");
563
564 MODULE_ALIAS_CRYPTO("sha1-all");
565 MODULE_ALIAS_CRYPTO("sha256-all");
566 MODULE_ALIAS_CRYPTO("sha1-padlock");
567 MODULE_ALIAS_CRYPTO("sha256-padlock");