Linux-libre 5.3.12-gnu
[librecmc/linux-libre.git] / arch / x86 / crypto / sha256_ssse3_glue.c
1 /*
2  * Cryptographic API.
3  *
4  * Glue code for the SHA256 Secure Hash Algorithm assembler
5  * implementation using supplemental SSE3 / AVX / AVX2 instructions.
6  *
7  * This file is based on sha256_generic.c
8  *
9  * Copyright (C) 2013 Intel Corporation.
10  *
11  * Author:
12  *     Tim Chen <tim.c.chen@linux.intel.com>
13  *
14  * This program is free software; you can redistribute it and/or modify it
15  * under the terms of the GNU General Public License as published by the Free
16  * Software Foundation; either version 2 of the License, or (at your option)
17  * any later version.
18  *
19  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
20  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
22  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
23  * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
24  * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
25  * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26  * SOFTWARE.
27  */
28
29
30 #define pr_fmt(fmt)     KBUILD_MODNAME ": " fmt
31
32 #include <crypto/internal/hash.h>
33 #include <crypto/internal/simd.h>
34 #include <linux/init.h>
35 #include <linux/module.h>
36 #include <linux/mm.h>
37 #include <linux/cryptohash.h>
38 #include <linux/types.h>
39 #include <crypto/sha.h>
40 #include <crypto/sha256_base.h>
41 #include <linux/string.h>
42 #include <asm/simd.h>
43
44 asmlinkage void sha256_transform_ssse3(u32 *digest, const char *data,
45                                        u64 rounds);
46 typedef void (sha256_transform_fn)(u32 *digest, const char *data, u64 rounds);
47
48 static int sha256_update(struct shash_desc *desc, const u8 *data,
49                          unsigned int len, sha256_transform_fn *sha256_xform)
50 {
51         struct sha256_state *sctx = shash_desc_ctx(desc);
52
53         if (!crypto_simd_usable() ||
54             (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE)
55                 return crypto_sha256_update(desc, data, len);
56
57         /* make sure casting to sha256_block_fn() is safe */
58         BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
59
60         kernel_fpu_begin();
61         sha256_base_do_update(desc, data, len,
62                               (sha256_block_fn *)sha256_xform);
63         kernel_fpu_end();
64
65         return 0;
66 }
67
68 static int sha256_finup(struct shash_desc *desc, const u8 *data,
69               unsigned int len, u8 *out, sha256_transform_fn *sha256_xform)
70 {
71         if (!crypto_simd_usable())
72                 return crypto_sha256_finup(desc, data, len, out);
73
74         kernel_fpu_begin();
75         if (len)
76                 sha256_base_do_update(desc, data, len,
77                                       (sha256_block_fn *)sha256_xform);
78         sha256_base_do_finalize(desc, (sha256_block_fn *)sha256_xform);
79         kernel_fpu_end();
80
81         return sha256_base_finish(desc, out);
82 }
83
84 static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data,
85                          unsigned int len)
86 {
87         return sha256_update(desc, data, len, sha256_transform_ssse3);
88 }
89
90 static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data,
91               unsigned int len, u8 *out)
92 {
93         return sha256_finup(desc, data, len, out, sha256_transform_ssse3);
94 }
95
96 /* Add padding and return the message digest. */
97 static int sha256_ssse3_final(struct shash_desc *desc, u8 *out)
98 {
99         return sha256_ssse3_finup(desc, NULL, 0, out);
100 }
101
102 static struct shash_alg sha256_ssse3_algs[] = { {
103         .digestsize     =       SHA256_DIGEST_SIZE,
104         .init           =       sha256_base_init,
105         .update         =       sha256_ssse3_update,
106         .final          =       sha256_ssse3_final,
107         .finup          =       sha256_ssse3_finup,
108         .descsize       =       sizeof(struct sha256_state),
109         .base           =       {
110                 .cra_name       =       "sha256",
111                 .cra_driver_name =      "sha256-ssse3",
112                 .cra_priority   =       150,
113                 .cra_blocksize  =       SHA256_BLOCK_SIZE,
114                 .cra_module     =       THIS_MODULE,
115         }
116 }, {
117         .digestsize     =       SHA224_DIGEST_SIZE,
118         .init           =       sha224_base_init,
119         .update         =       sha256_ssse3_update,
120         .final          =       sha256_ssse3_final,
121         .finup          =       sha256_ssse3_finup,
122         .descsize       =       sizeof(struct sha256_state),
123         .base           =       {
124                 .cra_name       =       "sha224",
125                 .cra_driver_name =      "sha224-ssse3",
126                 .cra_priority   =       150,
127                 .cra_blocksize  =       SHA224_BLOCK_SIZE,
128                 .cra_module     =       THIS_MODULE,
129         }
130 } };
131
132 static int register_sha256_ssse3(void)
133 {
134         if (boot_cpu_has(X86_FEATURE_SSSE3))
135                 return crypto_register_shashes(sha256_ssse3_algs,
136                                 ARRAY_SIZE(sha256_ssse3_algs));
137         return 0;
138 }
139
140 static void unregister_sha256_ssse3(void)
141 {
142         if (boot_cpu_has(X86_FEATURE_SSSE3))
143                 crypto_unregister_shashes(sha256_ssse3_algs,
144                                 ARRAY_SIZE(sha256_ssse3_algs));
145 }
146
147 #ifdef CONFIG_AS_AVX
148 asmlinkage void sha256_transform_avx(u32 *digest, const char *data,
149                                      u64 rounds);
150
151 static int sha256_avx_update(struct shash_desc *desc, const u8 *data,
152                          unsigned int len)
153 {
154         return sha256_update(desc, data, len, sha256_transform_avx);
155 }
156
157 static int sha256_avx_finup(struct shash_desc *desc, const u8 *data,
158                       unsigned int len, u8 *out)
159 {
160         return sha256_finup(desc, data, len, out, sha256_transform_avx);
161 }
162
163 static int sha256_avx_final(struct shash_desc *desc, u8 *out)
164 {
165         return sha256_avx_finup(desc, NULL, 0, out);
166 }
167
168 static struct shash_alg sha256_avx_algs[] = { {
169         .digestsize     =       SHA256_DIGEST_SIZE,
170         .init           =       sha256_base_init,
171         .update         =       sha256_avx_update,
172         .final          =       sha256_avx_final,
173         .finup          =       sha256_avx_finup,
174         .descsize       =       sizeof(struct sha256_state),
175         .base           =       {
176                 .cra_name       =       "sha256",
177                 .cra_driver_name =      "sha256-avx",
178                 .cra_priority   =       160,
179                 .cra_blocksize  =       SHA256_BLOCK_SIZE,
180                 .cra_module     =       THIS_MODULE,
181         }
182 }, {
183         .digestsize     =       SHA224_DIGEST_SIZE,
184         .init           =       sha224_base_init,
185         .update         =       sha256_avx_update,
186         .final          =       sha256_avx_final,
187         .finup          =       sha256_avx_finup,
188         .descsize       =       sizeof(struct sha256_state),
189         .base           =       {
190                 .cra_name       =       "sha224",
191                 .cra_driver_name =      "sha224-avx",
192                 .cra_priority   =       160,
193                 .cra_blocksize  =       SHA224_BLOCK_SIZE,
194                 .cra_module     =       THIS_MODULE,
195         }
196 } };
197
198 static bool avx_usable(void)
199 {
200         if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
201                 if (boot_cpu_has(X86_FEATURE_AVX))
202                         pr_info("AVX detected but unusable.\n");
203                 return false;
204         }
205
206         return true;
207 }
208
209 static int register_sha256_avx(void)
210 {
211         if (avx_usable())
212                 return crypto_register_shashes(sha256_avx_algs,
213                                 ARRAY_SIZE(sha256_avx_algs));
214         return 0;
215 }
216
217 static void unregister_sha256_avx(void)
218 {
219         if (avx_usable())
220                 crypto_unregister_shashes(sha256_avx_algs,
221                                 ARRAY_SIZE(sha256_avx_algs));
222 }
223
224 #else
225 static inline int register_sha256_avx(void) { return 0; }
226 static inline void unregister_sha256_avx(void) { }
227 #endif
228
229 #if defined(CONFIG_AS_AVX2) && defined(CONFIG_AS_AVX)
230 asmlinkage void sha256_transform_rorx(u32 *digest, const char *data,
231                                       u64 rounds);
232
233 static int sha256_avx2_update(struct shash_desc *desc, const u8 *data,
234                          unsigned int len)
235 {
236         return sha256_update(desc, data, len, sha256_transform_rorx);
237 }
238
239 static int sha256_avx2_finup(struct shash_desc *desc, const u8 *data,
240                       unsigned int len, u8 *out)
241 {
242         return sha256_finup(desc, data, len, out, sha256_transform_rorx);
243 }
244
245 static int sha256_avx2_final(struct shash_desc *desc, u8 *out)
246 {
247         return sha256_avx2_finup(desc, NULL, 0, out);
248 }
249
250 static struct shash_alg sha256_avx2_algs[] = { {
251         .digestsize     =       SHA256_DIGEST_SIZE,
252         .init           =       sha256_base_init,
253         .update         =       sha256_avx2_update,
254         .final          =       sha256_avx2_final,
255         .finup          =       sha256_avx2_finup,
256         .descsize       =       sizeof(struct sha256_state),
257         .base           =       {
258                 .cra_name       =       "sha256",
259                 .cra_driver_name =      "sha256-avx2",
260                 .cra_priority   =       170,
261                 .cra_blocksize  =       SHA256_BLOCK_SIZE,
262                 .cra_module     =       THIS_MODULE,
263         }
264 }, {
265         .digestsize     =       SHA224_DIGEST_SIZE,
266         .init           =       sha224_base_init,
267         .update         =       sha256_avx2_update,
268         .final          =       sha256_avx2_final,
269         .finup          =       sha256_avx2_finup,
270         .descsize       =       sizeof(struct sha256_state),
271         .base           =       {
272                 .cra_name       =       "sha224",
273                 .cra_driver_name =      "sha224-avx2",
274                 .cra_priority   =       170,
275                 .cra_blocksize  =       SHA224_BLOCK_SIZE,
276                 .cra_module     =       THIS_MODULE,
277         }
278 } };
279
280 static bool avx2_usable(void)
281 {
282         if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) &&
283                     boot_cpu_has(X86_FEATURE_BMI2))
284                 return true;
285
286         return false;
287 }
288
289 static int register_sha256_avx2(void)
290 {
291         if (avx2_usable())
292                 return crypto_register_shashes(sha256_avx2_algs,
293                                 ARRAY_SIZE(sha256_avx2_algs));
294         return 0;
295 }
296
297 static void unregister_sha256_avx2(void)
298 {
299         if (avx2_usable())
300                 crypto_unregister_shashes(sha256_avx2_algs,
301                                 ARRAY_SIZE(sha256_avx2_algs));
302 }
303
304 #else
305 static inline int register_sha256_avx2(void) { return 0; }
306 static inline void unregister_sha256_avx2(void) { }
307 #endif
308
309 #ifdef CONFIG_AS_SHA256_NI
310 asmlinkage void sha256_ni_transform(u32 *digest, const char *data,
311                                    u64 rounds); /*unsigned int rounds);*/
312
313 static int sha256_ni_update(struct shash_desc *desc, const u8 *data,
314                          unsigned int len)
315 {
316         return sha256_update(desc, data, len, sha256_ni_transform);
317 }
318
319 static int sha256_ni_finup(struct shash_desc *desc, const u8 *data,
320                       unsigned int len, u8 *out)
321 {
322         return sha256_finup(desc, data, len, out, sha256_ni_transform);
323 }
324
325 static int sha256_ni_final(struct shash_desc *desc, u8 *out)
326 {
327         return sha256_ni_finup(desc, NULL, 0, out);
328 }
329
330 static struct shash_alg sha256_ni_algs[] = { {
331         .digestsize     =       SHA256_DIGEST_SIZE,
332         .init           =       sha256_base_init,
333         .update         =       sha256_ni_update,
334         .final          =       sha256_ni_final,
335         .finup          =       sha256_ni_finup,
336         .descsize       =       sizeof(struct sha256_state),
337         .base           =       {
338                 .cra_name       =       "sha256",
339                 .cra_driver_name =      "sha256-ni",
340                 .cra_priority   =       250,
341                 .cra_blocksize  =       SHA256_BLOCK_SIZE,
342                 .cra_module     =       THIS_MODULE,
343         }
344 }, {
345         .digestsize     =       SHA224_DIGEST_SIZE,
346         .init           =       sha224_base_init,
347         .update         =       sha256_ni_update,
348         .final          =       sha256_ni_final,
349         .finup          =       sha256_ni_finup,
350         .descsize       =       sizeof(struct sha256_state),
351         .base           =       {
352                 .cra_name       =       "sha224",
353                 .cra_driver_name =      "sha224-ni",
354                 .cra_priority   =       250,
355                 .cra_blocksize  =       SHA224_BLOCK_SIZE,
356                 .cra_module     =       THIS_MODULE,
357         }
358 } };
359
360 static int register_sha256_ni(void)
361 {
362         if (boot_cpu_has(X86_FEATURE_SHA_NI))
363                 return crypto_register_shashes(sha256_ni_algs,
364                                 ARRAY_SIZE(sha256_ni_algs));
365         return 0;
366 }
367
368 static void unregister_sha256_ni(void)
369 {
370         if (boot_cpu_has(X86_FEATURE_SHA_NI))
371                 crypto_unregister_shashes(sha256_ni_algs,
372                                 ARRAY_SIZE(sha256_ni_algs));
373 }
374
375 #else
376 static inline int register_sha256_ni(void) { return 0; }
377 static inline void unregister_sha256_ni(void) { }
378 #endif
379
380 static int __init sha256_ssse3_mod_init(void)
381 {
382         if (register_sha256_ssse3())
383                 goto fail;
384
385         if (register_sha256_avx()) {
386                 unregister_sha256_ssse3();
387                 goto fail;
388         }
389
390         if (register_sha256_avx2()) {
391                 unregister_sha256_avx();
392                 unregister_sha256_ssse3();
393                 goto fail;
394         }
395
396         if (register_sha256_ni()) {
397                 unregister_sha256_avx2();
398                 unregister_sha256_avx();
399                 unregister_sha256_ssse3();
400                 goto fail;
401         }
402
403         return 0;
404 fail:
405         return -ENODEV;
406 }
407
408 static void __exit sha256_ssse3_mod_fini(void)
409 {
410         unregister_sha256_ni();
411         unregister_sha256_avx2();
412         unregister_sha256_avx();
413         unregister_sha256_ssse3();
414 }
415
416 module_init(sha256_ssse3_mod_init);
417 module_exit(sha256_ssse3_mod_fini);
418
419 MODULE_LICENSE("GPL");
420 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated");
421
422 MODULE_ALIAS_CRYPTO("sha256");
423 MODULE_ALIAS_CRYPTO("sha256-ssse3");
424 MODULE_ALIAS_CRYPTO("sha256-avx");
425 MODULE_ALIAS_CRYPTO("sha256-avx2");
426 MODULE_ALIAS_CRYPTO("sha224");
427 MODULE_ALIAS_CRYPTO("sha224-ssse3");
428 MODULE_ALIAS_CRYPTO("sha224-avx");
429 MODULE_ALIAS_CRYPTO("sha224-avx2");
430 #ifdef CONFIG_AS_SHA256_NI
431 MODULE_ALIAS_CRYPTO("sha256-ni");
432 MODULE_ALIAS_CRYPTO("sha224-ni");
433 #endif