Linux-libre 5.3.12-gnu
[librecmc/linux-libre.git] / arch / arm64 / crypto / sha2-ce-glue.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
4  *
5  * Copyright (C) 2014 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6  */
7
8 #include <asm/neon.h>
9 #include <asm/simd.h>
10 #include <asm/unaligned.h>
11 #include <crypto/internal/hash.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/sha.h>
14 #include <crypto/sha256_base.h>
15 #include <linux/cpufeature.h>
16 #include <linux/crypto.h>
17 #include <linux/module.h>
18
19 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
22
23 struct sha256_ce_state {
24         struct sha256_state     sst;
25         u32                     finalize;
26 };
27
28 asmlinkage void sha2_ce_transform(struct sha256_ce_state *sst, u8 const *src,
29                                   int blocks);
30
31 const u32 sha256_ce_offsetof_count = offsetof(struct sha256_ce_state,
32                                               sst.count);
33 const u32 sha256_ce_offsetof_finalize = offsetof(struct sha256_ce_state,
34                                                  finalize);
35
36 asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks);
37
38 static int sha256_ce_update(struct shash_desc *desc, const u8 *data,
39                             unsigned int len)
40 {
41         struct sha256_ce_state *sctx = shash_desc_ctx(desc);
42
43         if (!crypto_simd_usable())
44                 return sha256_base_do_update(desc, data, len,
45                                 (sha256_block_fn *)sha256_block_data_order);
46
47         sctx->finalize = 0;
48         kernel_neon_begin();
49         sha256_base_do_update(desc, data, len,
50                               (sha256_block_fn *)sha2_ce_transform);
51         kernel_neon_end();
52
53         return 0;
54 }
55
56 static int sha256_ce_finup(struct shash_desc *desc, const u8 *data,
57                            unsigned int len, u8 *out)
58 {
59         struct sha256_ce_state *sctx = shash_desc_ctx(desc);
60         bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len;
61
62         if (!crypto_simd_usable()) {
63                 if (len)
64                         sha256_base_do_update(desc, data, len,
65                                 (sha256_block_fn *)sha256_block_data_order);
66                 sha256_base_do_finalize(desc,
67                                 (sha256_block_fn *)sha256_block_data_order);
68                 return sha256_base_finish(desc, out);
69         }
70
71         /*
72          * Allow the asm code to perform the finalization if there is no
73          * partial data and the input is a round multiple of the block size.
74          */
75         sctx->finalize = finalize;
76
77         kernel_neon_begin();
78         sha256_base_do_update(desc, data, len,
79                               (sha256_block_fn *)sha2_ce_transform);
80         if (!finalize)
81                 sha256_base_do_finalize(desc,
82                                         (sha256_block_fn *)sha2_ce_transform);
83         kernel_neon_end();
84         return sha256_base_finish(desc, out);
85 }
86
87 static int sha256_ce_final(struct shash_desc *desc, u8 *out)
88 {
89         struct sha256_ce_state *sctx = shash_desc_ctx(desc);
90
91         if (!crypto_simd_usable()) {
92                 sha256_base_do_finalize(desc,
93                                 (sha256_block_fn *)sha256_block_data_order);
94                 return sha256_base_finish(desc, out);
95         }
96
97         sctx->finalize = 0;
98         kernel_neon_begin();
99         sha256_base_do_finalize(desc, (sha256_block_fn *)sha2_ce_transform);
100         kernel_neon_end();
101         return sha256_base_finish(desc, out);
102 }
103
104 static struct shash_alg algs[] = { {
105         .init                   = sha224_base_init,
106         .update                 = sha256_ce_update,
107         .final                  = sha256_ce_final,
108         .finup                  = sha256_ce_finup,
109         .descsize               = sizeof(struct sha256_ce_state),
110         .digestsize             = SHA224_DIGEST_SIZE,
111         .base                   = {
112                 .cra_name               = "sha224",
113                 .cra_driver_name        = "sha224-ce",
114                 .cra_priority           = 200,
115                 .cra_blocksize          = SHA256_BLOCK_SIZE,
116                 .cra_module             = THIS_MODULE,
117         }
118 }, {
119         .init                   = sha256_base_init,
120         .update                 = sha256_ce_update,
121         .final                  = sha256_ce_final,
122         .finup                  = sha256_ce_finup,
123         .descsize               = sizeof(struct sha256_ce_state),
124         .digestsize             = SHA256_DIGEST_SIZE,
125         .base                   = {
126                 .cra_name               = "sha256",
127                 .cra_driver_name        = "sha256-ce",
128                 .cra_priority           = 200,
129                 .cra_blocksize          = SHA256_BLOCK_SIZE,
130                 .cra_module             = THIS_MODULE,
131         }
132 } };
133
134 static int __init sha2_ce_mod_init(void)
135 {
136         return crypto_register_shashes(algs, ARRAY_SIZE(algs));
137 }
138
139 static void __exit sha2_ce_mod_fini(void)
140 {
141         crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
142 }
143
144 module_cpu_feature_match(SHA2, sha2_ce_mod_init);
145 module_exit(sha2_ce_mod_fini);