Linux-libre 4.15.7-gnu
[librecmc/linux-libre.git] / arch / arm / crypto / ghash-ce-glue.c
1 /*
2  * Accelerated GHASH implementation with ARMv8 vmull.p64 instructions.
3  *
4  * Copyright (C) 2015 Linaro Ltd. <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 as published
8  * by the Free Software Foundation.
9  */
10
11 #include <asm/hwcap.h>
12 #include <asm/neon.h>
13 #include <asm/simd.h>
14 #include <asm/unaligned.h>
15 #include <crypto/cryptd.h>
16 #include <crypto/internal/hash.h>
17 #include <crypto/gf128mul.h>
18 #include <linux/cpufeature.h>
19 #include <linux/crypto.h>
20 #include <linux/module.h>
21
22 MODULE_DESCRIPTION("GHASH secure hash using ARMv8 Crypto Extensions");
23 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
24 MODULE_LICENSE("GPL v2");
25 MODULE_ALIAS_CRYPTO("ghash");
26
27 #define GHASH_BLOCK_SIZE        16
28 #define GHASH_DIGEST_SIZE       16
29
30 struct ghash_key {
31         u64     a;
32         u64     b;
33 };
34
35 struct ghash_desc_ctx {
36         u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
37         u8 buf[GHASH_BLOCK_SIZE];
38         u32 count;
39 };
40
41 struct ghash_async_ctx {
42         struct cryptd_ahash *cryptd_tfm;
43 };
44
45 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
46                                        struct ghash_key const *k,
47                                        const char *head);
48
49 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
50                                       struct ghash_key const *k,
51                                       const char *head);
52
53 static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src,
54                                   struct ghash_key const *k,
55                                   const char *head);
56
57 static int ghash_init(struct shash_desc *desc)
58 {
59         struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
60
61         *ctx = (struct ghash_desc_ctx){};
62         return 0;
63 }
64
65 static int ghash_update(struct shash_desc *desc, const u8 *src,
66                         unsigned int len)
67 {
68         struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
69         unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
70
71         ctx->count += len;
72
73         if ((partial + len) >= GHASH_BLOCK_SIZE) {
74                 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
75                 int blocks;
76
77                 if (partial) {
78                         int p = GHASH_BLOCK_SIZE - partial;
79
80                         memcpy(ctx->buf + partial, src, p);
81                         src += p;
82                         len -= p;
83                 }
84
85                 blocks = len / GHASH_BLOCK_SIZE;
86                 len %= GHASH_BLOCK_SIZE;
87
88                 kernel_neon_begin();
89                 pmull_ghash_update(blocks, ctx->digest, src, key,
90                                    partial ? ctx->buf : NULL);
91                 kernel_neon_end();
92                 src += blocks * GHASH_BLOCK_SIZE;
93                 partial = 0;
94         }
95         if (len)
96                 memcpy(ctx->buf + partial, src, len);
97         return 0;
98 }
99
100 static int ghash_final(struct shash_desc *desc, u8 *dst)
101 {
102         struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
103         unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
104
105         if (partial) {
106                 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
107
108                 memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
109                 kernel_neon_begin();
110                 pmull_ghash_update(1, ctx->digest, ctx->buf, key, NULL);
111                 kernel_neon_end();
112         }
113         put_unaligned_be64(ctx->digest[1], dst);
114         put_unaligned_be64(ctx->digest[0], dst + 8);
115
116         *ctx = (struct ghash_desc_ctx){};
117         return 0;
118 }
119
120 static int ghash_setkey(struct crypto_shash *tfm,
121                         const u8 *inkey, unsigned int keylen)
122 {
123         struct ghash_key *key = crypto_shash_ctx(tfm);
124         u64 a, b;
125
126         if (keylen != GHASH_BLOCK_SIZE) {
127                 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
128                 return -EINVAL;
129         }
130
131         /* perform multiplication by 'x' in GF(2^128) */
132         b = get_unaligned_be64(inkey);
133         a = get_unaligned_be64(inkey + 8);
134
135         key->a = (a << 1) | (b >> 63);
136         key->b = (b << 1) | (a >> 63);
137
138         if (b >> 63)
139                 key->b ^= 0xc200000000000000UL;
140
141         return 0;
142 }
143
144 static struct shash_alg ghash_alg = {
145         .digestsize             = GHASH_DIGEST_SIZE,
146         .init                   = ghash_init,
147         .update                 = ghash_update,
148         .final                  = ghash_final,
149         .setkey                 = ghash_setkey,
150         .descsize               = sizeof(struct ghash_desc_ctx),
151         .base                   = {
152                 .cra_name       = "__ghash",
153                 .cra_driver_name = "__driver-ghash-ce",
154                 .cra_priority   = 0,
155                 .cra_flags      = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_INTERNAL,
156                 .cra_blocksize  = GHASH_BLOCK_SIZE,
157                 .cra_ctxsize    = sizeof(struct ghash_key),
158                 .cra_module     = THIS_MODULE,
159         },
160 };
161
162 static int ghash_async_init(struct ahash_request *req)
163 {
164         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
165         struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
166         struct ahash_request *cryptd_req = ahash_request_ctx(req);
167         struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
168         struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
169         struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
170
171         desc->tfm = child;
172         desc->flags = req->base.flags;
173         return crypto_shash_init(desc);
174 }
175
176 static int ghash_async_update(struct ahash_request *req)
177 {
178         struct ahash_request *cryptd_req = ahash_request_ctx(req);
179         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
180         struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
181         struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
182
183         if (!may_use_simd() ||
184             (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
185                 memcpy(cryptd_req, req, sizeof(*req));
186                 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
187                 return crypto_ahash_update(cryptd_req);
188         } else {
189                 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
190                 return shash_ahash_update(req, desc);
191         }
192 }
193
194 static int ghash_async_final(struct ahash_request *req)
195 {
196         struct ahash_request *cryptd_req = ahash_request_ctx(req);
197         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
198         struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
199         struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
200
201         if (!may_use_simd() ||
202             (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
203                 memcpy(cryptd_req, req, sizeof(*req));
204                 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
205                 return crypto_ahash_final(cryptd_req);
206         } else {
207                 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
208                 return crypto_shash_final(desc, req->result);
209         }
210 }
211
212 static int ghash_async_digest(struct ahash_request *req)
213 {
214         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
215         struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
216         struct ahash_request *cryptd_req = ahash_request_ctx(req);
217         struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
218
219         if (!may_use_simd() ||
220             (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
221                 memcpy(cryptd_req, req, sizeof(*req));
222                 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
223                 return crypto_ahash_digest(cryptd_req);
224         } else {
225                 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
226                 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
227
228                 desc->tfm = child;
229                 desc->flags = req->base.flags;
230                 return shash_ahash_digest(req, desc);
231         }
232 }
233
234 static int ghash_async_import(struct ahash_request *req, const void *in)
235 {
236         struct ahash_request *cryptd_req = ahash_request_ctx(req);
237         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
238         struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
239         struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
240
241         desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
242         desc->flags = req->base.flags;
243
244         return crypto_shash_import(desc, in);
245 }
246
247 static int ghash_async_export(struct ahash_request *req, void *out)
248 {
249         struct ahash_request *cryptd_req = ahash_request_ctx(req);
250         struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
251
252         return crypto_shash_export(desc, out);
253 }
254
255 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
256                               unsigned int keylen)
257 {
258         struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
259         struct crypto_ahash *child = &ctx->cryptd_tfm->base;
260         int err;
261
262         crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
263         crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
264                                & CRYPTO_TFM_REQ_MASK);
265         err = crypto_ahash_setkey(child, key, keylen);
266         crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child)
267                                & CRYPTO_TFM_RES_MASK);
268
269         return err;
270 }
271
272 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
273 {
274         struct cryptd_ahash *cryptd_tfm;
275         struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
276
277         cryptd_tfm = cryptd_alloc_ahash("__driver-ghash-ce",
278                                         CRYPTO_ALG_INTERNAL,
279                                         CRYPTO_ALG_INTERNAL);
280         if (IS_ERR(cryptd_tfm))
281                 return PTR_ERR(cryptd_tfm);
282         ctx->cryptd_tfm = cryptd_tfm;
283         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
284                                  sizeof(struct ahash_request) +
285                                  crypto_ahash_reqsize(&cryptd_tfm->base));
286
287         return 0;
288 }
289
290 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
291 {
292         struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
293
294         cryptd_free_ahash(ctx->cryptd_tfm);
295 }
296
297 static struct ahash_alg ghash_async_alg = {
298         .init                   = ghash_async_init,
299         .update                 = ghash_async_update,
300         .final                  = ghash_async_final,
301         .setkey                 = ghash_async_setkey,
302         .digest                 = ghash_async_digest,
303         .import                 = ghash_async_import,
304         .export                 = ghash_async_export,
305         .halg.digestsize        = GHASH_DIGEST_SIZE,
306         .halg.statesize         = sizeof(struct ghash_desc_ctx),
307         .halg.base              = {
308                 .cra_name       = "ghash",
309                 .cra_driver_name = "ghash-ce",
310                 .cra_priority   = 300,
311                 .cra_flags      = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
312                 .cra_blocksize  = GHASH_BLOCK_SIZE,
313                 .cra_type       = &crypto_ahash_type,
314                 .cra_ctxsize    = sizeof(struct ghash_async_ctx),
315                 .cra_module     = THIS_MODULE,
316                 .cra_init       = ghash_async_init_tfm,
317                 .cra_exit       = ghash_async_exit_tfm,
318         },
319 };
320
321 static int __init ghash_ce_mod_init(void)
322 {
323         int err;
324
325         if (!(elf_hwcap & HWCAP_NEON))
326                 return -ENODEV;
327
328         if (elf_hwcap2 & HWCAP2_PMULL)
329                 pmull_ghash_update = pmull_ghash_update_p64;
330         else
331                 pmull_ghash_update = pmull_ghash_update_p8;
332
333         err = crypto_register_shash(&ghash_alg);
334         if (err)
335                 return err;
336         err = crypto_register_ahash(&ghash_async_alg);
337         if (err)
338                 goto err_shash;
339
340         return 0;
341
342 err_shash:
343         crypto_unregister_shash(&ghash_alg);
344         return err;
345 }
346
347 static void __exit ghash_ce_mod_exit(void)
348 {
349         crypto_unregister_ahash(&ghash_async_alg);
350         crypto_unregister_shash(&ghash_alg);
351 }
352
353 module_init(ghash_ce_mod_init);
354 module_exit(ghash_ce_mod_exit);