Linux-libre 5.3.12-gnu
[librecmc/linux-libre.git] / drivers / crypto / inside-secure / safexcel_hash.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7
8 #include <crypto/hmac.h>
9 #include <crypto/md5.h>
10 #include <crypto/sha.h>
11 #include <linux/device.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmapool.h>
14
15 #include "safexcel.h"
16
17 struct safexcel_ahash_ctx {
18         struct safexcel_context base;
19         struct safexcel_crypto_priv *priv;
20
21         u32 alg;
22
23         u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
24         u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
25 };
26
27 struct safexcel_ahash_req {
28         bool last_req;
29         bool finish;
30         bool hmac;
31         bool needs_inv;
32
33         int nents;
34         dma_addr_t result_dma;
35
36         u32 digest;
37
38         u8 state_sz;    /* expected sate size, only set once */
39         u32 state[SHA512_DIGEST_SIZE / sizeof(u32)] __aligned(sizeof(u32));
40
41         u64 len[2];
42         u64 processed[2];
43
44         u8 cache[SHA512_BLOCK_SIZE << 1] __aligned(sizeof(u32));
45         dma_addr_t cache_dma;
46         unsigned int cache_sz;
47
48         u8 cache_next[SHA512_BLOCK_SIZE << 1] __aligned(sizeof(u32));
49 };
50
51 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
52 {
53         u64 len, processed;
54
55         len = (0xffffffff * req->len[1]) + req->len[0];
56         processed = (0xffffffff * req->processed[1]) + req->processed[0];
57
58         return len - processed;
59 }
60
61 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
62                                 u32 input_length, u32 result_length)
63 {
64         struct safexcel_token *token =
65                 (struct safexcel_token *)cdesc->control_data.token;
66
67         token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
68         token[0].packet_length = input_length;
69         token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
70         token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
71
72         token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
73         token[1].packet_length = result_length;
74         token[1].stat = EIP197_TOKEN_STAT_LAST_HASH |
75                         EIP197_TOKEN_STAT_LAST_PACKET;
76         token[1].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
77                                 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
78 }
79
80 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
81                                      struct safexcel_ahash_req *req,
82                                      struct safexcel_command_desc *cdesc,
83                                      unsigned int digestsize)
84 {
85         struct safexcel_crypto_priv *priv = ctx->priv;
86         int i;
87
88         cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_OUT;
89         cdesc->control_data.control0 |= ctx->alg;
90         cdesc->control_data.control0 |= req->digest;
91
92         if (!req->finish)
93                 cdesc->control_data.control0 |= CONTEXT_CONTROL_NO_FINISH_HASH;
94
95         if (req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) {
96                 if (req->processed[0] || req->processed[1]) {
97                         if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
98                                 cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(5);
99                         else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
100                                 cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(6);
101                         else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224 ||
102                                  ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
103                                 cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(9);
104                         else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384 ||
105                                  ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
106                                 cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(17);
107
108                         cdesc->control_data.control1 |= CONTEXT_CONTROL_DIGEST_CNT;
109                 } else {
110                         cdesc->control_data.control0 |= CONTEXT_CONTROL_RESTART_HASH;
111                 }
112
113                 /*
114                  * Copy the input digest if needed, and setup the context
115                  * fields. Do this now as we need it to setup the first command
116                  * descriptor.
117                  */
118                 if (req->processed[0] || req->processed[1]) {
119                         for (i = 0; i < digestsize / sizeof(u32); i++)
120                                 ctx->base.ctxr->data[i] = cpu_to_le32(req->state[i]);
121
122                         if (req->finish) {
123                                 u64 count = req->processed[0] / EIP197_COUNTER_BLOCK_SIZE;
124                                 count += ((0xffffffff / EIP197_COUNTER_BLOCK_SIZE) *
125                                           req->processed[1]);
126
127                                 /* This is a haredware limitation, as the
128                                  * counter must fit into an u32. This represents
129                                  * a farily big amount of input data, so we
130                                  * shouldn't see this.
131                                  */
132                                 if (unlikely(count & 0xffff0000)) {
133                                         dev_warn(priv->dev,
134                                                  "Input data is too big\n");
135                                         return;
136                                 }
137
138                                 ctx->base.ctxr->data[i] = cpu_to_le32(count);
139                         }
140                 }
141         } else if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC) {
142                 cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(2 * req->state_sz / sizeof(u32));
143
144                 memcpy(ctx->base.ctxr->data, ctx->ipad, req->state_sz);
145                 memcpy(ctx->base.ctxr->data + req->state_sz / sizeof(u32),
146                        ctx->opad, req->state_sz);
147         }
148 }
149
150 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
151                                       struct crypto_async_request *async,
152                                       bool *should_complete, int *ret)
153 {
154         struct safexcel_result_desc *rdesc;
155         struct ahash_request *areq = ahash_request_cast(async);
156         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
157         struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
158         u64 cache_len;
159
160         *ret = 0;
161
162         rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
163         if (IS_ERR(rdesc)) {
164                 dev_err(priv->dev,
165                         "hash: result: could not retrieve the result descriptor\n");
166                 *ret = PTR_ERR(rdesc);
167         } else {
168                 *ret = safexcel_rdesc_check_errors(priv, rdesc);
169         }
170
171         safexcel_complete(priv, ring);
172
173         if (sreq->nents) {
174                 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
175                 sreq->nents = 0;
176         }
177
178         if (sreq->result_dma) {
179                 dma_unmap_single(priv->dev, sreq->result_dma, sreq->state_sz,
180                                  DMA_FROM_DEVICE);
181                 sreq->result_dma = 0;
182         }
183
184         if (sreq->cache_dma) {
185                 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
186                                  DMA_TO_DEVICE);
187                 sreq->cache_dma = 0;
188                 sreq->cache_sz = 0;
189         }
190
191         if (sreq->finish)
192                 memcpy(areq->result, sreq->state,
193                        crypto_ahash_digestsize(ahash));
194
195         cache_len = safexcel_queued_len(sreq);
196         if (cache_len)
197                 memcpy(sreq->cache, sreq->cache_next, cache_len);
198
199         *should_complete = true;
200
201         return 1;
202 }
203
204 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
205                                    int *commands, int *results)
206 {
207         struct ahash_request *areq = ahash_request_cast(async);
208         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
209         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
210         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
211         struct safexcel_crypto_priv *priv = ctx->priv;
212         struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
213         struct safexcel_result_desc *rdesc;
214         struct scatterlist *sg;
215         int i, extra = 0, n_cdesc = 0, ret = 0;
216         u64 queued, len, cache_len, cache_max;
217
218         cache_max = crypto_ahash_blocksize(ahash);
219         if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC)
220                 cache_max <<= 1;
221
222         queued = len = safexcel_queued_len(req);
223         if (queued <= cache_max)
224                 cache_len = queued;
225         else
226                 cache_len = queued - areq->nbytes;
227
228         if (!req->last_req) {
229                 /* If this is not the last request and the queued data does not
230                  * fit into full blocks, cache it for the next send() call.
231                  */
232                 extra = queued & (crypto_ahash_blocksize(ahash) - 1);
233
234                 if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC &&
235                     extra < crypto_ahash_blocksize(ahash))
236                         extra += crypto_ahash_blocksize(ahash);
237
238                 /* If this is not the last request and the queued data
239                  * is a multiple of a block, cache the last one for now.
240                  */
241                 if (!extra)
242                         extra = crypto_ahash_blocksize(ahash);
243
244                 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
245                                    req->cache_next, extra,
246                                    areq->nbytes - extra);
247
248                 queued -= extra;
249                 len -= extra;
250         }
251
252         /* Add a command descriptor for the cached data, if any */
253         if (cache_len) {
254                 req->cache_dma = dma_map_single(priv->dev, req->cache,
255                                                 cache_len, DMA_TO_DEVICE);
256                 if (dma_mapping_error(priv->dev, req->cache_dma))
257                         return -EINVAL;
258
259                 req->cache_sz = cache_len;
260                 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
261                                                  (cache_len == len),
262                                                  req->cache_dma, cache_len, len,
263                                                  ctx->base.ctxr_dma);
264                 if (IS_ERR(first_cdesc)) {
265                         ret = PTR_ERR(first_cdesc);
266                         goto unmap_cache;
267                 }
268                 n_cdesc++;
269
270                 queued -= cache_len;
271                 if (!queued)
272                         goto send_command;
273         }
274
275         /* Now handle the current ahash request buffer(s) */
276         req->nents = dma_map_sg(priv->dev, areq->src, sg_nents(areq->src),
277                                 DMA_TO_DEVICE);
278         if (!req->nents) {
279                 ret = -ENOMEM;
280                 goto cdesc_rollback;
281         }
282
283         for_each_sg(areq->src, sg, req->nents, i) {
284                 int sglen = sg_dma_len(sg);
285
286                 /* Do not overflow the request */
287                 if (queued < sglen)
288                         sglen = queued;
289
290                 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
291                                            !(queued - sglen), sg_dma_address(sg),
292                                            sglen, len, ctx->base.ctxr_dma);
293                 if (IS_ERR(cdesc)) {
294                         ret = PTR_ERR(cdesc);
295                         goto unmap_sg;
296                 }
297                 n_cdesc++;
298
299                 if (n_cdesc == 1)
300                         first_cdesc = cdesc;
301
302                 queued -= sglen;
303                 if (!queued)
304                         break;
305         }
306
307 send_command:
308         /* Setup the context options */
309         safexcel_context_control(ctx, req, first_cdesc, req->state_sz);
310
311         /* Add the token */
312         safexcel_hash_token(first_cdesc, len, req->state_sz);
313
314         req->result_dma = dma_map_single(priv->dev, req->state, req->state_sz,
315                                          DMA_FROM_DEVICE);
316         if (dma_mapping_error(priv->dev, req->result_dma)) {
317                 ret = -EINVAL;
318                 goto unmap_sg;
319         }
320
321         /* Add a result descriptor */
322         rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
323                                    req->state_sz);
324         if (IS_ERR(rdesc)) {
325                 ret = PTR_ERR(rdesc);
326                 goto unmap_result;
327         }
328
329         safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
330
331         req->processed[0] += len;
332         if (req->processed[0] < len)
333                 req->processed[1]++;
334
335         *commands = n_cdesc;
336         *results = 1;
337         return 0;
338
339 unmap_result:
340         dma_unmap_single(priv->dev, req->result_dma, req->state_sz,
341                          DMA_FROM_DEVICE);
342 unmap_sg:
343         dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
344 cdesc_rollback:
345         for (i = 0; i < n_cdesc; i++)
346                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
347 unmap_cache:
348         if (req->cache_dma) {
349                 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
350                                  DMA_TO_DEVICE);
351                 req->cache_dma = 0;
352                 req->cache_sz = 0;
353         }
354
355         return ret;
356 }
357
358 static inline bool safexcel_ahash_needs_inv_get(struct ahash_request *areq)
359 {
360         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
361         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
362         unsigned int state_w_sz = req->state_sz / sizeof(u32);
363         u64 processed;
364         int i;
365
366         processed = req->processed[0] / EIP197_COUNTER_BLOCK_SIZE;
367         processed += (0xffffffff / EIP197_COUNTER_BLOCK_SIZE) * req->processed[1];
368
369         for (i = 0; i < state_w_sz; i++)
370                 if (ctx->base.ctxr->data[i] != cpu_to_le32(req->state[i]))
371                         return true;
372
373         if (ctx->base.ctxr->data[state_w_sz] != cpu_to_le32(processed))
374                 return true;
375
376         return false;
377 }
378
379 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
380                                       int ring,
381                                       struct crypto_async_request *async,
382                                       bool *should_complete, int *ret)
383 {
384         struct safexcel_result_desc *rdesc;
385         struct ahash_request *areq = ahash_request_cast(async);
386         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
387         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
388         int enq_ret;
389
390         *ret = 0;
391
392         rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
393         if (IS_ERR(rdesc)) {
394                 dev_err(priv->dev,
395                         "hash: invalidate: could not retrieve the result descriptor\n");
396                 *ret = PTR_ERR(rdesc);
397         } else {
398                 *ret = safexcel_rdesc_check_errors(priv, rdesc);
399         }
400
401         safexcel_complete(priv, ring);
402
403         if (ctx->base.exit_inv) {
404                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
405                               ctx->base.ctxr_dma);
406
407                 *should_complete = true;
408                 return 1;
409         }
410
411         ring = safexcel_select_ring(priv);
412         ctx->base.ring = ring;
413
414         spin_lock_bh(&priv->ring[ring].queue_lock);
415         enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
416         spin_unlock_bh(&priv->ring[ring].queue_lock);
417
418         if (enq_ret != -EINPROGRESS)
419                 *ret = enq_ret;
420
421         queue_work(priv->ring[ring].workqueue,
422                    &priv->ring[ring].work_data.work);
423
424         *should_complete = false;
425
426         return 1;
427 }
428
429 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
430                                   struct crypto_async_request *async,
431                                   bool *should_complete, int *ret)
432 {
433         struct ahash_request *areq = ahash_request_cast(async);
434         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
435         int err;
436
437         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
438
439         if (req->needs_inv) {
440                 req->needs_inv = false;
441                 err = safexcel_handle_inv_result(priv, ring, async,
442                                                  should_complete, ret);
443         } else {
444                 err = safexcel_handle_req_result(priv, ring, async,
445                                                  should_complete, ret);
446         }
447
448         return err;
449 }
450
451 static int safexcel_ahash_send_inv(struct crypto_async_request *async,
452                                    int ring, int *commands, int *results)
453 {
454         struct ahash_request *areq = ahash_request_cast(async);
455         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
456         int ret;
457
458         ret = safexcel_invalidate_cache(async, ctx->priv,
459                                         ctx->base.ctxr_dma, ring);
460         if (unlikely(ret))
461                 return ret;
462
463         *commands = 1;
464         *results = 1;
465
466         return 0;
467 }
468
469 static int safexcel_ahash_send(struct crypto_async_request *async,
470                                int ring, int *commands, int *results)
471 {
472         struct ahash_request *areq = ahash_request_cast(async);
473         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
474         int ret;
475
476         if (req->needs_inv)
477                 ret = safexcel_ahash_send_inv(async, ring, commands, results);
478         else
479                 ret = safexcel_ahash_send_req(async, ring, commands, results);
480
481         return ret;
482 }
483
484 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
485 {
486         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
487         struct safexcel_crypto_priv *priv = ctx->priv;
488         EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
489         struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
490         struct safexcel_inv_result result = {};
491         int ring = ctx->base.ring;
492
493         memset(req, 0, EIP197_AHASH_REQ_SIZE);
494
495         /* create invalidation request */
496         init_completion(&result.completion);
497         ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
498                                    safexcel_inv_complete, &result);
499
500         ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
501         ctx = crypto_tfm_ctx(req->base.tfm);
502         ctx->base.exit_inv = true;
503         rctx->needs_inv = true;
504
505         spin_lock_bh(&priv->ring[ring].queue_lock);
506         crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
507         spin_unlock_bh(&priv->ring[ring].queue_lock);
508
509         queue_work(priv->ring[ring].workqueue,
510                    &priv->ring[ring].work_data.work);
511
512         wait_for_completion(&result.completion);
513
514         if (result.error) {
515                 dev_warn(priv->dev, "hash: completion error (%d)\n",
516                          result.error);
517                 return result.error;
518         }
519
520         return 0;
521 }
522
523 /* safexcel_ahash_cache: cache data until at least one request can be sent to
524  * the engine, aka. when there is at least 1 block size in the pipe.
525  */
526 static int safexcel_ahash_cache(struct ahash_request *areq, u32 cache_max)
527 {
528         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
529         u64 queued, cache_len;
530
531         /* queued: everything accepted by the driver which will be handled by
532          * the next send() calls.
533          * tot sz handled by update() - tot sz handled by send()
534          */
535         queued = safexcel_queued_len(req);
536         /* cache_len: everything accepted by the driver but not sent yet,
537          * tot sz handled by update() - last req sz - tot sz handled by send()
538          */
539         cache_len = queued - areq->nbytes;
540
541         /*
542          * In case there isn't enough bytes to proceed (less than a
543          * block size), cache the data until we have enough.
544          */
545         if (cache_len + areq->nbytes <= cache_max) {
546                 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
547                                    req->cache + cache_len,
548                                    areq->nbytes, 0);
549                 return areq->nbytes;
550         }
551
552         /* We couldn't cache all the data */
553         return -E2BIG;
554 }
555
556 static int safexcel_ahash_enqueue(struct ahash_request *areq)
557 {
558         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
559         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
560         struct safexcel_crypto_priv *priv = ctx->priv;
561         int ret, ring;
562
563         req->needs_inv = false;
564
565         if (ctx->base.ctxr) {
566                 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
567                     (req->processed[0] || req->processed[1]) &&
568                     req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)
569                         /* We're still setting needs_inv here, even though it is
570                          * cleared right away, because the needs_inv flag can be
571                          * set in other functions and we want to keep the same
572                          * logic.
573                          */
574                         ctx->base.needs_inv = safexcel_ahash_needs_inv_get(areq);
575
576                 if (ctx->base.needs_inv) {
577                         ctx->base.needs_inv = false;
578                         req->needs_inv = true;
579                 }
580         } else {
581                 ctx->base.ring = safexcel_select_ring(priv);
582                 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
583                                                  EIP197_GFP_FLAGS(areq->base),
584                                                  &ctx->base.ctxr_dma);
585                 if (!ctx->base.ctxr)
586                         return -ENOMEM;
587         }
588
589         ring = ctx->base.ring;
590
591         spin_lock_bh(&priv->ring[ring].queue_lock);
592         ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
593         spin_unlock_bh(&priv->ring[ring].queue_lock);
594
595         queue_work(priv->ring[ring].workqueue,
596                    &priv->ring[ring].work_data.work);
597
598         return ret;
599 }
600
601 static int safexcel_ahash_update(struct ahash_request *areq)
602 {
603         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
604         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
605         u32 cache_max;
606
607         /* If the request is 0 length, do nothing */
608         if (!areq->nbytes)
609                 return 0;
610
611         req->len[0] += areq->nbytes;
612         if (req->len[0] < areq->nbytes)
613                 req->len[1]++;
614
615         cache_max = crypto_ahash_blocksize(ahash);
616         if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC)
617                 cache_max <<= 1;
618
619         safexcel_ahash_cache(areq, cache_max);
620
621         /*
622          * We're not doing partial updates when performing an hmac request.
623          * Everything will be handled by the final() call.
624          */
625         if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC)
626                 return 0;
627
628         if (req->hmac)
629                 return safexcel_ahash_enqueue(areq);
630
631         if (!req->last_req &&
632             safexcel_queued_len(req) > cache_max)
633                 return safexcel_ahash_enqueue(areq);
634
635         return 0;
636 }
637
638 static int safexcel_ahash_final(struct ahash_request *areq)
639 {
640         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
641         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
642
643         req->last_req = true;
644         req->finish = true;
645
646         /* If we have an overall 0 length request */
647         if (!req->len[0] && !req->len[1] && !areq->nbytes) {
648                 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
649                         memcpy(areq->result, md5_zero_message_hash,
650                                MD5_DIGEST_SIZE);
651                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
652                         memcpy(areq->result, sha1_zero_message_hash,
653                                SHA1_DIGEST_SIZE);
654                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
655                         memcpy(areq->result, sha224_zero_message_hash,
656                                SHA224_DIGEST_SIZE);
657                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
658                         memcpy(areq->result, sha256_zero_message_hash,
659                                SHA256_DIGEST_SIZE);
660                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
661                         memcpy(areq->result, sha384_zero_message_hash,
662                                SHA384_DIGEST_SIZE);
663                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
664                         memcpy(areq->result, sha512_zero_message_hash,
665                                SHA512_DIGEST_SIZE);
666
667                 return 0;
668         }
669
670         return safexcel_ahash_enqueue(areq);
671 }
672
673 static int safexcel_ahash_finup(struct ahash_request *areq)
674 {
675         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
676
677         req->last_req = true;
678         req->finish = true;
679
680         safexcel_ahash_update(areq);
681         return safexcel_ahash_final(areq);
682 }
683
684 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
685 {
686         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
687         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
688         struct safexcel_ahash_export_state *export = out;
689         u32 cache_sz;
690
691         cache_sz = crypto_ahash_blocksize(ahash);
692         if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC)
693                 cache_sz <<= 1;
694
695         export->len[0] = req->len[0];
696         export->len[1] = req->len[1];
697         export->processed[0] = req->processed[0];
698         export->processed[1] = req->processed[1];
699
700         export->digest = req->digest;
701
702         memcpy(export->state, req->state, req->state_sz);
703         memcpy(export->cache, req->cache, cache_sz);
704
705         return 0;
706 }
707
708 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
709 {
710         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
711         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
712         const struct safexcel_ahash_export_state *export = in;
713         u32 cache_sz;
714         int ret;
715
716         ret = crypto_ahash_init(areq);
717         if (ret)
718                 return ret;
719
720         cache_sz = crypto_ahash_blocksize(ahash);
721         if (req->digest == CONTEXT_CONTROL_DIGEST_HMAC)
722                 cache_sz <<= 1;
723
724         req->len[0] = export->len[0];
725         req->len[1] = export->len[1];
726         req->processed[0] = export->processed[0];
727         req->processed[1] = export->processed[1];
728
729         req->digest = export->digest;
730
731         memcpy(req->cache, export->cache, cache_sz);
732         memcpy(req->state, export->state, req->state_sz);
733
734         return 0;
735 }
736
737 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
738 {
739         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
740         struct safexcel_alg_template *tmpl =
741                 container_of(__crypto_ahash_alg(tfm->__crt_alg),
742                              struct safexcel_alg_template, alg.ahash);
743
744         ctx->priv = tmpl->priv;
745         ctx->base.send = safexcel_ahash_send;
746         ctx->base.handle_result = safexcel_handle_result;
747
748         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
749                                  sizeof(struct safexcel_ahash_req));
750         return 0;
751 }
752
753 static int safexcel_sha1_init(struct ahash_request *areq)
754 {
755         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
756         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
757
758         memset(req, 0, sizeof(*req));
759
760         req->state[0] = SHA1_H0;
761         req->state[1] = SHA1_H1;
762         req->state[2] = SHA1_H2;
763         req->state[3] = SHA1_H3;
764         req->state[4] = SHA1_H4;
765
766         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
767         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
768         req->state_sz = SHA1_DIGEST_SIZE;
769
770         return 0;
771 }
772
773 static int safexcel_sha1_digest(struct ahash_request *areq)
774 {
775         int ret = safexcel_sha1_init(areq);
776
777         if (ret)
778                 return ret;
779
780         return safexcel_ahash_finup(areq);
781 }
782
783 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
784 {
785         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
786         struct safexcel_crypto_priv *priv = ctx->priv;
787         int ret;
788
789         /* context not allocated, skip invalidation */
790         if (!ctx->base.ctxr)
791                 return;
792
793         if (priv->flags & EIP197_TRC_CACHE) {
794                 ret = safexcel_ahash_exit_inv(tfm);
795                 if (ret)
796                         dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
797         } else {
798                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
799                               ctx->base.ctxr_dma);
800         }
801 }
802
803 struct safexcel_alg_template safexcel_alg_sha1 = {
804         .type = SAFEXCEL_ALG_TYPE_AHASH,
805         .engines = EIP97IES | EIP197B | EIP197D,
806         .alg.ahash = {
807                 .init = safexcel_sha1_init,
808                 .update = safexcel_ahash_update,
809                 .final = safexcel_ahash_final,
810                 .finup = safexcel_ahash_finup,
811                 .digest = safexcel_sha1_digest,
812                 .export = safexcel_ahash_export,
813                 .import = safexcel_ahash_import,
814                 .halg = {
815                         .digestsize = SHA1_DIGEST_SIZE,
816                         .statesize = sizeof(struct safexcel_ahash_export_state),
817                         .base = {
818                                 .cra_name = "sha1",
819                                 .cra_driver_name = "safexcel-sha1",
820                                 .cra_priority = 300,
821                                 .cra_flags = CRYPTO_ALG_ASYNC |
822                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
823                                 .cra_blocksize = SHA1_BLOCK_SIZE,
824                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
825                                 .cra_init = safexcel_ahash_cra_init,
826                                 .cra_exit = safexcel_ahash_cra_exit,
827                                 .cra_module = THIS_MODULE,
828                         },
829                 },
830         },
831 };
832
833 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
834 {
835         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
836
837         safexcel_sha1_init(areq);
838         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
839         return 0;
840 }
841
842 static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
843 {
844         int ret = safexcel_hmac_sha1_init(areq);
845
846         if (ret)
847                 return ret;
848
849         return safexcel_ahash_finup(areq);
850 }
851
852 struct safexcel_ahash_result {
853         struct completion completion;
854         int error;
855 };
856
857 static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
858 {
859         struct safexcel_ahash_result *result = req->data;
860
861         if (error == -EINPROGRESS)
862                 return;
863
864         result->error = error;
865         complete(&result->completion);
866 }
867
868 static int safexcel_hmac_init_pad(struct ahash_request *areq,
869                                   unsigned int blocksize, const u8 *key,
870                                   unsigned int keylen, u8 *ipad, u8 *opad)
871 {
872         struct safexcel_ahash_result result;
873         struct scatterlist sg;
874         int ret, i;
875         u8 *keydup;
876
877         if (keylen <= blocksize) {
878                 memcpy(ipad, key, keylen);
879         } else {
880                 keydup = kmemdup(key, keylen, GFP_KERNEL);
881                 if (!keydup)
882                         return -ENOMEM;
883
884                 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
885                                            safexcel_ahash_complete, &result);
886                 sg_init_one(&sg, keydup, keylen);
887                 ahash_request_set_crypt(areq, &sg, ipad, keylen);
888                 init_completion(&result.completion);
889
890                 ret = crypto_ahash_digest(areq);
891                 if (ret == -EINPROGRESS || ret == -EBUSY) {
892                         wait_for_completion_interruptible(&result.completion);
893                         ret = result.error;
894                 }
895
896                 /* Avoid leaking */
897                 memzero_explicit(keydup, keylen);
898                 kfree(keydup);
899
900                 if (ret)
901                         return ret;
902
903                 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
904         }
905
906         memset(ipad + keylen, 0, blocksize - keylen);
907         memcpy(opad, ipad, blocksize);
908
909         for (i = 0; i < blocksize; i++) {
910                 ipad[i] ^= HMAC_IPAD_VALUE;
911                 opad[i] ^= HMAC_OPAD_VALUE;
912         }
913
914         return 0;
915 }
916
917 static int safexcel_hmac_init_iv(struct ahash_request *areq,
918                                  unsigned int blocksize, u8 *pad, void *state)
919 {
920         struct safexcel_ahash_result result;
921         struct safexcel_ahash_req *req;
922         struct scatterlist sg;
923         int ret;
924
925         ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
926                                    safexcel_ahash_complete, &result);
927         sg_init_one(&sg, pad, blocksize);
928         ahash_request_set_crypt(areq, &sg, pad, blocksize);
929         init_completion(&result.completion);
930
931         ret = crypto_ahash_init(areq);
932         if (ret)
933                 return ret;
934
935         req = ahash_request_ctx(areq);
936         req->hmac = true;
937         req->last_req = true;
938
939         ret = crypto_ahash_update(areq);
940         if (ret && ret != -EINPROGRESS && ret != -EBUSY)
941                 return ret;
942
943         wait_for_completion_interruptible(&result.completion);
944         if (result.error)
945                 return result.error;
946
947         return crypto_ahash_export(areq, state);
948 }
949
950 int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
951                          void *istate, void *ostate)
952 {
953         struct ahash_request *areq;
954         struct crypto_ahash *tfm;
955         unsigned int blocksize;
956         u8 *ipad, *opad;
957         int ret;
958
959         tfm = crypto_alloc_ahash(alg, 0, 0);
960         if (IS_ERR(tfm))
961                 return PTR_ERR(tfm);
962
963         areq = ahash_request_alloc(tfm, GFP_KERNEL);
964         if (!areq) {
965                 ret = -ENOMEM;
966                 goto free_ahash;
967         }
968
969         crypto_ahash_clear_flags(tfm, ~0);
970         blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
971
972         ipad = kcalloc(2, blocksize, GFP_KERNEL);
973         if (!ipad) {
974                 ret = -ENOMEM;
975                 goto free_request;
976         }
977
978         opad = ipad + blocksize;
979
980         ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
981         if (ret)
982                 goto free_ipad;
983
984         ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
985         if (ret)
986                 goto free_ipad;
987
988         ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
989
990 free_ipad:
991         kfree(ipad);
992 free_request:
993         ahash_request_free(areq);
994 free_ahash:
995         crypto_free_ahash(tfm);
996
997         return ret;
998 }
999
1000 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1001                                     unsigned int keylen, const char *alg,
1002                                     unsigned int state_sz)
1003 {
1004         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1005         struct safexcel_crypto_priv *priv = ctx->priv;
1006         struct safexcel_ahash_export_state istate, ostate;
1007         int ret, i;
1008
1009         ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1010         if (ret)
1011                 return ret;
1012
1013         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr) {
1014                 for (i = 0; i < state_sz / sizeof(u32); i++) {
1015                         if (ctx->ipad[i] != le32_to_cpu(istate.state[i]) ||
1016                             ctx->opad[i] != le32_to_cpu(ostate.state[i])) {
1017                                 ctx->base.needs_inv = true;
1018                                 break;
1019                         }
1020                 }
1021         }
1022
1023         memcpy(ctx->ipad, &istate.state, state_sz);
1024         memcpy(ctx->opad, &ostate.state, state_sz);
1025
1026         return 0;
1027 }
1028
1029 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1030                                      unsigned int keylen)
1031 {
1032         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1033                                         SHA1_DIGEST_SIZE);
1034 }
1035
1036 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1037         .type = SAFEXCEL_ALG_TYPE_AHASH,
1038         .engines = EIP97IES | EIP197B | EIP197D,
1039         .alg.ahash = {
1040                 .init = safexcel_hmac_sha1_init,
1041                 .update = safexcel_ahash_update,
1042                 .final = safexcel_ahash_final,
1043                 .finup = safexcel_ahash_finup,
1044                 .digest = safexcel_hmac_sha1_digest,
1045                 .setkey = safexcel_hmac_sha1_setkey,
1046                 .export = safexcel_ahash_export,
1047                 .import = safexcel_ahash_import,
1048                 .halg = {
1049                         .digestsize = SHA1_DIGEST_SIZE,
1050                         .statesize = sizeof(struct safexcel_ahash_export_state),
1051                         .base = {
1052                                 .cra_name = "hmac(sha1)",
1053                                 .cra_driver_name = "safexcel-hmac-sha1",
1054                                 .cra_priority = 300,
1055                                 .cra_flags = CRYPTO_ALG_ASYNC |
1056                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1057                                 .cra_blocksize = SHA1_BLOCK_SIZE,
1058                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1059                                 .cra_init = safexcel_ahash_cra_init,
1060                                 .cra_exit = safexcel_ahash_cra_exit,
1061                                 .cra_module = THIS_MODULE,
1062                         },
1063                 },
1064         },
1065 };
1066
1067 static int safexcel_sha256_init(struct ahash_request *areq)
1068 {
1069         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1070         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1071
1072         memset(req, 0, sizeof(*req));
1073
1074         req->state[0] = SHA256_H0;
1075         req->state[1] = SHA256_H1;
1076         req->state[2] = SHA256_H2;
1077         req->state[3] = SHA256_H3;
1078         req->state[4] = SHA256_H4;
1079         req->state[5] = SHA256_H5;
1080         req->state[6] = SHA256_H6;
1081         req->state[7] = SHA256_H7;
1082
1083         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1084         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1085         req->state_sz = SHA256_DIGEST_SIZE;
1086
1087         return 0;
1088 }
1089
1090 static int safexcel_sha256_digest(struct ahash_request *areq)
1091 {
1092         int ret = safexcel_sha256_init(areq);
1093
1094         if (ret)
1095                 return ret;
1096
1097         return safexcel_ahash_finup(areq);
1098 }
1099
1100 struct safexcel_alg_template safexcel_alg_sha256 = {
1101         .type = SAFEXCEL_ALG_TYPE_AHASH,
1102         .engines = EIP97IES | EIP197B | EIP197D,
1103         .alg.ahash = {
1104                 .init = safexcel_sha256_init,
1105                 .update = safexcel_ahash_update,
1106                 .final = safexcel_ahash_final,
1107                 .finup = safexcel_ahash_finup,
1108                 .digest = safexcel_sha256_digest,
1109                 .export = safexcel_ahash_export,
1110                 .import = safexcel_ahash_import,
1111                 .halg = {
1112                         .digestsize = SHA256_DIGEST_SIZE,
1113                         .statesize = sizeof(struct safexcel_ahash_export_state),
1114                         .base = {
1115                                 .cra_name = "sha256",
1116                                 .cra_driver_name = "safexcel-sha256",
1117                                 .cra_priority = 300,
1118                                 .cra_flags = CRYPTO_ALG_ASYNC |
1119                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1120                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1121                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1122                                 .cra_init = safexcel_ahash_cra_init,
1123                                 .cra_exit = safexcel_ahash_cra_exit,
1124                                 .cra_module = THIS_MODULE,
1125                         },
1126                 },
1127         },
1128 };
1129
1130 static int safexcel_sha224_init(struct ahash_request *areq)
1131 {
1132         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1133         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1134
1135         memset(req, 0, sizeof(*req));
1136
1137         req->state[0] = SHA224_H0;
1138         req->state[1] = SHA224_H1;
1139         req->state[2] = SHA224_H2;
1140         req->state[3] = SHA224_H3;
1141         req->state[4] = SHA224_H4;
1142         req->state[5] = SHA224_H5;
1143         req->state[6] = SHA224_H6;
1144         req->state[7] = SHA224_H7;
1145
1146         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1147         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1148         req->state_sz = SHA256_DIGEST_SIZE;
1149
1150         return 0;
1151 }
1152
1153 static int safexcel_sha224_digest(struct ahash_request *areq)
1154 {
1155         int ret = safexcel_sha224_init(areq);
1156
1157         if (ret)
1158                 return ret;
1159
1160         return safexcel_ahash_finup(areq);
1161 }
1162
1163 struct safexcel_alg_template safexcel_alg_sha224 = {
1164         .type = SAFEXCEL_ALG_TYPE_AHASH,
1165         .engines = EIP97IES | EIP197B | EIP197D,
1166         .alg.ahash = {
1167                 .init = safexcel_sha224_init,
1168                 .update = safexcel_ahash_update,
1169                 .final = safexcel_ahash_final,
1170                 .finup = safexcel_ahash_finup,
1171                 .digest = safexcel_sha224_digest,
1172                 .export = safexcel_ahash_export,
1173                 .import = safexcel_ahash_import,
1174                 .halg = {
1175                         .digestsize = SHA224_DIGEST_SIZE,
1176                         .statesize = sizeof(struct safexcel_ahash_export_state),
1177                         .base = {
1178                                 .cra_name = "sha224",
1179                                 .cra_driver_name = "safexcel-sha224",
1180                                 .cra_priority = 300,
1181                                 .cra_flags = CRYPTO_ALG_ASYNC |
1182                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1183                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1184                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1185                                 .cra_init = safexcel_ahash_cra_init,
1186                                 .cra_exit = safexcel_ahash_cra_exit,
1187                                 .cra_module = THIS_MODULE,
1188                         },
1189                 },
1190         },
1191 };
1192
1193 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1194                                        unsigned int keylen)
1195 {
1196         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1197                                         SHA256_DIGEST_SIZE);
1198 }
1199
1200 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1201 {
1202         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1203
1204         safexcel_sha224_init(areq);
1205         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1206         return 0;
1207 }
1208
1209 static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1210 {
1211         int ret = safexcel_hmac_sha224_init(areq);
1212
1213         if (ret)
1214                 return ret;
1215
1216         return safexcel_ahash_finup(areq);
1217 }
1218
1219 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1220         .type = SAFEXCEL_ALG_TYPE_AHASH,
1221         .engines = EIP97IES | EIP197B | EIP197D,
1222         .alg.ahash = {
1223                 .init = safexcel_hmac_sha224_init,
1224                 .update = safexcel_ahash_update,
1225                 .final = safexcel_ahash_final,
1226                 .finup = safexcel_ahash_finup,
1227                 .digest = safexcel_hmac_sha224_digest,
1228                 .setkey = safexcel_hmac_sha224_setkey,
1229                 .export = safexcel_ahash_export,
1230                 .import = safexcel_ahash_import,
1231                 .halg = {
1232                         .digestsize = SHA224_DIGEST_SIZE,
1233                         .statesize = sizeof(struct safexcel_ahash_export_state),
1234                         .base = {
1235                                 .cra_name = "hmac(sha224)",
1236                                 .cra_driver_name = "safexcel-hmac-sha224",
1237                                 .cra_priority = 300,
1238                                 .cra_flags = CRYPTO_ALG_ASYNC |
1239                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1240                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1241                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1242                                 .cra_init = safexcel_ahash_cra_init,
1243                                 .cra_exit = safexcel_ahash_cra_exit,
1244                                 .cra_module = THIS_MODULE,
1245                         },
1246                 },
1247         },
1248 };
1249
1250 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1251                                      unsigned int keylen)
1252 {
1253         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1254                                         SHA256_DIGEST_SIZE);
1255 }
1256
1257 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1258 {
1259         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1260
1261         safexcel_sha256_init(areq);
1262         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1263         return 0;
1264 }
1265
1266 static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1267 {
1268         int ret = safexcel_hmac_sha256_init(areq);
1269
1270         if (ret)
1271                 return ret;
1272
1273         return safexcel_ahash_finup(areq);
1274 }
1275
1276 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1277         .type = SAFEXCEL_ALG_TYPE_AHASH,
1278         .engines = EIP97IES | EIP197B | EIP197D,
1279         .alg.ahash = {
1280                 .init = safexcel_hmac_sha256_init,
1281                 .update = safexcel_ahash_update,
1282                 .final = safexcel_ahash_final,
1283                 .finup = safexcel_ahash_finup,
1284                 .digest = safexcel_hmac_sha256_digest,
1285                 .setkey = safexcel_hmac_sha256_setkey,
1286                 .export = safexcel_ahash_export,
1287                 .import = safexcel_ahash_import,
1288                 .halg = {
1289                         .digestsize = SHA256_DIGEST_SIZE,
1290                         .statesize = sizeof(struct safexcel_ahash_export_state),
1291                         .base = {
1292                                 .cra_name = "hmac(sha256)",
1293                                 .cra_driver_name = "safexcel-hmac-sha256",
1294                                 .cra_priority = 300,
1295                                 .cra_flags = CRYPTO_ALG_ASYNC |
1296                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1297                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1298                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1299                                 .cra_init = safexcel_ahash_cra_init,
1300                                 .cra_exit = safexcel_ahash_cra_exit,
1301                                 .cra_module = THIS_MODULE,
1302                         },
1303                 },
1304         },
1305 };
1306
1307 static int safexcel_sha512_init(struct ahash_request *areq)
1308 {
1309         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1310         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1311
1312         memset(req, 0, sizeof(*req));
1313
1314         req->state[0] = lower_32_bits(SHA512_H0);
1315         req->state[1] = upper_32_bits(SHA512_H0);
1316         req->state[2] = lower_32_bits(SHA512_H1);
1317         req->state[3] = upper_32_bits(SHA512_H1);
1318         req->state[4] = lower_32_bits(SHA512_H2);
1319         req->state[5] = upper_32_bits(SHA512_H2);
1320         req->state[6] = lower_32_bits(SHA512_H3);
1321         req->state[7] = upper_32_bits(SHA512_H3);
1322         req->state[8] = lower_32_bits(SHA512_H4);
1323         req->state[9] = upper_32_bits(SHA512_H4);
1324         req->state[10] = lower_32_bits(SHA512_H5);
1325         req->state[11] = upper_32_bits(SHA512_H5);
1326         req->state[12] = lower_32_bits(SHA512_H6);
1327         req->state[13] = upper_32_bits(SHA512_H6);
1328         req->state[14] = lower_32_bits(SHA512_H7);
1329         req->state[15] = upper_32_bits(SHA512_H7);
1330
1331         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1332         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1333         req->state_sz = SHA512_DIGEST_SIZE;
1334
1335         return 0;
1336 }
1337
1338 static int safexcel_sha512_digest(struct ahash_request *areq)
1339 {
1340         int ret = safexcel_sha512_init(areq);
1341
1342         if (ret)
1343                 return ret;
1344
1345         return safexcel_ahash_finup(areq);
1346 }
1347
1348 struct safexcel_alg_template safexcel_alg_sha512 = {
1349         .type = SAFEXCEL_ALG_TYPE_AHASH,
1350         .engines = EIP97IES | EIP197B | EIP197D,
1351         .alg.ahash = {
1352                 .init = safexcel_sha512_init,
1353                 .update = safexcel_ahash_update,
1354                 .final = safexcel_ahash_final,
1355                 .finup = safexcel_ahash_finup,
1356                 .digest = safexcel_sha512_digest,
1357                 .export = safexcel_ahash_export,
1358                 .import = safexcel_ahash_import,
1359                 .halg = {
1360                         .digestsize = SHA512_DIGEST_SIZE,
1361                         .statesize = sizeof(struct safexcel_ahash_export_state),
1362                         .base = {
1363                                 .cra_name = "sha512",
1364                                 .cra_driver_name = "safexcel-sha512",
1365                                 .cra_priority = 300,
1366                                 .cra_flags = CRYPTO_ALG_ASYNC |
1367                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1368                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1369                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1370                                 .cra_init = safexcel_ahash_cra_init,
1371                                 .cra_exit = safexcel_ahash_cra_exit,
1372                                 .cra_module = THIS_MODULE,
1373                         },
1374                 },
1375         },
1376 };
1377
1378 static int safexcel_sha384_init(struct ahash_request *areq)
1379 {
1380         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1381         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1382
1383         memset(req, 0, sizeof(*req));
1384
1385         req->state[0] = lower_32_bits(SHA384_H0);
1386         req->state[1] = upper_32_bits(SHA384_H0);
1387         req->state[2] = lower_32_bits(SHA384_H1);
1388         req->state[3] = upper_32_bits(SHA384_H1);
1389         req->state[4] = lower_32_bits(SHA384_H2);
1390         req->state[5] = upper_32_bits(SHA384_H2);
1391         req->state[6] = lower_32_bits(SHA384_H3);
1392         req->state[7] = upper_32_bits(SHA384_H3);
1393         req->state[8] = lower_32_bits(SHA384_H4);
1394         req->state[9] = upper_32_bits(SHA384_H4);
1395         req->state[10] = lower_32_bits(SHA384_H5);
1396         req->state[11] = upper_32_bits(SHA384_H5);
1397         req->state[12] = lower_32_bits(SHA384_H6);
1398         req->state[13] = upper_32_bits(SHA384_H6);
1399         req->state[14] = lower_32_bits(SHA384_H7);
1400         req->state[15] = upper_32_bits(SHA384_H7);
1401
1402         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1403         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1404         req->state_sz = SHA512_DIGEST_SIZE;
1405
1406         return 0;
1407 }
1408
1409 static int safexcel_sha384_digest(struct ahash_request *areq)
1410 {
1411         int ret = safexcel_sha384_init(areq);
1412
1413         if (ret)
1414                 return ret;
1415
1416         return safexcel_ahash_finup(areq);
1417 }
1418
1419 struct safexcel_alg_template safexcel_alg_sha384 = {
1420         .type = SAFEXCEL_ALG_TYPE_AHASH,
1421         .engines = EIP97IES | EIP197B | EIP197D,
1422         .alg.ahash = {
1423                 .init = safexcel_sha384_init,
1424                 .update = safexcel_ahash_update,
1425                 .final = safexcel_ahash_final,
1426                 .finup = safexcel_ahash_finup,
1427                 .digest = safexcel_sha384_digest,
1428                 .export = safexcel_ahash_export,
1429                 .import = safexcel_ahash_import,
1430                 .halg = {
1431                         .digestsize = SHA384_DIGEST_SIZE,
1432                         .statesize = sizeof(struct safexcel_ahash_export_state),
1433                         .base = {
1434                                 .cra_name = "sha384",
1435                                 .cra_driver_name = "safexcel-sha384",
1436                                 .cra_priority = 300,
1437                                 .cra_flags = CRYPTO_ALG_ASYNC |
1438                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1439                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1440                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1441                                 .cra_init = safexcel_ahash_cra_init,
1442                                 .cra_exit = safexcel_ahash_cra_exit,
1443                                 .cra_module = THIS_MODULE,
1444                         },
1445                 },
1446         },
1447 };
1448
1449 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1450                                        unsigned int keylen)
1451 {
1452         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1453                                         SHA512_DIGEST_SIZE);
1454 }
1455
1456 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1457 {
1458         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1459
1460         safexcel_sha512_init(areq);
1461         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1462         return 0;
1463 }
1464
1465 static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1466 {
1467         int ret = safexcel_hmac_sha512_init(areq);
1468
1469         if (ret)
1470                 return ret;
1471
1472         return safexcel_ahash_finup(areq);
1473 }
1474
1475 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1476         .type = SAFEXCEL_ALG_TYPE_AHASH,
1477         .engines = EIP97IES | EIP197B | EIP197D,
1478         .alg.ahash = {
1479                 .init = safexcel_hmac_sha512_init,
1480                 .update = safexcel_ahash_update,
1481                 .final = safexcel_ahash_final,
1482                 .finup = safexcel_ahash_finup,
1483                 .digest = safexcel_hmac_sha512_digest,
1484                 .setkey = safexcel_hmac_sha512_setkey,
1485                 .export = safexcel_ahash_export,
1486                 .import = safexcel_ahash_import,
1487                 .halg = {
1488                         .digestsize = SHA512_DIGEST_SIZE,
1489                         .statesize = sizeof(struct safexcel_ahash_export_state),
1490                         .base = {
1491                                 .cra_name = "hmac(sha512)",
1492                                 .cra_driver_name = "safexcel-hmac-sha512",
1493                                 .cra_priority = 300,
1494                                 .cra_flags = CRYPTO_ALG_ASYNC |
1495                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1496                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1497                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1498                                 .cra_init = safexcel_ahash_cra_init,
1499                                 .cra_exit = safexcel_ahash_cra_exit,
1500                                 .cra_module = THIS_MODULE,
1501                         },
1502                 },
1503         },
1504 };
1505
1506 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1507                                        unsigned int keylen)
1508 {
1509         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1510                                         SHA512_DIGEST_SIZE);
1511 }
1512
1513 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1514 {
1515         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1516
1517         safexcel_sha384_init(areq);
1518         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1519         return 0;
1520 }
1521
1522 static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1523 {
1524         int ret = safexcel_hmac_sha384_init(areq);
1525
1526         if (ret)
1527                 return ret;
1528
1529         return safexcel_ahash_finup(areq);
1530 }
1531
1532 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1533         .type = SAFEXCEL_ALG_TYPE_AHASH,
1534         .engines = EIP97IES | EIP197B | EIP197D,
1535         .alg.ahash = {
1536                 .init = safexcel_hmac_sha384_init,
1537                 .update = safexcel_ahash_update,
1538                 .final = safexcel_ahash_final,
1539                 .finup = safexcel_ahash_finup,
1540                 .digest = safexcel_hmac_sha384_digest,
1541                 .setkey = safexcel_hmac_sha384_setkey,
1542                 .export = safexcel_ahash_export,
1543                 .import = safexcel_ahash_import,
1544                 .halg = {
1545                         .digestsize = SHA384_DIGEST_SIZE,
1546                         .statesize = sizeof(struct safexcel_ahash_export_state),
1547                         .base = {
1548                                 .cra_name = "hmac(sha384)",
1549                                 .cra_driver_name = "safexcel-hmac-sha384",
1550                                 .cra_priority = 300,
1551                                 .cra_flags = CRYPTO_ALG_ASYNC |
1552                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1553                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1554                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1555                                 .cra_init = safexcel_ahash_cra_init,
1556                                 .cra_exit = safexcel_ahash_cra_exit,
1557                                 .cra_module = THIS_MODULE,
1558                         },
1559                 },
1560         },
1561 };
1562
1563 static int safexcel_md5_init(struct ahash_request *areq)
1564 {
1565         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1566         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1567
1568         memset(req, 0, sizeof(*req));
1569
1570         req->state[0] = MD5_H0;
1571         req->state[1] = MD5_H1;
1572         req->state[2] = MD5_H2;
1573         req->state[3] = MD5_H3;
1574
1575         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1576         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1577         req->state_sz = MD5_DIGEST_SIZE;
1578
1579         return 0;
1580 }
1581
1582 static int safexcel_md5_digest(struct ahash_request *areq)
1583 {
1584         int ret = safexcel_md5_init(areq);
1585
1586         if (ret)
1587                 return ret;
1588
1589         return safexcel_ahash_finup(areq);
1590 }
1591
1592 struct safexcel_alg_template safexcel_alg_md5 = {
1593         .type = SAFEXCEL_ALG_TYPE_AHASH,
1594         .engines = EIP97IES | EIP197B | EIP197D,
1595         .alg.ahash = {
1596                 .init = safexcel_md5_init,
1597                 .update = safexcel_ahash_update,
1598                 .final = safexcel_ahash_final,
1599                 .finup = safexcel_ahash_finup,
1600                 .digest = safexcel_md5_digest,
1601                 .export = safexcel_ahash_export,
1602                 .import = safexcel_ahash_import,
1603                 .halg = {
1604                         .digestsize = MD5_DIGEST_SIZE,
1605                         .statesize = sizeof(struct safexcel_ahash_export_state),
1606                         .base = {
1607                                 .cra_name = "md5",
1608                                 .cra_driver_name = "safexcel-md5",
1609                                 .cra_priority = 300,
1610                                 .cra_flags = CRYPTO_ALG_ASYNC |
1611                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1612                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1613                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1614                                 .cra_init = safexcel_ahash_cra_init,
1615                                 .cra_exit = safexcel_ahash_cra_exit,
1616                                 .cra_module = THIS_MODULE,
1617                         },
1618                 },
1619         },
1620 };
1621
1622 static int safexcel_hmac_md5_init(struct ahash_request *areq)
1623 {
1624         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1625
1626         safexcel_md5_init(areq);
1627         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1628         return 0;
1629 }
1630
1631 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1632                                      unsigned int keylen)
1633 {
1634         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1635                                         MD5_DIGEST_SIZE);
1636 }
1637
1638 static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1639 {
1640         int ret = safexcel_hmac_md5_init(areq);
1641
1642         if (ret)
1643                 return ret;
1644
1645         return safexcel_ahash_finup(areq);
1646 }
1647
1648 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1649         .type = SAFEXCEL_ALG_TYPE_AHASH,
1650         .engines = EIP97IES | EIP197B | EIP197D,
1651         .alg.ahash = {
1652                 .init = safexcel_hmac_md5_init,
1653                 .update = safexcel_ahash_update,
1654                 .final = safexcel_ahash_final,
1655                 .finup = safexcel_ahash_finup,
1656                 .digest = safexcel_hmac_md5_digest,
1657                 .setkey = safexcel_hmac_md5_setkey,
1658                 .export = safexcel_ahash_export,
1659                 .import = safexcel_ahash_import,
1660                 .halg = {
1661                         .digestsize = MD5_DIGEST_SIZE,
1662                         .statesize = sizeof(struct safexcel_ahash_export_state),
1663                         .base = {
1664                                 .cra_name = "hmac(md5)",
1665                                 .cra_driver_name = "safexcel-hmac-md5",
1666                                 .cra_priority = 300,
1667                                 .cra_flags = CRYPTO_ALG_ASYNC |
1668                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1669                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1670                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1671                                 .cra_init = safexcel_ahash_cra_init,
1672                                 .cra_exit = safexcel_ahash_cra_exit,
1673                                 .cra_module = THIS_MODULE,
1674                         },
1675                 },
1676         },
1677 };