Linux-libre 5.7.5-gnu
[librecmc/linux-libre.git] / drivers / crypto / inside-secure / safexcel_hash.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7
8 #include <crypto/aes.h>
9 #include <crypto/hmac.h>
10 #include <crypto/md5.h>
11 #include <crypto/sha.h>
12 #include <crypto/sha3.h>
13 #include <crypto/skcipher.h>
14 #include <crypto/sm3.h>
15 #include <linux/device.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/dmapool.h>
18
19 #include "safexcel.h"
20
21 struct safexcel_ahash_ctx {
22         struct safexcel_context base;
23         struct safexcel_crypto_priv *priv;
24
25         u32 alg;
26         u8  key_sz;
27         bool cbcmac;
28         bool do_fallback;
29         bool fb_init_done;
30         bool fb_do_setkey;
31
32         __le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
33         __le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
34
35         struct crypto_cipher *kaes;
36         struct crypto_ahash *fback;
37         struct crypto_shash *shpre;
38         struct shash_desc *shdesc;
39 };
40
41 struct safexcel_ahash_req {
42         bool last_req;
43         bool finish;
44         bool hmac;
45         bool needs_inv;
46         bool hmac_zlen;
47         bool len_is_le;
48         bool not_first;
49         bool xcbcmac;
50
51         int nents;
52         dma_addr_t result_dma;
53
54         u32 digest;
55
56         u8 state_sz;    /* expected state size, only set once */
57         u8 block_sz;    /* block size, only set once */
58         u8 digest_sz;   /* output digest size, only set once */
59         __le32 state[SHA3_512_BLOCK_SIZE /
60                      sizeof(__le32)] __aligned(sizeof(__le32));
61
62         u64 len;
63         u64 processed;
64
65         u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
66         dma_addr_t cache_dma;
67         unsigned int cache_sz;
68
69         u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
70 };
71
72 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
73 {
74         return req->len - req->processed;
75 }
76
77 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
78                                 u32 input_length, u32 result_length,
79                                 bool cbcmac)
80 {
81         struct safexcel_token *token =
82                 (struct safexcel_token *)cdesc->control_data.token;
83
84         token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
85         token[0].packet_length = input_length;
86         token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
87
88         input_length &= 15;
89         if (unlikely(cbcmac && input_length)) {
90                 token[0].stat =  0;
91                 token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
92                 token[1].packet_length = 16 - input_length;
93                 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
94                 token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
95         } else {
96                 token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
97                 eip197_noop_token(&token[1]);
98         }
99
100         token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
101         token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
102                         EIP197_TOKEN_STAT_LAST_PACKET;
103         token[2].packet_length = result_length;
104         token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
105                                 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
106
107         eip197_noop_token(&token[3]);
108 }
109
110 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
111                                      struct safexcel_ahash_req *req,
112                                      struct safexcel_command_desc *cdesc)
113 {
114         struct safexcel_crypto_priv *priv = ctx->priv;
115         u64 count = 0;
116
117         cdesc->control_data.control0 = ctx->alg;
118         cdesc->control_data.control1 = 0;
119
120         /*
121          * Copy the input digest if needed, and setup the context
122          * fields. Do this now as we need it to setup the first command
123          * descriptor.
124          */
125         if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
126                 if (req->xcbcmac)
127                         memcpy(ctx->base.ctxr->data, ctx->ipad, ctx->key_sz);
128                 else
129                         memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
130
131                 if (!req->finish && req->xcbcmac)
132                         cdesc->control_data.control0 |=
133                                 CONTEXT_CONTROL_DIGEST_XCM |
134                                 CONTEXT_CONTROL_TYPE_HASH_OUT  |
135                                 CONTEXT_CONTROL_NO_FINISH_HASH |
136                                 CONTEXT_CONTROL_SIZE(req->state_sz /
137                                                      sizeof(u32));
138                 else
139                         cdesc->control_data.control0 |=
140                                 CONTEXT_CONTROL_DIGEST_XCM |
141                                 CONTEXT_CONTROL_TYPE_HASH_OUT  |
142                                 CONTEXT_CONTROL_SIZE(req->state_sz /
143                                                      sizeof(u32));
144                 return;
145         } else if (!req->processed) {
146                 /* First - and possibly only - block of basic hash only */
147                 if (req->finish)
148                         cdesc->control_data.control0 |= req->digest |
149                                 CONTEXT_CONTROL_TYPE_HASH_OUT |
150                                 CONTEXT_CONTROL_RESTART_HASH  |
151                                 /* ensure its not 0! */
152                                 CONTEXT_CONTROL_SIZE(1);
153                 else
154                         cdesc->control_data.control0 |= req->digest |
155                                 CONTEXT_CONTROL_TYPE_HASH_OUT  |
156                                 CONTEXT_CONTROL_RESTART_HASH   |
157                                 CONTEXT_CONTROL_NO_FINISH_HASH |
158                                 /* ensure its not 0! */
159                                 CONTEXT_CONTROL_SIZE(1);
160                 return;
161         }
162
163         /* Hash continuation or HMAC, setup (inner) digest from state */
164         memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
165
166         if (req->finish) {
167                 /* Compute digest count for hash/HMAC finish operations */
168                 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
169                     req->hmac_zlen || (req->processed != req->block_sz)) {
170                         count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
171
172                         /* This is a hardware limitation, as the
173                          * counter must fit into an u32. This represents
174                          * a fairly big amount of input data, so we
175                          * shouldn't see this.
176                          */
177                         if (unlikely(count & 0xffffffff00000000ULL)) {
178                                 dev_warn(priv->dev,
179                                          "Input data is too big\n");
180                                 return;
181                         }
182                 }
183
184                 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
185                     /* Special case: zero length HMAC */
186                     req->hmac_zlen ||
187                     /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
188                     (req->processed != req->block_sz)) {
189                         /* Basic hash continue operation, need digest + cnt */
190                         cdesc->control_data.control0 |=
191                                 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
192                                 CONTEXT_CONTROL_TYPE_HASH_OUT |
193                                 CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
194                         /* For zero-len HMAC, don't finalize, already padded! */
195                         if (req->hmac_zlen)
196                                 cdesc->control_data.control0 |=
197                                         CONTEXT_CONTROL_NO_FINISH_HASH;
198                         cdesc->control_data.control1 |=
199                                 CONTEXT_CONTROL_DIGEST_CNT;
200                         ctx->base.ctxr->data[req->state_sz >> 2] =
201                                 cpu_to_le32(count);
202                         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
203
204                         /* Clear zero-length HMAC flag for next operation! */
205                         req->hmac_zlen = false;
206                 } else { /* HMAC */
207                         /* Need outer digest for HMAC finalization */
208                         memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
209                                ctx->opad, req->state_sz);
210
211                         /* Single pass HMAC - no digest count */
212                         cdesc->control_data.control0 |=
213                                 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
214                                 CONTEXT_CONTROL_TYPE_HASH_OUT |
215                                 CONTEXT_CONTROL_DIGEST_HMAC;
216                 }
217         } else { /* Hash continuation, do not finish yet */
218                 cdesc->control_data.control0 |=
219                         CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
220                         CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
221                         CONTEXT_CONTROL_TYPE_HASH_OUT |
222                         CONTEXT_CONTROL_NO_FINISH_HASH;
223         }
224 }
225
226 static int safexcel_ahash_enqueue(struct ahash_request *areq);
227
228 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
229                                       int ring,
230                                       struct crypto_async_request *async,
231                                       bool *should_complete, int *ret)
232 {
233         struct safexcel_result_desc *rdesc;
234         struct ahash_request *areq = ahash_request_cast(async);
235         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
236         struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
237         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
238         u64 cache_len;
239
240         *ret = 0;
241
242         rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
243         if (IS_ERR(rdesc)) {
244                 dev_err(priv->dev,
245                         "hash: result: could not retrieve the result descriptor\n");
246                 *ret = PTR_ERR(rdesc);
247         } else {
248                 *ret = safexcel_rdesc_check_errors(priv, rdesc);
249         }
250
251         safexcel_complete(priv, ring);
252
253         if (sreq->nents) {
254                 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
255                 sreq->nents = 0;
256         }
257
258         if (sreq->result_dma) {
259                 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
260                                  DMA_FROM_DEVICE);
261                 sreq->result_dma = 0;
262         }
263
264         if (sreq->cache_dma) {
265                 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
266                                  DMA_TO_DEVICE);
267                 sreq->cache_dma = 0;
268                 sreq->cache_sz = 0;
269         }
270
271         if (sreq->finish) {
272                 if (sreq->hmac &&
273                     (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
274                         /* Faking HMAC using hash - need to do outer hash */
275                         memcpy(sreq->cache, sreq->state,
276                                crypto_ahash_digestsize(ahash));
277
278                         memcpy(sreq->state, ctx->opad, sreq->digest_sz);
279
280                         sreq->len = sreq->block_sz +
281                                     crypto_ahash_digestsize(ahash);
282                         sreq->processed = sreq->block_sz;
283                         sreq->hmac = 0;
284
285                         if (priv->flags & EIP197_TRC_CACHE)
286                                 ctx->base.needs_inv = true;
287                         areq->nbytes = 0;
288                         safexcel_ahash_enqueue(areq);
289
290                         *should_complete = false; /* Not done yet */
291                         return 1;
292                 }
293
294                 if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
295                              ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
296                         /* Undo final XOR with 0xffffffff ...*/
297                         *(__le32 *)areq->result = ~sreq->state[0];
298                 } else {
299                         memcpy(areq->result, sreq->state,
300                                crypto_ahash_digestsize(ahash));
301                 }
302         }
303
304         cache_len = safexcel_queued_len(sreq);
305         if (cache_len)
306                 memcpy(sreq->cache, sreq->cache_next, cache_len);
307
308         *should_complete = true;
309
310         return 1;
311 }
312
313 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
314                                    int *commands, int *results)
315 {
316         struct ahash_request *areq = ahash_request_cast(async);
317         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
318         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
319         struct safexcel_crypto_priv *priv = ctx->priv;
320         struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
321         struct safexcel_result_desc *rdesc;
322         struct scatterlist *sg;
323         struct safexcel_token *dmmy;
324         int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
325         u64 queued, len;
326
327         queued = safexcel_queued_len(req);
328         if (queued <= HASH_CACHE_SIZE)
329                 cache_len = queued;
330         else
331                 cache_len = queued - areq->nbytes;
332
333         if (!req->finish && !req->last_req) {
334                 /* If this is not the last request and the queued data does not
335                  * fit into full cache blocks, cache it for the next send call.
336                  */
337                 extra = queued & (HASH_CACHE_SIZE - 1);
338
339                 /* If this is not the last request and the queued data
340                  * is a multiple of a block, cache the last one for now.
341                  */
342                 if (!extra)
343                         extra = HASH_CACHE_SIZE;
344
345                 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
346                                    req->cache_next, extra,
347                                    areq->nbytes - extra);
348
349                 queued -= extra;
350
351                 if (!queued) {
352                         *commands = 0;
353                         *results = 0;
354                         return 0;
355                 }
356
357                 extra = 0;
358         }
359
360         if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
361                 if (unlikely(cache_len < AES_BLOCK_SIZE)) {
362                         /*
363                          * Cache contains less than 1 full block, complete.
364                          */
365                         extra = AES_BLOCK_SIZE - cache_len;
366                         if (queued > cache_len) {
367                                 /* More data follows: borrow bytes */
368                                 u64 tmp = queued - cache_len;
369
370                                 skip = min_t(u64, tmp, extra);
371                                 sg_pcopy_to_buffer(areq->src,
372                                         sg_nents(areq->src),
373                                         req->cache + cache_len,
374                                         skip, 0);
375                         }
376                         extra -= skip;
377                         memset(req->cache + cache_len + skip, 0, extra);
378                         if (!ctx->cbcmac && extra) {
379                                 // 10- padding for XCBCMAC & CMAC
380                                 req->cache[cache_len + skip] = 0x80;
381                                 // HW will use K2 iso K3 - compensate!
382                                 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
383                                         ((__be32 *)req->cache)[i] ^=
384                                           cpu_to_be32(le32_to_cpu(
385                                             ctx->ipad[i] ^ ctx->ipad[i + 4]));
386                         }
387                         cache_len = AES_BLOCK_SIZE;
388                         queued = queued + extra;
389                 }
390
391                 /* XCBC continue: XOR previous result into 1st word */
392                 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
393         }
394
395         len = queued;
396         /* Add a command descriptor for the cached data, if any */
397         if (cache_len) {
398                 req->cache_dma = dma_map_single(priv->dev, req->cache,
399                                                 cache_len, DMA_TO_DEVICE);
400                 if (dma_mapping_error(priv->dev, req->cache_dma))
401                         return -EINVAL;
402
403                 req->cache_sz = cache_len;
404                 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
405                                                  (cache_len == len),
406                                                  req->cache_dma, cache_len,
407                                                  len, ctx->base.ctxr_dma,
408                                                  &dmmy);
409                 if (IS_ERR(first_cdesc)) {
410                         ret = PTR_ERR(first_cdesc);
411                         goto unmap_cache;
412                 }
413                 n_cdesc++;
414
415                 queued -= cache_len;
416                 if (!queued)
417                         goto send_command;
418         }
419
420         /* Now handle the current ahash request buffer(s) */
421         req->nents = dma_map_sg(priv->dev, areq->src,
422                                 sg_nents_for_len(areq->src,
423                                                  areq->nbytes),
424                                 DMA_TO_DEVICE);
425         if (!req->nents) {
426                 ret = -ENOMEM;
427                 goto cdesc_rollback;
428         }
429
430         for_each_sg(areq->src, sg, req->nents, i) {
431                 int sglen = sg_dma_len(sg);
432
433                 if (unlikely(sglen <= skip)) {
434                         skip -= sglen;
435                         continue;
436                 }
437
438                 /* Do not overflow the request */
439                 if ((queued + skip) <= sglen)
440                         sglen = queued;
441                 else
442                         sglen -= skip;
443
444                 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
445                                            !(queued - sglen),
446                                            sg_dma_address(sg) + skip, sglen,
447                                            len, ctx->base.ctxr_dma, &dmmy);
448                 if (IS_ERR(cdesc)) {
449                         ret = PTR_ERR(cdesc);
450                         goto unmap_sg;
451                 }
452
453                 if (!n_cdesc)
454                         first_cdesc = cdesc;
455                 n_cdesc++;
456
457                 queued -= sglen;
458                 if (!queued)
459                         break;
460                 skip = 0;
461         }
462
463 send_command:
464         /* Setup the context options */
465         safexcel_context_control(ctx, req, first_cdesc);
466
467         /* Add the token */
468         safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
469
470         req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
471                                          DMA_FROM_DEVICE);
472         if (dma_mapping_error(priv->dev, req->result_dma)) {
473                 ret = -EINVAL;
474                 goto unmap_sg;
475         }
476
477         /* Add a result descriptor */
478         rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
479                                    req->digest_sz);
480         if (IS_ERR(rdesc)) {
481                 ret = PTR_ERR(rdesc);
482                 goto unmap_result;
483         }
484
485         safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
486
487         req->processed += len - extra;
488
489         *commands = n_cdesc;
490         *results = 1;
491         return 0;
492
493 unmap_result:
494         dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
495                          DMA_FROM_DEVICE);
496 unmap_sg:
497         if (req->nents) {
498                 dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
499                 req->nents = 0;
500         }
501 cdesc_rollback:
502         for (i = 0; i < n_cdesc; i++)
503                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
504 unmap_cache:
505         if (req->cache_dma) {
506                 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
507                                  DMA_TO_DEVICE);
508                 req->cache_dma = 0;
509                 req->cache_sz = 0;
510         }
511
512         return ret;
513 }
514
515 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
516                                       int ring,
517                                       struct crypto_async_request *async,
518                                       bool *should_complete, int *ret)
519 {
520         struct safexcel_result_desc *rdesc;
521         struct ahash_request *areq = ahash_request_cast(async);
522         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
523         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
524         int enq_ret;
525
526         *ret = 0;
527
528         rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
529         if (IS_ERR(rdesc)) {
530                 dev_err(priv->dev,
531                         "hash: invalidate: could not retrieve the result descriptor\n");
532                 *ret = PTR_ERR(rdesc);
533         } else {
534                 *ret = safexcel_rdesc_check_errors(priv, rdesc);
535         }
536
537         safexcel_complete(priv, ring);
538
539         if (ctx->base.exit_inv) {
540                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
541                               ctx->base.ctxr_dma);
542
543                 *should_complete = true;
544                 return 1;
545         }
546
547         ring = safexcel_select_ring(priv);
548         ctx->base.ring = ring;
549
550         spin_lock_bh(&priv->ring[ring].queue_lock);
551         enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
552         spin_unlock_bh(&priv->ring[ring].queue_lock);
553
554         if (enq_ret != -EINPROGRESS)
555                 *ret = enq_ret;
556
557         queue_work(priv->ring[ring].workqueue,
558                    &priv->ring[ring].work_data.work);
559
560         *should_complete = false;
561
562         return 1;
563 }
564
565 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
566                                   struct crypto_async_request *async,
567                                   bool *should_complete, int *ret)
568 {
569         struct ahash_request *areq = ahash_request_cast(async);
570         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
571         int err;
572
573         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
574
575         if (req->needs_inv) {
576                 req->needs_inv = false;
577                 err = safexcel_handle_inv_result(priv, ring, async,
578                                                  should_complete, ret);
579         } else {
580                 err = safexcel_handle_req_result(priv, ring, async,
581                                                  should_complete, ret);
582         }
583
584         return err;
585 }
586
587 static int safexcel_ahash_send_inv(struct crypto_async_request *async,
588                                    int ring, int *commands, int *results)
589 {
590         struct ahash_request *areq = ahash_request_cast(async);
591         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
592         int ret;
593
594         ret = safexcel_invalidate_cache(async, ctx->priv,
595                                         ctx->base.ctxr_dma, ring);
596         if (unlikely(ret))
597                 return ret;
598
599         *commands = 1;
600         *results = 1;
601
602         return 0;
603 }
604
605 static int safexcel_ahash_send(struct crypto_async_request *async,
606                                int ring, int *commands, int *results)
607 {
608         struct ahash_request *areq = ahash_request_cast(async);
609         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
610         int ret;
611
612         if (req->needs_inv)
613                 ret = safexcel_ahash_send_inv(async, ring, commands, results);
614         else
615                 ret = safexcel_ahash_send_req(async, ring, commands, results);
616
617         return ret;
618 }
619
620 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
621 {
622         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
623         struct safexcel_crypto_priv *priv = ctx->priv;
624         EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
625         struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
626         struct safexcel_inv_result result = {};
627         int ring = ctx->base.ring;
628
629         memset(req, 0, EIP197_AHASH_REQ_SIZE);
630
631         /* create invalidation request */
632         init_completion(&result.completion);
633         ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
634                                    safexcel_inv_complete, &result);
635
636         ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
637         ctx = crypto_tfm_ctx(req->base.tfm);
638         ctx->base.exit_inv = true;
639         rctx->needs_inv = true;
640
641         spin_lock_bh(&priv->ring[ring].queue_lock);
642         crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
643         spin_unlock_bh(&priv->ring[ring].queue_lock);
644
645         queue_work(priv->ring[ring].workqueue,
646                    &priv->ring[ring].work_data.work);
647
648         wait_for_completion(&result.completion);
649
650         if (result.error) {
651                 dev_warn(priv->dev, "hash: completion error (%d)\n",
652                          result.error);
653                 return result.error;
654         }
655
656         return 0;
657 }
658
659 /* safexcel_ahash_cache: cache data until at least one request can be sent to
660  * the engine, aka. when there is at least 1 block size in the pipe.
661  */
662 static int safexcel_ahash_cache(struct ahash_request *areq)
663 {
664         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
665         u64 cache_len;
666
667         /* cache_len: everything accepted by the driver but not sent yet,
668          * tot sz handled by update() - last req sz - tot sz handled by send()
669          */
670         cache_len = safexcel_queued_len(req);
671
672         /*
673          * In case there isn't enough bytes to proceed (less than a
674          * block size), cache the data until we have enough.
675          */
676         if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
677                 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
678                                    req->cache + cache_len,
679                                    areq->nbytes, 0);
680                 return 0;
681         }
682
683         /* We couldn't cache all the data */
684         return -E2BIG;
685 }
686
687 static int safexcel_ahash_enqueue(struct ahash_request *areq)
688 {
689         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
690         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
691         struct safexcel_crypto_priv *priv = ctx->priv;
692         int ret, ring;
693
694         req->needs_inv = false;
695
696         if (ctx->base.ctxr) {
697                 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
698                      /* invalidate for *any* non-XCBC continuation */
699                    ((req->not_first && !req->xcbcmac) ||
700                      /* invalidate if (i)digest changed */
701                      memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
702                      /* invalidate for HMAC finish with odigest changed */
703                      (req->finish && req->hmac &&
704                       memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
705                              ctx->opad, req->state_sz))))
706                         /*
707                          * We're still setting needs_inv here, even though it is
708                          * cleared right away, because the needs_inv flag can be
709                          * set in other functions and we want to keep the same
710                          * logic.
711                          */
712                         ctx->base.needs_inv = true;
713
714                 if (ctx->base.needs_inv) {
715                         ctx->base.needs_inv = false;
716                         req->needs_inv = true;
717                 }
718         } else {
719                 ctx->base.ring = safexcel_select_ring(priv);
720                 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
721                                                  EIP197_GFP_FLAGS(areq->base),
722                                                  &ctx->base.ctxr_dma);
723                 if (!ctx->base.ctxr)
724                         return -ENOMEM;
725         }
726         req->not_first = true;
727
728         ring = ctx->base.ring;
729
730         spin_lock_bh(&priv->ring[ring].queue_lock);
731         ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
732         spin_unlock_bh(&priv->ring[ring].queue_lock);
733
734         queue_work(priv->ring[ring].workqueue,
735                    &priv->ring[ring].work_data.work);
736
737         return ret;
738 }
739
740 static int safexcel_ahash_update(struct ahash_request *areq)
741 {
742         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
743         int ret;
744
745         /* If the request is 0 length, do nothing */
746         if (!areq->nbytes)
747                 return 0;
748
749         /* Add request to the cache if it fits */
750         ret = safexcel_ahash_cache(areq);
751
752         /* Update total request length */
753         req->len += areq->nbytes;
754
755         /* If not all data could fit into the cache, go process the excess.
756          * Also go process immediately for an HMAC IV precompute, which
757          * will never be finished at all, but needs to be processed anyway.
758          */
759         if ((ret && !req->finish) || req->last_req)
760                 return safexcel_ahash_enqueue(areq);
761
762         return 0;
763 }
764
765 static int safexcel_ahash_final(struct ahash_request *areq)
766 {
767         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
768         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
769
770         req->finish = true;
771
772         if (unlikely(!req->len && !areq->nbytes)) {
773                 /*
774                  * If we have an overall 0 length *hash* request:
775                  * The HW cannot do 0 length hash, so we provide the correct
776                  * result directly here.
777                  */
778                 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
779                         memcpy(areq->result, md5_zero_message_hash,
780                                MD5_DIGEST_SIZE);
781                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
782                         memcpy(areq->result, sha1_zero_message_hash,
783                                SHA1_DIGEST_SIZE);
784                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
785                         memcpy(areq->result, sha224_zero_message_hash,
786                                SHA224_DIGEST_SIZE);
787                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
788                         memcpy(areq->result, sha256_zero_message_hash,
789                                SHA256_DIGEST_SIZE);
790                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
791                         memcpy(areq->result, sha384_zero_message_hash,
792                                SHA384_DIGEST_SIZE);
793                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
794                         memcpy(areq->result, sha512_zero_message_hash,
795                                SHA512_DIGEST_SIZE);
796                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
797                         memcpy(areq->result,
798                                EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
799                 }
800
801                 return 0;
802         } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
803                             ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
804                             req->len == sizeof(u32) && !areq->nbytes)) {
805                 /* Zero length CRC32 */
806                 memcpy(areq->result, ctx->ipad, sizeof(u32));
807                 return 0;
808         } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
809                             !areq->nbytes)) {
810                 /* Zero length CBC MAC */
811                 memset(areq->result, 0, AES_BLOCK_SIZE);
812                 return 0;
813         } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
814                             !areq->nbytes)) {
815                 /* Zero length (X)CBC/CMAC */
816                 int i;
817
818                 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
819                         ((__be32 *)areq->result)[i] =
820                                 cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3
821                 areq->result[0] ^= 0x80;                        // 10- padding
822                 crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
823                 return 0;
824         } else if (unlikely(req->hmac &&
825                             (req->len == req->block_sz) &&
826                             !areq->nbytes)) {
827                 /*
828                  * If we have an overall 0 length *HMAC* request:
829                  * For HMAC, we need to finalize the inner digest
830                  * and then perform the outer hash.
831                  */
832
833                 /* generate pad block in the cache */
834                 /* start with a hash block of all zeroes */
835                 memset(req->cache, 0, req->block_sz);
836                 /* set the first byte to 0x80 to 'append a 1 bit' */
837                 req->cache[0] = 0x80;
838                 /* add the length in bits in the last 2 bytes */
839                 if (req->len_is_le) {
840                         /* Little endian length word (e.g. MD5) */
841                         req->cache[req->block_sz-8] = (req->block_sz << 3) &
842                                                       255;
843                         req->cache[req->block_sz-7] = (req->block_sz >> 5);
844                 } else {
845                         /* Big endian length word (e.g. any SHA) */
846                         req->cache[req->block_sz-2] = (req->block_sz >> 5);
847                         req->cache[req->block_sz-1] = (req->block_sz << 3) &
848                                                       255;
849                 }
850
851                 req->len += req->block_sz; /* plus 1 hash block */
852
853                 /* Set special zero-length HMAC flag */
854                 req->hmac_zlen = true;
855
856                 /* Finalize HMAC */
857                 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
858         } else if (req->hmac) {
859                 /* Finalize HMAC */
860                 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
861         }
862
863         return safexcel_ahash_enqueue(areq);
864 }
865
866 static int safexcel_ahash_finup(struct ahash_request *areq)
867 {
868         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
869
870         req->finish = true;
871
872         safexcel_ahash_update(areq);
873         return safexcel_ahash_final(areq);
874 }
875
876 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
877 {
878         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
879         struct safexcel_ahash_export_state *export = out;
880
881         export->len = req->len;
882         export->processed = req->processed;
883
884         export->digest = req->digest;
885
886         memcpy(export->state, req->state, req->state_sz);
887         memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
888
889         return 0;
890 }
891
892 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
893 {
894         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
895         const struct safexcel_ahash_export_state *export = in;
896         int ret;
897
898         ret = crypto_ahash_init(areq);
899         if (ret)
900                 return ret;
901
902         req->len = export->len;
903         req->processed = export->processed;
904
905         req->digest = export->digest;
906
907         memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
908         memcpy(req->state, export->state, req->state_sz);
909
910         return 0;
911 }
912
913 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
914 {
915         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
916         struct safexcel_alg_template *tmpl =
917                 container_of(__crypto_ahash_alg(tfm->__crt_alg),
918                              struct safexcel_alg_template, alg.ahash);
919
920         ctx->priv = tmpl->priv;
921         ctx->base.send = safexcel_ahash_send;
922         ctx->base.handle_result = safexcel_handle_result;
923         ctx->fb_do_setkey = false;
924
925         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
926                                  sizeof(struct safexcel_ahash_req));
927         return 0;
928 }
929
930 static int safexcel_sha1_init(struct ahash_request *areq)
931 {
932         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
933         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
934
935         memset(req, 0, sizeof(*req));
936
937         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
938         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
939         req->state_sz = SHA1_DIGEST_SIZE;
940         req->digest_sz = SHA1_DIGEST_SIZE;
941         req->block_sz = SHA1_BLOCK_SIZE;
942
943         return 0;
944 }
945
946 static int safexcel_sha1_digest(struct ahash_request *areq)
947 {
948         int ret = safexcel_sha1_init(areq);
949
950         if (ret)
951                 return ret;
952
953         return safexcel_ahash_finup(areq);
954 }
955
956 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
957 {
958         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
959         struct safexcel_crypto_priv *priv = ctx->priv;
960         int ret;
961
962         /* context not allocated, skip invalidation */
963         if (!ctx->base.ctxr)
964                 return;
965
966         if (priv->flags & EIP197_TRC_CACHE) {
967                 ret = safexcel_ahash_exit_inv(tfm);
968                 if (ret)
969                         dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
970         } else {
971                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
972                               ctx->base.ctxr_dma);
973         }
974 }
975
976 struct safexcel_alg_template safexcel_alg_sha1 = {
977         .type = SAFEXCEL_ALG_TYPE_AHASH,
978         .algo_mask = SAFEXCEL_ALG_SHA1,
979         .alg.ahash = {
980                 .init = safexcel_sha1_init,
981                 .update = safexcel_ahash_update,
982                 .final = safexcel_ahash_final,
983                 .finup = safexcel_ahash_finup,
984                 .digest = safexcel_sha1_digest,
985                 .export = safexcel_ahash_export,
986                 .import = safexcel_ahash_import,
987                 .halg = {
988                         .digestsize = SHA1_DIGEST_SIZE,
989                         .statesize = sizeof(struct safexcel_ahash_export_state),
990                         .base = {
991                                 .cra_name = "sha1",
992                                 .cra_driver_name = "safexcel-sha1",
993                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
994                                 .cra_flags = CRYPTO_ALG_ASYNC |
995                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
996                                 .cra_blocksize = SHA1_BLOCK_SIZE,
997                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
998                                 .cra_init = safexcel_ahash_cra_init,
999                                 .cra_exit = safexcel_ahash_cra_exit,
1000                                 .cra_module = THIS_MODULE,
1001                         },
1002                 },
1003         },
1004 };
1005
1006 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1007 {
1008         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1009         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1010
1011         memset(req, 0, sizeof(*req));
1012
1013         /* Start from ipad precompute */
1014         memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE);
1015         /* Already processed the key^ipad part now! */
1016         req->len        = SHA1_BLOCK_SIZE;
1017         req->processed  = SHA1_BLOCK_SIZE;
1018
1019         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1020         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1021         req->state_sz = SHA1_DIGEST_SIZE;
1022         req->digest_sz = SHA1_DIGEST_SIZE;
1023         req->block_sz = SHA1_BLOCK_SIZE;
1024         req->hmac = true;
1025
1026         return 0;
1027 }
1028
1029 static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1030 {
1031         int ret = safexcel_hmac_sha1_init(areq);
1032
1033         if (ret)
1034                 return ret;
1035
1036         return safexcel_ahash_finup(areq);
1037 }
1038
1039 struct safexcel_ahash_result {
1040         struct completion completion;
1041         int error;
1042 };
1043
1044 static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1045 {
1046         struct safexcel_ahash_result *result = req->data;
1047
1048         if (error == -EINPROGRESS)
1049                 return;
1050
1051         result->error = error;
1052         complete(&result->completion);
1053 }
1054
1055 static int safexcel_hmac_init_pad(struct ahash_request *areq,
1056                                   unsigned int blocksize, const u8 *key,
1057                                   unsigned int keylen, u8 *ipad, u8 *opad)
1058 {
1059         struct safexcel_ahash_result result;
1060         struct scatterlist sg;
1061         int ret, i;
1062         u8 *keydup;
1063
1064         if (keylen <= blocksize) {
1065                 memcpy(ipad, key, keylen);
1066         } else {
1067                 keydup = kmemdup(key, keylen, GFP_KERNEL);
1068                 if (!keydup)
1069                         return -ENOMEM;
1070
1071                 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1072                                            safexcel_ahash_complete, &result);
1073                 sg_init_one(&sg, keydup, keylen);
1074                 ahash_request_set_crypt(areq, &sg, ipad, keylen);
1075                 init_completion(&result.completion);
1076
1077                 ret = crypto_ahash_digest(areq);
1078                 if (ret == -EINPROGRESS || ret == -EBUSY) {
1079                         wait_for_completion_interruptible(&result.completion);
1080                         ret = result.error;
1081                 }
1082
1083                 /* Avoid leaking */
1084                 memzero_explicit(keydup, keylen);
1085                 kfree(keydup);
1086
1087                 if (ret)
1088                         return ret;
1089
1090                 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1091         }
1092
1093         memset(ipad + keylen, 0, blocksize - keylen);
1094         memcpy(opad, ipad, blocksize);
1095
1096         for (i = 0; i < blocksize; i++) {
1097                 ipad[i] ^= HMAC_IPAD_VALUE;
1098                 opad[i] ^= HMAC_OPAD_VALUE;
1099         }
1100
1101         return 0;
1102 }
1103
1104 static int safexcel_hmac_init_iv(struct ahash_request *areq,
1105                                  unsigned int blocksize, u8 *pad, void *state)
1106 {
1107         struct safexcel_ahash_result result;
1108         struct safexcel_ahash_req *req;
1109         struct scatterlist sg;
1110         int ret;
1111
1112         ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1113                                    safexcel_ahash_complete, &result);
1114         sg_init_one(&sg, pad, blocksize);
1115         ahash_request_set_crypt(areq, &sg, pad, blocksize);
1116         init_completion(&result.completion);
1117
1118         ret = crypto_ahash_init(areq);
1119         if (ret)
1120                 return ret;
1121
1122         req = ahash_request_ctx(areq);
1123         req->hmac = true;
1124         req->last_req = true;
1125
1126         ret = crypto_ahash_update(areq);
1127         if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1128                 return ret;
1129
1130         wait_for_completion_interruptible(&result.completion);
1131         if (result.error)
1132                 return result.error;
1133
1134         return crypto_ahash_export(areq, state);
1135 }
1136
1137 int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
1138                          void *istate, void *ostate)
1139 {
1140         struct ahash_request *areq;
1141         struct crypto_ahash *tfm;
1142         unsigned int blocksize;
1143         u8 *ipad, *opad;
1144         int ret;
1145
1146         tfm = crypto_alloc_ahash(alg, 0, 0);
1147         if (IS_ERR(tfm))
1148                 return PTR_ERR(tfm);
1149
1150         areq = ahash_request_alloc(tfm, GFP_KERNEL);
1151         if (!areq) {
1152                 ret = -ENOMEM;
1153                 goto free_ahash;
1154         }
1155
1156         crypto_ahash_clear_flags(tfm, ~0);
1157         blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1158
1159         ipad = kcalloc(2, blocksize, GFP_KERNEL);
1160         if (!ipad) {
1161                 ret = -ENOMEM;
1162                 goto free_request;
1163         }
1164
1165         opad = ipad + blocksize;
1166
1167         ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1168         if (ret)
1169                 goto free_ipad;
1170
1171         ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1172         if (ret)
1173                 goto free_ipad;
1174
1175         ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1176
1177 free_ipad:
1178         kfree(ipad);
1179 free_request:
1180         ahash_request_free(areq);
1181 free_ahash:
1182         crypto_free_ahash(tfm);
1183
1184         return ret;
1185 }
1186
1187 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1188                                     unsigned int keylen, const char *alg,
1189                                     unsigned int state_sz)
1190 {
1191         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1192         struct safexcel_crypto_priv *priv = ctx->priv;
1193         struct safexcel_ahash_export_state istate, ostate;
1194         int ret;
1195
1196         ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1197         if (ret)
1198                 return ret;
1199
1200         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr &&
1201             (memcmp(ctx->ipad, istate.state, state_sz) ||
1202              memcmp(ctx->opad, ostate.state, state_sz)))
1203                 ctx->base.needs_inv = true;
1204
1205         memcpy(ctx->ipad, &istate.state, state_sz);
1206         memcpy(ctx->opad, &ostate.state, state_sz);
1207
1208         return 0;
1209 }
1210
1211 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1212                                      unsigned int keylen)
1213 {
1214         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1215                                         SHA1_DIGEST_SIZE);
1216 }
1217
1218 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1219         .type = SAFEXCEL_ALG_TYPE_AHASH,
1220         .algo_mask = SAFEXCEL_ALG_SHA1,
1221         .alg.ahash = {
1222                 .init = safexcel_hmac_sha1_init,
1223                 .update = safexcel_ahash_update,
1224                 .final = safexcel_ahash_final,
1225                 .finup = safexcel_ahash_finup,
1226                 .digest = safexcel_hmac_sha1_digest,
1227                 .setkey = safexcel_hmac_sha1_setkey,
1228                 .export = safexcel_ahash_export,
1229                 .import = safexcel_ahash_import,
1230                 .halg = {
1231                         .digestsize = SHA1_DIGEST_SIZE,
1232                         .statesize = sizeof(struct safexcel_ahash_export_state),
1233                         .base = {
1234                                 .cra_name = "hmac(sha1)",
1235                                 .cra_driver_name = "safexcel-hmac-sha1",
1236                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1237                                 .cra_flags = CRYPTO_ALG_ASYNC |
1238                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1239                                 .cra_blocksize = SHA1_BLOCK_SIZE,
1240                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1241                                 .cra_init = safexcel_ahash_cra_init,
1242                                 .cra_exit = safexcel_ahash_cra_exit,
1243                                 .cra_module = THIS_MODULE,
1244                         },
1245                 },
1246         },
1247 };
1248
1249 static int safexcel_sha256_init(struct ahash_request *areq)
1250 {
1251         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1252         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1253
1254         memset(req, 0, sizeof(*req));
1255
1256         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1257         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1258         req->state_sz = SHA256_DIGEST_SIZE;
1259         req->digest_sz = SHA256_DIGEST_SIZE;
1260         req->block_sz = SHA256_BLOCK_SIZE;
1261
1262         return 0;
1263 }
1264
1265 static int safexcel_sha256_digest(struct ahash_request *areq)
1266 {
1267         int ret = safexcel_sha256_init(areq);
1268
1269         if (ret)
1270                 return ret;
1271
1272         return safexcel_ahash_finup(areq);
1273 }
1274
1275 struct safexcel_alg_template safexcel_alg_sha256 = {
1276         .type = SAFEXCEL_ALG_TYPE_AHASH,
1277         .algo_mask = SAFEXCEL_ALG_SHA2_256,
1278         .alg.ahash = {
1279                 .init = safexcel_sha256_init,
1280                 .update = safexcel_ahash_update,
1281                 .final = safexcel_ahash_final,
1282                 .finup = safexcel_ahash_finup,
1283                 .digest = safexcel_sha256_digest,
1284                 .export = safexcel_ahash_export,
1285                 .import = safexcel_ahash_import,
1286                 .halg = {
1287                         .digestsize = SHA256_DIGEST_SIZE,
1288                         .statesize = sizeof(struct safexcel_ahash_export_state),
1289                         .base = {
1290                                 .cra_name = "sha256",
1291                                 .cra_driver_name = "safexcel-sha256",
1292                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1293                                 .cra_flags = CRYPTO_ALG_ASYNC |
1294                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1295                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1296                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1297                                 .cra_init = safexcel_ahash_cra_init,
1298                                 .cra_exit = safexcel_ahash_cra_exit,
1299                                 .cra_module = THIS_MODULE,
1300                         },
1301                 },
1302         },
1303 };
1304
1305 static int safexcel_sha224_init(struct ahash_request *areq)
1306 {
1307         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1308         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1309
1310         memset(req, 0, sizeof(*req));
1311
1312         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1313         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1314         req->state_sz = SHA256_DIGEST_SIZE;
1315         req->digest_sz = SHA256_DIGEST_SIZE;
1316         req->block_sz = SHA256_BLOCK_SIZE;
1317
1318         return 0;
1319 }
1320
1321 static int safexcel_sha224_digest(struct ahash_request *areq)
1322 {
1323         int ret = safexcel_sha224_init(areq);
1324
1325         if (ret)
1326                 return ret;
1327
1328         return safexcel_ahash_finup(areq);
1329 }
1330
1331 struct safexcel_alg_template safexcel_alg_sha224 = {
1332         .type = SAFEXCEL_ALG_TYPE_AHASH,
1333         .algo_mask = SAFEXCEL_ALG_SHA2_256,
1334         .alg.ahash = {
1335                 .init = safexcel_sha224_init,
1336                 .update = safexcel_ahash_update,
1337                 .final = safexcel_ahash_final,
1338                 .finup = safexcel_ahash_finup,
1339                 .digest = safexcel_sha224_digest,
1340                 .export = safexcel_ahash_export,
1341                 .import = safexcel_ahash_import,
1342                 .halg = {
1343                         .digestsize = SHA224_DIGEST_SIZE,
1344                         .statesize = sizeof(struct safexcel_ahash_export_state),
1345                         .base = {
1346                                 .cra_name = "sha224",
1347                                 .cra_driver_name = "safexcel-sha224",
1348                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1349                                 .cra_flags = CRYPTO_ALG_ASYNC |
1350                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1351                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1352                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1353                                 .cra_init = safexcel_ahash_cra_init,
1354                                 .cra_exit = safexcel_ahash_cra_exit,
1355                                 .cra_module = THIS_MODULE,
1356                         },
1357                 },
1358         },
1359 };
1360
1361 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1362                                        unsigned int keylen)
1363 {
1364         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1365                                         SHA256_DIGEST_SIZE);
1366 }
1367
1368 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1369 {
1370         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1371         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1372
1373         memset(req, 0, sizeof(*req));
1374
1375         /* Start from ipad precompute */
1376         memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1377         /* Already processed the key^ipad part now! */
1378         req->len        = SHA256_BLOCK_SIZE;
1379         req->processed  = SHA256_BLOCK_SIZE;
1380
1381         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1382         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1383         req->state_sz = SHA256_DIGEST_SIZE;
1384         req->digest_sz = SHA256_DIGEST_SIZE;
1385         req->block_sz = SHA256_BLOCK_SIZE;
1386         req->hmac = true;
1387
1388         return 0;
1389 }
1390
1391 static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1392 {
1393         int ret = safexcel_hmac_sha224_init(areq);
1394
1395         if (ret)
1396                 return ret;
1397
1398         return safexcel_ahash_finup(areq);
1399 }
1400
1401 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1402         .type = SAFEXCEL_ALG_TYPE_AHASH,
1403         .algo_mask = SAFEXCEL_ALG_SHA2_256,
1404         .alg.ahash = {
1405                 .init = safexcel_hmac_sha224_init,
1406                 .update = safexcel_ahash_update,
1407                 .final = safexcel_ahash_final,
1408                 .finup = safexcel_ahash_finup,
1409                 .digest = safexcel_hmac_sha224_digest,
1410                 .setkey = safexcel_hmac_sha224_setkey,
1411                 .export = safexcel_ahash_export,
1412                 .import = safexcel_ahash_import,
1413                 .halg = {
1414                         .digestsize = SHA224_DIGEST_SIZE,
1415                         .statesize = sizeof(struct safexcel_ahash_export_state),
1416                         .base = {
1417                                 .cra_name = "hmac(sha224)",
1418                                 .cra_driver_name = "safexcel-hmac-sha224",
1419                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1420                                 .cra_flags = CRYPTO_ALG_ASYNC |
1421                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1422                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1423                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1424                                 .cra_init = safexcel_ahash_cra_init,
1425                                 .cra_exit = safexcel_ahash_cra_exit,
1426                                 .cra_module = THIS_MODULE,
1427                         },
1428                 },
1429         },
1430 };
1431
1432 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1433                                      unsigned int keylen)
1434 {
1435         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1436                                         SHA256_DIGEST_SIZE);
1437 }
1438
1439 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1440 {
1441         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1442         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1443
1444         memset(req, 0, sizeof(*req));
1445
1446         /* Start from ipad precompute */
1447         memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1448         /* Already processed the key^ipad part now! */
1449         req->len        = SHA256_BLOCK_SIZE;
1450         req->processed  = SHA256_BLOCK_SIZE;
1451
1452         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1453         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1454         req->state_sz = SHA256_DIGEST_SIZE;
1455         req->digest_sz = SHA256_DIGEST_SIZE;
1456         req->block_sz = SHA256_BLOCK_SIZE;
1457         req->hmac = true;
1458
1459         return 0;
1460 }
1461
1462 static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1463 {
1464         int ret = safexcel_hmac_sha256_init(areq);
1465
1466         if (ret)
1467                 return ret;
1468
1469         return safexcel_ahash_finup(areq);
1470 }
1471
1472 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1473         .type = SAFEXCEL_ALG_TYPE_AHASH,
1474         .algo_mask = SAFEXCEL_ALG_SHA2_256,
1475         .alg.ahash = {
1476                 .init = safexcel_hmac_sha256_init,
1477                 .update = safexcel_ahash_update,
1478                 .final = safexcel_ahash_final,
1479                 .finup = safexcel_ahash_finup,
1480                 .digest = safexcel_hmac_sha256_digest,
1481                 .setkey = safexcel_hmac_sha256_setkey,
1482                 .export = safexcel_ahash_export,
1483                 .import = safexcel_ahash_import,
1484                 .halg = {
1485                         .digestsize = SHA256_DIGEST_SIZE,
1486                         .statesize = sizeof(struct safexcel_ahash_export_state),
1487                         .base = {
1488                                 .cra_name = "hmac(sha256)",
1489                                 .cra_driver_name = "safexcel-hmac-sha256",
1490                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1491                                 .cra_flags = CRYPTO_ALG_ASYNC |
1492                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1493                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1494                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1495                                 .cra_init = safexcel_ahash_cra_init,
1496                                 .cra_exit = safexcel_ahash_cra_exit,
1497                                 .cra_module = THIS_MODULE,
1498                         },
1499                 },
1500         },
1501 };
1502
1503 static int safexcel_sha512_init(struct ahash_request *areq)
1504 {
1505         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1506         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1507
1508         memset(req, 0, sizeof(*req));
1509
1510         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1511         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1512         req->state_sz = SHA512_DIGEST_SIZE;
1513         req->digest_sz = SHA512_DIGEST_SIZE;
1514         req->block_sz = SHA512_BLOCK_SIZE;
1515
1516         return 0;
1517 }
1518
1519 static int safexcel_sha512_digest(struct ahash_request *areq)
1520 {
1521         int ret = safexcel_sha512_init(areq);
1522
1523         if (ret)
1524                 return ret;
1525
1526         return safexcel_ahash_finup(areq);
1527 }
1528
1529 struct safexcel_alg_template safexcel_alg_sha512 = {
1530         .type = SAFEXCEL_ALG_TYPE_AHASH,
1531         .algo_mask = SAFEXCEL_ALG_SHA2_512,
1532         .alg.ahash = {
1533                 .init = safexcel_sha512_init,
1534                 .update = safexcel_ahash_update,
1535                 .final = safexcel_ahash_final,
1536                 .finup = safexcel_ahash_finup,
1537                 .digest = safexcel_sha512_digest,
1538                 .export = safexcel_ahash_export,
1539                 .import = safexcel_ahash_import,
1540                 .halg = {
1541                         .digestsize = SHA512_DIGEST_SIZE,
1542                         .statesize = sizeof(struct safexcel_ahash_export_state),
1543                         .base = {
1544                                 .cra_name = "sha512",
1545                                 .cra_driver_name = "safexcel-sha512",
1546                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1547                                 .cra_flags = CRYPTO_ALG_ASYNC |
1548                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1549                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1550                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1551                                 .cra_init = safexcel_ahash_cra_init,
1552                                 .cra_exit = safexcel_ahash_cra_exit,
1553                                 .cra_module = THIS_MODULE,
1554                         },
1555                 },
1556         },
1557 };
1558
1559 static int safexcel_sha384_init(struct ahash_request *areq)
1560 {
1561         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1562         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1563
1564         memset(req, 0, sizeof(*req));
1565
1566         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1567         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1568         req->state_sz = SHA512_DIGEST_SIZE;
1569         req->digest_sz = SHA512_DIGEST_SIZE;
1570         req->block_sz = SHA512_BLOCK_SIZE;
1571
1572         return 0;
1573 }
1574
1575 static int safexcel_sha384_digest(struct ahash_request *areq)
1576 {
1577         int ret = safexcel_sha384_init(areq);
1578
1579         if (ret)
1580                 return ret;
1581
1582         return safexcel_ahash_finup(areq);
1583 }
1584
1585 struct safexcel_alg_template safexcel_alg_sha384 = {
1586         .type = SAFEXCEL_ALG_TYPE_AHASH,
1587         .algo_mask = SAFEXCEL_ALG_SHA2_512,
1588         .alg.ahash = {
1589                 .init = safexcel_sha384_init,
1590                 .update = safexcel_ahash_update,
1591                 .final = safexcel_ahash_final,
1592                 .finup = safexcel_ahash_finup,
1593                 .digest = safexcel_sha384_digest,
1594                 .export = safexcel_ahash_export,
1595                 .import = safexcel_ahash_import,
1596                 .halg = {
1597                         .digestsize = SHA384_DIGEST_SIZE,
1598                         .statesize = sizeof(struct safexcel_ahash_export_state),
1599                         .base = {
1600                                 .cra_name = "sha384",
1601                                 .cra_driver_name = "safexcel-sha384",
1602                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1603                                 .cra_flags = CRYPTO_ALG_ASYNC |
1604                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1605                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1606                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1607                                 .cra_init = safexcel_ahash_cra_init,
1608                                 .cra_exit = safexcel_ahash_cra_exit,
1609                                 .cra_module = THIS_MODULE,
1610                         },
1611                 },
1612         },
1613 };
1614
1615 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1616                                        unsigned int keylen)
1617 {
1618         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1619                                         SHA512_DIGEST_SIZE);
1620 }
1621
1622 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1623 {
1624         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1625         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1626
1627         memset(req, 0, sizeof(*req));
1628
1629         /* Start from ipad precompute */
1630         memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1631         /* Already processed the key^ipad part now! */
1632         req->len        = SHA512_BLOCK_SIZE;
1633         req->processed  = SHA512_BLOCK_SIZE;
1634
1635         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1636         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1637         req->state_sz = SHA512_DIGEST_SIZE;
1638         req->digest_sz = SHA512_DIGEST_SIZE;
1639         req->block_sz = SHA512_BLOCK_SIZE;
1640         req->hmac = true;
1641
1642         return 0;
1643 }
1644
1645 static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1646 {
1647         int ret = safexcel_hmac_sha512_init(areq);
1648
1649         if (ret)
1650                 return ret;
1651
1652         return safexcel_ahash_finup(areq);
1653 }
1654
1655 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1656         .type = SAFEXCEL_ALG_TYPE_AHASH,
1657         .algo_mask = SAFEXCEL_ALG_SHA2_512,
1658         .alg.ahash = {
1659                 .init = safexcel_hmac_sha512_init,
1660                 .update = safexcel_ahash_update,
1661                 .final = safexcel_ahash_final,
1662                 .finup = safexcel_ahash_finup,
1663                 .digest = safexcel_hmac_sha512_digest,
1664                 .setkey = safexcel_hmac_sha512_setkey,
1665                 .export = safexcel_ahash_export,
1666                 .import = safexcel_ahash_import,
1667                 .halg = {
1668                         .digestsize = SHA512_DIGEST_SIZE,
1669                         .statesize = sizeof(struct safexcel_ahash_export_state),
1670                         .base = {
1671                                 .cra_name = "hmac(sha512)",
1672                                 .cra_driver_name = "safexcel-hmac-sha512",
1673                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1674                                 .cra_flags = CRYPTO_ALG_ASYNC |
1675                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1676                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1677                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1678                                 .cra_init = safexcel_ahash_cra_init,
1679                                 .cra_exit = safexcel_ahash_cra_exit,
1680                                 .cra_module = THIS_MODULE,
1681                         },
1682                 },
1683         },
1684 };
1685
1686 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1687                                        unsigned int keylen)
1688 {
1689         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1690                                         SHA512_DIGEST_SIZE);
1691 }
1692
1693 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1694 {
1695         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1696         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1697
1698         memset(req, 0, sizeof(*req));
1699
1700         /* Start from ipad precompute */
1701         memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1702         /* Already processed the key^ipad part now! */
1703         req->len        = SHA512_BLOCK_SIZE;
1704         req->processed  = SHA512_BLOCK_SIZE;
1705
1706         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1707         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1708         req->state_sz = SHA512_DIGEST_SIZE;
1709         req->digest_sz = SHA512_DIGEST_SIZE;
1710         req->block_sz = SHA512_BLOCK_SIZE;
1711         req->hmac = true;
1712
1713         return 0;
1714 }
1715
1716 static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1717 {
1718         int ret = safexcel_hmac_sha384_init(areq);
1719
1720         if (ret)
1721                 return ret;
1722
1723         return safexcel_ahash_finup(areq);
1724 }
1725
1726 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1727         .type = SAFEXCEL_ALG_TYPE_AHASH,
1728         .algo_mask = SAFEXCEL_ALG_SHA2_512,
1729         .alg.ahash = {
1730                 .init = safexcel_hmac_sha384_init,
1731                 .update = safexcel_ahash_update,
1732                 .final = safexcel_ahash_final,
1733                 .finup = safexcel_ahash_finup,
1734                 .digest = safexcel_hmac_sha384_digest,
1735                 .setkey = safexcel_hmac_sha384_setkey,
1736                 .export = safexcel_ahash_export,
1737                 .import = safexcel_ahash_import,
1738                 .halg = {
1739                         .digestsize = SHA384_DIGEST_SIZE,
1740                         .statesize = sizeof(struct safexcel_ahash_export_state),
1741                         .base = {
1742                                 .cra_name = "hmac(sha384)",
1743                                 .cra_driver_name = "safexcel-hmac-sha384",
1744                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1745                                 .cra_flags = CRYPTO_ALG_ASYNC |
1746                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1747                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1748                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1749                                 .cra_init = safexcel_ahash_cra_init,
1750                                 .cra_exit = safexcel_ahash_cra_exit,
1751                                 .cra_module = THIS_MODULE,
1752                         },
1753                 },
1754         },
1755 };
1756
1757 static int safexcel_md5_init(struct ahash_request *areq)
1758 {
1759         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1760         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1761
1762         memset(req, 0, sizeof(*req));
1763
1764         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1765         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1766         req->state_sz = MD5_DIGEST_SIZE;
1767         req->digest_sz = MD5_DIGEST_SIZE;
1768         req->block_sz = MD5_HMAC_BLOCK_SIZE;
1769
1770         return 0;
1771 }
1772
1773 static int safexcel_md5_digest(struct ahash_request *areq)
1774 {
1775         int ret = safexcel_md5_init(areq);
1776
1777         if (ret)
1778                 return ret;
1779
1780         return safexcel_ahash_finup(areq);
1781 }
1782
1783 struct safexcel_alg_template safexcel_alg_md5 = {
1784         .type = SAFEXCEL_ALG_TYPE_AHASH,
1785         .algo_mask = SAFEXCEL_ALG_MD5,
1786         .alg.ahash = {
1787                 .init = safexcel_md5_init,
1788                 .update = safexcel_ahash_update,
1789                 .final = safexcel_ahash_final,
1790                 .finup = safexcel_ahash_finup,
1791                 .digest = safexcel_md5_digest,
1792                 .export = safexcel_ahash_export,
1793                 .import = safexcel_ahash_import,
1794                 .halg = {
1795                         .digestsize = MD5_DIGEST_SIZE,
1796                         .statesize = sizeof(struct safexcel_ahash_export_state),
1797                         .base = {
1798                                 .cra_name = "md5",
1799                                 .cra_driver_name = "safexcel-md5",
1800                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1801                                 .cra_flags = CRYPTO_ALG_ASYNC |
1802                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1803                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1804                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1805                                 .cra_init = safexcel_ahash_cra_init,
1806                                 .cra_exit = safexcel_ahash_cra_exit,
1807                                 .cra_module = THIS_MODULE,
1808                         },
1809                 },
1810         },
1811 };
1812
1813 static int safexcel_hmac_md5_init(struct ahash_request *areq)
1814 {
1815         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1816         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1817
1818         memset(req, 0, sizeof(*req));
1819
1820         /* Start from ipad precompute */
1821         memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE);
1822         /* Already processed the key^ipad part now! */
1823         req->len        = MD5_HMAC_BLOCK_SIZE;
1824         req->processed  = MD5_HMAC_BLOCK_SIZE;
1825
1826         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1827         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1828         req->state_sz = MD5_DIGEST_SIZE;
1829         req->digest_sz = MD5_DIGEST_SIZE;
1830         req->block_sz = MD5_HMAC_BLOCK_SIZE;
1831         req->len_is_le = true; /* MD5 is little endian! ... */
1832         req->hmac = true;
1833
1834         return 0;
1835 }
1836
1837 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1838                                      unsigned int keylen)
1839 {
1840         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1841                                         MD5_DIGEST_SIZE);
1842 }
1843
1844 static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1845 {
1846         int ret = safexcel_hmac_md5_init(areq);
1847
1848         if (ret)
1849                 return ret;
1850
1851         return safexcel_ahash_finup(areq);
1852 }
1853
1854 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1855         .type = SAFEXCEL_ALG_TYPE_AHASH,
1856         .algo_mask = SAFEXCEL_ALG_MD5,
1857         .alg.ahash = {
1858                 .init = safexcel_hmac_md5_init,
1859                 .update = safexcel_ahash_update,
1860                 .final = safexcel_ahash_final,
1861                 .finup = safexcel_ahash_finup,
1862                 .digest = safexcel_hmac_md5_digest,
1863                 .setkey = safexcel_hmac_md5_setkey,
1864                 .export = safexcel_ahash_export,
1865                 .import = safexcel_ahash_import,
1866                 .halg = {
1867                         .digestsize = MD5_DIGEST_SIZE,
1868                         .statesize = sizeof(struct safexcel_ahash_export_state),
1869                         .base = {
1870                                 .cra_name = "hmac(md5)",
1871                                 .cra_driver_name = "safexcel-hmac-md5",
1872                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1873                                 .cra_flags = CRYPTO_ALG_ASYNC |
1874                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1875                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1876                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1877                                 .cra_init = safexcel_ahash_cra_init,
1878                                 .cra_exit = safexcel_ahash_cra_exit,
1879                                 .cra_module = THIS_MODULE,
1880                         },
1881                 },
1882         },
1883 };
1884
1885 static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1886 {
1887         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1888         int ret = safexcel_ahash_cra_init(tfm);
1889
1890         /* Default 'key' is all zeroes */
1891         memset(ctx->ipad, 0, sizeof(u32));
1892         return ret;
1893 }
1894
1895 static int safexcel_crc32_init(struct ahash_request *areq)
1896 {
1897         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1898         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1899
1900         memset(req, 0, sizeof(*req));
1901
1902         /* Start from loaded key */
1903         req->state[0]   = (__force __le32)le32_to_cpu(~ctx->ipad[0]);
1904         /* Set processed to non-zero to enable invalidation detection */
1905         req->len        = sizeof(u32);
1906         req->processed  = sizeof(u32);
1907
1908         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1909         req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1910         req->state_sz = sizeof(u32);
1911         req->digest_sz = sizeof(u32);
1912         req->block_sz = sizeof(u32);
1913
1914         return 0;
1915 }
1916
1917 static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1918                                  unsigned int keylen)
1919 {
1920         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1921
1922         if (keylen != sizeof(u32))
1923                 return -EINVAL;
1924
1925         memcpy(ctx->ipad, key, sizeof(u32));
1926         return 0;
1927 }
1928
1929 static int safexcel_crc32_digest(struct ahash_request *areq)
1930 {
1931         return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1932 }
1933
1934 struct safexcel_alg_template safexcel_alg_crc32 = {
1935         .type = SAFEXCEL_ALG_TYPE_AHASH,
1936         .algo_mask = 0,
1937         .alg.ahash = {
1938                 .init = safexcel_crc32_init,
1939                 .update = safexcel_ahash_update,
1940                 .final = safexcel_ahash_final,
1941                 .finup = safexcel_ahash_finup,
1942                 .digest = safexcel_crc32_digest,
1943                 .setkey = safexcel_crc32_setkey,
1944                 .export = safexcel_ahash_export,
1945                 .import = safexcel_ahash_import,
1946                 .halg = {
1947                         .digestsize = sizeof(u32),
1948                         .statesize = sizeof(struct safexcel_ahash_export_state),
1949                         .base = {
1950                                 .cra_name = "crc32",
1951                                 .cra_driver_name = "safexcel-crc32",
1952                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1953                                 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1954                                              CRYPTO_ALG_ASYNC |
1955                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1956                                 .cra_blocksize = 1,
1957                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1958                                 .cra_init = safexcel_crc32_cra_init,
1959                                 .cra_exit = safexcel_ahash_cra_exit,
1960                                 .cra_module = THIS_MODULE,
1961                         },
1962                 },
1963         },
1964 };
1965
1966 static int safexcel_cbcmac_init(struct ahash_request *areq)
1967 {
1968         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1969         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1970
1971         memset(req, 0, sizeof(*req));
1972
1973         /* Start from loaded keys */
1974         memcpy(req->state, ctx->ipad, ctx->key_sz);
1975         /* Set processed to non-zero to enable invalidation detection */
1976         req->len        = AES_BLOCK_SIZE;
1977         req->processed  = AES_BLOCK_SIZE;
1978
1979         req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
1980         req->state_sz = ctx->key_sz;
1981         req->digest_sz = AES_BLOCK_SIZE;
1982         req->block_sz = AES_BLOCK_SIZE;
1983         req->xcbcmac  = true;
1984
1985         return 0;
1986 }
1987
1988 static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1989                                  unsigned int len)
1990 {
1991         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1992         struct crypto_aes_ctx aes;
1993         int ret, i;
1994
1995         ret = aes_expandkey(&aes, key, len);
1996         if (ret)
1997                 return ret;
1998
1999         memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
2000         for (i = 0; i < len / sizeof(u32); i++)
2001                 ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]);
2002
2003         if (len == AES_KEYSIZE_192) {
2004                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2005                 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2006         } else if (len == AES_KEYSIZE_256) {
2007                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2008                 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2009         } else {
2010                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2011                 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2012         }
2013         ctx->cbcmac  = true;
2014
2015         memzero_explicit(&aes, sizeof(aes));
2016         return 0;
2017 }
2018
2019 static int safexcel_cbcmac_digest(struct ahash_request *areq)
2020 {
2021         return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2022 }
2023
2024 struct safexcel_alg_template safexcel_alg_cbcmac = {
2025         .type = SAFEXCEL_ALG_TYPE_AHASH,
2026         .algo_mask = 0,
2027         .alg.ahash = {
2028                 .init = safexcel_cbcmac_init,
2029                 .update = safexcel_ahash_update,
2030                 .final = safexcel_ahash_final,
2031                 .finup = safexcel_ahash_finup,
2032                 .digest = safexcel_cbcmac_digest,
2033                 .setkey = safexcel_cbcmac_setkey,
2034                 .export = safexcel_ahash_export,
2035                 .import = safexcel_ahash_import,
2036                 .halg = {
2037                         .digestsize = AES_BLOCK_SIZE,
2038                         .statesize = sizeof(struct safexcel_ahash_export_state),
2039                         .base = {
2040                                 .cra_name = "cbcmac(aes)",
2041                                 .cra_driver_name = "safexcel-cbcmac-aes",
2042                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2043                                 .cra_flags = CRYPTO_ALG_ASYNC |
2044                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2045                                 .cra_blocksize = 1,
2046                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2047                                 .cra_init = safexcel_ahash_cra_init,
2048                                 .cra_exit = safexcel_ahash_cra_exit,
2049                                 .cra_module = THIS_MODULE,
2050                         },
2051                 },
2052         },
2053 };
2054
2055 static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2056                                  unsigned int len)
2057 {
2058         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2059         struct crypto_aes_ctx aes;
2060         u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2061         int ret, i;
2062
2063         ret = aes_expandkey(&aes, key, len);
2064         if (ret)
2065                 return ret;
2066
2067         /* precompute the XCBC key material */
2068         crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2069         crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2070                                 CRYPTO_TFM_REQ_MASK);
2071         ret = crypto_cipher_setkey(ctx->kaes, key, len);
2072         if (ret)
2073                 return ret;
2074
2075         crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2076                 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2077         crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2078                 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2079         crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2080                 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2081         for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2082                 ctx->ipad[i] =
2083                         cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i]));
2084
2085         crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2086         crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2087                                 CRYPTO_TFM_REQ_MASK);
2088         ret = crypto_cipher_setkey(ctx->kaes,
2089                                    (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2090                                    AES_MIN_KEY_SIZE);
2091         if (ret)
2092                 return ret;
2093
2094         ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2095         ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2096         ctx->cbcmac = false;
2097
2098         memzero_explicit(&aes, sizeof(aes));
2099         return 0;
2100 }
2101
2102 static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2103 {
2104         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2105
2106         safexcel_ahash_cra_init(tfm);
2107         ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2108         return PTR_ERR_OR_ZERO(ctx->kaes);
2109 }
2110
2111 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2112 {
2113         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2114
2115         crypto_free_cipher(ctx->kaes);
2116         safexcel_ahash_cra_exit(tfm);
2117 }
2118
2119 struct safexcel_alg_template safexcel_alg_xcbcmac = {
2120         .type = SAFEXCEL_ALG_TYPE_AHASH,
2121         .algo_mask = 0,
2122         .alg.ahash = {
2123                 .init = safexcel_cbcmac_init,
2124                 .update = safexcel_ahash_update,
2125                 .final = safexcel_ahash_final,
2126                 .finup = safexcel_ahash_finup,
2127                 .digest = safexcel_cbcmac_digest,
2128                 .setkey = safexcel_xcbcmac_setkey,
2129                 .export = safexcel_ahash_export,
2130                 .import = safexcel_ahash_import,
2131                 .halg = {
2132                         .digestsize = AES_BLOCK_SIZE,
2133                         .statesize = sizeof(struct safexcel_ahash_export_state),
2134                         .base = {
2135                                 .cra_name = "xcbc(aes)",
2136                                 .cra_driver_name = "safexcel-xcbc-aes",
2137                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2138                                 .cra_flags = CRYPTO_ALG_ASYNC |
2139                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2140                                 .cra_blocksize = AES_BLOCK_SIZE,
2141                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2142                                 .cra_init = safexcel_xcbcmac_cra_init,
2143                                 .cra_exit = safexcel_xcbcmac_cra_exit,
2144                                 .cra_module = THIS_MODULE,
2145                         },
2146                 },
2147         },
2148 };
2149
2150 static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2151                                 unsigned int len)
2152 {
2153         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2154         struct crypto_aes_ctx aes;
2155         __be64 consts[4];
2156         u64 _const[2];
2157         u8 msb_mask, gfmask;
2158         int ret, i;
2159
2160         ret = aes_expandkey(&aes, key, len);
2161         if (ret)
2162                 return ret;
2163
2164         for (i = 0; i < len / sizeof(u32); i++)
2165                 ctx->ipad[i + 8] =
2166                         cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i]));
2167
2168         /* precompute the CMAC key material */
2169         crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2170         crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2171                                 CRYPTO_TFM_REQ_MASK);
2172         ret = crypto_cipher_setkey(ctx->kaes, key, len);
2173         if (ret)
2174                 return ret;
2175
2176         /* code below borrowed from crypto/cmac.c */
2177         /* encrypt the zero block */
2178         memset(consts, 0, AES_BLOCK_SIZE);
2179         crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2180
2181         gfmask = 0x87;
2182         _const[0] = be64_to_cpu(consts[1]);
2183         _const[1] = be64_to_cpu(consts[0]);
2184
2185         /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2186         for (i = 0; i < 4; i += 2) {
2187                 msb_mask = ((s64)_const[1] >> 63) & gfmask;
2188                 _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2189                 _const[0] = (_const[0] << 1) ^ msb_mask;
2190
2191                 consts[i + 0] = cpu_to_be64(_const[1]);
2192                 consts[i + 1] = cpu_to_be64(_const[0]);
2193         }
2194         /* end of code borrowed from crypto/cmac.c */
2195
2196         for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2197                 ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]);
2198
2199         if (len == AES_KEYSIZE_192) {
2200                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2201                 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2202         } else if (len == AES_KEYSIZE_256) {
2203                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2204                 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2205         } else {
2206                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2207                 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2208         }
2209         ctx->cbcmac = false;
2210
2211         memzero_explicit(&aes, sizeof(aes));
2212         return 0;
2213 }
2214
2215 struct safexcel_alg_template safexcel_alg_cmac = {
2216         .type = SAFEXCEL_ALG_TYPE_AHASH,
2217         .algo_mask = 0,
2218         .alg.ahash = {
2219                 .init = safexcel_cbcmac_init,
2220                 .update = safexcel_ahash_update,
2221                 .final = safexcel_ahash_final,
2222                 .finup = safexcel_ahash_finup,
2223                 .digest = safexcel_cbcmac_digest,
2224                 .setkey = safexcel_cmac_setkey,
2225                 .export = safexcel_ahash_export,
2226                 .import = safexcel_ahash_import,
2227                 .halg = {
2228                         .digestsize = AES_BLOCK_SIZE,
2229                         .statesize = sizeof(struct safexcel_ahash_export_state),
2230                         .base = {
2231                                 .cra_name = "cmac(aes)",
2232                                 .cra_driver_name = "safexcel-cmac-aes",
2233                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2234                                 .cra_flags = CRYPTO_ALG_ASYNC |
2235                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2236                                 .cra_blocksize = AES_BLOCK_SIZE,
2237                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2238                                 .cra_init = safexcel_xcbcmac_cra_init,
2239                                 .cra_exit = safexcel_xcbcmac_cra_exit,
2240                                 .cra_module = THIS_MODULE,
2241                         },
2242                 },
2243         },
2244 };
2245
2246 static int safexcel_sm3_init(struct ahash_request *areq)
2247 {
2248         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2249         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2250
2251         memset(req, 0, sizeof(*req));
2252
2253         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2254         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2255         req->state_sz = SM3_DIGEST_SIZE;
2256         req->digest_sz = SM3_DIGEST_SIZE;
2257         req->block_sz = SM3_BLOCK_SIZE;
2258
2259         return 0;
2260 }
2261
2262 static int safexcel_sm3_digest(struct ahash_request *areq)
2263 {
2264         int ret = safexcel_sm3_init(areq);
2265
2266         if (ret)
2267                 return ret;
2268
2269         return safexcel_ahash_finup(areq);
2270 }
2271
2272 struct safexcel_alg_template safexcel_alg_sm3 = {
2273         .type = SAFEXCEL_ALG_TYPE_AHASH,
2274         .algo_mask = SAFEXCEL_ALG_SM3,
2275         .alg.ahash = {
2276                 .init = safexcel_sm3_init,
2277                 .update = safexcel_ahash_update,
2278                 .final = safexcel_ahash_final,
2279                 .finup = safexcel_ahash_finup,
2280                 .digest = safexcel_sm3_digest,
2281                 .export = safexcel_ahash_export,
2282                 .import = safexcel_ahash_import,
2283                 .halg = {
2284                         .digestsize = SM3_DIGEST_SIZE,
2285                         .statesize = sizeof(struct safexcel_ahash_export_state),
2286                         .base = {
2287                                 .cra_name = "sm3",
2288                                 .cra_driver_name = "safexcel-sm3",
2289                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2290                                 .cra_flags = CRYPTO_ALG_ASYNC |
2291                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2292                                 .cra_blocksize = SM3_BLOCK_SIZE,
2293                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2294                                 .cra_init = safexcel_ahash_cra_init,
2295                                 .cra_exit = safexcel_ahash_cra_exit,
2296                                 .cra_module = THIS_MODULE,
2297                         },
2298                 },
2299         },
2300 };
2301
2302 static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2303                                     unsigned int keylen)
2304 {
2305         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2306                                         SM3_DIGEST_SIZE);
2307 }
2308
2309 static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2310 {
2311         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2312         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2313
2314         memset(req, 0, sizeof(*req));
2315
2316         /* Start from ipad precompute */
2317         memcpy(req->state, ctx->ipad, SM3_DIGEST_SIZE);
2318         /* Already processed the key^ipad part now! */
2319         req->len        = SM3_BLOCK_SIZE;
2320         req->processed  = SM3_BLOCK_SIZE;
2321
2322         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2323         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2324         req->state_sz = SM3_DIGEST_SIZE;
2325         req->digest_sz = SM3_DIGEST_SIZE;
2326         req->block_sz = SM3_BLOCK_SIZE;
2327         req->hmac = true;
2328
2329         return 0;
2330 }
2331
2332 static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2333 {
2334         int ret = safexcel_hmac_sm3_init(areq);
2335
2336         if (ret)
2337                 return ret;
2338
2339         return safexcel_ahash_finup(areq);
2340 }
2341
2342 struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2343         .type = SAFEXCEL_ALG_TYPE_AHASH,
2344         .algo_mask = SAFEXCEL_ALG_SM3,
2345         .alg.ahash = {
2346                 .init = safexcel_hmac_sm3_init,
2347                 .update = safexcel_ahash_update,
2348                 .final = safexcel_ahash_final,
2349                 .finup = safexcel_ahash_finup,
2350                 .digest = safexcel_hmac_sm3_digest,
2351                 .setkey = safexcel_hmac_sm3_setkey,
2352                 .export = safexcel_ahash_export,
2353                 .import = safexcel_ahash_import,
2354                 .halg = {
2355                         .digestsize = SM3_DIGEST_SIZE,
2356                         .statesize = sizeof(struct safexcel_ahash_export_state),
2357                         .base = {
2358                                 .cra_name = "hmac(sm3)",
2359                                 .cra_driver_name = "safexcel-hmac-sm3",
2360                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2361                                 .cra_flags = CRYPTO_ALG_ASYNC |
2362                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2363                                 .cra_blocksize = SM3_BLOCK_SIZE,
2364                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2365                                 .cra_init = safexcel_ahash_cra_init,
2366                                 .cra_exit = safexcel_ahash_cra_exit,
2367                                 .cra_module = THIS_MODULE,
2368                         },
2369                 },
2370         },
2371 };
2372
2373 static int safexcel_sha3_224_init(struct ahash_request *areq)
2374 {
2375         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2376         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2377         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2378
2379         memset(req, 0, sizeof(*req));
2380
2381         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2382         req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2383         req->state_sz = SHA3_224_DIGEST_SIZE;
2384         req->digest_sz = SHA3_224_DIGEST_SIZE;
2385         req->block_sz = SHA3_224_BLOCK_SIZE;
2386         ctx->do_fallback = false;
2387         ctx->fb_init_done = false;
2388         return 0;
2389 }
2390
2391 static int safexcel_sha3_fbcheck(struct ahash_request *req)
2392 {
2393         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2394         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2395         struct ahash_request *subreq = ahash_request_ctx(req);
2396         int ret = 0;
2397
2398         if (ctx->do_fallback) {
2399                 ahash_request_set_tfm(subreq, ctx->fback);
2400                 ahash_request_set_callback(subreq, req->base.flags,
2401                                            req->base.complete, req->base.data);
2402                 ahash_request_set_crypt(subreq, req->src, req->result,
2403                                         req->nbytes);
2404                 if (!ctx->fb_init_done) {
2405                         if (ctx->fb_do_setkey) {
2406                                 /* Set fallback cipher HMAC key */
2407                                 u8 key[SHA3_224_BLOCK_SIZE];
2408
2409                                 memcpy(key, ctx->ipad,
2410                                        crypto_ahash_blocksize(ctx->fback) / 2);
2411                                 memcpy(key +
2412                                        crypto_ahash_blocksize(ctx->fback) / 2,
2413                                        ctx->opad,
2414                                        crypto_ahash_blocksize(ctx->fback) / 2);
2415                                 ret = crypto_ahash_setkey(ctx->fback, key,
2416                                         crypto_ahash_blocksize(ctx->fback));
2417                                 memzero_explicit(key,
2418                                         crypto_ahash_blocksize(ctx->fback));
2419                                 ctx->fb_do_setkey = false;
2420                         }
2421                         ret = ret ?: crypto_ahash_init(subreq);
2422                         ctx->fb_init_done = true;
2423                 }
2424         }
2425         return ret;
2426 }
2427
2428 static int safexcel_sha3_update(struct ahash_request *req)
2429 {
2430         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2431         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2432         struct ahash_request *subreq = ahash_request_ctx(req);
2433
2434         ctx->do_fallback = true;
2435         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2436 }
2437
2438 static int safexcel_sha3_final(struct ahash_request *req)
2439 {
2440         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2441         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2442         struct ahash_request *subreq = ahash_request_ctx(req);
2443
2444         ctx->do_fallback = true;
2445         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2446 }
2447
2448 static int safexcel_sha3_finup(struct ahash_request *req)
2449 {
2450         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2451         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2452         struct ahash_request *subreq = ahash_request_ctx(req);
2453
2454         ctx->do_fallback |= !req->nbytes;
2455         if (ctx->do_fallback)
2456                 /* Update or ex/import happened or len 0, cannot use the HW */
2457                 return safexcel_sha3_fbcheck(req) ?:
2458                        crypto_ahash_finup(subreq);
2459         else
2460                 return safexcel_ahash_finup(req);
2461 }
2462
2463 static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2464 {
2465         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2466         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2467         struct ahash_request *subreq = ahash_request_ctx(req);
2468
2469         ctx->do_fallback = true;
2470         ctx->fb_init_done = false;
2471         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2472 }
2473
2474 static int safexcel_sha3_224_digest(struct ahash_request *req)
2475 {
2476         if (req->nbytes)
2477                 return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2478
2479         /* HW cannot do zero length hash, use fallback instead */
2480         return safexcel_sha3_digest_fallback(req);
2481 }
2482
2483 static int safexcel_sha3_export(struct ahash_request *req, void *out)
2484 {
2485         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2486         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2487         struct ahash_request *subreq = ahash_request_ctx(req);
2488
2489         ctx->do_fallback = true;
2490         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2491 }
2492
2493 static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2494 {
2495         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2496         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2497         struct ahash_request *subreq = ahash_request_ctx(req);
2498
2499         ctx->do_fallback = true;
2500         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2501         // return safexcel_ahash_import(req, in);
2502 }
2503
2504 static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2505 {
2506         struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2507         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2508
2509         safexcel_ahash_cra_init(tfm);
2510
2511         /* Allocate fallback implementation */
2512         ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2513                                         CRYPTO_ALG_ASYNC |
2514                                         CRYPTO_ALG_NEED_FALLBACK);
2515         if (IS_ERR(ctx->fback))
2516                 return PTR_ERR(ctx->fback);
2517
2518         /* Update statesize from fallback algorithm! */
2519         crypto_hash_alg_common(ahash)->statesize =
2520                 crypto_ahash_statesize(ctx->fback);
2521         crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2522                                             sizeof(struct ahash_request) +
2523                                             crypto_ahash_reqsize(ctx->fback)));
2524         return 0;
2525 }
2526
2527 static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2528 {
2529         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2530
2531         crypto_free_ahash(ctx->fback);
2532         safexcel_ahash_cra_exit(tfm);
2533 }
2534
2535 struct safexcel_alg_template safexcel_alg_sha3_224 = {
2536         .type = SAFEXCEL_ALG_TYPE_AHASH,
2537         .algo_mask = SAFEXCEL_ALG_SHA3,
2538         .alg.ahash = {
2539                 .init = safexcel_sha3_224_init,
2540                 .update = safexcel_sha3_update,
2541                 .final = safexcel_sha3_final,
2542                 .finup = safexcel_sha3_finup,
2543                 .digest = safexcel_sha3_224_digest,
2544                 .export = safexcel_sha3_export,
2545                 .import = safexcel_sha3_import,
2546                 .halg = {
2547                         .digestsize = SHA3_224_DIGEST_SIZE,
2548                         .statesize = sizeof(struct safexcel_ahash_export_state),
2549                         .base = {
2550                                 .cra_name = "sha3-224",
2551                                 .cra_driver_name = "safexcel-sha3-224",
2552                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2553                                 .cra_flags = CRYPTO_ALG_ASYNC |
2554                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2555                                              CRYPTO_ALG_NEED_FALLBACK,
2556                                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2557                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2558                                 .cra_init = safexcel_sha3_cra_init,
2559                                 .cra_exit = safexcel_sha3_cra_exit,
2560                                 .cra_module = THIS_MODULE,
2561                         },
2562                 },
2563         },
2564 };
2565
2566 static int safexcel_sha3_256_init(struct ahash_request *areq)
2567 {
2568         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2569         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2570         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2571
2572         memset(req, 0, sizeof(*req));
2573
2574         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2575         req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2576         req->state_sz = SHA3_256_DIGEST_SIZE;
2577         req->digest_sz = SHA3_256_DIGEST_SIZE;
2578         req->block_sz = SHA3_256_BLOCK_SIZE;
2579         ctx->do_fallback = false;
2580         ctx->fb_init_done = false;
2581         return 0;
2582 }
2583
2584 static int safexcel_sha3_256_digest(struct ahash_request *req)
2585 {
2586         if (req->nbytes)
2587                 return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2588
2589         /* HW cannot do zero length hash, use fallback instead */
2590         return safexcel_sha3_digest_fallback(req);
2591 }
2592
2593 struct safexcel_alg_template safexcel_alg_sha3_256 = {
2594         .type = SAFEXCEL_ALG_TYPE_AHASH,
2595         .algo_mask = SAFEXCEL_ALG_SHA3,
2596         .alg.ahash = {
2597                 .init = safexcel_sha3_256_init,
2598                 .update = safexcel_sha3_update,
2599                 .final = safexcel_sha3_final,
2600                 .finup = safexcel_sha3_finup,
2601                 .digest = safexcel_sha3_256_digest,
2602                 .export = safexcel_sha3_export,
2603                 .import = safexcel_sha3_import,
2604                 .halg = {
2605                         .digestsize = SHA3_256_DIGEST_SIZE,
2606                         .statesize = sizeof(struct safexcel_ahash_export_state),
2607                         .base = {
2608                                 .cra_name = "sha3-256",
2609                                 .cra_driver_name = "safexcel-sha3-256",
2610                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2611                                 .cra_flags = CRYPTO_ALG_ASYNC |
2612                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2613                                              CRYPTO_ALG_NEED_FALLBACK,
2614                                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2615                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2616                                 .cra_init = safexcel_sha3_cra_init,
2617                                 .cra_exit = safexcel_sha3_cra_exit,
2618                                 .cra_module = THIS_MODULE,
2619                         },
2620                 },
2621         },
2622 };
2623
2624 static int safexcel_sha3_384_init(struct ahash_request *areq)
2625 {
2626         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2627         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2628         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2629
2630         memset(req, 0, sizeof(*req));
2631
2632         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2633         req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2634         req->state_sz = SHA3_384_DIGEST_SIZE;
2635         req->digest_sz = SHA3_384_DIGEST_SIZE;
2636         req->block_sz = SHA3_384_BLOCK_SIZE;
2637         ctx->do_fallback = false;
2638         ctx->fb_init_done = false;
2639         return 0;
2640 }
2641
2642 static int safexcel_sha3_384_digest(struct ahash_request *req)
2643 {
2644         if (req->nbytes)
2645                 return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2646
2647         /* HW cannot do zero length hash, use fallback instead */
2648         return safexcel_sha3_digest_fallback(req);
2649 }
2650
2651 struct safexcel_alg_template safexcel_alg_sha3_384 = {
2652         .type = SAFEXCEL_ALG_TYPE_AHASH,
2653         .algo_mask = SAFEXCEL_ALG_SHA3,
2654         .alg.ahash = {
2655                 .init = safexcel_sha3_384_init,
2656                 .update = safexcel_sha3_update,
2657                 .final = safexcel_sha3_final,
2658                 .finup = safexcel_sha3_finup,
2659                 .digest = safexcel_sha3_384_digest,
2660                 .export = safexcel_sha3_export,
2661                 .import = safexcel_sha3_import,
2662                 .halg = {
2663                         .digestsize = SHA3_384_DIGEST_SIZE,
2664                         .statesize = sizeof(struct safexcel_ahash_export_state),
2665                         .base = {
2666                                 .cra_name = "sha3-384",
2667                                 .cra_driver_name = "safexcel-sha3-384",
2668                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2669                                 .cra_flags = CRYPTO_ALG_ASYNC |
2670                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2671                                              CRYPTO_ALG_NEED_FALLBACK,
2672                                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
2673                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2674                                 .cra_init = safexcel_sha3_cra_init,
2675                                 .cra_exit = safexcel_sha3_cra_exit,
2676                                 .cra_module = THIS_MODULE,
2677                         },
2678                 },
2679         },
2680 };
2681
2682 static int safexcel_sha3_512_init(struct ahash_request *areq)
2683 {
2684         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2685         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2686         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2687
2688         memset(req, 0, sizeof(*req));
2689
2690         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2691         req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2692         req->state_sz = SHA3_512_DIGEST_SIZE;
2693         req->digest_sz = SHA3_512_DIGEST_SIZE;
2694         req->block_sz = SHA3_512_BLOCK_SIZE;
2695         ctx->do_fallback = false;
2696         ctx->fb_init_done = false;
2697         return 0;
2698 }
2699
2700 static int safexcel_sha3_512_digest(struct ahash_request *req)
2701 {
2702         if (req->nbytes)
2703                 return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2704
2705         /* HW cannot do zero length hash, use fallback instead */
2706         return safexcel_sha3_digest_fallback(req);
2707 }
2708
2709 struct safexcel_alg_template safexcel_alg_sha3_512 = {
2710         .type = SAFEXCEL_ALG_TYPE_AHASH,
2711         .algo_mask = SAFEXCEL_ALG_SHA3,
2712         .alg.ahash = {
2713                 .init = safexcel_sha3_512_init,
2714                 .update = safexcel_sha3_update,
2715                 .final = safexcel_sha3_final,
2716                 .finup = safexcel_sha3_finup,
2717                 .digest = safexcel_sha3_512_digest,
2718                 .export = safexcel_sha3_export,
2719                 .import = safexcel_sha3_import,
2720                 .halg = {
2721                         .digestsize = SHA3_512_DIGEST_SIZE,
2722                         .statesize = sizeof(struct safexcel_ahash_export_state),
2723                         .base = {
2724                                 .cra_name = "sha3-512",
2725                                 .cra_driver_name = "safexcel-sha3-512",
2726                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2727                                 .cra_flags = CRYPTO_ALG_ASYNC |
2728                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2729                                              CRYPTO_ALG_NEED_FALLBACK,
2730                                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
2731                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2732                                 .cra_init = safexcel_sha3_cra_init,
2733                                 .cra_exit = safexcel_sha3_cra_exit,
2734                                 .cra_module = THIS_MODULE,
2735                         },
2736                 },
2737         },
2738 };
2739
2740 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2741 {
2742         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2743         int ret;
2744
2745         ret = safexcel_sha3_cra_init(tfm);
2746         if (ret)
2747                 return ret;
2748
2749         /* Allocate precalc basic digest implementation */
2750         ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2751         if (IS_ERR(ctx->shpre))
2752                 return PTR_ERR(ctx->shpre);
2753
2754         ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2755                               crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2756         if (!ctx->shdesc) {
2757                 crypto_free_shash(ctx->shpre);
2758                 return -ENOMEM;
2759         }
2760         ctx->shdesc->tfm = ctx->shpre;
2761         return 0;
2762 }
2763
2764 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2765 {
2766         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2767
2768         crypto_free_ahash(ctx->fback);
2769         crypto_free_shash(ctx->shpre);
2770         kfree(ctx->shdesc);
2771         safexcel_ahash_cra_exit(tfm);
2772 }
2773
2774 static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2775                                      unsigned int keylen)
2776 {
2777         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2778         int ret = 0;
2779
2780         if (keylen > crypto_ahash_blocksize(tfm)) {
2781                 /*
2782                  * If the key is larger than the blocksize, then hash it
2783                  * first using our fallback cipher
2784                  */
2785                 ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2786                                           (u8 *)ctx->ipad);
2787                 keylen = crypto_shash_digestsize(ctx->shpre);
2788
2789                 /*
2790                  * If the digest is larger than half the blocksize, we need to
2791                  * move the rest to opad due to the way our HMAC infra works.
2792                  */
2793                 if (keylen > crypto_ahash_blocksize(tfm) / 2)
2794                         /* Buffers overlap, need to use memmove iso memcpy! */
2795                         memmove(ctx->opad,
2796                                 (u8 *)ctx->ipad +
2797                                         crypto_ahash_blocksize(tfm) / 2,
2798                                 keylen - crypto_ahash_blocksize(tfm) / 2);
2799         } else {
2800                 /*
2801                  * Copy the key to our ipad & opad buffers
2802                  * Note that ipad and opad each contain one half of the key,
2803                  * to match the existing HMAC driver infrastructure.
2804                  */
2805                 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2806                         memcpy(ctx->ipad, key, keylen);
2807                 } else {
2808                         memcpy(ctx->ipad, key,
2809                                crypto_ahash_blocksize(tfm) / 2);
2810                         memcpy(ctx->opad,
2811                                key + crypto_ahash_blocksize(tfm) / 2,
2812                                keylen - crypto_ahash_blocksize(tfm) / 2);
2813                 }
2814         }
2815
2816         /* Pad key with zeroes */
2817         if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2818                 memset((u8 *)ctx->ipad + keylen, 0,
2819                        crypto_ahash_blocksize(tfm) / 2 - keylen);
2820                 memset(ctx->opad, 0, crypto_ahash_blocksize(tfm) / 2);
2821         } else {
2822                 memset((u8 *)ctx->opad + keylen -
2823                        crypto_ahash_blocksize(tfm) / 2, 0,
2824                        crypto_ahash_blocksize(tfm) - keylen);
2825         }
2826
2827         /* If doing fallback, still need to set the new key! */
2828         ctx->fb_do_setkey = true;
2829         return ret;
2830 }
2831
2832 static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2833 {
2834         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2835         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2836         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2837
2838         memset(req, 0, sizeof(*req));
2839
2840         /* Copy (half of) the key */
2841         memcpy(req->state, ctx->ipad, SHA3_224_BLOCK_SIZE / 2);
2842         /* Start of HMAC should have len == processed == blocksize */
2843         req->len        = SHA3_224_BLOCK_SIZE;
2844         req->processed  = SHA3_224_BLOCK_SIZE;
2845         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2846         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2847         req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2848         req->digest_sz = SHA3_224_DIGEST_SIZE;
2849         req->block_sz = SHA3_224_BLOCK_SIZE;
2850         req->hmac = true;
2851         ctx->do_fallback = false;
2852         ctx->fb_init_done = false;
2853         return 0;
2854 }
2855
2856 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2857 {
2858         if (req->nbytes)
2859                 return safexcel_hmac_sha3_224_init(req) ?:
2860                        safexcel_ahash_finup(req);
2861
2862         /* HW cannot do zero length HMAC, use fallback instead */
2863         return safexcel_sha3_digest_fallback(req);
2864 }
2865
2866 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2867 {
2868         return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2869 }
2870
2871 struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2872         .type = SAFEXCEL_ALG_TYPE_AHASH,
2873         .algo_mask = SAFEXCEL_ALG_SHA3,
2874         .alg.ahash = {
2875                 .init = safexcel_hmac_sha3_224_init,
2876                 .update = safexcel_sha3_update,
2877                 .final = safexcel_sha3_final,
2878                 .finup = safexcel_sha3_finup,
2879                 .digest = safexcel_hmac_sha3_224_digest,
2880                 .setkey = safexcel_hmac_sha3_setkey,
2881                 .export = safexcel_sha3_export,
2882                 .import = safexcel_sha3_import,
2883                 .halg = {
2884                         .digestsize = SHA3_224_DIGEST_SIZE,
2885                         .statesize = sizeof(struct safexcel_ahash_export_state),
2886                         .base = {
2887                                 .cra_name = "hmac(sha3-224)",
2888                                 .cra_driver_name = "safexcel-hmac-sha3-224",
2889                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2890                                 .cra_flags = CRYPTO_ALG_ASYNC |
2891                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2892                                              CRYPTO_ALG_NEED_FALLBACK,
2893                                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2894                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2895                                 .cra_init = safexcel_hmac_sha3_224_cra_init,
2896                                 .cra_exit = safexcel_hmac_sha3_cra_exit,
2897                                 .cra_module = THIS_MODULE,
2898                         },
2899                 },
2900         },
2901 };
2902
2903 static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2904 {
2905         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2906         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2907         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2908
2909         memset(req, 0, sizeof(*req));
2910
2911         /* Copy (half of) the key */
2912         memcpy(req->state, ctx->ipad, SHA3_256_BLOCK_SIZE / 2);
2913         /* Start of HMAC should have len == processed == blocksize */
2914         req->len        = SHA3_256_BLOCK_SIZE;
2915         req->processed  = SHA3_256_BLOCK_SIZE;
2916         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2917         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2918         req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2919         req->digest_sz = SHA3_256_DIGEST_SIZE;
2920         req->block_sz = SHA3_256_BLOCK_SIZE;
2921         req->hmac = true;
2922         ctx->do_fallback = false;
2923         ctx->fb_init_done = false;
2924         return 0;
2925 }
2926
2927 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2928 {
2929         if (req->nbytes)
2930                 return safexcel_hmac_sha3_256_init(req) ?:
2931                        safexcel_ahash_finup(req);
2932
2933         /* HW cannot do zero length HMAC, use fallback instead */
2934         return safexcel_sha3_digest_fallback(req);
2935 }
2936
2937 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2938 {
2939         return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2940 }
2941
2942 struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2943         .type = SAFEXCEL_ALG_TYPE_AHASH,
2944         .algo_mask = SAFEXCEL_ALG_SHA3,
2945         .alg.ahash = {
2946                 .init = safexcel_hmac_sha3_256_init,
2947                 .update = safexcel_sha3_update,
2948                 .final = safexcel_sha3_final,
2949                 .finup = safexcel_sha3_finup,
2950                 .digest = safexcel_hmac_sha3_256_digest,
2951                 .setkey = safexcel_hmac_sha3_setkey,
2952                 .export = safexcel_sha3_export,
2953                 .import = safexcel_sha3_import,
2954                 .halg = {
2955                         .digestsize = SHA3_256_DIGEST_SIZE,
2956                         .statesize = sizeof(struct safexcel_ahash_export_state),
2957                         .base = {
2958                                 .cra_name = "hmac(sha3-256)",
2959                                 .cra_driver_name = "safexcel-hmac-sha3-256",
2960                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2961                                 .cra_flags = CRYPTO_ALG_ASYNC |
2962                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2963                                              CRYPTO_ALG_NEED_FALLBACK,
2964                                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2965                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2966                                 .cra_init = safexcel_hmac_sha3_256_cra_init,
2967                                 .cra_exit = safexcel_hmac_sha3_cra_exit,
2968                                 .cra_module = THIS_MODULE,
2969                         },
2970                 },
2971         },
2972 };
2973
2974 static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2975 {
2976         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2977         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2978         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2979
2980         memset(req, 0, sizeof(*req));
2981
2982         /* Copy (half of) the key */
2983         memcpy(req->state, ctx->ipad, SHA3_384_BLOCK_SIZE / 2);
2984         /* Start of HMAC should have len == processed == blocksize */
2985         req->len        = SHA3_384_BLOCK_SIZE;
2986         req->processed  = SHA3_384_BLOCK_SIZE;
2987         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2988         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2989         req->state_sz = SHA3_384_BLOCK_SIZE / 2;
2990         req->digest_sz = SHA3_384_DIGEST_SIZE;
2991         req->block_sz = SHA3_384_BLOCK_SIZE;
2992         req->hmac = true;
2993         ctx->do_fallback = false;
2994         ctx->fb_init_done = false;
2995         return 0;
2996 }
2997
2998 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
2999 {
3000         if (req->nbytes)
3001                 return safexcel_hmac_sha3_384_init(req) ?:
3002                        safexcel_ahash_finup(req);
3003
3004         /* HW cannot do zero length HMAC, use fallback instead */
3005         return safexcel_sha3_digest_fallback(req);
3006 }
3007
3008 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3009 {
3010         return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3011 }
3012
3013 struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3014         .type = SAFEXCEL_ALG_TYPE_AHASH,
3015         .algo_mask = SAFEXCEL_ALG_SHA3,
3016         .alg.ahash = {
3017                 .init = safexcel_hmac_sha3_384_init,
3018                 .update = safexcel_sha3_update,
3019                 .final = safexcel_sha3_final,
3020                 .finup = safexcel_sha3_finup,
3021                 .digest = safexcel_hmac_sha3_384_digest,
3022                 .setkey = safexcel_hmac_sha3_setkey,
3023                 .export = safexcel_sha3_export,
3024                 .import = safexcel_sha3_import,
3025                 .halg = {
3026                         .digestsize = SHA3_384_DIGEST_SIZE,
3027                         .statesize = sizeof(struct safexcel_ahash_export_state),
3028                         .base = {
3029                                 .cra_name = "hmac(sha3-384)",
3030                                 .cra_driver_name = "safexcel-hmac-sha3-384",
3031                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3032                                 .cra_flags = CRYPTO_ALG_ASYNC |
3033                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
3034                                              CRYPTO_ALG_NEED_FALLBACK,
3035                                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
3036                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3037                                 .cra_init = safexcel_hmac_sha3_384_cra_init,
3038                                 .cra_exit = safexcel_hmac_sha3_cra_exit,
3039                                 .cra_module = THIS_MODULE,
3040                         },
3041                 },
3042         },
3043 };
3044
3045 static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3046 {
3047         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3048         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3049         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3050
3051         memset(req, 0, sizeof(*req));
3052
3053         /* Copy (half of) the key */
3054         memcpy(req->state, ctx->ipad, SHA3_512_BLOCK_SIZE / 2);
3055         /* Start of HMAC should have len == processed == blocksize */
3056         req->len        = SHA3_512_BLOCK_SIZE;
3057         req->processed  = SHA3_512_BLOCK_SIZE;
3058         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3059         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3060         req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3061         req->digest_sz = SHA3_512_DIGEST_SIZE;
3062         req->block_sz = SHA3_512_BLOCK_SIZE;
3063         req->hmac = true;
3064         ctx->do_fallback = false;
3065         ctx->fb_init_done = false;
3066         return 0;
3067 }
3068
3069 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3070 {
3071         if (req->nbytes)
3072                 return safexcel_hmac_sha3_512_init(req) ?:
3073                        safexcel_ahash_finup(req);
3074
3075         /* HW cannot do zero length HMAC, use fallback instead */
3076         return safexcel_sha3_digest_fallback(req);
3077 }
3078
3079 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3080 {
3081         return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3082 }
3083 struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3084         .type = SAFEXCEL_ALG_TYPE_AHASH,
3085         .algo_mask = SAFEXCEL_ALG_SHA3,
3086         .alg.ahash = {
3087                 .init = safexcel_hmac_sha3_512_init,
3088                 .update = safexcel_sha3_update,
3089                 .final = safexcel_sha3_final,
3090                 .finup = safexcel_sha3_finup,
3091                 .digest = safexcel_hmac_sha3_512_digest,
3092                 .setkey = safexcel_hmac_sha3_setkey,
3093                 .export = safexcel_sha3_export,
3094                 .import = safexcel_sha3_import,
3095                 .halg = {
3096                         .digestsize = SHA3_512_DIGEST_SIZE,
3097                         .statesize = sizeof(struct safexcel_ahash_export_state),
3098                         .base = {
3099                                 .cra_name = "hmac(sha3-512)",
3100                                 .cra_driver_name = "safexcel-hmac-sha3-512",
3101                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3102                                 .cra_flags = CRYPTO_ALG_ASYNC |
3103                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
3104                                              CRYPTO_ALG_NEED_FALLBACK,
3105                                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
3106                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3107                                 .cra_init = safexcel_hmac_sha3_512_cra_init,
3108                                 .cra_exit = safexcel_hmac_sha3_cra_exit,
3109                                 .cra_module = THIS_MODULE,
3110                         },
3111                 },
3112         },
3113 };