1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2017 Marvell
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
8 #include <linux/device.h>
9 #include <linux/dma-mapping.h>
10 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/des.h>
16 #include <crypto/sha.h>
17 #include <crypto/skcipher.h>
18 #include <crypto/internal/aead.h>
19 #include <crypto/internal/skcipher.h>
23 enum safexcel_cipher_direction {
28 enum safexcel_cipher_alg {
34 struct safexcel_cipher_ctx {
35 struct safexcel_context base;
36 struct safexcel_crypto_priv *priv;
39 enum safexcel_cipher_alg alg;
45 /* All the below is AEAD specific */
48 u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
49 u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
52 struct safexcel_cipher_req {
53 enum safexcel_cipher_direction direction;
54 /* Number of result descriptors associated to the request */
59 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
60 struct safexcel_command_desc *cdesc,
63 struct safexcel_token *token;
64 u32 offset = 0, block_sz = 0;
66 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
69 block_sz = DES_BLOCK_SIZE;
70 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
73 block_sz = DES3_EDE_BLOCK_SIZE;
74 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
77 block_sz = AES_BLOCK_SIZE;
78 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82 offset = block_sz / sizeof(u32);
83 memcpy(cdesc->control_data.token, iv, block_sz);
86 token = (struct safexcel_token *)(cdesc->control_data.token + offset);
88 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
89 token[0].packet_length = length;
90 token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
91 EIP197_TOKEN_STAT_LAST_HASH;
92 token[0].instructions = EIP197_TOKEN_INS_LAST |
93 EIP197_TOKEN_INS_TYPE_CRYTO |
94 EIP197_TOKEN_INS_TYPE_OUTPUT;
96 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
97 u32 last = (EIP197_MAX_TOKENS - 1) - offset;
99 token[last].opcode = EIP197_TOKEN_OPCODE_CTX_ACCESS;
100 token[last].packet_length = EIP197_TOKEN_DIRECTION_EXTERNAL |
101 EIP197_TOKEN_EXEC_IF_SUCCESSFUL|
102 EIP197_TOKEN_CTX_OFFSET(0x2);
103 token[last].stat = EIP197_TOKEN_STAT_LAST_HASH |
104 EIP197_TOKEN_STAT_LAST_PACKET;
105 token[last].instructions =
106 EIP197_TOKEN_INS_ORIGIN_LEN(block_sz / sizeof(u32)) |
107 EIP197_TOKEN_INS_ORIGIN_IV0;
109 /* Store the updated IV values back in the internal context
112 cdesc->control_data.control1 |= CONTEXT_CONTROL_CRYPTO_STORE;
116 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
117 struct safexcel_command_desc *cdesc,
118 enum safexcel_cipher_direction direction,
119 u32 cryptlen, u32 assoclen, u32 digestsize)
121 struct safexcel_token *token;
124 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
125 offset = AES_BLOCK_SIZE / sizeof(u32);
126 memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
128 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
131 token = (struct safexcel_token *)(cdesc->control_data.token + offset);
133 if (direction == SAFEXCEL_DECRYPT)
134 cryptlen -= digestsize;
136 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
137 token[0].packet_length = assoclen;
138 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH |
139 EIP197_TOKEN_INS_TYPE_OUTPUT;
141 token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
142 token[1].packet_length = cryptlen;
143 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
144 token[1].instructions = EIP197_TOKEN_INS_LAST |
145 EIP197_TOKEN_INS_TYPE_CRYTO |
146 EIP197_TOKEN_INS_TYPE_HASH |
147 EIP197_TOKEN_INS_TYPE_OUTPUT;
149 if (direction == SAFEXCEL_ENCRYPT) {
150 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
151 token[2].packet_length = digestsize;
152 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
153 EIP197_TOKEN_STAT_LAST_PACKET;
154 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
155 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
157 token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
158 token[2].packet_length = digestsize;
159 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
160 EIP197_TOKEN_STAT_LAST_PACKET;
161 token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
163 token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
164 token[3].packet_length = digestsize |
165 EIP197_TOKEN_HASH_RESULT_VERIFY;
166 token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
167 EIP197_TOKEN_STAT_LAST_PACKET;
168 token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
172 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
173 const u8 *key, unsigned int len)
175 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
176 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
177 struct safexcel_crypto_priv *priv = ctx->priv;
178 struct crypto_aes_ctx aes;
181 ret = crypto_aes_expand_key(&aes, key, len);
183 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
187 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
188 for (i = 0; i < len / sizeof(u32); i++) {
189 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
190 ctx->base.needs_inv = true;
196 for (i = 0; i < len / sizeof(u32); i++)
197 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
201 memzero_explicit(&aes, sizeof(aes));
205 static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,
208 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
209 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
210 struct safexcel_ahash_export_state istate, ostate;
211 struct safexcel_crypto_priv *priv = ctx->priv;
212 struct crypto_authenc_keys keys;
214 if (crypto_authenc_extractkeys(&keys, key, len) != 0)
217 if (keys.enckeylen > sizeof(ctx->key))
221 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
222 memcmp(ctx->key, keys.enckey, keys.enckeylen))
223 ctx->base.needs_inv = true;
226 switch (ctx->hash_alg) {
227 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
228 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
229 keys.authkeylen, &istate, &ostate))
232 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
233 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
234 keys.authkeylen, &istate, &ostate))
237 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
238 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
239 keys.authkeylen, &istate, &ostate))
242 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
243 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
244 keys.authkeylen, &istate, &ostate))
247 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
248 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
249 keys.authkeylen, &istate, &ostate))
253 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
257 crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
258 CRYPTO_TFM_RES_MASK);
260 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
261 (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
262 memcmp(ctx->opad, ostate.state, ctx->state_sz)))
263 ctx->base.needs_inv = true;
265 /* Now copy the keys into the context */
266 memcpy(ctx->key, keys.enckey, keys.enckeylen);
267 ctx->key_len = keys.enckeylen;
269 memcpy(ctx->ipad, &istate.state, ctx->state_sz);
270 memcpy(ctx->opad, &ostate.state, ctx->state_sz);
272 memzero_explicit(&keys, sizeof(keys));
276 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
277 memzero_explicit(&keys, sizeof(keys));
281 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
282 struct crypto_async_request *async,
283 struct safexcel_cipher_req *sreq,
284 struct safexcel_command_desc *cdesc)
286 struct safexcel_crypto_priv *priv = ctx->priv;
290 if (sreq->direction == SAFEXCEL_ENCRYPT)
291 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
293 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
295 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;
297 /* The decryption control type is a combination of the
298 * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
301 if (sreq->direction == SAFEXCEL_DECRYPT)
302 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
305 cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
306 cdesc->control_data.control1 |= ctx->mode;
309 cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
312 if (ctx->alg == SAFEXCEL_DES) {
313 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES;
314 } else if (ctx->alg == SAFEXCEL_3DES) {
315 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
316 } else if (ctx->alg == SAFEXCEL_AES) {
317 switch (ctx->key_len) {
318 case AES_KEYSIZE_128:
319 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
321 case AES_KEYSIZE_192:
322 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
324 case AES_KEYSIZE_256:
325 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
328 dev_err(priv->dev, "aes keysize not supported: %u\n",
334 ctrl_size = ctx->key_len / sizeof(u32);
336 /* Take in account the ipad+opad digests */
337 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
338 cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);
343 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
344 struct crypto_async_request *async,
345 struct scatterlist *src,
346 struct scatterlist *dst,
347 unsigned int cryptlen,
348 struct safexcel_cipher_req *sreq,
349 bool *should_complete, int *ret)
351 struct safexcel_result_desc *rdesc;
356 if (unlikely(!sreq->rdescs))
359 while (sreq->rdescs--) {
360 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
363 "cipher: result: could not retrieve the result descriptor\n");
364 *ret = PTR_ERR(rdesc);
369 *ret = safexcel_rdesc_check_errors(priv, rdesc);
374 safexcel_complete(priv, ring);
377 dma_unmap_sg(priv->dev, src, sg_nents(src), DMA_BIDIRECTIONAL);
379 dma_unmap_sg(priv->dev, src, sg_nents(src), DMA_TO_DEVICE);
380 dma_unmap_sg(priv->dev, dst, sg_nents(dst), DMA_FROM_DEVICE);
383 *should_complete = true;
388 static int safexcel_send_req(struct crypto_async_request *base, int ring,
389 struct safexcel_cipher_req *sreq,
390 struct scatterlist *src, struct scatterlist *dst,
391 unsigned int cryptlen, unsigned int assoclen,
392 unsigned int digestsize, u8 *iv, int *commands,
395 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
396 struct safexcel_crypto_priv *priv = ctx->priv;
397 struct safexcel_command_desc *cdesc;
398 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
399 struct scatterlist *sg;
400 unsigned int totlen = cryptlen + assoclen;
401 int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen;
405 nr_src = dma_map_sg(priv->dev, src, sg_nents(src),
411 nr_src = dma_map_sg(priv->dev, src, sg_nents(src),
416 nr_dst = dma_map_sg(priv->dev, dst, sg_nents(dst),
419 dma_unmap_sg(priv->dev, src, nr_src, DMA_TO_DEVICE);
424 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
427 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
428 ctx->ipad, ctx->state_sz);
429 memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / sizeof(u32),
430 ctx->opad, ctx->state_sz);
433 /* command descriptors */
434 for_each_sg(src, sg, nr_src, i) {
435 int len = sg_dma_len(sg);
437 /* Do not overflow the request */
438 if (queued - len < 0)
441 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, !(queued - len),
442 sg_dma_address(sg), len, totlen,
445 /* No space left in the command descriptor ring */
446 ret = PTR_ERR(cdesc);
452 safexcel_context_control(ctx, base, sreq, cdesc);
454 safexcel_aead_token(ctx, iv, cdesc,
455 sreq->direction, cryptlen,
456 assoclen, digestsize);
458 safexcel_skcipher_token(ctx, iv, cdesc,
467 /* result descriptors */
468 for_each_sg(dst, sg, nr_dst, i) {
469 bool first = !i, last = sg_is_last(sg);
470 u32 len = sg_dma_len(sg);
472 rdesc = safexcel_add_rdesc(priv, ring, first, last,
473 sg_dma_address(sg), len);
475 /* No space left in the result descriptor ring */
476 ret = PTR_ERR(rdesc);
484 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
491 for (i = 0; i < n_rdesc; i++)
492 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
494 for (i = 0; i < n_cdesc; i++)
495 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
498 dma_unmap_sg(priv->dev, src, nr_src, DMA_BIDIRECTIONAL);
500 dma_unmap_sg(priv->dev, src, nr_src, DMA_TO_DEVICE);
501 dma_unmap_sg(priv->dev, dst, nr_dst, DMA_FROM_DEVICE);
507 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
509 struct crypto_async_request *base,
510 struct safexcel_cipher_req *sreq,
511 bool *should_complete, int *ret)
513 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
514 struct safexcel_result_desc *rdesc;
515 int ndesc = 0, enq_ret;
519 if (unlikely(!sreq->rdescs))
522 while (sreq->rdescs--) {
523 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
526 "cipher: invalidate: could not retrieve the result descriptor\n");
527 *ret = PTR_ERR(rdesc);
532 *ret = safexcel_rdesc_check_errors(priv, rdesc);
537 safexcel_complete(priv, ring);
539 if (ctx->base.exit_inv) {
540 dma_pool_free(priv->context_pool, ctx->base.ctxr,
543 *should_complete = true;
548 ring = safexcel_select_ring(priv);
549 ctx->base.ring = ring;
551 spin_lock_bh(&priv->ring[ring].queue_lock);
552 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
553 spin_unlock_bh(&priv->ring[ring].queue_lock);
555 if (enq_ret != -EINPROGRESS)
558 queue_work(priv->ring[ring].workqueue,
559 &priv->ring[ring].work_data.work);
561 *should_complete = false;
566 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
568 struct crypto_async_request *async,
569 bool *should_complete, int *ret)
571 struct skcipher_request *req = skcipher_request_cast(async);
572 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
573 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(async->tfm);
576 if (sreq->needs_inv) {
577 sreq->needs_inv = false;
578 err = safexcel_handle_inv_result(priv, ring, async, sreq,
579 should_complete, ret);
581 err = safexcel_handle_req_result(priv, ring, async, req->src,
582 req->dst, req->cryptlen, sreq,
583 should_complete, ret);
585 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
590 block_sz = DES_BLOCK_SIZE;
593 block_sz = DES3_EDE_BLOCK_SIZE;
596 block_sz = AES_BLOCK_SIZE;
600 memcpy(req->iv, ctx->base.ctxr->data, block_sz);
607 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
609 struct crypto_async_request *async,
610 bool *should_complete, int *ret)
612 struct aead_request *req = aead_request_cast(async);
613 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
614 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
617 if (sreq->needs_inv) {
618 sreq->needs_inv = false;
619 err = safexcel_handle_inv_result(priv, ring, async, sreq,
620 should_complete, ret);
622 err = safexcel_handle_req_result(priv, ring, async, req->src,
624 req->cryptlen + crypto_aead_authsize(tfm),
625 sreq, should_complete, ret);
631 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
632 int ring, int *commands, int *results)
634 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
635 struct safexcel_crypto_priv *priv = ctx->priv;
638 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
648 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
649 int *commands, int *results)
651 struct skcipher_request *req = skcipher_request_cast(async);
652 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
653 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
654 struct safexcel_crypto_priv *priv = ctx->priv;
657 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
660 ret = safexcel_cipher_send_inv(async, ring, commands, results);
662 ret = safexcel_send_req(async, ring, sreq, req->src,
663 req->dst, req->cryptlen, 0, 0, req->iv,
666 sreq->rdescs = *results;
670 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
671 int *commands, int *results)
673 struct aead_request *req = aead_request_cast(async);
674 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
675 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
676 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
677 struct safexcel_crypto_priv *priv = ctx->priv;
680 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
683 ret = safexcel_cipher_send_inv(async, ring, commands, results);
685 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
686 req->cryptlen, req->assoclen,
687 crypto_aead_authsize(tfm), req->iv,
689 sreq->rdescs = *results;
693 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
694 struct crypto_async_request *base,
695 struct safexcel_cipher_req *sreq,
696 struct safexcel_inv_result *result)
698 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
699 struct safexcel_crypto_priv *priv = ctx->priv;
700 int ring = ctx->base.ring;
702 init_completion(&result->completion);
704 ctx = crypto_tfm_ctx(base->tfm);
705 ctx->base.exit_inv = true;
706 sreq->needs_inv = true;
708 spin_lock_bh(&priv->ring[ring].queue_lock);
709 crypto_enqueue_request(&priv->ring[ring].queue, base);
710 spin_unlock_bh(&priv->ring[ring].queue_lock);
712 queue_work(priv->ring[ring].workqueue,
713 &priv->ring[ring].work_data.work);
715 wait_for_completion(&result->completion);
719 "cipher: sync: invalidate: completion error %d\n",
721 return result->error;
727 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
729 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
730 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
731 struct safexcel_inv_result result = {};
733 memset(req, 0, sizeof(struct skcipher_request));
735 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
736 safexcel_inv_complete, &result);
737 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
739 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
742 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
744 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
745 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
746 struct safexcel_inv_result result = {};
748 memset(req, 0, sizeof(struct aead_request));
750 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
751 safexcel_inv_complete, &result);
752 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
754 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
757 static int safexcel_queue_req(struct crypto_async_request *base,
758 struct safexcel_cipher_req *sreq,
759 enum safexcel_cipher_direction dir, u32 mode,
760 enum safexcel_cipher_alg alg)
762 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
763 struct safexcel_crypto_priv *priv = ctx->priv;
766 sreq->needs_inv = false;
767 sreq->direction = dir;
771 if (ctx->base.ctxr) {
772 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
773 sreq->needs_inv = true;
774 ctx->base.needs_inv = false;
777 ctx->base.ring = safexcel_select_ring(priv);
778 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
779 EIP197_GFP_FLAGS(*base),
780 &ctx->base.ctxr_dma);
785 ring = ctx->base.ring;
787 spin_lock_bh(&priv->ring[ring].queue_lock);
788 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
789 spin_unlock_bh(&priv->ring[ring].queue_lock);
791 queue_work(priv->ring[ring].workqueue,
792 &priv->ring[ring].work_data.work);
797 static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
799 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
800 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
804 static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
806 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
807 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
811 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
813 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
814 struct safexcel_alg_template *tmpl =
815 container_of(tfm->__crt_alg, struct safexcel_alg_template,
818 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
819 sizeof(struct safexcel_cipher_req));
821 ctx->priv = tmpl->priv;
823 ctx->base.send = safexcel_skcipher_send;
824 ctx->base.handle_result = safexcel_skcipher_handle_result;
828 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
830 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
832 memzero_explicit(ctx->key, sizeof(ctx->key));
834 /* context not allocated, skip invalidation */
838 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
842 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
844 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
845 struct safexcel_crypto_priv *priv = ctx->priv;
848 if (safexcel_cipher_cra_exit(tfm))
851 if (priv->flags & EIP197_TRC_CACHE) {
852 ret = safexcel_skcipher_exit_inv(tfm);
854 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
857 dma_pool_free(priv->context_pool, ctx->base.ctxr,
862 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
864 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
865 struct safexcel_crypto_priv *priv = ctx->priv;
868 if (safexcel_cipher_cra_exit(tfm))
871 if (priv->flags & EIP197_TRC_CACHE) {
872 ret = safexcel_aead_exit_inv(tfm);
874 dev_warn(priv->dev, "aead: invalidation error %d\n",
877 dma_pool_free(priv->context_pool, ctx->base.ctxr,
882 struct safexcel_alg_template safexcel_alg_ecb_aes = {
883 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
884 .engines = EIP97IES | EIP197B | EIP197D,
886 .setkey = safexcel_skcipher_aes_setkey,
887 .encrypt = safexcel_ecb_aes_encrypt,
888 .decrypt = safexcel_ecb_aes_decrypt,
889 .min_keysize = AES_MIN_KEY_SIZE,
890 .max_keysize = AES_MAX_KEY_SIZE,
892 .cra_name = "ecb(aes)",
893 .cra_driver_name = "safexcel-ecb-aes",
895 .cra_flags = CRYPTO_ALG_ASYNC |
896 CRYPTO_ALG_KERN_DRIVER_ONLY,
897 .cra_blocksize = AES_BLOCK_SIZE,
898 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
900 .cra_init = safexcel_skcipher_cra_init,
901 .cra_exit = safexcel_skcipher_cra_exit,
902 .cra_module = THIS_MODULE,
907 static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
909 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
910 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
914 static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
916 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
917 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
921 struct safexcel_alg_template safexcel_alg_cbc_aes = {
922 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
923 .engines = EIP97IES | EIP197B | EIP197D,
925 .setkey = safexcel_skcipher_aes_setkey,
926 .encrypt = safexcel_cbc_aes_encrypt,
927 .decrypt = safexcel_cbc_aes_decrypt,
928 .min_keysize = AES_MIN_KEY_SIZE,
929 .max_keysize = AES_MAX_KEY_SIZE,
930 .ivsize = AES_BLOCK_SIZE,
932 .cra_name = "cbc(aes)",
933 .cra_driver_name = "safexcel-cbc-aes",
935 .cra_flags = CRYPTO_ALG_ASYNC |
936 CRYPTO_ALG_KERN_DRIVER_ONLY,
937 .cra_blocksize = AES_BLOCK_SIZE,
938 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
940 .cra_init = safexcel_skcipher_cra_init,
941 .cra_exit = safexcel_skcipher_cra_exit,
942 .cra_module = THIS_MODULE,
947 static int safexcel_cbc_des_encrypt(struct skcipher_request *req)
949 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
950 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
954 static int safexcel_cbc_des_decrypt(struct skcipher_request *req)
956 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
957 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
961 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
964 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
965 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
966 u32 tmp[DES_EXPKEY_WORDS];
969 if (len != DES_KEY_SIZE) {
970 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
974 ret = des_ekey(tmp, key);
975 if (!ret && (tfm->crt_flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
976 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
980 /* if context exits and key changed, need to invalidate it */
981 if (ctx->base.ctxr_dma)
982 if (memcmp(ctx->key, key, len))
983 ctx->base.needs_inv = true;
985 memcpy(ctx->key, key, len);
991 struct safexcel_alg_template safexcel_alg_cbc_des = {
992 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
993 .engines = EIP97IES | EIP197B | EIP197D,
995 .setkey = safexcel_des_setkey,
996 .encrypt = safexcel_cbc_des_encrypt,
997 .decrypt = safexcel_cbc_des_decrypt,
998 .min_keysize = DES_KEY_SIZE,
999 .max_keysize = DES_KEY_SIZE,
1000 .ivsize = DES_BLOCK_SIZE,
1002 .cra_name = "cbc(des)",
1003 .cra_driver_name = "safexcel-cbc-des",
1004 .cra_priority = 300,
1005 .cra_flags = CRYPTO_ALG_ASYNC |
1006 CRYPTO_ALG_KERN_DRIVER_ONLY,
1007 .cra_blocksize = DES_BLOCK_SIZE,
1008 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1010 .cra_init = safexcel_skcipher_cra_init,
1011 .cra_exit = safexcel_skcipher_cra_exit,
1012 .cra_module = THIS_MODULE,
1017 static int safexcel_ecb_des_encrypt(struct skcipher_request *req)
1019 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1020 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1024 static int safexcel_ecb_des_decrypt(struct skcipher_request *req)
1026 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1027 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1031 struct safexcel_alg_template safexcel_alg_ecb_des = {
1032 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1033 .engines = EIP97IES | EIP197B | EIP197D,
1035 .setkey = safexcel_des_setkey,
1036 .encrypt = safexcel_ecb_des_encrypt,
1037 .decrypt = safexcel_ecb_des_decrypt,
1038 .min_keysize = DES_KEY_SIZE,
1039 .max_keysize = DES_KEY_SIZE,
1040 .ivsize = DES_BLOCK_SIZE,
1042 .cra_name = "ecb(des)",
1043 .cra_driver_name = "safexcel-ecb-des",
1044 .cra_priority = 300,
1045 .cra_flags = CRYPTO_ALG_ASYNC |
1046 CRYPTO_ALG_KERN_DRIVER_ONLY,
1047 .cra_blocksize = DES_BLOCK_SIZE,
1048 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1050 .cra_init = safexcel_skcipher_cra_init,
1051 .cra_exit = safexcel_skcipher_cra_exit,
1052 .cra_module = THIS_MODULE,
1057 static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req)
1059 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1060 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1064 static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req)
1066 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1067 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1071 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1072 const u8 *key, unsigned int len)
1074 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1077 err = des3_verify_key(ctfm, key);
1081 /* if context exits and key changed, need to invalidate it */
1082 if (ctx->base.ctxr_dma) {
1083 if (memcmp(ctx->key, key, len))
1084 ctx->base.needs_inv = true;
1087 memcpy(ctx->key, key, len);
1094 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1095 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1096 .engines = EIP97IES | EIP197B | EIP197D,
1098 .setkey = safexcel_des3_ede_setkey,
1099 .encrypt = safexcel_cbc_des3_ede_encrypt,
1100 .decrypt = safexcel_cbc_des3_ede_decrypt,
1101 .min_keysize = DES3_EDE_KEY_SIZE,
1102 .max_keysize = DES3_EDE_KEY_SIZE,
1103 .ivsize = DES3_EDE_BLOCK_SIZE,
1105 .cra_name = "cbc(des3_ede)",
1106 .cra_driver_name = "safexcel-cbc-des3_ede",
1107 .cra_priority = 300,
1108 .cra_flags = CRYPTO_ALG_ASYNC |
1109 CRYPTO_ALG_KERN_DRIVER_ONLY,
1110 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1111 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1113 .cra_init = safexcel_skcipher_cra_init,
1114 .cra_exit = safexcel_skcipher_cra_exit,
1115 .cra_module = THIS_MODULE,
1120 static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req)
1122 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1123 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1127 static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req)
1129 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1130 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1134 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1135 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1136 .engines = EIP97IES | EIP197B | EIP197D,
1138 .setkey = safexcel_des3_ede_setkey,
1139 .encrypt = safexcel_ecb_des3_ede_encrypt,
1140 .decrypt = safexcel_ecb_des3_ede_decrypt,
1141 .min_keysize = DES3_EDE_KEY_SIZE,
1142 .max_keysize = DES3_EDE_KEY_SIZE,
1143 .ivsize = DES3_EDE_BLOCK_SIZE,
1145 .cra_name = "ecb(des3_ede)",
1146 .cra_driver_name = "safexcel-ecb-des3_ede",
1147 .cra_priority = 300,
1148 .cra_flags = CRYPTO_ALG_ASYNC |
1149 CRYPTO_ALG_KERN_DRIVER_ONLY,
1150 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1151 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1153 .cra_init = safexcel_skcipher_cra_init,
1154 .cra_exit = safexcel_skcipher_cra_exit,
1155 .cra_module = THIS_MODULE,
1160 static int safexcel_aead_encrypt(struct aead_request *req)
1162 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1164 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
1165 CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1168 static int safexcel_aead_decrypt(struct aead_request *req)
1170 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1172 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
1173 CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1176 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1178 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1179 struct safexcel_alg_template *tmpl =
1180 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1183 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1184 sizeof(struct safexcel_cipher_req));
1186 ctx->priv = tmpl->priv;
1189 ctx->base.send = safexcel_aead_send;
1190 ctx->base.handle_result = safexcel_aead_handle_result;
1194 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1196 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1198 safexcel_aead_cra_init(tfm);
1199 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1200 ctx->state_sz = SHA1_DIGEST_SIZE;
1204 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1205 .type = SAFEXCEL_ALG_TYPE_AEAD,
1206 .engines = EIP97IES | EIP197B | EIP197D,
1208 .setkey = safexcel_aead_aes_setkey,
1209 .encrypt = safexcel_aead_encrypt,
1210 .decrypt = safexcel_aead_decrypt,
1211 .ivsize = AES_BLOCK_SIZE,
1212 .maxauthsize = SHA1_DIGEST_SIZE,
1214 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1215 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1216 .cra_priority = 300,
1217 .cra_flags = CRYPTO_ALG_ASYNC |
1218 CRYPTO_ALG_KERN_DRIVER_ONLY,
1219 .cra_blocksize = AES_BLOCK_SIZE,
1220 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1222 .cra_init = safexcel_aead_sha1_cra_init,
1223 .cra_exit = safexcel_aead_cra_exit,
1224 .cra_module = THIS_MODULE,
1229 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1231 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1233 safexcel_aead_cra_init(tfm);
1234 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1235 ctx->state_sz = SHA256_DIGEST_SIZE;
1239 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1240 .type = SAFEXCEL_ALG_TYPE_AEAD,
1241 .engines = EIP97IES | EIP197B | EIP197D,
1243 .setkey = safexcel_aead_aes_setkey,
1244 .encrypt = safexcel_aead_encrypt,
1245 .decrypt = safexcel_aead_decrypt,
1246 .ivsize = AES_BLOCK_SIZE,
1247 .maxauthsize = SHA256_DIGEST_SIZE,
1249 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1250 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1251 .cra_priority = 300,
1252 .cra_flags = CRYPTO_ALG_ASYNC |
1253 CRYPTO_ALG_KERN_DRIVER_ONLY,
1254 .cra_blocksize = AES_BLOCK_SIZE,
1255 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1257 .cra_init = safexcel_aead_sha256_cra_init,
1258 .cra_exit = safexcel_aead_cra_exit,
1259 .cra_module = THIS_MODULE,
1264 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1266 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1268 safexcel_aead_cra_init(tfm);
1269 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1270 ctx->state_sz = SHA256_DIGEST_SIZE;
1274 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1275 .type = SAFEXCEL_ALG_TYPE_AEAD,
1276 .engines = EIP97IES | EIP197B | EIP197D,
1278 .setkey = safexcel_aead_aes_setkey,
1279 .encrypt = safexcel_aead_encrypt,
1280 .decrypt = safexcel_aead_decrypt,
1281 .ivsize = AES_BLOCK_SIZE,
1282 .maxauthsize = SHA224_DIGEST_SIZE,
1284 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1285 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1286 .cra_priority = 300,
1287 .cra_flags = CRYPTO_ALG_ASYNC |
1288 CRYPTO_ALG_KERN_DRIVER_ONLY,
1289 .cra_blocksize = AES_BLOCK_SIZE,
1290 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1292 .cra_init = safexcel_aead_sha224_cra_init,
1293 .cra_exit = safexcel_aead_cra_exit,
1294 .cra_module = THIS_MODULE,
1299 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1301 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1303 safexcel_aead_cra_init(tfm);
1304 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1305 ctx->state_sz = SHA512_DIGEST_SIZE;
1309 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1310 .type = SAFEXCEL_ALG_TYPE_AEAD,
1311 .engines = EIP97IES | EIP197B | EIP197D,
1313 .setkey = safexcel_aead_aes_setkey,
1314 .encrypt = safexcel_aead_encrypt,
1315 .decrypt = safexcel_aead_decrypt,
1316 .ivsize = AES_BLOCK_SIZE,
1317 .maxauthsize = SHA512_DIGEST_SIZE,
1319 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1320 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1321 .cra_priority = 300,
1322 .cra_flags = CRYPTO_ALG_ASYNC |
1323 CRYPTO_ALG_KERN_DRIVER_ONLY,
1324 .cra_blocksize = AES_BLOCK_SIZE,
1325 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1327 .cra_init = safexcel_aead_sha512_cra_init,
1328 .cra_exit = safexcel_aead_cra_exit,
1329 .cra_module = THIS_MODULE,
1334 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1336 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1338 safexcel_aead_cra_init(tfm);
1339 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1340 ctx->state_sz = SHA512_DIGEST_SIZE;
1344 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1345 .type = SAFEXCEL_ALG_TYPE_AEAD,
1346 .engines = EIP97IES | EIP197B | EIP197D,
1348 .setkey = safexcel_aead_aes_setkey,
1349 .encrypt = safexcel_aead_encrypt,
1350 .decrypt = safexcel_aead_decrypt,
1351 .ivsize = AES_BLOCK_SIZE,
1352 .maxauthsize = SHA384_DIGEST_SIZE,
1354 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1355 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1356 .cra_priority = 300,
1357 .cra_flags = CRYPTO_ALG_ASYNC |
1358 CRYPTO_ALG_KERN_DRIVER_ONLY,
1359 .cra_blocksize = AES_BLOCK_SIZE,
1360 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1362 .cra_init = safexcel_aead_sha384_cra_init,
1363 .cra_exit = safexcel_aead_cra_exit,
1364 .cra_module = THIS_MODULE,