2 * Copyright 2001-2020 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
11 * This file uses the low level AES functions (which are deprecated for
12 * non-internal use) in order to implement the EVP AES ciphers.
14 #include "internal/deprecated.h"
18 #include <openssl/opensslconf.h>
19 #include <openssl/crypto.h>
20 #include <openssl/evp.h>
21 #include <openssl/err.h>
22 #include <openssl/aes.h>
23 #include <openssl/rand.h>
24 #include <openssl/cmac.h>
25 #include "crypto/evp.h"
26 #include "internal/cryptlib.h"
27 #include "crypto/modes.h"
28 #include "crypto/siv.h"
29 #include "crypto/aes_platform.h"
30 #include "evp_local.h"
48 } ks; /* AES key schedule to use */
49 int key_set; /* Set if key initialised */
50 int iv_set; /* Set if an iv is set */
52 unsigned char *iv; /* Temporary IV store */
53 int ivlen; /* IV length */
55 int iv_gen; /* It is OK to generate IVs */
56 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
57 int tls_aad_len; /* TLS AAD length */
58 uint64_t tls_enc_records; /* Number of TLS records encrypted */
66 } ks1, ks2; /* AES key schedules to use */
68 void (*stream) (const unsigned char *in,
69 unsigned char *out, size_t length,
70 const AES_KEY *key1, const AES_KEY *key2,
71 const unsigned char iv[16]);
75 static const int allow_insecure_decrypt = 0;
77 static const int allow_insecure_decrypt = 1;
84 } ks; /* AES key schedule to use */
85 int key_set; /* Set if key initialised */
86 int iv_set; /* Set if an iv is set */
87 int tag_set; /* Set if tag is valid */
88 int len_set; /* Set if message length set */
89 int L, M; /* L and M parameters from RFC3610 */
90 int tls_aad_len; /* TLS AAD length */
95 #ifndef OPENSSL_NO_OCB
100 } ksenc; /* AES key schedule to use for encryption */
104 } ksdec; /* AES key schedule to use for decryption */
105 int key_set; /* Set if key initialised */
106 int iv_set; /* Set if an iv is set */
108 unsigned char *iv; /* Temporary IV store */
109 unsigned char tag[16];
110 unsigned char data_buf[16]; /* Store partial data blocks */
111 unsigned char aad_buf[16]; /* Store partial AAD blocks */
114 int ivlen; /* IV length */
119 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
121 /* increment counter (64-bit int) by 1 */
122 static void ctr64_inc(unsigned char *counter)
137 #if defined(AESNI_CAPABLE)
138 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
139 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
140 gctx->gcm.ghash==gcm_ghash_avx)
141 # undef AES_GCM_ASM2 /* minor size optimization */
144 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
145 const unsigned char *iv, int enc)
148 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
150 mode = EVP_CIPHER_CTX_mode(ctx);
151 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
153 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
155 dat->block = (block128_f) aesni_decrypt;
156 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
157 (cbc128_f) aesni_cbc_encrypt : NULL;
159 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
161 dat->block = (block128_f) aesni_encrypt;
162 if (mode == EVP_CIPH_CBC_MODE)
163 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
164 else if (mode == EVP_CIPH_CTR_MODE)
165 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
167 dat->stream.cbc = NULL;
171 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
178 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
179 const unsigned char *in, size_t len)
181 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
182 EVP_CIPHER_CTX_iv_noconst(ctx),
183 EVP_CIPHER_CTX_encrypting(ctx));
188 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
189 const unsigned char *in, size_t len)
191 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
196 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
197 EVP_CIPHER_CTX_encrypting(ctx));
202 # define aesni_ofb_cipher aes_ofb_cipher
203 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
204 const unsigned char *in, size_t len);
206 # define aesni_cfb_cipher aes_cfb_cipher
207 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
208 const unsigned char *in, size_t len);
210 # define aesni_cfb8_cipher aes_cfb8_cipher
211 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
212 const unsigned char *in, size_t len);
214 # define aesni_cfb1_cipher aes_cfb1_cipher
215 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
216 const unsigned char *in, size_t len);
218 # define aesni_ctr_cipher aes_ctr_cipher
219 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
220 const unsigned char *in, size_t len);
222 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
223 const unsigned char *iv, int enc)
225 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
229 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
231 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
232 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
234 * If we have an iv can set it directly, otherwise use saved IV.
236 if (iv == NULL && gctx->iv_set)
239 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
244 /* If key set use IV, otherwise copy */
246 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
248 memcpy(gctx->iv, iv, gctx->ivlen);
255 # define aesni_gcm_cipher aes_gcm_cipher
256 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
257 const unsigned char *in, size_t len);
259 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
260 const unsigned char *iv, int enc)
262 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
268 /* The key is two half length keys in reality */
269 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
270 const int bits = bytes * 8;
273 * Verify that the two keys are different.
275 * This addresses Rogaway's vulnerability.
276 * See comment in aes_xts_init_key() below.
278 if ((!allow_insecure_decrypt || enc)
279 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
280 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
284 /* key_len is two AES keys */
286 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
287 xctx->xts.block1 = (block128_f) aesni_encrypt;
288 xctx->stream = aesni_xts_encrypt;
290 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
291 xctx->xts.block1 = (block128_f) aesni_decrypt;
292 xctx->stream = aesni_xts_decrypt;
295 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
296 xctx->xts.block2 = (block128_f) aesni_encrypt;
298 xctx->xts.key1 = &xctx->ks1;
302 xctx->xts.key2 = &xctx->ks2;
303 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
309 # define aesni_xts_cipher aes_xts_cipher
310 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
311 const unsigned char *in, size_t len);
313 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
314 const unsigned char *iv, int enc)
316 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
320 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
322 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
323 &cctx->ks, (block128_f) aesni_encrypt);
324 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
325 (ccm128_f) aesni_ccm64_decrypt_blocks;
329 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
335 # define aesni_ccm_cipher aes_ccm_cipher
336 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
337 const unsigned char *in, size_t len);
339 # ifndef OPENSSL_NO_OCB
340 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
341 const unsigned char *iv, int enc)
343 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
349 * We set both the encrypt and decrypt key here because decrypt
350 * needs both. We could possibly optimise to remove setting the
351 * decrypt for an encryption operation.
353 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
355 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
357 if (!CRYPTO_ocb128_init(&octx->ocb,
358 &octx->ksenc.ks, &octx->ksdec.ks,
359 (block128_f) aesni_encrypt,
360 (block128_f) aesni_decrypt,
361 enc ? aesni_ocb_encrypt
362 : aesni_ocb_decrypt))
368 * If we have an iv we can set it directly, otherwise use saved IV.
370 if (iv == NULL && octx->iv_set)
373 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
380 /* If key set use IV, otherwise copy */
382 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
384 memcpy(octx->iv, iv, octx->ivlen);
390 # define aesni_ocb_cipher aes_ocb_cipher
391 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
392 const unsigned char *in, size_t len);
393 # endif /* OPENSSL_NO_OCB */
395 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
396 static const EVP_CIPHER aesni_##keylen##_##mode = { \
397 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
398 flags|EVP_CIPH_##MODE##_MODE, \
400 aesni_##mode##_cipher, \
402 sizeof(EVP_AES_KEY), \
403 NULL,NULL,NULL,NULL }; \
404 static const EVP_CIPHER aes_##keylen##_##mode = { \
405 nid##_##keylen##_##nmode,blocksize, \
407 flags|EVP_CIPH_##MODE##_MODE, \
409 aes_##mode##_cipher, \
411 sizeof(EVP_AES_KEY), \
412 NULL,NULL,NULL,NULL }; \
413 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
414 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
416 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
417 static const EVP_CIPHER aesni_##keylen##_##mode = { \
418 nid##_##keylen##_##mode,blocksize, \
419 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
421 flags|EVP_CIPH_##MODE##_MODE, \
422 aesni_##mode##_init_key, \
423 aesni_##mode##_cipher, \
424 aes_##mode##_cleanup, \
425 sizeof(EVP_AES_##MODE##_CTX), \
426 NULL,NULL,aes_##mode##_ctrl,NULL }; \
427 static const EVP_CIPHER aes_##keylen##_##mode = { \
428 nid##_##keylen##_##mode,blocksize, \
429 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
431 flags|EVP_CIPH_##MODE##_MODE, \
432 aes_##mode##_init_key, \
433 aes_##mode##_cipher, \
434 aes_##mode##_cleanup, \
435 sizeof(EVP_AES_##MODE##_CTX), \
436 NULL,NULL,aes_##mode##_ctrl,NULL }; \
437 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
438 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
440 #elif defined(SPARC_AES_CAPABLE)
442 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
443 const unsigned char *iv, int enc)
446 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
448 mode = EVP_CIPHER_CTX_mode(ctx);
449 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
450 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
453 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
454 dat->block = (block128_f) aes_t4_decrypt;
457 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
458 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
461 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
462 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
465 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
466 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
473 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
474 dat->block = (block128_f) aes_t4_encrypt;
477 if (mode == EVP_CIPH_CBC_MODE)
478 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
479 else if (mode == EVP_CIPH_CTR_MODE)
480 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
482 dat->stream.cbc = NULL;
485 if (mode == EVP_CIPH_CBC_MODE)
486 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
487 else if (mode == EVP_CIPH_CTR_MODE)
488 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
490 dat->stream.cbc = NULL;
493 if (mode == EVP_CIPH_CBC_MODE)
494 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
495 else if (mode == EVP_CIPH_CTR_MODE)
496 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
498 dat->stream.cbc = NULL;
506 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
513 # define aes_t4_cbc_cipher aes_cbc_cipher
514 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
515 const unsigned char *in, size_t len);
517 # define aes_t4_ecb_cipher aes_ecb_cipher
518 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
519 const unsigned char *in, size_t len);
521 # define aes_t4_ofb_cipher aes_ofb_cipher
522 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
523 const unsigned char *in, size_t len);
525 # define aes_t4_cfb_cipher aes_cfb_cipher
526 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
527 const unsigned char *in, size_t len);
529 # define aes_t4_cfb8_cipher aes_cfb8_cipher
530 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
531 const unsigned char *in, size_t len);
533 # define aes_t4_cfb1_cipher aes_cfb1_cipher
534 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
535 const unsigned char *in, size_t len);
537 # define aes_t4_ctr_cipher aes_ctr_cipher
538 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
539 const unsigned char *in, size_t len);
541 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
542 const unsigned char *iv, int enc)
544 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
548 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
549 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
550 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
551 (block128_f) aes_t4_encrypt);
554 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
557 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
560 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
566 * If we have an iv can set it directly, otherwise use saved IV.
568 if (iv == NULL && gctx->iv_set)
571 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
576 /* If key set use IV, otherwise copy */
578 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
580 memcpy(gctx->iv, iv, gctx->ivlen);
587 # define aes_t4_gcm_cipher aes_gcm_cipher
588 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
589 const unsigned char *in, size_t len);
591 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
592 const unsigned char *iv, int enc)
594 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
600 /* The key is two half length keys in reality */
601 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
602 const int bits = bytes * 8;
605 * Verify that the two keys are different.
607 * This addresses Rogaway's vulnerability.
608 * See comment in aes_xts_init_key() below.
610 if ((!allow_insecure_decrypt || enc)
611 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
612 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
617 /* key_len is two AES keys */
619 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
620 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
623 xctx->stream = aes128_t4_xts_encrypt;
626 xctx->stream = aes256_t4_xts_encrypt;
632 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
633 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
636 xctx->stream = aes128_t4_xts_decrypt;
639 xctx->stream = aes256_t4_xts_decrypt;
646 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
647 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
649 xctx->xts.key1 = &xctx->ks1;
653 xctx->xts.key2 = &xctx->ks2;
654 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
660 # define aes_t4_xts_cipher aes_xts_cipher
661 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
662 const unsigned char *in, size_t len);
664 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
665 const unsigned char *iv, int enc)
667 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
671 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
672 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
673 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
674 &cctx->ks, (block128_f) aes_t4_encrypt);
679 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
685 # define aes_t4_ccm_cipher aes_ccm_cipher
686 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
687 const unsigned char *in, size_t len);
689 # ifndef OPENSSL_NO_OCB
690 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
691 const unsigned char *iv, int enc)
693 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
699 * We set both the encrypt and decrypt key here because decrypt
700 * needs both. We could possibly optimise to remove setting the
701 * decrypt for an encryption operation.
703 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
705 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
707 if (!CRYPTO_ocb128_init(&octx->ocb,
708 &octx->ksenc.ks, &octx->ksdec.ks,
709 (block128_f) aes_t4_encrypt,
710 (block128_f) aes_t4_decrypt,
717 * If we have an iv we can set it directly, otherwise use saved IV.
719 if (iv == NULL && octx->iv_set)
722 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
729 /* If key set use IV, otherwise copy */
731 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
733 memcpy(octx->iv, iv, octx->ivlen);
739 # define aes_t4_ocb_cipher aes_ocb_cipher
740 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
741 const unsigned char *in, size_t len);
742 # endif /* OPENSSL_NO_OCB */
744 # ifndef OPENSSL_NO_SIV
745 # define aes_t4_siv_init_key aes_siv_init_key
746 # define aes_t4_siv_cipher aes_siv_cipher
747 # endif /* OPENSSL_NO_SIV */
749 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
750 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
751 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
752 flags|EVP_CIPH_##MODE##_MODE, \
754 aes_t4_##mode##_cipher, \
756 sizeof(EVP_AES_KEY), \
757 NULL,NULL,NULL,NULL }; \
758 static const EVP_CIPHER aes_##keylen##_##mode = { \
759 nid##_##keylen##_##nmode,blocksize, \
761 flags|EVP_CIPH_##MODE##_MODE, \
763 aes_##mode##_cipher, \
765 sizeof(EVP_AES_KEY), \
766 NULL,NULL,NULL,NULL }; \
767 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
768 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
770 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
771 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
772 nid##_##keylen##_##mode,blocksize, \
773 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
775 flags|EVP_CIPH_##MODE##_MODE, \
776 aes_t4_##mode##_init_key, \
777 aes_t4_##mode##_cipher, \
778 aes_##mode##_cleanup, \
779 sizeof(EVP_AES_##MODE##_CTX), \
780 NULL,NULL,aes_##mode##_ctrl,NULL }; \
781 static const EVP_CIPHER aes_##keylen##_##mode = { \
782 nid##_##keylen##_##mode,blocksize, \
783 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
785 flags|EVP_CIPH_##MODE##_MODE, \
786 aes_##mode##_init_key, \
787 aes_##mode##_cipher, \
788 aes_##mode##_cleanup, \
789 sizeof(EVP_AES_##MODE##_CTX), \
790 NULL,NULL,aes_##mode##_ctrl,NULL }; \
791 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
792 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
794 #elif defined(S390X_aes_128_CAPABLE)
795 /* IBM S390X support */
800 * KM-AES parameter block - begin
801 * (see z/Architecture Principles of Operation >= SA22-7832-06)
806 /* KM-AES parameter block - end */
815 * KMO-AES parameter block - begin
816 * (see z/Architecture Principles of Operation >= SA22-7832-08)
819 unsigned char cv[16];
822 /* KMO-AES parameter block - end */
833 * KMF-AES parameter block - begin
834 * (see z/Architecture Principles of Operation >= SA22-7832-08)
837 unsigned char cv[16];
840 /* KMF-AES parameter block - end */
851 * KMA-GCM-AES parameter block - begin
852 * (see z/Architecture Principles of Operation >= SA22-7832-11)
855 unsigned char reserved[12];
861 unsigned long long g[2];
865 unsigned long long taadl;
866 unsigned long long tpcl;
868 unsigned long long g[2];
873 /* KMA-GCM-AES parameter block - end */
885 unsigned char ares[16];
886 unsigned char mres[16];
887 unsigned char kres[16];
893 uint64_t tls_enc_records; /* Number of TLS records encrypted */
900 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
901 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
902 * rounds field is used to store the function code and that the key
903 * schedule is not stored (if aes hardware support is detected).
906 unsigned char pad[16];
912 * KMAC-AES parameter block - begin
913 * (see z/Architecture Principles of Operation >= SA22-7832-08)
917 unsigned long long g[2];
922 /* KMAC-AES parameter block - end */
925 unsigned long long g[2];
929 unsigned long long g[2];
933 unsigned long long blocks;
942 unsigned char pad[140];
948 # define s390x_aes_init_key aes_init_key
949 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
950 const unsigned char *iv, int enc);
952 # define S390X_AES_CBC_CTX EVP_AES_KEY
954 # define s390x_aes_cbc_init_key aes_init_key
956 # define s390x_aes_cbc_cipher aes_cbc_cipher
957 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
958 const unsigned char *in, size_t len);
960 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
961 const unsigned char *key,
962 const unsigned char *iv, int enc)
964 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
965 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
967 cctx->fc = S390X_AES_FC(keylen);
969 cctx->fc |= S390X_DECRYPT;
971 memcpy(cctx->km.param.k, key, keylen);
975 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
976 const unsigned char *in, size_t len)
978 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
980 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
984 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
985 const unsigned char *key,
986 const unsigned char *ivec, int enc)
988 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
989 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
990 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
991 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
993 memcpy(cctx->kmo.param.cv, iv, ivlen);
994 memcpy(cctx->kmo.param.k, key, keylen);
995 cctx->fc = S390X_AES_FC(keylen);
1000 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1001 const unsigned char *in, size_t len)
1003 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1008 *out = *in ^ cctx->kmo.param.cv[n];
1017 len &= ~(size_t)0xf;
1019 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1026 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1030 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1039 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1040 const unsigned char *key,
1041 const unsigned char *ivec, int enc)
1043 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1044 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1045 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1046 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1048 cctx->fc = S390X_AES_FC(keylen);
1049 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1051 cctx->fc |= S390X_DECRYPT;
1054 memcpy(cctx->kmf.param.cv, iv, ivlen);
1055 memcpy(cctx->kmf.param.k, key, keylen);
1059 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1060 const unsigned char *in, size_t len)
1062 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1063 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1064 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1071 *out = cctx->kmf.param.cv[n] ^ tmp;
1072 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1081 len &= ~(size_t)0xf;
1083 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1090 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1091 S390X_AES_FC(keylen), cctx->kmf.param.k);
1095 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1096 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1105 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1106 const unsigned char *key,
1107 const unsigned char *ivec, int enc)
1109 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1110 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1111 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1112 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1114 cctx->fc = S390X_AES_FC(keylen);
1115 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1117 cctx->fc |= S390X_DECRYPT;
1119 memcpy(cctx->kmf.param.cv, iv, ivlen);
1120 memcpy(cctx->kmf.param.k, key, keylen);
1124 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1125 const unsigned char *in, size_t len)
1127 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1129 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1133 # define s390x_aes_cfb1_init_key aes_init_key
1135 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1136 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1137 const unsigned char *in, size_t len);
1139 # define S390X_AES_CTR_CTX EVP_AES_KEY
1141 # define s390x_aes_ctr_init_key aes_init_key
1143 # define s390x_aes_ctr_cipher aes_ctr_cipher
1144 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1145 const unsigned char *in, size_t len);
1147 /* iv + padding length for iv lengths != 12 */
1148 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1151 * Process additional authenticated data. Returns 0 on success. Code is
1154 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1157 unsigned long long alen;
1160 if (ctx->kma.param.tpcl)
1163 alen = ctx->kma.param.taadl + len;
1164 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1166 ctx->kma.param.taadl = alen;
1171 ctx->ares[n] = *aad;
1176 /* ctx->ares contains a complete block if offset has wrapped around */
1178 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1179 ctx->fc |= S390X_KMA_HS;
1186 len &= ~(size_t)0xf;
1188 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1190 ctx->fc |= S390X_KMA_HS;
1198 ctx->ares[rem] = aad[rem];
1205 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1206 * success. Code is big-endian.
1208 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1209 unsigned char *out, size_t len)
1211 const unsigned char *inptr;
1212 unsigned long long mlen;
1215 unsigned char b[16];
1220 mlen = ctx->kma.param.tpcl + len;
1221 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1223 ctx->kma.param.tpcl = mlen;
1229 while (n && inlen) {
1230 ctx->mres[n] = *inptr;
1235 /* ctx->mres contains a complete block if offset has wrapped around */
1237 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1238 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1239 ctx->fc |= S390X_KMA_HS;
1242 /* previous call already encrypted/decrypted its remainder,
1243 * see comment below */
1258 len &= ~(size_t)0xf;
1260 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1261 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1264 ctx->fc |= S390X_KMA_HS;
1269 * If there is a remainder, it has to be saved such that it can be
1270 * processed by kma later. However, we also have to do the for-now
1271 * unauthenticated encryption/decryption part here and now...
1274 if (!ctx->mreslen) {
1275 buf.w[0] = ctx->kma.param.j0.w[0];
1276 buf.w[1] = ctx->kma.param.j0.w[1];
1277 buf.w[2] = ctx->kma.param.j0.w[2];
1278 buf.w[3] = ctx->kma.param.cv.w + 1;
1279 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1283 for (i = 0; i < rem; i++) {
1284 ctx->mres[n + i] = in[i];
1285 out[i] = in[i] ^ ctx->kres[n + i];
1288 ctx->mreslen += rem;
1294 * Initialize context structure. Code is big-endian.
1296 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1297 const unsigned char *iv)
1299 ctx->kma.param.t.g[0] = 0;
1300 ctx->kma.param.t.g[1] = 0;
1301 ctx->kma.param.tpcl = 0;
1302 ctx->kma.param.taadl = 0;
1307 if (ctx->ivlen == 12) {
1308 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1309 ctx->kma.param.j0.w[3] = 1;
1310 ctx->kma.param.cv.w = 1;
1312 /* ctx->iv has the right size and is already padded. */
1313 memcpy(ctx->iv, iv, ctx->ivlen);
1314 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1315 ctx->fc, &ctx->kma.param);
1316 ctx->fc |= S390X_KMA_HS;
1318 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1319 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1320 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1321 ctx->kma.param.t.g[0] = 0;
1322 ctx->kma.param.t.g[1] = 0;
1327 * Performs various operations on the context structure depending on control
1328 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1329 * Code is big-endian.
1331 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1333 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1334 S390X_AES_GCM_CTX *gctx_out;
1335 EVP_CIPHER_CTX *out;
1336 unsigned char *buf, *iv;
1337 int ivlen, enc, len;
1341 ivlen = EVP_CIPHER_iv_length(c->cipher);
1342 iv = EVP_CIPHER_CTX_iv_noconst(c);
1345 gctx->ivlen = ivlen;
1349 gctx->tls_aad_len = -1;
1352 case EVP_CTRL_GET_IVLEN:
1353 *(int *)ptr = gctx->ivlen;
1356 case EVP_CTRL_AEAD_SET_IVLEN:
1361 iv = EVP_CIPHER_CTX_iv_noconst(c);
1362 len = S390X_gcm_ivpadlen(arg);
1364 /* Allocate memory for iv if needed. */
1365 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1367 OPENSSL_free(gctx->iv);
1369 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1370 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1375 memset(gctx->iv + arg, 0, len - arg - 8);
1376 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1381 case EVP_CTRL_AEAD_SET_TAG:
1382 buf = EVP_CIPHER_CTX_buf_noconst(c);
1383 enc = EVP_CIPHER_CTX_encrypting(c);
1384 if (arg <= 0 || arg > 16 || enc)
1387 memcpy(buf, ptr, arg);
1391 case EVP_CTRL_AEAD_GET_TAG:
1392 enc = EVP_CIPHER_CTX_encrypting(c);
1393 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1396 memcpy(ptr, gctx->kma.param.t.b, arg);
1399 case EVP_CTRL_GCM_SET_IV_FIXED:
1400 /* Special case: -1 length restores whole iv */
1402 memcpy(gctx->iv, ptr, gctx->ivlen);
1407 * Fixed field must be at least 4 bytes and invocation field at least
1410 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1414 memcpy(gctx->iv, ptr, arg);
1416 enc = EVP_CIPHER_CTX_encrypting(c);
1417 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1423 case EVP_CTRL_GCM_IV_GEN:
1424 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1427 s390x_aes_gcm_setiv(gctx, gctx->iv);
1429 if (arg <= 0 || arg > gctx->ivlen)
1432 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1434 * Invocation field will be at least 8 bytes in size and so no need
1435 * to check wrap around or increment more than last 8 bytes.
1437 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1441 case EVP_CTRL_GCM_SET_IV_INV:
1442 enc = EVP_CIPHER_CTX_encrypting(c);
1443 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1446 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1447 s390x_aes_gcm_setiv(gctx, gctx->iv);
1451 case EVP_CTRL_AEAD_TLS1_AAD:
1452 /* Save the aad for later use. */
1453 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1456 buf = EVP_CIPHER_CTX_buf_noconst(c);
1457 memcpy(buf, ptr, arg);
1458 gctx->tls_aad_len = arg;
1459 gctx->tls_enc_records = 0;
1461 len = buf[arg - 2] << 8 | buf[arg - 1];
1462 /* Correct length for explicit iv. */
1463 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1465 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1467 /* If decrypting correct for tag too. */
1468 enc = EVP_CIPHER_CTX_encrypting(c);
1470 if (len < EVP_GCM_TLS_TAG_LEN)
1472 len -= EVP_GCM_TLS_TAG_LEN;
1474 buf[arg - 2] = len >> 8;
1475 buf[arg - 1] = len & 0xff;
1476 /* Extra padding: tag appended to record. */
1477 return EVP_GCM_TLS_TAG_LEN;
1481 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1482 iv = EVP_CIPHER_CTX_iv_noconst(c);
1484 if (gctx->iv == iv) {
1485 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1487 len = S390X_gcm_ivpadlen(gctx->ivlen);
1489 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1490 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1494 memcpy(gctx_out->iv, gctx->iv, len);
1504 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1506 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1507 const unsigned char *key,
1508 const unsigned char *iv, int enc)
1510 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1513 if (iv == NULL && key == NULL)
1517 keylen = EVP_CIPHER_CTX_key_length(ctx);
1518 memcpy(&gctx->kma.param.k, key, keylen);
1520 gctx->fc = S390X_AES_FC(keylen);
1522 gctx->fc |= S390X_DECRYPT;
1524 if (iv == NULL && gctx->iv_set)
1528 s390x_aes_gcm_setiv(gctx, iv);
1534 s390x_aes_gcm_setiv(gctx, iv);
1536 memcpy(gctx->iv, iv, gctx->ivlen);
1545 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1546 * if successful. Otherwise -1 is returned. Code is big-endian.
1548 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1549 const unsigned char *in, size_t len)
1551 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1552 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1553 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1556 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1560 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1561 * Requirements from SP 800-38D". The requirements is for one party to the
1562 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1565 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1566 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
1570 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1571 : EVP_CTRL_GCM_SET_IV_INV,
1572 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1575 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1576 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1577 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1579 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1580 gctx->kma.param.tpcl = len << 3;
1581 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1582 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1585 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1586 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1588 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1589 EVP_GCM_TLS_TAG_LEN)) {
1590 OPENSSL_cleanse(out, len);
1597 gctx->tls_aad_len = -1;
1602 * Called from EVP layer to initialize context, process additional
1603 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1604 * ciphertext or process a TLS packet, depending on context. Returns bytes
1605 * written on success. Otherwise -1 is returned. Code is big-endian.
1607 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1608 const unsigned char *in, size_t len)
1610 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1611 unsigned char *buf, tmp[16];
1617 if (gctx->tls_aad_len >= 0)
1618 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1625 if (s390x_aes_gcm_aad(gctx, in, len))
1628 if (s390x_aes_gcm(gctx, in, out, len))
1633 gctx->kma.param.taadl <<= 3;
1634 gctx->kma.param.tpcl <<= 3;
1635 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1636 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1637 /* recall that we already did en-/decrypt gctx->mres
1638 * and returned it to caller... */
1639 OPENSSL_cleanse(tmp, gctx->mreslen);
1642 enc = EVP_CIPHER_CTX_encrypting(ctx);
1646 if (gctx->taglen < 0)
1649 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1650 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1657 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1659 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1660 const unsigned char *iv;
1665 iv = EVP_CIPHER_CTX_iv(c);
1667 OPENSSL_free(gctx->iv);
1669 OPENSSL_cleanse(gctx, sizeof(*gctx));
1673 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1675 # define s390x_aes_xts_init_key aes_xts_init_key
1676 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1677 const unsigned char *key,
1678 const unsigned char *iv, int enc);
1679 # define s390x_aes_xts_cipher aes_xts_cipher
1680 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1681 const unsigned char *in, size_t len);
1682 # define s390x_aes_xts_ctrl aes_xts_ctrl
1683 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1684 # define s390x_aes_xts_cleanup aes_xts_cleanup
1687 * Set nonce and length fields. Code is big-endian.
1689 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1690 const unsigned char *nonce,
1693 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1694 ctx->aes.ccm.nonce.g[1] = mlen;
1695 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1699 * Process additional authenticated data. Code is big-endian.
1701 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1710 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1712 /* Suppress 'type-punned pointer dereference' warning. */
1713 ptr = ctx->aes.ccm.buf.b;
1715 if (alen < ((1 << 16) - (1 << 8))) {
1716 *(uint16_t *)ptr = alen;
1718 } else if (sizeof(alen) == 8
1719 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1720 *(uint16_t *)ptr = 0xffff;
1721 *(uint64_t *)(ptr + 2) = alen;
1724 *(uint16_t *)ptr = 0xfffe;
1725 *(uint32_t *)(ptr + 2) = alen;
1729 while (i < 16 && alen) {
1730 ctx->aes.ccm.buf.b[i] = *aad;
1736 ctx->aes.ccm.buf.b[i] = 0;
1740 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1741 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1742 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1743 &ctx->aes.ccm.kmac_param);
1744 ctx->aes.ccm.blocks += 2;
1747 alen &= ~(size_t)0xf;
1749 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1750 ctx->aes.ccm.blocks += alen >> 4;
1754 for (i = 0; i < rem; i++)
1755 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1757 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1758 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1759 ctx->aes.ccm.kmac_param.k);
1760 ctx->aes.ccm.blocks++;
1765 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1768 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1769 unsigned char *out, size_t len, int enc)
1772 unsigned int i, l, num;
1773 unsigned char flags;
1775 flags = ctx->aes.ccm.nonce.b[0];
1776 if (!(flags & S390X_CCM_AAD_FLAG)) {
1777 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1778 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1779 ctx->aes.ccm.blocks++;
1782 ctx->aes.ccm.nonce.b[0] = l;
1785 * Reconstruct length from encoded length field
1786 * and initialize it with counter value.
1789 for (i = 15 - l; i < 15; i++) {
1790 n |= ctx->aes.ccm.nonce.b[i];
1791 ctx->aes.ccm.nonce.b[i] = 0;
1794 n |= ctx->aes.ccm.nonce.b[15];
1795 ctx->aes.ccm.nonce.b[15] = 1;
1798 return -1; /* length mismatch */
1801 /* Two operations per block plus one for tag encryption */
1802 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1803 if (ctx->aes.ccm.blocks > (1ULL << 61))
1804 return -2; /* too much data */
1809 len &= ~(size_t)0xf;
1812 /* mac-then-encrypt */
1814 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1816 for (i = 0; i < rem; i++)
1817 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1819 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1820 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1821 ctx->aes.ccm.kmac_param.k);
1824 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1825 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1826 &num, (ctr128_f)AES_ctr32_encrypt);
1828 /* decrypt-then-mac */
1829 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1830 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1831 &num, (ctr128_f)AES_ctr32_encrypt);
1834 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1836 for (i = 0; i < rem; i++)
1837 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1839 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1840 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1841 ctx->aes.ccm.kmac_param.k);
1845 for (i = 15 - l; i < 16; i++)
1846 ctx->aes.ccm.nonce.b[i] = 0;
1848 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1849 ctx->aes.ccm.kmac_param.k);
1850 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1851 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1853 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
1858 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1859 * if successful. Otherwise -1 is returned.
1861 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1862 const unsigned char *in, size_t len)
1864 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1865 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1866 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1867 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1870 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1874 /* Set explicit iv (sequence number). */
1875 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1878 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1880 * Get explicit iv (sequence number). We already have fixed iv
1881 * (server/client_write_iv) here.
1883 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1884 s390x_aes_ccm_setiv(cctx, ivec, len);
1886 /* Process aad (sequence number|type|version|length) */
1887 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1889 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1890 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1893 if (s390x_aes_ccm(cctx, in, out, len, enc))
1896 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1897 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1899 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1900 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1905 OPENSSL_cleanse(out, len);
1911 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1914 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1915 const unsigned char *key,
1916 const unsigned char *iv, int enc)
1918 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1919 unsigned char *ivec;
1922 if (iv == NULL && key == NULL)
1926 keylen = EVP_CIPHER_CTX_key_length(ctx);
1927 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
1928 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1930 /* Store encoded m and l. */
1931 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1932 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1933 memset(cctx->aes.ccm.nonce.b + 1, 0,
1934 sizeof(cctx->aes.ccm.nonce.b));
1935 cctx->aes.ccm.blocks = 0;
1937 cctx->aes.ccm.key_set = 1;
1941 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1942 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
1944 cctx->aes.ccm.iv_set = 1;
1951 * Called from EVP layer to initialize context, process additional
1952 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1953 * plaintext or process a TLS packet, depending on context. Returns bytes
1954 * written on success. Otherwise -1 is returned.
1956 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1957 const unsigned char *in, size_t len)
1959 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1960 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1962 unsigned char *buf, *ivec;
1964 if (!cctx->aes.ccm.key_set)
1967 if (cctx->aes.ccm.tls_aad_len >= 0)
1968 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1971 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1972 * so integrity must be checked already at Update() i.e., before
1973 * potentially corrupted data is output.
1975 if (in == NULL && out != NULL)
1978 if (!cctx->aes.ccm.iv_set)
1982 /* Update(): Pass message length. */
1984 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1985 s390x_aes_ccm_setiv(cctx, ivec, len);
1987 cctx->aes.ccm.len_set = 1;
1991 /* Update(): Process aad. */
1992 if (!cctx->aes.ccm.len_set && len)
1995 s390x_aes_ccm_aad(cctx, in, len);
1999 /* The tag must be set before actually decrypting data */
2000 if (!enc && !cctx->aes.ccm.tag_set)
2003 /* Update(): Process message. */
2005 if (!cctx->aes.ccm.len_set) {
2007 * In case message length was not previously set explicitly via
2008 * Update(), set it now.
2010 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2011 s390x_aes_ccm_setiv(cctx, ivec, len);
2013 cctx->aes.ccm.len_set = 1;
2017 if (s390x_aes_ccm(cctx, in, out, len, enc))
2020 cctx->aes.ccm.tag_set = 1;
2025 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2026 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2027 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2033 OPENSSL_cleanse(out, len);
2035 cctx->aes.ccm.iv_set = 0;
2036 cctx->aes.ccm.tag_set = 0;
2037 cctx->aes.ccm.len_set = 0;
2043 * Performs various operations on the context structure depending on control
2044 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2045 * Code is big-endian.
2047 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2049 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2050 unsigned char *buf, *iv;
2055 cctx->aes.ccm.key_set = 0;
2056 cctx->aes.ccm.iv_set = 0;
2057 cctx->aes.ccm.l = 8;
2058 cctx->aes.ccm.m = 12;
2059 cctx->aes.ccm.tag_set = 0;
2060 cctx->aes.ccm.len_set = 0;
2061 cctx->aes.ccm.tls_aad_len = -1;
2064 case EVP_CTRL_GET_IVLEN:
2065 *(int *)ptr = 15 - cctx->aes.ccm.l;
2068 case EVP_CTRL_AEAD_TLS1_AAD:
2069 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2072 /* Save the aad for later use. */
2073 buf = EVP_CIPHER_CTX_buf_noconst(c);
2074 memcpy(buf, ptr, arg);
2075 cctx->aes.ccm.tls_aad_len = arg;
2077 len = buf[arg - 2] << 8 | buf[arg - 1];
2078 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2081 /* Correct length for explicit iv. */
2082 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2084 enc = EVP_CIPHER_CTX_encrypting(c);
2086 if (len < cctx->aes.ccm.m)
2089 /* Correct length for tag. */
2090 len -= cctx->aes.ccm.m;
2093 buf[arg - 2] = len >> 8;
2094 buf[arg - 1] = len & 0xff;
2096 /* Extra padding: tag appended to record. */
2097 return cctx->aes.ccm.m;
2099 case EVP_CTRL_CCM_SET_IV_FIXED:
2100 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2103 /* Copy to first part of the iv. */
2104 iv = EVP_CIPHER_CTX_iv_noconst(c);
2105 memcpy(iv, ptr, arg);
2108 case EVP_CTRL_AEAD_SET_IVLEN:
2112 case EVP_CTRL_CCM_SET_L:
2113 if (arg < 2 || arg > 8)
2116 cctx->aes.ccm.l = arg;
2119 case EVP_CTRL_AEAD_SET_TAG:
2120 if ((arg & 1) || arg < 4 || arg > 16)
2123 enc = EVP_CIPHER_CTX_encrypting(c);
2128 cctx->aes.ccm.tag_set = 1;
2129 buf = EVP_CIPHER_CTX_buf_noconst(c);
2130 memcpy(buf, ptr, arg);
2133 cctx->aes.ccm.m = arg;
2136 case EVP_CTRL_AEAD_GET_TAG:
2137 enc = EVP_CIPHER_CTX_encrypting(c);
2138 if (!enc || !cctx->aes.ccm.tag_set)
2141 if(arg < cctx->aes.ccm.m)
2144 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2145 cctx->aes.ccm.tag_set = 0;
2146 cctx->aes.ccm.iv_set = 0;
2147 cctx->aes.ccm.len_set = 0;
2158 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2160 # ifndef OPENSSL_NO_OCB
2161 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2163 # define s390x_aes_ocb_init_key aes_ocb_init_key
2164 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2165 const unsigned char *iv, int enc);
2166 # define s390x_aes_ocb_cipher aes_ocb_cipher
2167 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2168 const unsigned char *in, size_t len);
2169 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2170 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2171 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2172 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2175 # ifndef OPENSSL_NO_SIV
2176 # define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2178 # define s390x_aes_siv_init_key aes_siv_init_key
2179 # define s390x_aes_siv_cipher aes_siv_cipher
2180 # define s390x_aes_siv_cleanup aes_siv_cleanup
2181 # define s390x_aes_siv_ctrl aes_siv_ctrl
2184 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2186 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2187 nid##_##keylen##_##nmode,blocksize, \
2190 flags | EVP_CIPH_##MODE##_MODE, \
2191 s390x_aes_##mode##_init_key, \
2192 s390x_aes_##mode##_cipher, \
2194 sizeof(S390X_AES_##MODE##_CTX), \
2200 static const EVP_CIPHER aes_##keylen##_##mode = { \
2201 nid##_##keylen##_##nmode, \
2205 flags | EVP_CIPH_##MODE##_MODE, \
2207 aes_##mode##_cipher, \
2209 sizeof(EVP_AES_KEY), \
2215 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2217 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2218 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2221 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2222 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2223 nid##_##keylen##_##mode, \
2225 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2227 flags | EVP_CIPH_##MODE##_MODE, \
2228 s390x_aes_##mode##_init_key, \
2229 s390x_aes_##mode##_cipher, \
2230 s390x_aes_##mode##_cleanup, \
2231 sizeof(S390X_AES_##MODE##_CTX), \
2234 s390x_aes_##mode##_ctrl, \
2237 static const EVP_CIPHER aes_##keylen##_##mode = { \
2238 nid##_##keylen##_##mode,blocksize, \
2239 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2241 flags | EVP_CIPH_##MODE##_MODE, \
2242 aes_##mode##_init_key, \
2243 aes_##mode##_cipher, \
2244 aes_##mode##_cleanup, \
2245 sizeof(EVP_AES_##MODE##_CTX), \
2248 aes_##mode##_ctrl, \
2251 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2253 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2254 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2259 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2260 static const EVP_CIPHER aes_##keylen##_##mode = { \
2261 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2262 flags|EVP_CIPH_##MODE##_MODE, \
2264 aes_##mode##_cipher, \
2266 sizeof(EVP_AES_KEY), \
2267 NULL,NULL,NULL,NULL }; \
2268 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2269 { return &aes_##keylen##_##mode; }
2271 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2272 static const EVP_CIPHER aes_##keylen##_##mode = { \
2273 nid##_##keylen##_##mode,blocksize, \
2274 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2276 flags|EVP_CIPH_##MODE##_MODE, \
2277 aes_##mode##_init_key, \
2278 aes_##mode##_cipher, \
2279 aes_##mode##_cleanup, \
2280 sizeof(EVP_AES_##MODE##_CTX), \
2281 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2282 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2283 { return &aes_##keylen##_##mode; }
2287 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2288 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2289 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2290 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2291 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2292 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2293 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2294 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2296 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2297 const unsigned char *iv, int enc)
2300 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2302 mode = EVP_CIPHER_CTX_mode(ctx);
2303 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2305 #ifdef HWAES_CAPABLE
2306 if (HWAES_CAPABLE) {
2307 ret = HWAES_set_decrypt_key(key,
2308 EVP_CIPHER_CTX_key_length(ctx) * 8,
2310 dat->block = (block128_f) HWAES_decrypt;
2311 dat->stream.cbc = NULL;
2312 # ifdef HWAES_cbc_encrypt
2313 if (mode == EVP_CIPH_CBC_MODE)
2314 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2318 #ifdef BSAES_CAPABLE
2319 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2320 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2322 dat->block = (block128_f) AES_decrypt;
2323 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2326 #ifdef VPAES_CAPABLE
2327 if (VPAES_CAPABLE) {
2328 ret = vpaes_set_decrypt_key(key,
2329 EVP_CIPHER_CTX_key_length(ctx) * 8,
2331 dat->block = (block128_f) vpaes_decrypt;
2332 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2333 (cbc128_f) vpaes_cbc_encrypt : NULL;
2337 ret = AES_set_decrypt_key(key,
2338 EVP_CIPHER_CTX_key_length(ctx) * 8,
2340 dat->block = (block128_f) AES_decrypt;
2341 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2342 (cbc128_f) AES_cbc_encrypt : NULL;
2345 #ifdef HWAES_CAPABLE
2346 if (HWAES_CAPABLE) {
2347 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2349 dat->block = (block128_f) HWAES_encrypt;
2350 dat->stream.cbc = NULL;
2351 # ifdef HWAES_cbc_encrypt
2352 if (mode == EVP_CIPH_CBC_MODE)
2353 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2356 # ifdef HWAES_ctr32_encrypt_blocks
2357 if (mode == EVP_CIPH_CTR_MODE)
2358 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2361 (void)0; /* terminate potentially open 'else' */
2364 #ifdef BSAES_CAPABLE
2365 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2366 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2368 dat->block = (block128_f) AES_encrypt;
2369 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2372 #ifdef VPAES_CAPABLE
2373 if (VPAES_CAPABLE) {
2374 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2376 dat->block = (block128_f) vpaes_encrypt;
2377 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2378 (cbc128_f) vpaes_cbc_encrypt : NULL;
2382 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2384 dat->block = (block128_f) AES_encrypt;
2385 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2386 (cbc128_f) AES_cbc_encrypt : NULL;
2388 if (mode == EVP_CIPH_CTR_MODE)
2389 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2394 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2401 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2402 const unsigned char *in, size_t len)
2404 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2406 if (dat->stream.cbc)
2407 (*dat->stream.cbc) (in, out, len, &dat->ks,
2408 EVP_CIPHER_CTX_iv_noconst(ctx),
2409 EVP_CIPHER_CTX_encrypting(ctx));
2410 else if (EVP_CIPHER_CTX_encrypting(ctx))
2411 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2412 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2414 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2415 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2420 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2421 const unsigned char *in, size_t len)
2423 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2425 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2430 for (i = 0, len -= bl; i <= len; i += bl)
2431 (*dat->block) (in + i, out + i, &dat->ks);
2436 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2437 const unsigned char *in, size_t len)
2439 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2441 int num = EVP_CIPHER_CTX_num(ctx);
2442 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2443 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2444 EVP_CIPHER_CTX_set_num(ctx, num);
2448 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2449 const unsigned char *in, size_t len)
2451 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2453 int num = EVP_CIPHER_CTX_num(ctx);
2454 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2455 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2456 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2457 EVP_CIPHER_CTX_set_num(ctx, num);
2461 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2462 const unsigned char *in, size_t len)
2464 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2466 int num = EVP_CIPHER_CTX_num(ctx);
2467 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2468 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2469 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2470 EVP_CIPHER_CTX_set_num(ctx, num);
2474 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2475 const unsigned char *in, size_t len)
2477 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2479 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2480 int num = EVP_CIPHER_CTX_num(ctx);
2481 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2482 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2483 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2484 EVP_CIPHER_CTX_set_num(ctx, num);
2488 while (len >= MAXBITCHUNK) {
2489 int num = EVP_CIPHER_CTX_num(ctx);
2490 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2491 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2492 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2493 EVP_CIPHER_CTX_set_num(ctx, num);
2499 int num = EVP_CIPHER_CTX_num(ctx);
2500 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2501 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2502 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2503 EVP_CIPHER_CTX_set_num(ctx, num);
2509 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2510 const unsigned char *in, size_t len)
2512 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2513 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2515 if (dat->stream.ctr)
2516 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2517 EVP_CIPHER_CTX_iv_noconst(ctx),
2518 EVP_CIPHER_CTX_buf_noconst(ctx),
2519 &num, dat->stream.ctr);
2521 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2522 EVP_CIPHER_CTX_iv_noconst(ctx),
2523 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2525 EVP_CIPHER_CTX_set_num(ctx, num);
2529 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2530 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2531 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2533 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2535 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2538 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2539 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2540 OPENSSL_free(gctx->iv);
2544 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2546 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2551 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
2555 gctx->tls_aad_len = -1;
2558 case EVP_CTRL_GET_IVLEN:
2559 *(int *)ptr = gctx->ivlen;
2562 case EVP_CTRL_AEAD_SET_IVLEN:
2565 /* Allocate memory for IV if needed */
2566 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2567 if (gctx->iv != c->iv)
2568 OPENSSL_free(gctx->iv);
2569 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2570 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2577 case EVP_CTRL_AEAD_SET_TAG:
2578 if (arg <= 0 || arg > 16 || c->encrypt)
2580 memcpy(c->buf, ptr, arg);
2584 case EVP_CTRL_AEAD_GET_TAG:
2585 if (arg <= 0 || arg > 16 || !c->encrypt
2586 || gctx->taglen < 0)
2588 memcpy(ptr, c->buf, arg);
2591 case EVP_CTRL_GET_IV:
2592 if (gctx->iv_gen != 1 && gctx->iv_gen_rand != 1)
2594 if (gctx->ivlen != arg)
2596 memcpy(ptr, gctx->iv, arg);
2599 case EVP_CTRL_GCM_SET_IV_FIXED:
2600 /* Special case: -1 length restores whole IV */
2602 memcpy(gctx->iv, ptr, gctx->ivlen);
2607 * Fixed field must be at least 4 bytes and invocation field at least
2610 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2613 memcpy(gctx->iv, ptr, arg);
2614 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2619 case EVP_CTRL_GCM_IV_GEN:
2620 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2622 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2623 if (arg <= 0 || arg > gctx->ivlen)
2625 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2627 * Invocation field will be at least 8 bytes in size and so no need
2628 * to check wrap around or increment more than last 8 bytes.
2630 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2634 case EVP_CTRL_GCM_SET_IV_INV:
2635 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2637 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2638 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2642 case EVP_CTRL_AEAD_TLS1_AAD:
2643 /* Save the AAD for later use */
2644 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2646 memcpy(c->buf, ptr, arg);
2647 gctx->tls_aad_len = arg;
2648 gctx->tls_enc_records = 0;
2650 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2651 /* Correct length for explicit IV */
2652 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2654 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2655 /* If decrypting correct for tag too */
2657 if (len < EVP_GCM_TLS_TAG_LEN)
2659 len -= EVP_GCM_TLS_TAG_LEN;
2661 c->buf[arg - 2] = len >> 8;
2662 c->buf[arg - 1] = len & 0xff;
2664 /* Extra padding: tag appended to record */
2665 return EVP_GCM_TLS_TAG_LEN;
2669 EVP_CIPHER_CTX *out = ptr;
2670 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2671 if (gctx->gcm.key) {
2672 if (gctx->gcm.key != &gctx->ks)
2674 gctx_out->gcm.key = &gctx_out->ks;
2676 if (gctx->iv == c->iv)
2677 gctx_out->iv = out->iv;
2679 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2680 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2683 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2694 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2695 const unsigned char *iv, int enc)
2697 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2702 #ifdef HWAES_CAPABLE
2703 if (HWAES_CAPABLE) {
2704 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2705 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2706 (block128_f) HWAES_encrypt);
2707 # ifdef HWAES_ctr32_encrypt_blocks
2708 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2715 #ifdef BSAES_CAPABLE
2716 if (BSAES_CAPABLE) {
2717 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2718 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2719 (block128_f) AES_encrypt);
2720 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2724 #ifdef VPAES_CAPABLE
2725 if (VPAES_CAPABLE) {
2726 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2727 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2728 (block128_f) vpaes_encrypt);
2733 (void)0; /* terminate potentially open 'else' */
2735 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2736 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2737 (block128_f) AES_encrypt);
2739 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2746 * If we have an iv can set it directly, otherwise use saved IV.
2748 if (iv == NULL && gctx->iv_set)
2751 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2756 /* If key set use IV, otherwise copy */
2758 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2760 memcpy(gctx->iv, iv, gctx->ivlen);
2768 * Handle TLS GCM packet format. This consists of the last portion of the IV
2769 * followed by the payload and finally the tag. On encrypt generate IV,
2770 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2774 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2775 const unsigned char *in, size_t len)
2777 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2779 /* Encrypt/decrypt must be performed in place */
2781 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2785 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2786 * Requirements from SP 800-38D". The requirements is for one party to the
2787 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
2790 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
2791 EVPerr(EVP_F_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
2796 * Set IV from start of buffer or generate IV and write to start of
2799 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
2800 : EVP_CTRL_GCM_SET_IV_INV,
2801 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2804 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
2806 /* Fix buffer and length to point to payload */
2807 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2808 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2809 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2811 /* Encrypt payload */
2814 #if defined(AES_GCM_ASM)
2815 if (len >= 32 && AES_GCM_ASM(gctx)) {
2816 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2819 bulk = AES_gcm_encrypt(in, out, len,
2821 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2822 gctx->gcm.len.u[1] += bulk;
2825 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2828 len - bulk, gctx->ctr))
2832 #if defined(AES_GCM_ASM2)
2833 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2834 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2837 bulk = AES_gcm_encrypt(in, out, len,
2839 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2840 gctx->gcm.len.u[1] += bulk;
2843 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2844 in + bulk, out + bulk, len - bulk))
2848 /* Finally write tag */
2849 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2850 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2855 #if defined(AES_GCM_ASM)
2856 if (len >= 16 && AES_GCM_ASM(gctx)) {
2857 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2860 bulk = AES_gcm_decrypt(in, out, len,
2862 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2863 gctx->gcm.len.u[1] += bulk;
2866 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2869 len - bulk, gctx->ctr))
2873 #if defined(AES_GCM_ASM2)
2874 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2875 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2878 bulk = AES_gcm_decrypt(in, out, len,
2880 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2881 gctx->gcm.len.u[1] += bulk;
2884 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2885 in + bulk, out + bulk, len - bulk))
2889 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
2890 /* If tag mismatch wipe buffer */
2891 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
2892 OPENSSL_cleanse(out, len);
2900 gctx->tls_aad_len = -1;
2906 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
2908 * See also 8.2.2 RBG-based construction.
2909 * Random construction consists of a free field (which can be NULL) and a
2910 * random field which will use a DRBG that can return at least 96 bits of
2911 * entropy strength. (The DRBG must be seeded by the FIPS module).
2913 static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
2915 int sz = gctx->ivlen - offset;
2917 /* Must be at least 96 bits */
2918 if (sz <= 0 || gctx->ivlen < 12)
2921 /* Use DRBG to generate random iv */
2922 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
2926 #endif /* FIPS_MODULE */
2928 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2929 const unsigned char *in, size_t len)
2931 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2933 /* If not set up, return error */
2937 if (gctx->tls_aad_len >= 0)
2938 return aes_gcm_tls_cipher(ctx, out, in, len);
2942 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
2943 * The IV can still be set externally (the security policy will state that
2944 * this is not FIPS compliant). There are some applications
2945 * where setting the IV externally is the only option available.
2947 if (!gctx->iv_set) {
2948 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
2950 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2952 gctx->iv_gen_rand = 1;
2957 #endif /* FIPS_MODULE */
2961 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2963 } else if (ctx->encrypt) {
2966 #if defined(AES_GCM_ASM)
2967 if (len >= 32 && AES_GCM_ASM(gctx)) {
2968 size_t res = (16 - gctx->gcm.mres) % 16;
2970 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2973 bulk = AES_gcm_encrypt(in + res,
2974 out + res, len - res,
2975 gctx->gcm.key, gctx->gcm.Yi.c,
2977 gctx->gcm.len.u[1] += bulk;
2981 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2984 len - bulk, gctx->ctr))
2988 #if defined(AES_GCM_ASM2)
2989 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2990 size_t res = (16 - gctx->gcm.mres) % 16;
2992 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2995 bulk = AES_gcm_encrypt(in + res,
2996 out + res, len - res,
2997 gctx->gcm.key, gctx->gcm.Yi.c,
2999 gctx->gcm.len.u[1] += bulk;
3003 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3004 in + bulk, out + bulk, len - bulk))
3010 #if defined(AES_GCM_ASM)
3011 if (len >= 16 && AES_GCM_ASM(gctx)) {
3012 size_t res = (16 - gctx->gcm.mres) % 16;
3014 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3017 bulk = AES_gcm_decrypt(in + res,
3018 out + res, len - res,
3020 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3021 gctx->gcm.len.u[1] += bulk;
3025 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3028 len - bulk, gctx->ctr))
3032 #if defined(AES_GCM_ASM2)
3033 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3034 size_t res = (16 - gctx->gcm.mres) % 16;
3036 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3039 bulk = AES_gcm_decrypt(in + res,
3040 out + res, len - res,
3042 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3043 gctx->gcm.len.u[1] += bulk;
3047 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3048 in + bulk, out + bulk, len - bulk))
3054 if (!ctx->encrypt) {
3055 if (gctx->taglen < 0)
3057 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3062 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3064 /* Don't reuse the IV */
3071 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3072 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3073 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3074 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3076 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3077 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3078 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3079 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3080 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3081 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3083 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3085 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3087 if (type == EVP_CTRL_COPY) {
3088 EVP_CIPHER_CTX *out = ptr;
3089 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3091 if (xctx->xts.key1) {
3092 if (xctx->xts.key1 != &xctx->ks1)
3094 xctx_out->xts.key1 = &xctx_out->ks1;
3096 if (xctx->xts.key2) {
3097 if (xctx->xts.key2 != &xctx->ks2)
3099 xctx_out->xts.key2 = &xctx_out->ks2;
3102 } else if (type != EVP_CTRL_INIT)
3104 /* key1 and key2 are used as an indicator both key and IV are set */
3105 xctx->xts.key1 = NULL;
3106 xctx->xts.key2 = NULL;
3110 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3111 const unsigned char *iv, int enc)
3113 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3120 /* The key is two half length keys in reality */
3121 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3122 const int bits = bytes * 8;
3125 * Verify that the two keys are different.
3127 * This addresses the vulnerability described in Rogaway's
3128 * September 2004 paper:
3130 * "Efficient Instantiations of Tweakable Blockciphers and
3131 * Refinements to Modes OCB and PMAC".
3132 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3134 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3136 * "The check for Key_1 != Key_2 shall be done at any place
3137 * BEFORE using the keys in the XTS-AES algorithm to process
3140 if ((!allow_insecure_decrypt || enc)
3141 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3142 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3147 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3149 xctx->stream = NULL;
3151 /* key_len is two AES keys */
3152 #ifdef HWAES_CAPABLE
3153 if (HWAES_CAPABLE) {
3155 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3156 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3157 # ifdef HWAES_xts_encrypt
3158 xctx->stream = HWAES_xts_encrypt;
3161 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3162 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3163 # ifdef HWAES_xts_decrypt
3164 xctx->stream = HWAES_xts_decrypt;
3168 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3169 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3171 xctx->xts.key1 = &xctx->ks1;
3175 #ifdef BSAES_CAPABLE
3177 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3180 #ifdef VPAES_CAPABLE
3181 if (VPAES_CAPABLE) {
3183 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3184 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3186 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3187 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3190 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3191 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3193 xctx->xts.key1 = &xctx->ks1;
3197 (void)0; /* terminate potentially open 'else' */
3200 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3201 xctx->xts.block1 = (block128_f) AES_encrypt;
3203 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3204 xctx->xts.block1 = (block128_f) AES_decrypt;
3207 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3208 xctx->xts.block2 = (block128_f) AES_encrypt;
3210 xctx->xts.key1 = &xctx->ks1;
3215 xctx->xts.key2 = &xctx->ks2;
3216 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3222 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3223 const unsigned char *in, size_t len)
3225 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3227 if (xctx->xts.key1 == NULL
3228 || xctx->xts.key2 == NULL
3231 || len < AES_BLOCK_SIZE)
3235 * Impose a limit of 2^20 blocks per data unit as specified by
3236 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3237 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3238 * NIST SP 800-38E mandates the same limit.
3240 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3241 EVPerr(EVP_F_AES_XTS_CIPHER, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3246 (*xctx->stream) (in, out, len,
3247 xctx->xts.key1, xctx->xts.key2,
3248 EVP_CIPHER_CTX_iv_noconst(ctx));
3249 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3251 EVP_CIPHER_CTX_encrypting(ctx)))
3256 #define aes_xts_cleanup NULL
3258 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3259 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3260 | EVP_CIPH_CUSTOM_COPY)
3262 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3263 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3265 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3267 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3276 cctx->tls_aad_len = -1;
3279 case EVP_CTRL_GET_IVLEN:
3280 *(int *)ptr = 15 - cctx->L;
3283 case EVP_CTRL_AEAD_TLS1_AAD:
3284 /* Save the AAD for later use */
3285 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3287 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3288 cctx->tls_aad_len = arg;
3291 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3292 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3293 /* Correct length for explicit IV */
3294 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3296 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3297 /* If decrypting correct for tag too */
3298 if (!EVP_CIPHER_CTX_encrypting(c)) {
3303 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3304 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3306 /* Extra padding: tag appended to record */
3309 case EVP_CTRL_CCM_SET_IV_FIXED:
3310 /* Sanity check length */
3311 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3313 /* Just copy to first part of IV */
3314 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3317 case EVP_CTRL_AEAD_SET_IVLEN:
3320 case EVP_CTRL_CCM_SET_L:
3321 if (arg < 2 || arg > 8)
3326 case EVP_CTRL_AEAD_SET_TAG:
3327 if ((arg & 1) || arg < 4 || arg > 16)
3329 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3333 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3338 case EVP_CTRL_AEAD_GET_TAG:
3339 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3341 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3350 EVP_CIPHER_CTX *out = ptr;
3351 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3352 if (cctx->ccm.key) {
3353 if (cctx->ccm.key != &cctx->ks)
3355 cctx_out->ccm.key = &cctx_out->ks;
3366 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3367 const unsigned char *iv, int enc)
3369 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3374 #ifdef HWAES_CAPABLE
3375 if (HWAES_CAPABLE) {
3376 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3379 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3380 &cctx->ks, (block128_f) HWAES_encrypt);
3386 #ifdef VPAES_CAPABLE
3387 if (VPAES_CAPABLE) {
3388 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3390 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3391 &cctx->ks, (block128_f) vpaes_encrypt);
3397 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3399 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3400 &cctx->ks, (block128_f) AES_encrypt);
3405 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3411 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3412 const unsigned char *in, size_t len)
3414 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3415 CCM128_CONTEXT *ccm = &cctx->ccm;
3416 /* Encrypt/decrypt must be performed in place */
3417 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3419 /* If encrypting set explicit IV from sequence number (start of AAD) */
3420 if (EVP_CIPHER_CTX_encrypting(ctx))
3421 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3422 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3423 /* Get rest of IV from explicit IV */
3424 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3425 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3426 /* Correct length value */
3427 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3428 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3432 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3433 /* Fix buffer to point to payload */
3434 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3435 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3436 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3437 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3439 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3441 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3443 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3445 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3447 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3448 unsigned char tag[16];
3449 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3450 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3454 OPENSSL_cleanse(out, len);
3459 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3460 const unsigned char *in, size_t len)
3462 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3463 CCM128_CONTEXT *ccm = &cctx->ccm;
3464 /* If not set up, return error */
3468 if (cctx->tls_aad_len >= 0)
3469 return aes_ccm_tls_cipher(ctx, out, in, len);
3471 /* EVP_*Final() doesn't return any data */
3472 if (in == NULL && out != NULL)
3480 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3486 /* If have AAD need message length */
3487 if (!cctx->len_set && len)
3489 CRYPTO_ccm128_aad(ccm, in, len);
3493 /* The tag must be set before actually decrypting data */
3494 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3497 /* If not set length yet do it */
3498 if (!cctx->len_set) {
3499 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3504 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3505 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3507 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3513 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3515 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3516 unsigned char tag[16];
3517 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3518 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3524 OPENSSL_cleanse(out, len);
3532 #define aes_ccm_cleanup NULL
3534 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3535 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3536 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3537 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3538 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3539 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3546 /* Indicates if IV has been set */
3550 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3551 const unsigned char *iv, int enc)
3553 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3557 if (EVP_CIPHER_CTX_encrypting(ctx))
3558 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3561 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3567 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3568 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3573 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3574 const unsigned char *in, size_t inlen)
3576 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3578 /* AES wrap with padding has IV length of 4, without padding 8 */
3579 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3580 /* No final operation so always return zero length */
3583 /* Input length must always be non-zero */
3586 /* If decrypting need at least 16 bytes and multiple of 8 */
3587 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3589 /* If not padding input must be multiple of 8 */
3590 if (!pad && inlen & 0x7)
3592 if (is_partially_overlapping(out, in, inlen)) {
3593 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3597 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3598 /* If padding round up to multiple of 8 */
3600 inlen = (inlen + 7) / 8 * 8;
3605 * If not padding output will be exactly 8 bytes smaller than
3606 * input. If padding it will be at least 8 bytes smaller but we
3607 * don't know how much.
3613 if (EVP_CIPHER_CTX_encrypting(ctx))
3614 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3616 (block128_f) AES_encrypt);
3618 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3620 (block128_f) AES_decrypt);
3622 if (EVP_CIPHER_CTX_encrypting(ctx))
3623 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3624 out, in, inlen, (block128_f) AES_encrypt);
3626 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3627 out, in, inlen, (block128_f) AES_decrypt);
3629 return rv ? (int)rv : -1;
3632 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3633 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3634 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3636 static const EVP_CIPHER aes_128_wrap = {
3638 8, 16, 8, WRAP_FLAGS,
3639 aes_wrap_init_key, aes_wrap_cipher,
3641 sizeof(EVP_AES_WRAP_CTX),
3642 NULL, NULL, NULL, NULL
3645 const EVP_CIPHER *EVP_aes_128_wrap(void)
3647 return &aes_128_wrap;
3650 static const EVP_CIPHER aes_192_wrap = {
3652 8, 24, 8, WRAP_FLAGS,
3653 aes_wrap_init_key, aes_wrap_cipher,
3655 sizeof(EVP_AES_WRAP_CTX),
3656 NULL, NULL, NULL, NULL
3659 const EVP_CIPHER *EVP_aes_192_wrap(void)
3661 return &aes_192_wrap;
3664 static const EVP_CIPHER aes_256_wrap = {
3666 8, 32, 8, WRAP_FLAGS,
3667 aes_wrap_init_key, aes_wrap_cipher,
3669 sizeof(EVP_AES_WRAP_CTX),
3670 NULL, NULL, NULL, NULL
3673 const EVP_CIPHER *EVP_aes_256_wrap(void)
3675 return &aes_256_wrap;
3678 static const EVP_CIPHER aes_128_wrap_pad = {
3679 NID_id_aes128_wrap_pad,
3680 8, 16, 4, WRAP_FLAGS,
3681 aes_wrap_init_key, aes_wrap_cipher,
3683 sizeof(EVP_AES_WRAP_CTX),
3684 NULL, NULL, NULL, NULL
3687 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3689 return &aes_128_wrap_pad;
3692 static const EVP_CIPHER aes_192_wrap_pad = {
3693 NID_id_aes192_wrap_pad,
3694 8, 24, 4, WRAP_FLAGS,
3695 aes_wrap_init_key, aes_wrap_cipher,
3697 sizeof(EVP_AES_WRAP_CTX),
3698 NULL, NULL, NULL, NULL
3701 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3703 return &aes_192_wrap_pad;
3706 static const EVP_CIPHER aes_256_wrap_pad = {
3707 NID_id_aes256_wrap_pad,
3708 8, 32, 4, WRAP_FLAGS,
3709 aes_wrap_init_key, aes_wrap_cipher,
3711 sizeof(EVP_AES_WRAP_CTX),
3712 NULL, NULL, NULL, NULL
3715 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3717 return &aes_256_wrap_pad;
3720 #ifndef OPENSSL_NO_OCB
3721 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3723 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3724 EVP_CIPHER_CTX *newc;
3725 EVP_AES_OCB_CTX *new_octx;
3731 octx->ivlen = EVP_CIPHER_iv_length(c->cipher);
3732 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3734 octx->data_buf_len = 0;
3735 octx->aad_buf_len = 0;
3738 case EVP_CTRL_GET_IVLEN:
3739 *(int *)ptr = octx->ivlen;
3742 case EVP_CTRL_AEAD_SET_IVLEN:
3743 /* IV len must be 1 to 15 */
3744 if (arg <= 0 || arg > 15)
3750 case EVP_CTRL_AEAD_SET_TAG:
3752 /* Tag len must be 0 to 16 */
3753 if (arg < 0 || arg > 16)
3759 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
3761 memcpy(octx->tag, ptr, arg);
3764 case EVP_CTRL_AEAD_GET_TAG:
3765 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
3768 memcpy(ptr, octx->tag, arg);
3772 newc = (EVP_CIPHER_CTX *)ptr;
3773 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3774 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3775 &new_octx->ksenc.ks,
3776 &new_octx->ksdec.ks);
3784 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3785 const unsigned char *iv, int enc)
3787 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3793 * We set both the encrypt and decrypt key here because decrypt
3794 * needs both. We could possibly optimise to remove setting the
3795 * decrypt for an encryption operation.
3797 # ifdef HWAES_CAPABLE
3798 if (HWAES_CAPABLE) {
3799 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3801 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3803 if (!CRYPTO_ocb128_init(&octx->ocb,
3804 &octx->ksenc.ks, &octx->ksdec.ks,
3805 (block128_f) HWAES_encrypt,
3806 (block128_f) HWAES_decrypt,
3807 enc ? HWAES_ocb_encrypt
3808 : HWAES_ocb_decrypt))
3813 # ifdef VPAES_CAPABLE
3814 if (VPAES_CAPABLE) {
3815 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3817 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3819 if (!CRYPTO_ocb128_init(&octx->ocb,
3820 &octx->ksenc.ks, &octx->ksdec.ks,
3821 (block128_f) vpaes_encrypt,
3822 (block128_f) vpaes_decrypt,
3828 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3830 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3832 if (!CRYPTO_ocb128_init(&octx->ocb,
3833 &octx->ksenc.ks, &octx->ksdec.ks,
3834 (block128_f) AES_encrypt,
3835 (block128_f) AES_decrypt,
3842 * If we have an iv we can set it directly, otherwise use saved IV.
3844 if (iv == NULL && octx->iv_set)
3847 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3854 /* If key set use IV, otherwise copy */
3856 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3858 memcpy(octx->iv, iv, octx->ivlen);
3864 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3865 const unsigned char *in, size_t len)
3869 int written_len = 0;
3870 size_t trailing_len;
3871 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3873 /* If IV or Key not set then return error */
3882 * Need to ensure we are only passing full blocks to low level OCB
3883 * routines. We do it here rather than in EVP_EncryptUpdate/
3884 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3885 * and those routines don't support that
3888 /* Are we dealing with AAD or normal data here? */
3890 buf = octx->aad_buf;
3891 buf_len = &(octx->aad_buf_len);
3893 buf = octx->data_buf;
3894 buf_len = &(octx->data_buf_len);
3896 if (is_partially_overlapping(out + *buf_len, in, len)) {
3897 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3903 * If we've got a partially filled buffer from a previous call then
3904 * use that data first
3907 unsigned int remaining;
3909 remaining = AES_BLOCK_SIZE - (*buf_len);
3910 if (remaining > len) {
3911 memcpy(buf + (*buf_len), in, len);
3915 memcpy(buf + (*buf_len), in, remaining);
3918 * If we get here we've filled the buffer, so process it
3923 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
3925 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3926 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3930 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3934 written_len = AES_BLOCK_SIZE;
3937 out += AES_BLOCK_SIZE;
3940 /* Do we have a partial block to handle at the end? */
3941 trailing_len = len % AES_BLOCK_SIZE;
3944 * If we've got some full blocks to handle, then process these first
3946 if (len != trailing_len) {
3948 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3950 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3951 if (!CRYPTO_ocb128_encrypt
3952 (&octx->ocb, in, out, len - trailing_len))
3955 if (!CRYPTO_ocb128_decrypt
3956 (&octx->ocb, in, out, len - trailing_len))
3959 written_len += len - trailing_len;
3960 in += len - trailing_len;
3963 /* Handle any trailing partial block */
3964 if (trailing_len > 0) {
3965 memcpy(buf, in, trailing_len);
3966 *buf_len = trailing_len;
3972 * First of all empty the buffer of any partial block that we might
3973 * have been provided - both for data and AAD
3975 if (octx->data_buf_len > 0) {
3976 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3977 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
3978 octx->data_buf_len))
3981 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
3982 octx->data_buf_len))
3985 written_len = octx->data_buf_len;
3986 octx->data_buf_len = 0;
3988 if (octx->aad_buf_len > 0) {
3989 if (!CRYPTO_ocb128_aad
3990 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
3992 octx->aad_buf_len = 0;
3994 /* If decrypting then verify */
3995 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
3996 if (octx->taglen < 0)
3998 if (CRYPTO_ocb128_finish(&octx->ocb,
3999 octx->tag, octx->taglen) != 0)
4004 /* If encrypting then just get the tag */
4005 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4007 /* Don't reuse the IV */
4013 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4015 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4016 CRYPTO_ocb128_cleanup(&octx->ocb);
4020 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4021 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4022 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4023 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4024 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4025 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4026 #endif /* OPENSSL_NO_OCB */
4029 #ifndef OPENSSL_NO_SIV
4031 typedef SIV128_CONTEXT EVP_AES_SIV_CTX;
4033 #define aesni_siv_init_key aes_siv_init_key
4034 static int aes_siv_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4035 const unsigned char *iv, int enc)
4037 const EVP_CIPHER *ctr;
4038 const EVP_CIPHER *cbc;
4039 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4040 int klen = EVP_CIPHER_CTX_key_length(ctx) / 2;
4047 cbc = EVP_aes_128_cbc();
4048 ctr = EVP_aes_128_ctr();
4051 cbc = EVP_aes_192_cbc();
4052 ctr = EVP_aes_192_ctr();
4055 cbc = EVP_aes_256_cbc();
4056 ctr = EVP_aes_256_ctr();
4062 /* klen is the length of the underlying cipher, not the input key,
4063 which should be twice as long */
4064 return CRYPTO_siv128_init(sctx, key, klen, cbc, ctr);
4067 #define aesni_siv_cipher aes_siv_cipher
4068 static int aes_siv_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4069 const unsigned char *in, size_t len)
4071 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4073 /* EncryptFinal or DecryptFinal */
4075 return CRYPTO_siv128_finish(sctx);
4077 /* Deal with associated data */
4079 return CRYPTO_siv128_aad(sctx, in, len);
4081 if (EVP_CIPHER_CTX_encrypting(ctx))
4082 return CRYPTO_siv128_encrypt(sctx, in, out, len);
4084 return CRYPTO_siv128_decrypt(sctx, in, out, len);
4087 #define aesni_siv_cleanup aes_siv_cleanup
4088 static int aes_siv_cleanup(EVP_CIPHER_CTX *c)
4090 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4092 return CRYPTO_siv128_cleanup(sctx);
4096 #define aesni_siv_ctrl aes_siv_ctrl
4097 static int aes_siv_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4099 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4100 SIV128_CONTEXT *sctx_out;
4104 return CRYPTO_siv128_cleanup(sctx);
4106 case EVP_CTRL_SET_SPEED:
4107 return CRYPTO_siv128_speed(sctx, arg);
4109 case EVP_CTRL_AEAD_SET_TAG:
4110 if (!EVP_CIPHER_CTX_encrypting(c))
4111 return CRYPTO_siv128_set_tag(sctx, ptr, arg);
4114 case EVP_CTRL_AEAD_GET_TAG:
4115 if (!EVP_CIPHER_CTX_encrypting(c))
4117 return CRYPTO_siv128_get_tag(sctx, ptr, arg);
4120 sctx_out = EVP_C_DATA(SIV128_CONTEXT, (EVP_CIPHER_CTX*)ptr);
4121 return CRYPTO_siv128_copy_ctx(sctx_out, sctx);
4129 #define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4130 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4131 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4132 | EVP_CIPH_CTRL_INIT)
4134 BLOCK_CIPHER_custom(NID_aes, 128, 1, 0, siv, SIV, SIV_FLAGS)
4135 BLOCK_CIPHER_custom(NID_aes, 192, 1, 0, siv, SIV, SIV_FLAGS)
4136 BLOCK_CIPHER_custom(NID_aes, 256, 1, 0, siv, SIV, SIV_FLAGS)