2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "internal/cryptlib.h"
19 #include "modes_lcl.h"
20 #include <openssl/rand.h>
21 #include <openssl/cmac.h>
40 } ks; /* AES key schedule to use */
41 int key_set; /* Set if key initialised */
42 int iv_set; /* Set if an iv is set */
44 unsigned char *iv; /* Temporary IV store */
45 int ivlen; /* IV length */
47 int iv_gen; /* It is OK to generate IVs */
48 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
49 int tls_aad_len; /* TLS AAD length */
50 uint64_t tls_enc_records; /* Number of TLS records encrypted */
58 } ks1, ks2; /* AES key schedules to use */
60 void (*stream) (const unsigned char *in,
61 unsigned char *out, size_t length,
62 const AES_KEY *key1, const AES_KEY *key2,
63 const unsigned char iv[16]);
67 static const int allow_insecure_decrypt = 0;
69 static const int allow_insecure_decrypt = 1;
76 } ks; /* AES key schedule to use */
77 int key_set; /* Set if key initialised */
78 int iv_set; /* Set if an iv is set */
79 int tag_set; /* Set if tag is valid */
80 int len_set; /* Set if message length set */
81 int L, M; /* L and M parameters from RFC3610 */
82 int tls_aad_len; /* TLS AAD length */
87 #ifndef OPENSSL_NO_OCB
92 } ksenc; /* AES key schedule to use for encryption */
96 } ksdec; /* AES key schedule to use for decryption */
97 int key_set; /* Set if key initialised */
98 int iv_set; /* Set if an iv is set */
100 unsigned char *iv; /* Temporary IV store */
101 unsigned char tag[16];
102 unsigned char data_buf[16]; /* Store partial data blocks */
103 unsigned char aad_buf[16]; /* Store partial AAD blocks */
106 int ivlen; /* IV length */
111 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
114 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
116 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
119 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
121 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
124 void vpaes_cbc_encrypt(const unsigned char *in,
127 const AES_KEY *key, unsigned char *ivec, int enc);
130 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
131 size_t length, const AES_KEY *key,
132 unsigned char ivec[16], int enc);
133 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
134 size_t len, const AES_KEY *key,
135 const unsigned char ivec[16]);
136 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
137 size_t len, const AES_KEY *key1,
138 const AES_KEY *key2, const unsigned char iv[16]);
139 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
140 size_t len, const AES_KEY *key1,
141 const AES_KEY *key2, const unsigned char iv[16]);
144 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
145 size_t blocks, const AES_KEY *key,
146 const unsigned char ivec[AES_BLOCK_SIZE]);
149 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
150 const AES_KEY *key1, const AES_KEY *key2,
151 const unsigned char iv[16]);
152 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
153 const AES_KEY *key1, const AES_KEY *key2,
154 const unsigned char iv[16]);
157 /* increment counter (64-bit int) by 1 */
158 static void ctr64_inc(unsigned char *counter)
173 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
174 # include "ppc_arch.h"
176 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
178 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
179 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
180 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
181 # define HWAES_encrypt aes_p8_encrypt
182 # define HWAES_decrypt aes_p8_decrypt
183 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
184 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
185 # define HWAES_xts_encrypt aes_p8_xts_encrypt
186 # define HWAES_xts_decrypt aes_p8_xts_decrypt
189 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
190 ((defined(__i386) || defined(__i386__) || \
191 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
192 defined(__x86_64) || defined(__x86_64__) || \
193 defined(_M_AMD64) || defined(_M_X64) )
195 extern unsigned int OPENSSL_ia32cap_P[];
198 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
201 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
206 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
208 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
210 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
213 void aesni_encrypt(const unsigned char *in, unsigned char *out,
215 void aesni_decrypt(const unsigned char *in, unsigned char *out,
218 void aesni_ecb_encrypt(const unsigned char *in,
220 size_t length, const AES_KEY *key, int enc);
221 void aesni_cbc_encrypt(const unsigned char *in,
224 const AES_KEY *key, unsigned char *ivec, int enc);
226 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
229 const void *key, const unsigned char *ivec);
231 void aesni_xts_encrypt(const unsigned char *in,
234 const AES_KEY *key1, const AES_KEY *key2,
235 const unsigned char iv[16]);
237 void aesni_xts_decrypt(const unsigned char *in,
240 const AES_KEY *key1, const AES_KEY *key2,
241 const unsigned char iv[16]);
243 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
247 const unsigned char ivec[16],
248 unsigned char cmac[16]);
250 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
254 const unsigned char ivec[16],
255 unsigned char cmac[16]);
257 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
258 size_t aesni_gcm_encrypt(const unsigned char *in,
261 const void *key, unsigned char ivec[16], u64 *Xi);
262 # define AES_gcm_encrypt aesni_gcm_encrypt
263 size_t aesni_gcm_decrypt(const unsigned char *in,
266 const void *key, unsigned char ivec[16], u64 *Xi);
267 # define AES_gcm_decrypt aesni_gcm_decrypt
268 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
270 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
271 gctx->gcm.ghash==gcm_ghash_avx)
272 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
273 gctx->gcm.ghash==gcm_ghash_avx)
274 # undef AES_GCM_ASM2 /* minor size optimization */
277 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
278 const unsigned char *iv, int enc)
281 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
283 mode = EVP_CIPHER_CTX_mode(ctx);
284 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
286 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
288 dat->block = (block128_f) aesni_decrypt;
289 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
290 (cbc128_f) aesni_cbc_encrypt : NULL;
292 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
294 dat->block = (block128_f) aesni_encrypt;
295 if (mode == EVP_CIPH_CBC_MODE)
296 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
297 else if (mode == EVP_CIPH_CTR_MODE)
298 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
300 dat->stream.cbc = NULL;
304 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
311 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
312 const unsigned char *in, size_t len)
314 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
315 EVP_CIPHER_CTX_iv_noconst(ctx),
316 EVP_CIPHER_CTX_encrypting(ctx));
321 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
322 const unsigned char *in, size_t len)
324 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
329 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
330 EVP_CIPHER_CTX_encrypting(ctx));
335 # define aesni_ofb_cipher aes_ofb_cipher
336 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
337 const unsigned char *in, size_t len);
339 # define aesni_cfb_cipher aes_cfb_cipher
340 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
341 const unsigned char *in, size_t len);
343 # define aesni_cfb8_cipher aes_cfb8_cipher
344 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
345 const unsigned char *in, size_t len);
347 # define aesni_cfb1_cipher aes_cfb1_cipher
348 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
349 const unsigned char *in, size_t len);
351 # define aesni_ctr_cipher aes_ctr_cipher
352 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
353 const unsigned char *in, size_t len);
355 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
356 const unsigned char *iv, int enc)
358 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
362 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
364 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
365 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
367 * If we have an iv can set it directly, otherwise use saved IV.
369 if (iv == NULL && gctx->iv_set)
372 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
377 /* If key set use IV, otherwise copy */
379 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
381 memcpy(gctx->iv, iv, gctx->ivlen);
388 # define aesni_gcm_cipher aes_gcm_cipher
389 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
390 const unsigned char *in, size_t len);
392 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
393 const unsigned char *iv, int enc)
395 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
401 /* The key is two half length keys in reality */
402 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
403 const int bits = bytes * 8;
406 * Verify that the two keys are different.
408 * This addresses Rogaway's vulnerability.
409 * See comment in aes_xts_init_key() below.
411 if ((!allow_insecure_decrypt || enc)
412 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
413 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
417 /* key_len is two AES keys */
419 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
420 xctx->xts.block1 = (block128_f) aesni_encrypt;
421 xctx->stream = aesni_xts_encrypt;
423 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
424 xctx->xts.block1 = (block128_f) aesni_decrypt;
425 xctx->stream = aesni_xts_decrypt;
428 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
429 xctx->xts.block2 = (block128_f) aesni_encrypt;
431 xctx->xts.key1 = &xctx->ks1;
435 xctx->xts.key2 = &xctx->ks2;
436 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
442 # define aesni_xts_cipher aes_xts_cipher
443 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
444 const unsigned char *in, size_t len);
446 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
447 const unsigned char *iv, int enc)
449 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
453 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
455 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
456 &cctx->ks, (block128_f) aesni_encrypt);
457 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
458 (ccm128_f) aesni_ccm64_decrypt_blocks;
462 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
468 # define aesni_ccm_cipher aes_ccm_cipher
469 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
470 const unsigned char *in, size_t len);
472 # ifndef OPENSSL_NO_OCB
473 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
474 size_t blocks, const void *key,
475 size_t start_block_num,
476 unsigned char offset_i[16],
477 const unsigned char L_[][16],
478 unsigned char checksum[16]);
479 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
480 size_t blocks, const void *key,
481 size_t start_block_num,
482 unsigned char offset_i[16],
483 const unsigned char L_[][16],
484 unsigned char checksum[16]);
486 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
487 const unsigned char *iv, int enc)
489 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
495 * We set both the encrypt and decrypt key here because decrypt
496 * needs both. We could possibly optimise to remove setting the
497 * decrypt for an encryption operation.
499 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
501 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
503 if (!CRYPTO_ocb128_init(&octx->ocb,
504 &octx->ksenc.ks, &octx->ksdec.ks,
505 (block128_f) aesni_encrypt,
506 (block128_f) aesni_decrypt,
507 enc ? aesni_ocb_encrypt
508 : aesni_ocb_decrypt))
514 * If we have an iv we can set it directly, otherwise use saved IV.
516 if (iv == NULL && octx->iv_set)
519 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
526 /* If key set use IV, otherwise copy */
528 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
530 memcpy(octx->iv, iv, octx->ivlen);
536 # define aesni_ocb_cipher aes_ocb_cipher
537 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
538 const unsigned char *in, size_t len);
539 # endif /* OPENSSL_NO_OCB */
541 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
542 static const EVP_CIPHER aesni_##keylen##_##mode = { \
543 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
544 flags|EVP_CIPH_##MODE##_MODE, \
546 aesni_##mode##_cipher, \
548 sizeof(EVP_AES_KEY), \
549 NULL,NULL,NULL,NULL }; \
550 static const EVP_CIPHER aes_##keylen##_##mode = { \
551 nid##_##keylen##_##nmode,blocksize, \
553 flags|EVP_CIPH_##MODE##_MODE, \
555 aes_##mode##_cipher, \
557 sizeof(EVP_AES_KEY), \
558 NULL,NULL,NULL,NULL }; \
559 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
560 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
562 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
563 static const EVP_CIPHER aesni_##keylen##_##mode = { \
564 nid##_##keylen##_##mode,blocksize, \
565 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
567 flags|EVP_CIPH_##MODE##_MODE, \
568 aesni_##mode##_init_key, \
569 aesni_##mode##_cipher, \
570 aes_##mode##_cleanup, \
571 sizeof(EVP_AES_##MODE##_CTX), \
572 NULL,NULL,aes_##mode##_ctrl,NULL }; \
573 static const EVP_CIPHER aes_##keylen##_##mode = { \
574 nid##_##keylen##_##mode,blocksize, \
575 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
577 flags|EVP_CIPH_##MODE##_MODE, \
578 aes_##mode##_init_key, \
579 aes_##mode##_cipher, \
580 aes_##mode##_cleanup, \
581 sizeof(EVP_AES_##MODE##_CTX), \
582 NULL,NULL,aes_##mode##_ctrl,NULL }; \
583 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
584 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
586 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
588 # include "sparc_arch.h"
590 extern unsigned int OPENSSL_sparcv9cap_P[];
593 * Initial Fujitsu SPARC64 X support
595 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
596 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
597 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
598 # define HWAES_encrypt aes_fx_encrypt
599 # define HWAES_decrypt aes_fx_decrypt
600 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
601 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
603 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
605 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
606 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
607 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
609 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
612 * Key-length specific subroutines were chosen for following reason.
613 * Each SPARC T4 core can execute up to 8 threads which share core's
614 * resources. Loading as much key material to registers allows to
615 * minimize references to shared memory interface, as well as amount
616 * of instructions in inner loops [much needed on T4]. But then having
617 * non-key-length specific routines would require conditional branches
618 * either in inner loops or on subroutines' entries. Former is hardly
619 * acceptable, while latter means code size increase to size occupied
620 * by multiple key-length specific subroutines, so why fight?
622 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
623 size_t len, const AES_KEY *key,
624 unsigned char *ivec);
625 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
626 size_t len, const AES_KEY *key,
627 unsigned char *ivec);
628 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
629 size_t len, const AES_KEY *key,
630 unsigned char *ivec);
631 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
632 size_t len, const AES_KEY *key,
633 unsigned char *ivec);
634 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
635 size_t len, const AES_KEY *key,
636 unsigned char *ivec);
637 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
638 size_t len, const AES_KEY *key,
639 unsigned char *ivec);
640 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
641 size_t blocks, const AES_KEY *key,
642 unsigned char *ivec);
643 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
644 size_t blocks, const AES_KEY *key,
645 unsigned char *ivec);
646 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
647 size_t blocks, const AES_KEY *key,
648 unsigned char *ivec);
649 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
650 size_t blocks, const AES_KEY *key1,
651 const AES_KEY *key2, const unsigned char *ivec);
652 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
653 size_t blocks, const AES_KEY *key1,
654 const AES_KEY *key2, const unsigned char *ivec);
655 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
656 size_t blocks, const AES_KEY *key1,
657 const AES_KEY *key2, const unsigned char *ivec);
658 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
659 size_t blocks, const AES_KEY *key1,
660 const AES_KEY *key2, const unsigned char *ivec);
662 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
663 const unsigned char *iv, int enc)
666 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
668 mode = EVP_CIPHER_CTX_mode(ctx);
669 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
670 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
673 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
674 dat->block = (block128_f) aes_t4_decrypt;
677 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
678 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
681 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
682 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
685 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
686 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
693 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
694 dat->block = (block128_f) aes_t4_encrypt;
697 if (mode == EVP_CIPH_CBC_MODE)
698 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
699 else if (mode == EVP_CIPH_CTR_MODE)
700 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
702 dat->stream.cbc = NULL;
705 if (mode == EVP_CIPH_CBC_MODE)
706 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
707 else if (mode == EVP_CIPH_CTR_MODE)
708 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
710 dat->stream.cbc = NULL;
713 if (mode == EVP_CIPH_CBC_MODE)
714 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
715 else if (mode == EVP_CIPH_CTR_MODE)
716 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
718 dat->stream.cbc = NULL;
726 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
733 # define aes_t4_cbc_cipher aes_cbc_cipher
734 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
735 const unsigned char *in, size_t len);
737 # define aes_t4_ecb_cipher aes_ecb_cipher
738 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
739 const unsigned char *in, size_t len);
741 # define aes_t4_ofb_cipher aes_ofb_cipher
742 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
743 const unsigned char *in, size_t len);
745 # define aes_t4_cfb_cipher aes_cfb_cipher
746 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
747 const unsigned char *in, size_t len);
749 # define aes_t4_cfb8_cipher aes_cfb8_cipher
750 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
751 const unsigned char *in, size_t len);
753 # define aes_t4_cfb1_cipher aes_cfb1_cipher
754 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
755 const unsigned char *in, size_t len);
757 # define aes_t4_ctr_cipher aes_ctr_cipher
758 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
759 const unsigned char *in, size_t len);
761 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
762 const unsigned char *iv, int enc)
764 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
768 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
769 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
770 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
771 (block128_f) aes_t4_encrypt);
774 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
777 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
780 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
786 * If we have an iv can set it directly, otherwise use saved IV.
788 if (iv == NULL && gctx->iv_set)
791 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
796 /* If key set use IV, otherwise copy */
798 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
800 memcpy(gctx->iv, iv, gctx->ivlen);
807 # define aes_t4_gcm_cipher aes_gcm_cipher
808 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
809 const unsigned char *in, size_t len);
811 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
812 const unsigned char *iv, int enc)
814 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
820 /* The key is two half length keys in reality */
821 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
822 const int bits = bytes * 8;
825 * Verify that the two keys are different.
827 * This addresses Rogaway's vulnerability.
828 * See comment in aes_xts_init_key() below.
830 if ((!allow_insecure_decrypt || enc)
831 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
832 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
837 /* key_len is two AES keys */
839 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
840 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
843 xctx->stream = aes128_t4_xts_encrypt;
846 xctx->stream = aes256_t4_xts_encrypt;
852 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
853 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
856 xctx->stream = aes128_t4_xts_decrypt;
859 xctx->stream = aes256_t4_xts_decrypt;
866 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
867 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
869 xctx->xts.key1 = &xctx->ks1;
873 xctx->xts.key2 = &xctx->ks2;
874 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
880 # define aes_t4_xts_cipher aes_xts_cipher
881 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
882 const unsigned char *in, size_t len);
884 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
885 const unsigned char *iv, int enc)
887 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
891 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
892 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
893 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
894 &cctx->ks, (block128_f) aes_t4_encrypt);
899 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
905 # define aes_t4_ccm_cipher aes_ccm_cipher
906 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
907 const unsigned char *in, size_t len);
909 # ifndef OPENSSL_NO_OCB
910 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
911 const unsigned char *iv, int enc)
913 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
919 * We set both the encrypt and decrypt key here because decrypt
920 * needs both. We could possibly optimise to remove setting the
921 * decrypt for an encryption operation.
923 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
925 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
927 if (!CRYPTO_ocb128_init(&octx->ocb,
928 &octx->ksenc.ks, &octx->ksdec.ks,
929 (block128_f) aes_t4_encrypt,
930 (block128_f) aes_t4_decrypt,
937 * If we have an iv we can set it directly, otherwise use saved IV.
939 if (iv == NULL && octx->iv_set)
942 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
949 /* If key set use IV, otherwise copy */
951 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
953 memcpy(octx->iv, iv, octx->ivlen);
959 # define aes_t4_ocb_cipher aes_ocb_cipher
960 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
961 const unsigned char *in, size_t len);
962 # endif /* OPENSSL_NO_OCB */
964 # ifndef OPENSSL_NO_SIV
965 # define aes_t4_siv_init_key aes_siv_init_key
966 # define aes_t4_siv_cipher aes_siv_cipher
967 # endif /* OPENSSL_NO_SIV */
969 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
970 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
971 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
972 flags|EVP_CIPH_##MODE##_MODE, \
974 aes_t4_##mode##_cipher, \
976 sizeof(EVP_AES_KEY), \
977 NULL,NULL,NULL,NULL }; \
978 static const EVP_CIPHER aes_##keylen##_##mode = { \
979 nid##_##keylen##_##nmode,blocksize, \
981 flags|EVP_CIPH_##MODE##_MODE, \
983 aes_##mode##_cipher, \
985 sizeof(EVP_AES_KEY), \
986 NULL,NULL,NULL,NULL }; \
987 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
988 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
990 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
991 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
992 nid##_##keylen##_##mode,blocksize, \
993 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
995 flags|EVP_CIPH_##MODE##_MODE, \
996 aes_t4_##mode##_init_key, \
997 aes_t4_##mode##_cipher, \
998 aes_##mode##_cleanup, \
999 sizeof(EVP_AES_##MODE##_CTX), \
1000 NULL,NULL,aes_##mode##_ctrl,NULL }; \
1001 static const EVP_CIPHER aes_##keylen##_##mode = { \
1002 nid##_##keylen##_##mode,blocksize, \
1003 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
1005 flags|EVP_CIPH_##MODE##_MODE, \
1006 aes_##mode##_init_key, \
1007 aes_##mode##_cipher, \
1008 aes_##mode##_cleanup, \
1009 sizeof(EVP_AES_##MODE##_CTX), \
1010 NULL,NULL,aes_##mode##_ctrl,NULL }; \
1011 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1012 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
1014 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
1018 # include "s390x_arch.h"
1024 * KM-AES parameter block - begin
1025 * (see z/Architecture Principles of Operation >= SA22-7832-06)
1028 unsigned char k[32];
1030 /* KM-AES parameter block - end */
1033 } S390X_AES_ECB_CTX;
1039 * KMO-AES parameter block - begin
1040 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1043 unsigned char cv[16];
1044 unsigned char k[32];
1046 /* KMO-AES parameter block - end */
1051 } S390X_AES_OFB_CTX;
1057 * KMF-AES parameter block - begin
1058 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1061 unsigned char cv[16];
1062 unsigned char k[32];
1064 /* KMF-AES parameter block - end */
1069 } S390X_AES_CFB_CTX;
1075 * KMA-GCM-AES parameter block - begin
1076 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1079 unsigned char reserved[12];
1085 unsigned long long g[2];
1086 unsigned char b[16];
1088 unsigned char h[16];
1089 unsigned long long taadl;
1090 unsigned long long tpcl;
1092 unsigned long long g[2];
1095 unsigned char k[32];
1097 /* KMA-GCM-AES parameter block - end */
1109 unsigned char ares[16];
1110 unsigned char mres[16];
1111 unsigned char kres[16];
1117 uint64_t tls_enc_records; /* Number of TLS records encrypted */
1118 } S390X_AES_GCM_CTX;
1124 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1125 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1126 * rounds field is used to store the function code and that the key
1127 * schedule is not stored (if aes hardware support is detected).
1130 unsigned char pad[16];
1136 * KMAC-AES parameter block - begin
1137 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1141 unsigned long long g[2];
1142 unsigned char b[16];
1144 unsigned char k[32];
1146 /* KMAC-AES paramater block - end */
1149 unsigned long long g[2];
1150 unsigned char b[16];
1153 unsigned long long g[2];
1154 unsigned char b[16];
1157 unsigned long long blocks;
1166 unsigned char pad[140];
1170 } S390X_AES_CCM_CTX;
1172 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1173 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1175 /* Most modes of operation need km for partial block processing. */
1176 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1177 S390X_CAPBIT(S390X_AES_128))
1178 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1179 S390X_CAPBIT(S390X_AES_192))
1180 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1181 S390X_CAPBIT(S390X_AES_256))
1183 # define s390x_aes_init_key aes_init_key
1184 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1185 const unsigned char *iv, int enc);
1187 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1188 # define S390X_aes_192_cbc_CAPABLE 1
1189 # define S390X_aes_256_cbc_CAPABLE 1
1190 # define S390X_AES_CBC_CTX EVP_AES_KEY
1192 # define s390x_aes_cbc_init_key aes_init_key
1194 # define s390x_aes_cbc_cipher aes_cbc_cipher
1195 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1196 const unsigned char *in, size_t len);
1198 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1199 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1200 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1202 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1203 const unsigned char *key,
1204 const unsigned char *iv, int enc)
1206 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1207 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1209 cctx->fc = S390X_AES_FC(keylen);
1211 cctx->fc |= S390X_DECRYPT;
1213 memcpy(cctx->km.param.k, key, keylen);
1217 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1218 const unsigned char *in, size_t len)
1220 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1222 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1226 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1227 (OPENSSL_s390xcap_P.kmo[0] & \
1228 S390X_CAPBIT(S390X_AES_128)))
1229 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1230 (OPENSSL_s390xcap_P.kmo[0] & \
1231 S390X_CAPBIT(S390X_AES_192)))
1232 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1233 (OPENSSL_s390xcap_P.kmo[0] & \
1234 S390X_CAPBIT(S390X_AES_256)))
1236 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1237 const unsigned char *key,
1238 const unsigned char *ivec, int enc)
1240 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1241 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1242 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1243 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1245 memcpy(cctx->kmo.param.cv, iv, ivlen);
1246 memcpy(cctx->kmo.param.k, key, keylen);
1247 cctx->fc = S390X_AES_FC(keylen);
1252 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1253 const unsigned char *in, size_t len)
1255 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1260 *out = *in ^ cctx->kmo.param.cv[n];
1269 len &= ~(size_t)0xf;
1271 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1278 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1282 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1291 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1292 (OPENSSL_s390xcap_P.kmf[0] & \
1293 S390X_CAPBIT(S390X_AES_128)))
1294 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1295 (OPENSSL_s390xcap_P.kmf[0] & \
1296 S390X_CAPBIT(S390X_AES_192)))
1297 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1298 (OPENSSL_s390xcap_P.kmf[0] & \
1299 S390X_CAPBIT(S390X_AES_256)))
1301 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1302 const unsigned char *key,
1303 const unsigned char *ivec, int enc)
1305 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1306 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1307 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1308 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1310 cctx->fc = S390X_AES_FC(keylen);
1311 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1313 cctx->fc |= S390X_DECRYPT;
1316 memcpy(cctx->kmf.param.cv, iv, ivlen);
1317 memcpy(cctx->kmf.param.k, key, keylen);
1321 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1322 const unsigned char *in, size_t len)
1324 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1325 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1326 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1333 *out = cctx->kmf.param.cv[n] ^ tmp;
1334 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1343 len &= ~(size_t)0xf;
1345 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1352 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1353 S390X_AES_FC(keylen), cctx->kmf.param.k);
1357 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1358 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1367 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1368 S390X_CAPBIT(S390X_AES_128))
1369 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1370 S390X_CAPBIT(S390X_AES_192))
1371 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1372 S390X_CAPBIT(S390X_AES_256))
1374 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1375 const unsigned char *key,
1376 const unsigned char *ivec, int enc)
1378 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1379 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1380 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1381 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1383 cctx->fc = S390X_AES_FC(keylen);
1384 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1386 cctx->fc |= S390X_DECRYPT;
1388 memcpy(cctx->kmf.param.cv, iv, ivlen);
1389 memcpy(cctx->kmf.param.k, key, keylen);
1393 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1394 const unsigned char *in, size_t len)
1396 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1398 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1402 # define S390X_aes_128_cfb1_CAPABLE 0
1403 # define S390X_aes_192_cfb1_CAPABLE 0
1404 # define S390X_aes_256_cfb1_CAPABLE 0
1406 # define s390x_aes_cfb1_init_key aes_init_key
1408 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1409 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1410 const unsigned char *in, size_t len);
1412 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1413 # define S390X_aes_192_ctr_CAPABLE 1
1414 # define S390X_aes_256_ctr_CAPABLE 1
1415 # define S390X_AES_CTR_CTX EVP_AES_KEY
1417 # define s390x_aes_ctr_init_key aes_init_key
1419 # define s390x_aes_ctr_cipher aes_ctr_cipher
1420 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1421 const unsigned char *in, size_t len);
1423 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1424 (OPENSSL_s390xcap_P.kma[0] & \
1425 S390X_CAPBIT(S390X_AES_128)))
1426 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1427 (OPENSSL_s390xcap_P.kma[0] & \
1428 S390X_CAPBIT(S390X_AES_192)))
1429 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1430 (OPENSSL_s390xcap_P.kma[0] & \
1431 S390X_CAPBIT(S390X_AES_256)))
1433 /* iv + padding length for iv lengths != 12 */
1434 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1437 * Process additional authenticated data. Returns 0 on success. Code is
1440 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1443 unsigned long long alen;
1446 if (ctx->kma.param.tpcl)
1449 alen = ctx->kma.param.taadl + len;
1450 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1452 ctx->kma.param.taadl = alen;
1457 ctx->ares[n] = *aad;
1462 /* ctx->ares contains a complete block if offset has wrapped around */
1464 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1465 ctx->fc |= S390X_KMA_HS;
1472 len &= ~(size_t)0xf;
1474 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1476 ctx->fc |= S390X_KMA_HS;
1484 ctx->ares[rem] = aad[rem];
1491 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1492 * success. Code is big-endian.
1494 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1495 unsigned char *out, size_t len)
1497 const unsigned char *inptr;
1498 unsigned long long mlen;
1501 unsigned char b[16];
1506 mlen = ctx->kma.param.tpcl + len;
1507 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1509 ctx->kma.param.tpcl = mlen;
1515 while (n && inlen) {
1516 ctx->mres[n] = *inptr;
1521 /* ctx->mres contains a complete block if offset has wrapped around */
1523 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1524 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1525 ctx->fc |= S390X_KMA_HS;
1528 /* previous call already encrypted/decrypted its remainder,
1529 * see comment below */
1544 len &= ~(size_t)0xf;
1546 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1547 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1550 ctx->fc |= S390X_KMA_HS;
1555 * If there is a remainder, it has to be saved such that it can be
1556 * processed by kma later. However, we also have to do the for-now
1557 * unauthenticated encryption/decryption part here and now...
1560 if (!ctx->mreslen) {
1561 buf.w[0] = ctx->kma.param.j0.w[0];
1562 buf.w[1] = ctx->kma.param.j0.w[1];
1563 buf.w[2] = ctx->kma.param.j0.w[2];
1564 buf.w[3] = ctx->kma.param.cv.w + 1;
1565 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1569 for (i = 0; i < rem; i++) {
1570 ctx->mres[n + i] = in[i];
1571 out[i] = in[i] ^ ctx->kres[n + i];
1574 ctx->mreslen += rem;
1580 * Initialize context structure. Code is big-endian.
1582 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1583 const unsigned char *iv)
1585 ctx->kma.param.t.g[0] = 0;
1586 ctx->kma.param.t.g[1] = 0;
1587 ctx->kma.param.tpcl = 0;
1588 ctx->kma.param.taadl = 0;
1593 if (ctx->ivlen == 12) {
1594 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1595 ctx->kma.param.j0.w[3] = 1;
1596 ctx->kma.param.cv.w = 1;
1598 /* ctx->iv has the right size and is already padded. */
1599 memcpy(ctx->iv, iv, ctx->ivlen);
1600 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1601 ctx->fc, &ctx->kma.param);
1602 ctx->fc |= S390X_KMA_HS;
1604 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1605 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1606 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1607 ctx->kma.param.t.g[0] = 0;
1608 ctx->kma.param.t.g[1] = 0;
1613 * Performs various operations on the context structure depending on control
1614 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1615 * Code is big-endian.
1617 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1619 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1620 S390X_AES_GCM_CTX *gctx_out;
1621 EVP_CIPHER_CTX *out;
1622 unsigned char *buf, *iv;
1623 int ivlen, enc, len;
1627 ivlen = EVP_CIPHER_CTX_iv_length(c);
1628 iv = EVP_CIPHER_CTX_iv_noconst(c);
1631 gctx->ivlen = ivlen;
1635 gctx->tls_aad_len = -1;
1638 case EVP_CTRL_AEAD_SET_IVLEN:
1643 iv = EVP_CIPHER_CTX_iv_noconst(c);
1644 len = S390X_gcm_ivpadlen(arg);
1646 /* Allocate memory for iv if needed. */
1647 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1649 OPENSSL_free(gctx->iv);
1651 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1652 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1657 memset(gctx->iv + arg, 0, len - arg - 8);
1658 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1663 case EVP_CTRL_AEAD_SET_TAG:
1664 buf = EVP_CIPHER_CTX_buf_noconst(c);
1665 enc = EVP_CIPHER_CTX_encrypting(c);
1666 if (arg <= 0 || arg > 16 || enc)
1669 memcpy(buf, ptr, arg);
1673 case EVP_CTRL_AEAD_GET_TAG:
1674 enc = EVP_CIPHER_CTX_encrypting(c);
1675 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1678 memcpy(ptr, gctx->kma.param.t.b, arg);
1681 case EVP_CTRL_GCM_SET_IV_FIXED:
1682 /* Special case: -1 length restores whole iv */
1684 memcpy(gctx->iv, ptr, gctx->ivlen);
1689 * Fixed field must be at least 4 bytes and invocation field at least
1692 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1696 memcpy(gctx->iv, ptr, arg);
1698 enc = EVP_CIPHER_CTX_encrypting(c);
1699 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1705 case EVP_CTRL_GCM_IV_GEN:
1706 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1709 s390x_aes_gcm_setiv(gctx, gctx->iv);
1711 if (arg <= 0 || arg > gctx->ivlen)
1714 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1716 * Invocation field will be at least 8 bytes in size and so no need
1717 * to check wrap around or increment more than last 8 bytes.
1719 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1723 case EVP_CTRL_GCM_SET_IV_INV:
1724 enc = EVP_CIPHER_CTX_encrypting(c);
1725 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1728 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1729 s390x_aes_gcm_setiv(gctx, gctx->iv);
1733 case EVP_CTRL_AEAD_TLS1_AAD:
1734 /* Save the aad for later use. */
1735 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1738 buf = EVP_CIPHER_CTX_buf_noconst(c);
1739 memcpy(buf, ptr, arg);
1740 gctx->tls_aad_len = arg;
1741 gctx->tls_enc_records = 0;
1743 len = buf[arg - 2] << 8 | buf[arg - 1];
1744 /* Correct length for explicit iv. */
1745 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1747 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1749 /* If decrypting correct for tag too. */
1750 enc = EVP_CIPHER_CTX_encrypting(c);
1752 if (len < EVP_GCM_TLS_TAG_LEN)
1754 len -= EVP_GCM_TLS_TAG_LEN;
1756 buf[arg - 2] = len >> 8;
1757 buf[arg - 1] = len & 0xff;
1758 /* Extra padding: tag appended to record. */
1759 return EVP_GCM_TLS_TAG_LEN;
1763 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1764 iv = EVP_CIPHER_CTX_iv_noconst(c);
1766 if (gctx->iv == iv) {
1767 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1769 len = S390X_gcm_ivpadlen(gctx->ivlen);
1771 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1772 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1776 memcpy(gctx_out->iv, gctx->iv, len);
1786 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1788 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1789 const unsigned char *key,
1790 const unsigned char *iv, int enc)
1792 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1795 if (iv == NULL && key == NULL)
1799 keylen = EVP_CIPHER_CTX_key_length(ctx);
1800 memcpy(&gctx->kma.param.k, key, keylen);
1802 gctx->fc = S390X_AES_FC(keylen);
1804 gctx->fc |= S390X_DECRYPT;
1806 if (iv == NULL && gctx->iv_set)
1810 s390x_aes_gcm_setiv(gctx, iv);
1816 s390x_aes_gcm_setiv(gctx, iv);
1818 memcpy(gctx->iv, iv, gctx->ivlen);
1827 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1828 * if successful. Otherwise -1 is returned. Code is big-endian.
1830 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1831 const unsigned char *in, size_t len)
1833 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1834 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1835 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1838 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1842 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1843 * Requirements from SP 800-38D". The requirements is for one party to the
1844 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1847 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1848 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
1852 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1853 : EVP_CTRL_GCM_SET_IV_INV,
1854 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1857 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1858 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1859 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1861 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1862 gctx->kma.param.tpcl = len << 3;
1863 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1864 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1867 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1868 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1870 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1871 EVP_GCM_TLS_TAG_LEN)) {
1872 OPENSSL_cleanse(out, len);
1879 gctx->tls_aad_len = -1;
1884 * Called from EVP layer to initialize context, process additional
1885 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1886 * ciphertext or process a TLS packet, depending on context. Returns bytes
1887 * written on success. Otherwise -1 is returned. Code is big-endian.
1889 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1890 const unsigned char *in, size_t len)
1892 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1893 unsigned char *buf, tmp[16];
1899 if (gctx->tls_aad_len >= 0)
1900 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1907 if (s390x_aes_gcm_aad(gctx, in, len))
1910 if (s390x_aes_gcm(gctx, in, out, len))
1915 gctx->kma.param.taadl <<= 3;
1916 gctx->kma.param.tpcl <<= 3;
1917 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1918 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1919 /* recall that we already did en-/decrypt gctx->mres
1920 * and returned it to caller... */
1921 OPENSSL_cleanse(tmp, gctx->mreslen);
1924 enc = EVP_CIPHER_CTX_encrypting(ctx);
1928 if (gctx->taglen < 0)
1931 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1932 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1939 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1941 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1942 const unsigned char *iv;
1947 iv = EVP_CIPHER_CTX_iv(c);
1949 OPENSSL_free(gctx->iv);
1951 OPENSSL_cleanse(gctx, sizeof(*gctx));
1955 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1956 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1957 # define S390X_aes_256_xts_CAPABLE 1
1959 # define s390x_aes_xts_init_key aes_xts_init_key
1960 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1961 const unsigned char *key,
1962 const unsigned char *iv, int enc);
1963 # define s390x_aes_xts_cipher aes_xts_cipher
1964 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1965 const unsigned char *in, size_t len);
1966 # define s390x_aes_xts_ctrl aes_xts_ctrl
1967 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1968 # define s390x_aes_xts_cleanup aes_xts_cleanup
1970 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1971 (OPENSSL_s390xcap_P.kmac[0] & \
1972 S390X_CAPBIT(S390X_AES_128)))
1973 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1974 (OPENSSL_s390xcap_P.kmac[0] & \
1975 S390X_CAPBIT(S390X_AES_192)))
1976 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1977 (OPENSSL_s390xcap_P.kmac[0] & \
1978 S390X_CAPBIT(S390X_AES_256)))
1980 # define S390X_CCM_AAD_FLAG 0x40
1983 * Set nonce and length fields. Code is big-endian.
1985 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1986 const unsigned char *nonce,
1989 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1990 ctx->aes.ccm.nonce.g[1] = mlen;
1991 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1995 * Process additional authenticated data. Code is big-endian.
1997 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
2006 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
2008 /* Suppress 'type-punned pointer dereference' warning. */
2009 ptr = ctx->aes.ccm.buf.b;
2011 if (alen < ((1 << 16) - (1 << 8))) {
2012 *(uint16_t *)ptr = alen;
2014 } else if (sizeof(alen) == 8
2015 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
2016 *(uint16_t *)ptr = 0xffff;
2017 *(uint64_t *)(ptr + 2) = alen;
2020 *(uint16_t *)ptr = 0xfffe;
2021 *(uint32_t *)(ptr + 2) = alen;
2025 while (i < 16 && alen) {
2026 ctx->aes.ccm.buf.b[i] = *aad;
2032 ctx->aes.ccm.buf.b[i] = 0;
2036 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
2037 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
2038 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2039 &ctx->aes.ccm.kmac_param);
2040 ctx->aes.ccm.blocks += 2;
2043 alen &= ~(size_t)0xf;
2045 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2046 ctx->aes.ccm.blocks += alen >> 4;
2050 for (i = 0; i < rem; i++)
2051 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2053 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2054 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2055 ctx->aes.ccm.kmac_param.k);
2056 ctx->aes.ccm.blocks++;
2061 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2064 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2065 unsigned char *out, size_t len, int enc)
2068 unsigned int i, l, num;
2069 unsigned char flags;
2071 flags = ctx->aes.ccm.nonce.b[0];
2072 if (!(flags & S390X_CCM_AAD_FLAG)) {
2073 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2074 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2075 ctx->aes.ccm.blocks++;
2078 ctx->aes.ccm.nonce.b[0] = l;
2081 * Reconstruct length from encoded length field
2082 * and initialize it with counter value.
2085 for (i = 15 - l; i < 15; i++) {
2086 n |= ctx->aes.ccm.nonce.b[i];
2087 ctx->aes.ccm.nonce.b[i] = 0;
2090 n |= ctx->aes.ccm.nonce.b[15];
2091 ctx->aes.ccm.nonce.b[15] = 1;
2094 return -1; /* length mismatch */
2097 /* Two operations per block plus one for tag encryption */
2098 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2099 if (ctx->aes.ccm.blocks > (1ULL << 61))
2100 return -2; /* too much data */
2105 len &= ~(size_t)0xf;
2108 /* mac-then-encrypt */
2110 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2112 for (i = 0; i < rem; i++)
2113 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2115 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2116 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2117 ctx->aes.ccm.kmac_param.k);
2120 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2121 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2122 &num, (ctr128_f)AES_ctr32_encrypt);
2124 /* decrypt-then-mac */
2125 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2126 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2127 &num, (ctr128_f)AES_ctr32_encrypt);
2130 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2132 for (i = 0; i < rem; i++)
2133 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2135 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2136 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2137 ctx->aes.ccm.kmac_param.k);
2141 for (i = 15 - l; i < 16; i++)
2142 ctx->aes.ccm.nonce.b[i] = 0;
2144 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2145 ctx->aes.ccm.kmac_param.k);
2146 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2147 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2149 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2154 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2155 * if successful. Otherwise -1 is returned.
2157 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2158 const unsigned char *in, size_t len)
2160 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2161 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2162 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2163 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2166 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2170 /* Set explicit iv (sequence number). */
2171 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2174 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2176 * Get explicit iv (sequence number). We already have fixed iv
2177 * (server/client_write_iv) here.
2179 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2180 s390x_aes_ccm_setiv(cctx, ivec, len);
2182 /* Process aad (sequence number|type|version|length) */
2183 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2185 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2186 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2189 if (s390x_aes_ccm(cctx, in, out, len, enc))
2192 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2193 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2195 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2196 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2201 OPENSSL_cleanse(out, len);
2207 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2210 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2211 const unsigned char *key,
2212 const unsigned char *iv, int enc)
2214 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2215 unsigned char *ivec;
2218 if (iv == NULL && key == NULL)
2222 keylen = EVP_CIPHER_CTX_key_length(ctx);
2223 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2224 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2226 /* Store encoded m and l. */
2227 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2228 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2229 memset(cctx->aes.ccm.nonce.b + 1, 0,
2230 sizeof(cctx->aes.ccm.nonce.b));
2231 cctx->aes.ccm.blocks = 0;
2233 cctx->aes.ccm.key_set = 1;
2237 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2238 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2240 cctx->aes.ccm.iv_set = 1;
2247 * Called from EVP layer to initialize context, process additional
2248 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2249 * plaintext or process a TLS packet, depending on context. Returns bytes
2250 * written on success. Otherwise -1 is returned.
2252 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2253 const unsigned char *in, size_t len)
2255 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2256 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2258 unsigned char *buf, *ivec;
2260 if (!cctx->aes.ccm.key_set)
2263 if (cctx->aes.ccm.tls_aad_len >= 0)
2264 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2267 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2268 * so integrity must be checked already at Update() i.e., before
2269 * potentially corrupted data is output.
2271 if (in == NULL && out != NULL)
2274 if (!cctx->aes.ccm.iv_set)
2278 /* Update(): Pass message length. */
2280 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2281 s390x_aes_ccm_setiv(cctx, ivec, len);
2283 cctx->aes.ccm.len_set = 1;
2287 /* Update(): Process aad. */
2288 if (!cctx->aes.ccm.len_set && len)
2291 s390x_aes_ccm_aad(cctx, in, len);
2295 /* The tag must be set before actually decrypting data */
2296 if (!enc && !cctx->aes.ccm.tag_set)
2299 /* Update(): Process message. */
2301 if (!cctx->aes.ccm.len_set) {
2303 * In case message length was not previously set explicitly via
2304 * Update(), set it now.
2306 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2307 s390x_aes_ccm_setiv(cctx, ivec, len);
2309 cctx->aes.ccm.len_set = 1;
2313 if (s390x_aes_ccm(cctx, in, out, len, enc))
2316 cctx->aes.ccm.tag_set = 1;
2321 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2322 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2323 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2329 OPENSSL_cleanse(out, len);
2331 cctx->aes.ccm.iv_set = 0;
2332 cctx->aes.ccm.tag_set = 0;
2333 cctx->aes.ccm.len_set = 0;
2339 * Performs various operations on the context structure depending on control
2340 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2341 * Code is big-endian.
2343 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2345 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2346 unsigned char *buf, *iv;
2351 cctx->aes.ccm.key_set = 0;
2352 cctx->aes.ccm.iv_set = 0;
2353 cctx->aes.ccm.l = 8;
2354 cctx->aes.ccm.m = 12;
2355 cctx->aes.ccm.tag_set = 0;
2356 cctx->aes.ccm.len_set = 0;
2357 cctx->aes.ccm.tls_aad_len = -1;
2360 case EVP_CTRL_AEAD_TLS1_AAD:
2361 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2364 /* Save the aad for later use. */
2365 buf = EVP_CIPHER_CTX_buf_noconst(c);
2366 memcpy(buf, ptr, arg);
2367 cctx->aes.ccm.tls_aad_len = arg;
2369 len = buf[arg - 2] << 8 | buf[arg - 1];
2370 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2373 /* Correct length for explicit iv. */
2374 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2376 enc = EVP_CIPHER_CTX_encrypting(c);
2378 if (len < cctx->aes.ccm.m)
2381 /* Correct length for tag. */
2382 len -= cctx->aes.ccm.m;
2385 buf[arg - 2] = len >> 8;
2386 buf[arg - 1] = len & 0xff;
2388 /* Extra padding: tag appended to record. */
2389 return cctx->aes.ccm.m;
2391 case EVP_CTRL_CCM_SET_IV_FIXED:
2392 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2395 /* Copy to first part of the iv. */
2396 iv = EVP_CIPHER_CTX_iv_noconst(c);
2397 memcpy(iv, ptr, arg);
2400 case EVP_CTRL_AEAD_SET_IVLEN:
2404 case EVP_CTRL_CCM_SET_L:
2405 if (arg < 2 || arg > 8)
2408 cctx->aes.ccm.l = arg;
2411 case EVP_CTRL_AEAD_SET_TAG:
2412 if ((arg & 1) || arg < 4 || arg > 16)
2415 enc = EVP_CIPHER_CTX_encrypting(c);
2420 cctx->aes.ccm.tag_set = 1;
2421 buf = EVP_CIPHER_CTX_buf_noconst(c);
2422 memcpy(buf, ptr, arg);
2425 cctx->aes.ccm.m = arg;
2428 case EVP_CTRL_AEAD_GET_TAG:
2429 enc = EVP_CIPHER_CTX_encrypting(c);
2430 if (!enc || !cctx->aes.ccm.tag_set)
2433 if(arg < cctx->aes.ccm.m)
2436 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2437 cctx->aes.ccm.tag_set = 0;
2438 cctx->aes.ccm.iv_set = 0;
2439 cctx->aes.ccm.len_set = 0;
2450 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2452 # ifndef OPENSSL_NO_OCB
2453 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2454 # define S390X_aes_128_ocb_CAPABLE 0
2455 # define S390X_aes_192_ocb_CAPABLE 0
2456 # define S390X_aes_256_ocb_CAPABLE 0
2458 # define s390x_aes_ocb_init_key aes_ocb_init_key
2459 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2460 const unsigned char *iv, int enc);
2461 # define s390x_aes_ocb_cipher aes_ocb_cipher
2462 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2463 const unsigned char *in, size_t len);
2464 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2465 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2466 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2467 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2470 # ifndef OPENSSL_NO_SIV
2471 # define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2472 # define S390X_aes_128_siv_CAPABLE 0
2473 # define S390X_aes_192_siv_CAPABLE 0
2474 # define S390X_aes_256_siv_CAPABLE 0
2476 # define s390x_aes_siv_init_key aes_siv_init_key
2477 # define s390x_aes_siv_cipher aes_siv_cipher
2478 # define s390x_aes_siv_cleanup aes_siv_cleanup
2479 # define s390x_aes_siv_ctrl aes_siv_ctrl
2482 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2484 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2485 nid##_##keylen##_##nmode,blocksize, \
2488 flags | EVP_CIPH_##MODE##_MODE, \
2489 s390x_aes_##mode##_init_key, \
2490 s390x_aes_##mode##_cipher, \
2492 sizeof(S390X_AES_##MODE##_CTX), \
2498 static const EVP_CIPHER aes_##keylen##_##mode = { \
2499 nid##_##keylen##_##nmode, \
2503 flags | EVP_CIPH_##MODE##_MODE, \
2505 aes_##mode##_cipher, \
2507 sizeof(EVP_AES_KEY), \
2513 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2515 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2516 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2519 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2520 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2521 nid##_##keylen##_##mode, \
2523 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2525 flags | EVP_CIPH_##MODE##_MODE, \
2526 s390x_aes_##mode##_init_key, \
2527 s390x_aes_##mode##_cipher, \
2528 s390x_aes_##mode##_cleanup, \
2529 sizeof(S390X_AES_##MODE##_CTX), \
2532 s390x_aes_##mode##_ctrl, \
2535 static const EVP_CIPHER aes_##keylen##_##mode = { \
2536 nid##_##keylen##_##mode,blocksize, \
2537 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2539 flags | EVP_CIPH_##MODE##_MODE, \
2540 aes_##mode##_init_key, \
2541 aes_##mode##_cipher, \
2542 aes_##mode##_cleanup, \
2543 sizeof(EVP_AES_##MODE##_CTX), \
2546 aes_##mode##_ctrl, \
2549 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2551 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2552 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2557 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2558 static const EVP_CIPHER aes_##keylen##_##mode = { \
2559 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2560 flags|EVP_CIPH_##MODE##_MODE, \
2562 aes_##mode##_cipher, \
2564 sizeof(EVP_AES_KEY), \
2565 NULL,NULL,NULL,NULL }; \
2566 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2567 { return &aes_##keylen##_##mode; }
2569 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2570 static const EVP_CIPHER aes_##keylen##_##mode = { \
2571 nid##_##keylen##_##mode,blocksize, \
2572 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2574 flags|EVP_CIPH_##MODE##_MODE, \
2575 aes_##mode##_init_key, \
2576 aes_##mode##_cipher, \
2577 aes_##mode##_cleanup, \
2578 sizeof(EVP_AES_##MODE##_CTX), \
2579 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2580 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2581 { return &aes_##keylen##_##mode; }
2585 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2586 # include "arm_arch.h"
2587 # if __ARM_MAX_ARCH__>=7
2588 # if defined(BSAES_ASM)
2589 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2591 # if defined(VPAES_ASM)
2592 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2594 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2595 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2596 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2597 # define HWAES_encrypt aes_v8_encrypt
2598 # define HWAES_decrypt aes_v8_decrypt
2599 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2600 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2604 #if defined(HWAES_CAPABLE)
2605 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2607 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2609 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2610 const AES_KEY *key);
2611 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2612 const AES_KEY *key);
2613 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2614 size_t length, const AES_KEY *key,
2615 unsigned char *ivec, const int enc);
2616 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2617 size_t len, const AES_KEY *key,
2618 const unsigned char ivec[16]);
2619 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2620 size_t len, const AES_KEY *key1,
2621 const AES_KEY *key2, const unsigned char iv[16]);
2622 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2623 size_t len, const AES_KEY *key1,
2624 const AES_KEY *key2, const unsigned char iv[16]);
2627 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2628 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2629 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2630 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2631 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2632 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2633 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2634 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2636 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2637 const unsigned char *iv, int enc)
2640 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2642 mode = EVP_CIPHER_CTX_mode(ctx);
2643 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2645 #ifdef HWAES_CAPABLE
2646 if (HWAES_CAPABLE) {
2647 ret = HWAES_set_decrypt_key(key,
2648 EVP_CIPHER_CTX_key_length(ctx) * 8,
2650 dat->block = (block128_f) HWAES_decrypt;
2651 dat->stream.cbc = NULL;
2652 # ifdef HWAES_cbc_encrypt
2653 if (mode == EVP_CIPH_CBC_MODE)
2654 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2658 #ifdef BSAES_CAPABLE
2659 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2660 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2662 dat->block = (block128_f) AES_decrypt;
2663 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2666 #ifdef VPAES_CAPABLE
2667 if (VPAES_CAPABLE) {
2668 ret = vpaes_set_decrypt_key(key,
2669 EVP_CIPHER_CTX_key_length(ctx) * 8,
2671 dat->block = (block128_f) vpaes_decrypt;
2672 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2673 (cbc128_f) vpaes_cbc_encrypt : NULL;
2677 ret = AES_set_decrypt_key(key,
2678 EVP_CIPHER_CTX_key_length(ctx) * 8,
2680 dat->block = (block128_f) AES_decrypt;
2681 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2682 (cbc128_f) AES_cbc_encrypt : NULL;
2685 #ifdef HWAES_CAPABLE
2686 if (HWAES_CAPABLE) {
2687 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2689 dat->block = (block128_f) HWAES_encrypt;
2690 dat->stream.cbc = NULL;
2691 # ifdef HWAES_cbc_encrypt
2692 if (mode == EVP_CIPH_CBC_MODE)
2693 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2696 # ifdef HWAES_ctr32_encrypt_blocks
2697 if (mode == EVP_CIPH_CTR_MODE)
2698 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2701 (void)0; /* terminate potentially open 'else' */
2704 #ifdef BSAES_CAPABLE
2705 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2706 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2708 dat->block = (block128_f) AES_encrypt;
2709 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2712 #ifdef VPAES_CAPABLE
2713 if (VPAES_CAPABLE) {
2714 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2716 dat->block = (block128_f) vpaes_encrypt;
2717 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2718 (cbc128_f) vpaes_cbc_encrypt : NULL;
2722 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2724 dat->block = (block128_f) AES_encrypt;
2725 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2726 (cbc128_f) AES_cbc_encrypt : NULL;
2728 if (mode == EVP_CIPH_CTR_MODE)
2729 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2734 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2741 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2742 const unsigned char *in, size_t len)
2744 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2746 if (dat->stream.cbc)
2747 (*dat->stream.cbc) (in, out, len, &dat->ks,
2748 EVP_CIPHER_CTX_iv_noconst(ctx),
2749 EVP_CIPHER_CTX_encrypting(ctx));
2750 else if (EVP_CIPHER_CTX_encrypting(ctx))
2751 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2752 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2754 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2755 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2760 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2761 const unsigned char *in, size_t len)
2763 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2765 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2770 for (i = 0, len -= bl; i <= len; i += bl)
2771 (*dat->block) (in + i, out + i, &dat->ks);
2776 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2777 const unsigned char *in, size_t len)
2779 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2781 int num = EVP_CIPHER_CTX_num(ctx);
2782 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2783 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2784 EVP_CIPHER_CTX_set_num(ctx, num);
2788 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2789 const unsigned char *in, size_t len)
2791 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2793 int num = EVP_CIPHER_CTX_num(ctx);
2794 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2795 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2796 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2797 EVP_CIPHER_CTX_set_num(ctx, num);
2801 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2802 const unsigned char *in, size_t len)
2804 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2806 int num = EVP_CIPHER_CTX_num(ctx);
2807 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2808 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2809 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2810 EVP_CIPHER_CTX_set_num(ctx, num);
2814 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2815 const unsigned char *in, size_t len)
2817 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2819 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2820 int num = EVP_CIPHER_CTX_num(ctx);
2821 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2822 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2823 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2824 EVP_CIPHER_CTX_set_num(ctx, num);
2828 while (len >= MAXBITCHUNK) {
2829 int num = EVP_CIPHER_CTX_num(ctx);
2830 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2831 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2832 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2833 EVP_CIPHER_CTX_set_num(ctx, num);
2839 int num = EVP_CIPHER_CTX_num(ctx);
2840 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2841 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2842 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2843 EVP_CIPHER_CTX_set_num(ctx, num);
2849 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2850 const unsigned char *in, size_t len)
2852 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2853 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2855 if (dat->stream.ctr)
2856 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2857 EVP_CIPHER_CTX_iv_noconst(ctx),
2858 EVP_CIPHER_CTX_buf_noconst(ctx),
2859 &num, dat->stream.ctr);
2861 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2862 EVP_CIPHER_CTX_iv_noconst(ctx),
2863 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2865 EVP_CIPHER_CTX_set_num(ctx, num);
2869 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2870 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2871 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2873 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2875 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2878 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2879 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2880 OPENSSL_free(gctx->iv);
2884 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2886 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2891 gctx->ivlen = c->cipher->iv_len;
2895 gctx->tls_aad_len = -1;
2898 case EVP_CTRL_AEAD_SET_IVLEN:
2901 /* Allocate memory for IV if needed */
2902 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2903 if (gctx->iv != c->iv)
2904 OPENSSL_free(gctx->iv);
2905 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2906 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2913 case EVP_CTRL_AEAD_SET_TAG:
2914 if (arg <= 0 || arg > 16 || c->encrypt)
2916 memcpy(c->buf, ptr, arg);
2920 case EVP_CTRL_AEAD_GET_TAG:
2921 if (arg <= 0 || arg > 16 || !c->encrypt
2922 || gctx->taglen < 0)
2924 memcpy(ptr, c->buf, arg);
2927 case EVP_CTRL_GET_IV:
2928 if (gctx->iv_gen != 1 && gctx->iv_gen_rand != 1)
2930 if (gctx->ivlen != arg)
2932 memcpy(ptr, gctx->iv, arg);
2935 case EVP_CTRL_GCM_SET_IV_FIXED:
2936 /* Special case: -1 length restores whole IV */
2938 memcpy(gctx->iv, ptr, gctx->ivlen);
2943 * Fixed field must be at least 4 bytes and invocation field at least
2946 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2949 memcpy(gctx->iv, ptr, arg);
2950 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2955 case EVP_CTRL_GCM_IV_GEN:
2956 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2958 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2959 if (arg <= 0 || arg > gctx->ivlen)
2961 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2963 * Invocation field will be at least 8 bytes in size and so no need
2964 * to check wrap around or increment more than last 8 bytes.
2966 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2970 case EVP_CTRL_GCM_SET_IV_INV:
2971 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2973 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2974 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2978 case EVP_CTRL_AEAD_TLS1_AAD:
2979 /* Save the AAD for later use */
2980 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2982 memcpy(c->buf, ptr, arg);
2983 gctx->tls_aad_len = arg;
2984 gctx->tls_enc_records = 0;
2986 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2987 /* Correct length for explicit IV */
2988 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2990 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2991 /* If decrypting correct for tag too */
2993 if (len < EVP_GCM_TLS_TAG_LEN)
2995 len -= EVP_GCM_TLS_TAG_LEN;
2997 c->buf[arg - 2] = len >> 8;
2998 c->buf[arg - 1] = len & 0xff;
3000 /* Extra padding: tag appended to record */
3001 return EVP_GCM_TLS_TAG_LEN;
3005 EVP_CIPHER_CTX *out = ptr;
3006 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
3007 if (gctx->gcm.key) {
3008 if (gctx->gcm.key != &gctx->ks)
3010 gctx_out->gcm.key = &gctx_out->ks;
3012 if (gctx->iv == c->iv)
3013 gctx_out->iv = out->iv;
3015 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
3016 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
3019 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
3030 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3031 const unsigned char *iv, int enc)
3033 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3038 #ifdef HWAES_CAPABLE
3039 if (HWAES_CAPABLE) {
3040 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3041 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3042 (block128_f) HWAES_encrypt);
3043 # ifdef HWAES_ctr32_encrypt_blocks
3044 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
3051 #ifdef BSAES_CAPABLE
3052 if (BSAES_CAPABLE) {
3053 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3054 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3055 (block128_f) AES_encrypt);
3056 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3060 #ifdef VPAES_CAPABLE
3061 if (VPAES_CAPABLE) {
3062 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3063 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3064 (block128_f) vpaes_encrypt);
3069 (void)0; /* terminate potentially open 'else' */
3071 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3072 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3073 (block128_f) AES_encrypt);
3075 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3082 * If we have an iv can set it directly, otherwise use saved IV.
3084 if (iv == NULL && gctx->iv_set)
3087 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3092 /* If key set use IV, otherwise copy */
3094 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3096 memcpy(gctx->iv, iv, gctx->ivlen);
3104 * Handle TLS GCM packet format. This consists of the last portion of the IV
3105 * followed by the payload and finally the tag. On encrypt generate IV,
3106 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3110 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3111 const unsigned char *in, size_t len)
3113 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3115 /* Encrypt/decrypt must be performed in place */
3117 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3121 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
3122 * Requirements from SP 800-38D". The requirements is for one party to the
3123 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
3126 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
3127 EVPerr(EVP_F_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
3132 * Set IV from start of buffer or generate IV and write to start of
3135 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3136 : EVP_CTRL_GCM_SET_IV_INV,
3137 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3140 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3142 /* Fix buffer and length to point to payload */
3143 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3144 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3145 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3147 /* Encrypt payload */
3150 #if defined(AES_GCM_ASM)
3151 if (len >= 32 && AES_GCM_ASM(gctx)) {
3152 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3155 bulk = AES_gcm_encrypt(in, out, len,
3157 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3158 gctx->gcm.len.u[1] += bulk;
3161 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3164 len - bulk, gctx->ctr))
3168 #if defined(AES_GCM_ASM2)
3169 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3170 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3173 bulk = AES_gcm_encrypt(in, out, len,
3175 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3176 gctx->gcm.len.u[1] += bulk;
3179 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3180 in + bulk, out + bulk, len - bulk))
3184 /* Finally write tag */
3185 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3186 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3191 #if defined(AES_GCM_ASM)
3192 if (len >= 16 && AES_GCM_ASM(gctx)) {
3193 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3196 bulk = AES_gcm_decrypt(in, out, len,
3198 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3199 gctx->gcm.len.u[1] += bulk;
3202 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3205 len - bulk, gctx->ctr))
3209 #if defined(AES_GCM_ASM2)
3210 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3211 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3214 bulk = AES_gcm_decrypt(in, out, len,
3216 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3217 gctx->gcm.len.u[1] += bulk;
3220 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3221 in + bulk, out + bulk, len - bulk))
3225 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3226 /* If tag mismatch wipe buffer */
3227 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3228 OPENSSL_cleanse(out, len);
3236 gctx->tls_aad_len = -1;
3242 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
3244 * See also 8.2.2 RBG-based construction.
3245 * Random construction consists of a free field (which can be NULL) and a
3246 * random field which will use a DRBG that can return at least 96 bits of
3247 * entropy strength. (The DRBG must be seeded by the FIPS module).
3249 static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
3251 int sz = gctx->ivlen - offset;
3253 /* Must be at least 96 bits */
3254 if (sz <= 0 || gctx->ivlen < 12)
3257 /* Use DRBG to generate random iv */
3258 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
3262 #endif /* FIPS_MODE */
3264 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3265 const unsigned char *in, size_t len)
3267 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3269 /* If not set up, return error */
3273 if (gctx->tls_aad_len >= 0)
3274 return aes_gcm_tls_cipher(ctx, out, in, len);
3278 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
3279 * The IV can still be set externally (the security policy will state that
3280 * this is not FIPS compliant). There are some applications
3281 * where setting the IV externally is the only option available.
3283 if (!gctx->iv_set) {
3284 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
3286 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
3288 gctx->iv_gen_rand = 1;
3293 #endif /* FIPS_MODE */
3297 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3299 } else if (ctx->encrypt) {
3302 #if defined(AES_GCM_ASM)
3303 if (len >= 32 && AES_GCM_ASM(gctx)) {
3304 size_t res = (16 - gctx->gcm.mres) % 16;
3306 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3309 bulk = AES_gcm_encrypt(in + res,
3310 out + res, len - res,
3311 gctx->gcm.key, gctx->gcm.Yi.c,
3313 gctx->gcm.len.u[1] += bulk;
3317 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3320 len - bulk, gctx->ctr))
3324 #if defined(AES_GCM_ASM2)
3325 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3326 size_t res = (16 - gctx->gcm.mres) % 16;
3328 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3331 bulk = AES_gcm_encrypt(in + res,
3332 out + res, len - res,
3333 gctx->gcm.key, gctx->gcm.Yi.c,
3335 gctx->gcm.len.u[1] += bulk;
3339 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3340 in + bulk, out + bulk, len - bulk))
3346 #if defined(AES_GCM_ASM)
3347 if (len >= 16 && AES_GCM_ASM(gctx)) {
3348 size_t res = (16 - gctx->gcm.mres) % 16;
3350 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3353 bulk = AES_gcm_decrypt(in + res,
3354 out + res, len - res,
3356 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3357 gctx->gcm.len.u[1] += bulk;
3361 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3364 len - bulk, gctx->ctr))
3368 #if defined(AES_GCM_ASM2)
3369 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3370 size_t res = (16 - gctx->gcm.mres) % 16;
3372 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3375 bulk = AES_gcm_decrypt(in + res,
3376 out + res, len - res,
3378 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3379 gctx->gcm.len.u[1] += bulk;
3383 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3384 in + bulk, out + bulk, len - bulk))
3390 if (!ctx->encrypt) {
3391 if (gctx->taglen < 0)
3393 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3398 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3400 /* Don't reuse the IV */
3407 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3408 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3409 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3410 | EVP_CIPH_CUSTOM_COPY)
3412 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3413 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3414 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3415 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3416 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3417 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3419 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3421 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3423 if (type == EVP_CTRL_COPY) {
3424 EVP_CIPHER_CTX *out = ptr;
3425 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3427 if (xctx->xts.key1) {
3428 if (xctx->xts.key1 != &xctx->ks1)
3430 xctx_out->xts.key1 = &xctx_out->ks1;
3432 if (xctx->xts.key2) {
3433 if (xctx->xts.key2 != &xctx->ks2)
3435 xctx_out->xts.key2 = &xctx_out->ks2;
3438 } else if (type != EVP_CTRL_INIT)
3440 /* key1 and key2 are used as an indicator both key and IV are set */
3441 xctx->xts.key1 = NULL;
3442 xctx->xts.key2 = NULL;
3446 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3447 const unsigned char *iv, int enc)
3449 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3456 /* The key is two half length keys in reality */
3457 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3458 const int bits = bytes * 8;
3461 * Verify that the two keys are different.
3463 * This addresses the vulnerability described in Rogaway's
3464 * September 2004 paper:
3466 * "Efficient Instantiations of Tweakable Blockciphers and
3467 * Refinements to Modes OCB and PMAC".
3468 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3470 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3472 * "The check for Key_1 != Key_2 shall be done at any place
3473 * BEFORE using the keys in the XTS-AES algorithm to process
3476 if ((!allow_insecure_decrypt || enc)
3477 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3478 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3483 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3485 xctx->stream = NULL;
3487 /* key_len is two AES keys */
3488 #ifdef HWAES_CAPABLE
3489 if (HWAES_CAPABLE) {
3491 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3492 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3493 # ifdef HWAES_xts_encrypt
3494 xctx->stream = HWAES_xts_encrypt;
3497 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3498 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3499 # ifdef HWAES_xts_decrypt
3500 xctx->stream = HWAES_xts_decrypt;
3504 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3505 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3507 xctx->xts.key1 = &xctx->ks1;
3511 #ifdef BSAES_CAPABLE
3513 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3516 #ifdef VPAES_CAPABLE
3517 if (VPAES_CAPABLE) {
3519 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3520 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3522 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3523 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3526 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3527 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3529 xctx->xts.key1 = &xctx->ks1;
3533 (void)0; /* terminate potentially open 'else' */
3536 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3537 xctx->xts.block1 = (block128_f) AES_encrypt;
3539 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3540 xctx->xts.block1 = (block128_f) AES_decrypt;
3543 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3544 xctx->xts.block2 = (block128_f) AES_encrypt;
3546 xctx->xts.key1 = &xctx->ks1;
3551 xctx->xts.key2 = &xctx->ks2;
3552 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3558 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3559 const unsigned char *in, size_t len)
3561 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3563 if (xctx->xts.key1 == NULL
3564 || xctx->xts.key2 == NULL
3567 || len < AES_BLOCK_SIZE)
3571 * Impose a limit of 2^20 blocks per data unit as specifed by
3572 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3573 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3574 * NIST SP 800-38E mandates the same limit.
3576 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3577 EVPerr(EVP_F_AES_XTS_CIPHER, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3582 (*xctx->stream) (in, out, len,
3583 xctx->xts.key1, xctx->xts.key2,
3584 EVP_CIPHER_CTX_iv_noconst(ctx));
3585 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3587 EVP_CIPHER_CTX_encrypting(ctx)))
3592 #define aes_xts_cleanup NULL
3594 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3595 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3596 | EVP_CIPH_CUSTOM_COPY)
3598 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3599 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3601 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3603 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3612 cctx->tls_aad_len = -1;
3615 case EVP_CTRL_AEAD_TLS1_AAD:
3616 /* Save the AAD for later use */
3617 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3619 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3620 cctx->tls_aad_len = arg;
3623 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3624 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3625 /* Correct length for explicit IV */
3626 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3628 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3629 /* If decrypting correct for tag too */
3630 if (!EVP_CIPHER_CTX_encrypting(c)) {
3635 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3636 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3638 /* Extra padding: tag appended to record */
3641 case EVP_CTRL_CCM_SET_IV_FIXED:
3642 /* Sanity check length */
3643 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3645 /* Just copy to first part of IV */
3646 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3649 case EVP_CTRL_AEAD_SET_IVLEN:
3652 case EVP_CTRL_CCM_SET_L:
3653 if (arg < 2 || arg > 8)
3658 case EVP_CTRL_AEAD_SET_TAG:
3659 if ((arg & 1) || arg < 4 || arg > 16)
3661 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3665 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3670 case EVP_CTRL_AEAD_GET_TAG:
3671 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3673 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3682 EVP_CIPHER_CTX *out = ptr;
3683 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3684 if (cctx->ccm.key) {
3685 if (cctx->ccm.key != &cctx->ks)
3687 cctx_out->ccm.key = &cctx_out->ks;
3698 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3699 const unsigned char *iv, int enc)
3701 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3706 #ifdef HWAES_CAPABLE
3707 if (HWAES_CAPABLE) {
3708 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3711 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3712 &cctx->ks, (block128_f) HWAES_encrypt);
3718 #ifdef VPAES_CAPABLE
3719 if (VPAES_CAPABLE) {
3720 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3722 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3723 &cctx->ks, (block128_f) vpaes_encrypt);
3729 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3731 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3732 &cctx->ks, (block128_f) AES_encrypt);
3737 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3743 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3744 const unsigned char *in, size_t len)
3746 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3747 CCM128_CONTEXT *ccm = &cctx->ccm;
3748 /* Encrypt/decrypt must be performed in place */
3749 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3751 /* If encrypting set explicit IV from sequence number (start of AAD) */
3752 if (EVP_CIPHER_CTX_encrypting(ctx))
3753 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3754 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3755 /* Get rest of IV from explicit IV */
3756 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3757 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3758 /* Correct length value */
3759 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3760 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3764 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3765 /* Fix buffer to point to payload */
3766 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3767 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3768 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3769 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3771 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3773 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3775 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3777 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3779 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3780 unsigned char tag[16];
3781 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3782 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3786 OPENSSL_cleanse(out, len);
3791 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3792 const unsigned char *in, size_t len)
3794 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3795 CCM128_CONTEXT *ccm = &cctx->ccm;
3796 /* If not set up, return error */
3800 if (cctx->tls_aad_len >= 0)
3801 return aes_ccm_tls_cipher(ctx, out, in, len);
3803 /* EVP_*Final() doesn't return any data */
3804 if (in == NULL && out != NULL)
3812 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3818 /* If have AAD need message length */
3819 if (!cctx->len_set && len)
3821 CRYPTO_ccm128_aad(ccm, in, len);
3825 /* The tag must be set before actually decrypting data */
3826 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3829 /* If not set length yet do it */
3830 if (!cctx->len_set) {
3831 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3836 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3837 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3839 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3845 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3847 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3848 unsigned char tag[16];
3849 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3850 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3856 OPENSSL_cleanse(out, len);
3864 #define aes_ccm_cleanup NULL
3866 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3867 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3868 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3869 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3870 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3871 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3878 /* Indicates if IV has been set */
3882 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3883 const unsigned char *iv, int enc)
3885 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3889 if (EVP_CIPHER_CTX_encrypting(ctx))
3890 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3893 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3899 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3900 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3905 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3906 const unsigned char *in, size_t inlen)
3908 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3910 /* AES wrap with padding has IV length of 4, without padding 8 */
3911 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3912 /* No final operation so always return zero length */
3915 /* Input length must always be non-zero */
3918 /* If decrypting need at least 16 bytes and multiple of 8 */
3919 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3921 /* If not padding input must be multiple of 8 */
3922 if (!pad && inlen & 0x7)
3924 if (is_partially_overlapping(out, in, inlen)) {
3925 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3929 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3930 /* If padding round up to multiple of 8 */
3932 inlen = (inlen + 7) / 8 * 8;
3937 * If not padding output will be exactly 8 bytes smaller than
3938 * input. If padding it will be at least 8 bytes smaller but we
3939 * don't know how much.
3945 if (EVP_CIPHER_CTX_encrypting(ctx))
3946 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3948 (block128_f) AES_encrypt);
3950 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3952 (block128_f) AES_decrypt);
3954 if (EVP_CIPHER_CTX_encrypting(ctx))
3955 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3956 out, in, inlen, (block128_f) AES_encrypt);
3958 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3959 out, in, inlen, (block128_f) AES_decrypt);
3961 return rv ? (int)rv : -1;
3964 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3965 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3966 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3968 static const EVP_CIPHER aes_128_wrap = {
3970 8, 16, 8, WRAP_FLAGS,
3971 aes_wrap_init_key, aes_wrap_cipher,
3973 sizeof(EVP_AES_WRAP_CTX),
3974 NULL, NULL, NULL, NULL
3977 const EVP_CIPHER *EVP_aes_128_wrap(void)
3979 return &aes_128_wrap;
3982 static const EVP_CIPHER aes_192_wrap = {
3984 8, 24, 8, WRAP_FLAGS,
3985 aes_wrap_init_key, aes_wrap_cipher,
3987 sizeof(EVP_AES_WRAP_CTX),
3988 NULL, NULL, NULL, NULL
3991 const EVP_CIPHER *EVP_aes_192_wrap(void)
3993 return &aes_192_wrap;
3996 static const EVP_CIPHER aes_256_wrap = {
3998 8, 32, 8, WRAP_FLAGS,
3999 aes_wrap_init_key, aes_wrap_cipher,
4001 sizeof(EVP_AES_WRAP_CTX),
4002 NULL, NULL, NULL, NULL
4005 const EVP_CIPHER *EVP_aes_256_wrap(void)
4007 return &aes_256_wrap;
4010 static const EVP_CIPHER aes_128_wrap_pad = {
4011 NID_id_aes128_wrap_pad,
4012 8, 16, 4, WRAP_FLAGS,
4013 aes_wrap_init_key, aes_wrap_cipher,
4015 sizeof(EVP_AES_WRAP_CTX),
4016 NULL, NULL, NULL, NULL
4019 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
4021 return &aes_128_wrap_pad;
4024 static const EVP_CIPHER aes_192_wrap_pad = {
4025 NID_id_aes192_wrap_pad,
4026 8, 24, 4, WRAP_FLAGS,
4027 aes_wrap_init_key, aes_wrap_cipher,
4029 sizeof(EVP_AES_WRAP_CTX),
4030 NULL, NULL, NULL, NULL
4033 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
4035 return &aes_192_wrap_pad;
4038 static const EVP_CIPHER aes_256_wrap_pad = {
4039 NID_id_aes256_wrap_pad,
4040 8, 32, 4, WRAP_FLAGS,
4041 aes_wrap_init_key, aes_wrap_cipher,
4043 sizeof(EVP_AES_WRAP_CTX),
4044 NULL, NULL, NULL, NULL
4047 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
4049 return &aes_256_wrap_pad;
4052 #ifndef OPENSSL_NO_OCB
4053 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4055 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4056 EVP_CIPHER_CTX *newc;
4057 EVP_AES_OCB_CTX *new_octx;
4063 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
4064 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
4066 octx->data_buf_len = 0;
4067 octx->aad_buf_len = 0;
4070 case EVP_CTRL_AEAD_SET_IVLEN:
4071 /* IV len must be 1 to 15 */
4072 if (arg <= 0 || arg > 15)
4078 case EVP_CTRL_AEAD_SET_TAG:
4080 /* Tag len must be 0 to 16 */
4081 if (arg < 0 || arg > 16)
4087 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
4089 memcpy(octx->tag, ptr, arg);
4092 case EVP_CTRL_AEAD_GET_TAG:
4093 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
4096 memcpy(ptr, octx->tag, arg);
4100 newc = (EVP_CIPHER_CTX *)ptr;
4101 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
4102 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
4103 &new_octx->ksenc.ks,
4104 &new_octx->ksdec.ks);
4112 # ifdef HWAES_CAPABLE
4113 # ifdef HWAES_ocb_encrypt
4114 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4115 size_t blocks, const void *key,
4116 size_t start_block_num,
4117 unsigned char offset_i[16],
4118 const unsigned char L_[][16],
4119 unsigned char checksum[16]);
4121 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4123 # ifdef HWAES_ocb_decrypt
4124 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4125 size_t blocks, const void *key,
4126 size_t start_block_num,
4127 unsigned char offset_i[16],
4128 const unsigned char L_[][16],
4129 unsigned char checksum[16]);
4131 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4135 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4136 const unsigned char *iv, int enc)
4138 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4144 * We set both the encrypt and decrypt key here because decrypt
4145 * needs both. We could possibly optimise to remove setting the
4146 * decrypt for an encryption operation.
4148 # ifdef HWAES_CAPABLE
4149 if (HWAES_CAPABLE) {
4150 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4152 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4154 if (!CRYPTO_ocb128_init(&octx->ocb,
4155 &octx->ksenc.ks, &octx->ksdec.ks,
4156 (block128_f) HWAES_encrypt,
4157 (block128_f) HWAES_decrypt,
4158 enc ? HWAES_ocb_encrypt
4159 : HWAES_ocb_decrypt))
4164 # ifdef VPAES_CAPABLE
4165 if (VPAES_CAPABLE) {
4166 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4168 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4170 if (!CRYPTO_ocb128_init(&octx->ocb,
4171 &octx->ksenc.ks, &octx->ksdec.ks,
4172 (block128_f) vpaes_encrypt,
4173 (block128_f) vpaes_decrypt,
4179 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4181 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4183 if (!CRYPTO_ocb128_init(&octx->ocb,
4184 &octx->ksenc.ks, &octx->ksdec.ks,
4185 (block128_f) AES_encrypt,
4186 (block128_f) AES_decrypt,
4193 * If we have an iv we can set it directly, otherwise use saved IV.
4195 if (iv == NULL && octx->iv_set)
4198 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4205 /* If key set use IV, otherwise copy */
4207 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4209 memcpy(octx->iv, iv, octx->ivlen);
4215 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4216 const unsigned char *in, size_t len)
4220 int written_len = 0;
4221 size_t trailing_len;
4222 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4224 /* If IV or Key not set then return error */
4233 * Need to ensure we are only passing full blocks to low level OCB
4234 * routines. We do it here rather than in EVP_EncryptUpdate/
4235 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4236 * and those routines don't support that
4239 /* Are we dealing with AAD or normal data here? */
4241 buf = octx->aad_buf;
4242 buf_len = &(octx->aad_buf_len);
4244 buf = octx->data_buf;
4245 buf_len = &(octx->data_buf_len);
4247 if (is_partially_overlapping(out + *buf_len, in, len)) {
4248 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4254 * If we've got a partially filled buffer from a previous call then
4255 * use that data first
4258 unsigned int remaining;
4260 remaining = AES_BLOCK_SIZE - (*buf_len);
4261 if (remaining > len) {
4262 memcpy(buf + (*buf_len), in, len);
4266 memcpy(buf + (*buf_len), in, remaining);
4269 * If we get here we've filled the buffer, so process it
4274 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4276 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4277 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4281 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4285 written_len = AES_BLOCK_SIZE;
4288 out += AES_BLOCK_SIZE;
4291 /* Do we have a partial block to handle at the end? */
4292 trailing_len = len % AES_BLOCK_SIZE;
4295 * If we've got some full blocks to handle, then process these first
4297 if (len != trailing_len) {
4299 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4301 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4302 if (!CRYPTO_ocb128_encrypt
4303 (&octx->ocb, in, out, len - trailing_len))
4306 if (!CRYPTO_ocb128_decrypt
4307 (&octx->ocb, in, out, len - trailing_len))
4310 written_len += len - trailing_len;
4311 in += len - trailing_len;
4314 /* Handle any trailing partial block */
4315 if (trailing_len > 0) {
4316 memcpy(buf, in, trailing_len);
4317 *buf_len = trailing_len;
4323 * First of all empty the buffer of any partial block that we might
4324 * have been provided - both for data and AAD
4326 if (octx->data_buf_len > 0) {
4327 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4328 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4329 octx->data_buf_len))
4332 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4333 octx->data_buf_len))
4336 written_len = octx->data_buf_len;
4337 octx->data_buf_len = 0;
4339 if (octx->aad_buf_len > 0) {
4340 if (!CRYPTO_ocb128_aad
4341 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4343 octx->aad_buf_len = 0;
4345 /* If decrypting then verify */
4346 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4347 if (octx->taglen < 0)
4349 if (CRYPTO_ocb128_finish(&octx->ocb,
4350 octx->tag, octx->taglen) != 0)
4355 /* If encrypting then just get the tag */
4356 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4358 /* Don't reuse the IV */
4364 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4366 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4367 CRYPTO_ocb128_cleanup(&octx->ocb);
4371 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4372 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4373 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4374 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4375 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4376 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4377 #endif /* OPENSSL_NO_OCB */
4380 #ifndef OPENSSL_NO_SIV
4382 typedef SIV128_CONTEXT EVP_AES_SIV_CTX;
4384 #define aesni_siv_init_key aes_siv_init_key
4385 static int aes_siv_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4386 const unsigned char *iv, int enc)
4388 const EVP_CIPHER *ctr;
4389 const EVP_CIPHER *cbc;
4390 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4391 int klen = EVP_CIPHER_CTX_key_length(ctx) / 2;
4398 cbc = EVP_aes_128_cbc();
4399 ctr = EVP_aes_128_ctr();
4402 cbc = EVP_aes_192_cbc();
4403 ctr = EVP_aes_192_ctr();
4406 cbc = EVP_aes_256_cbc();
4407 ctr = EVP_aes_256_ctr();
4413 /* klen is the length of the underlying cipher, not the input key,
4414 which should be twice as long */
4415 return CRYPTO_siv128_init(sctx, key, klen, cbc, ctr);
4418 #define aesni_siv_cipher aes_siv_cipher
4419 static int aes_siv_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4420 const unsigned char *in, size_t len)
4422 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4424 /* EncryptFinal or DecryptFinal */
4426 return CRYPTO_siv128_finish(sctx);
4428 /* Deal with associated data */
4430 return CRYPTO_siv128_aad(sctx, in, len);
4432 if (EVP_CIPHER_CTX_encrypting(ctx))
4433 return CRYPTO_siv128_encrypt(sctx, in, out, len);
4435 return CRYPTO_siv128_decrypt(sctx, in, out, len);
4438 #define aesni_siv_cleanup aes_siv_cleanup
4439 static int aes_siv_cleanup(EVP_CIPHER_CTX *c)
4441 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4443 return CRYPTO_siv128_cleanup(sctx);
4447 #define aesni_siv_ctrl aes_siv_ctrl
4448 static int aes_siv_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4450 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4451 SIV128_CONTEXT *sctx_out;
4455 return CRYPTO_siv128_cleanup(sctx);
4457 case EVP_CTRL_SET_SPEED:
4458 return CRYPTO_siv128_speed(sctx, arg);
4460 case EVP_CTRL_AEAD_SET_TAG:
4461 if (!EVP_CIPHER_CTX_encrypting(c))
4462 return CRYPTO_siv128_set_tag(sctx, ptr, arg);
4465 case EVP_CTRL_AEAD_GET_TAG:
4466 if (!EVP_CIPHER_CTX_encrypting(c))
4468 return CRYPTO_siv128_get_tag(sctx, ptr, arg);
4471 sctx_out = EVP_C_DATA(SIV128_CONTEXT, (EVP_CIPHER_CTX*)ptr);
4472 return CRYPTO_siv128_copy_ctx(sctx_out, sctx);
4480 #define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4481 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4482 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4483 | EVP_CIPH_CTRL_INIT)
4485 BLOCK_CIPHER_custom(NID_aes, 128, 1, 0, siv, SIV, SIV_FLAGS)
4486 BLOCK_CIPHER_custom(NID_aes, 192, 1, 0, siv, SIV, SIV_FLAGS)
4487 BLOCK_CIPHER_custom(NID_aes, 256, 1, 0, siv, SIV, SIV_FLAGS)