2 * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "modes_lcl.h"
19 #include <openssl/rand.h>
20 #include <openssl/cmac.h>
39 } ks; /* AES key schedule to use */
40 int key_set; /* Set if key initialised */
41 int iv_set; /* Set if an iv is set */
43 unsigned char *iv; /* Temporary IV store */
44 int ivlen; /* IV length */
46 int iv_gen; /* It is OK to generate IVs */
47 int tls_aad_len; /* TLS AAD length */
48 uint64_t tls_enc_records; /* Number of TLS records encrypted */
56 } ks1, ks2; /* AES key schedules to use */
58 void (*stream) (const unsigned char *in,
59 unsigned char *out, size_t length,
60 const AES_KEY *key1, const AES_KEY *key2,
61 const unsigned char iv[16]);
68 } ks; /* AES key schedule to use */
69 int key_set; /* Set if key initialised */
70 int iv_set; /* Set if an iv is set */
71 int tag_set; /* Set if tag is valid */
72 int len_set; /* Set if message length set */
73 int L, M; /* L and M parameters from RFC3610 */
74 int tls_aad_len; /* TLS AAD length */
79 #ifndef OPENSSL_NO_OCB
84 } ksenc; /* AES key schedule to use for encryption */
88 } ksdec; /* AES key schedule to use for decryption */
89 int key_set; /* Set if key initialised */
90 int iv_set; /* Set if an iv is set */
92 unsigned char *iv; /* Temporary IV store */
93 unsigned char tag[16];
94 unsigned char data_buf[16]; /* Store partial data blocks */
95 unsigned char aad_buf[16]; /* Store partial AAD blocks */
98 int ivlen; /* IV length */
103 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
106 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
108 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
111 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
113 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
116 void vpaes_cbc_encrypt(const unsigned char *in,
119 const AES_KEY *key, unsigned char *ivec, int enc);
122 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
123 size_t length, const AES_KEY *key,
124 unsigned char ivec[16], int enc);
125 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
126 size_t len, const AES_KEY *key,
127 const unsigned char ivec[16]);
128 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
129 size_t len, const AES_KEY *key1,
130 const AES_KEY *key2, const unsigned char iv[16]);
131 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
132 size_t len, const AES_KEY *key1,
133 const AES_KEY *key2, const unsigned char iv[16]);
136 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
137 size_t blocks, const AES_KEY *key,
138 const unsigned char ivec[AES_BLOCK_SIZE]);
141 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
142 const AES_KEY *key1, const AES_KEY *key2,
143 const unsigned char iv[16]);
144 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
145 const AES_KEY *key1, const AES_KEY *key2,
146 const unsigned char iv[16]);
149 /* increment counter (64-bit int) by 1 */
150 static void ctr64_inc(unsigned char *counter)
165 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
166 # include "ppc_arch.h"
168 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
170 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
171 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
172 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
173 # define HWAES_encrypt aes_p8_encrypt
174 # define HWAES_decrypt aes_p8_decrypt
175 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
176 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
177 # define HWAES_xts_encrypt aes_p8_xts_encrypt
178 # define HWAES_xts_decrypt aes_p8_xts_decrypt
181 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
182 ((defined(__i386) || defined(__i386__) || \
183 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
184 defined(__x86_64) || defined(__x86_64__) || \
185 defined(_M_AMD64) || defined(_M_X64) )
187 extern unsigned int OPENSSL_ia32cap_P[];
190 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
193 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
198 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
200 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
202 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
205 void aesni_encrypt(const unsigned char *in, unsigned char *out,
207 void aesni_decrypt(const unsigned char *in, unsigned char *out,
210 void aesni_ecb_encrypt(const unsigned char *in,
212 size_t length, const AES_KEY *key, int enc);
213 void aesni_cbc_encrypt(const unsigned char *in,
216 const AES_KEY *key, unsigned char *ivec, int enc);
218 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
221 const void *key, const unsigned char *ivec);
223 void aesni_xts_encrypt(const unsigned char *in,
226 const AES_KEY *key1, const AES_KEY *key2,
227 const unsigned char iv[16]);
229 void aesni_xts_decrypt(const unsigned char *in,
232 const AES_KEY *key1, const AES_KEY *key2,
233 const unsigned char iv[16]);
235 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
239 const unsigned char ivec[16],
240 unsigned char cmac[16]);
242 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
246 const unsigned char ivec[16],
247 unsigned char cmac[16]);
249 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
250 size_t aesni_gcm_encrypt(const unsigned char *in,
253 const void *key, unsigned char ivec[16], u64 *Xi);
254 # define AES_gcm_encrypt aesni_gcm_encrypt
255 size_t aesni_gcm_decrypt(const unsigned char *in,
258 const void *key, unsigned char ivec[16], u64 *Xi);
259 # define AES_gcm_decrypt aesni_gcm_decrypt
260 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
262 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
263 gctx->gcm.ghash==gcm_ghash_avx)
264 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
265 gctx->gcm.ghash==gcm_ghash_avx)
266 # undef AES_GCM_ASM2 /* minor size optimization */
269 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
270 const unsigned char *iv, int enc)
273 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
275 mode = EVP_CIPHER_CTX_mode(ctx);
276 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
278 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
280 dat->block = (block128_f) aesni_decrypt;
281 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
282 (cbc128_f) aesni_cbc_encrypt : NULL;
284 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
286 dat->block = (block128_f) aesni_encrypt;
287 if (mode == EVP_CIPH_CBC_MODE)
288 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
289 else if (mode == EVP_CIPH_CTR_MODE)
290 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
292 dat->stream.cbc = NULL;
296 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
303 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
304 const unsigned char *in, size_t len)
306 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
307 EVP_CIPHER_CTX_iv_noconst(ctx),
308 EVP_CIPHER_CTX_encrypting(ctx));
313 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
314 const unsigned char *in, size_t len)
316 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
321 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
322 EVP_CIPHER_CTX_encrypting(ctx));
327 # define aesni_ofb_cipher aes_ofb_cipher
328 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
329 const unsigned char *in, size_t len);
331 # define aesni_cfb_cipher aes_cfb_cipher
332 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
333 const unsigned char *in, size_t len);
335 # define aesni_cfb8_cipher aes_cfb8_cipher
336 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
337 const unsigned char *in, size_t len);
339 # define aesni_cfb1_cipher aes_cfb1_cipher
340 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
341 const unsigned char *in, size_t len);
343 # define aesni_ctr_cipher aes_ctr_cipher
344 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
345 const unsigned char *in, size_t len);
347 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
348 const unsigned char *iv, int enc)
350 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
354 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
356 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
357 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
359 * If we have an iv can set it directly, otherwise use saved IV.
361 if (iv == NULL && gctx->iv_set)
364 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
369 /* If key set use IV, otherwise copy */
371 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
373 memcpy(gctx->iv, iv, gctx->ivlen);
380 # define aesni_gcm_cipher aes_gcm_cipher
381 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
382 const unsigned char *in, size_t len);
384 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
385 const unsigned char *iv, int enc)
387 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
392 /* key_len is two AES keys */
394 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
396 xctx->xts.block1 = (block128_f) aesni_encrypt;
397 xctx->stream = aesni_xts_encrypt;
399 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
401 xctx->xts.block1 = (block128_f) aesni_decrypt;
402 xctx->stream = aesni_xts_decrypt;
405 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
406 EVP_CIPHER_CTX_key_length(ctx) * 4,
408 xctx->xts.block2 = (block128_f) aesni_encrypt;
410 xctx->xts.key1 = &xctx->ks1;
414 xctx->xts.key2 = &xctx->ks2;
415 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
421 # define aesni_xts_cipher aes_xts_cipher
422 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
423 const unsigned char *in, size_t len);
425 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
426 const unsigned char *iv, int enc)
428 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
432 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
434 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
435 &cctx->ks, (block128_f) aesni_encrypt);
436 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
437 (ccm128_f) aesni_ccm64_decrypt_blocks;
441 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
447 # define aesni_ccm_cipher aes_ccm_cipher
448 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
449 const unsigned char *in, size_t len);
451 # ifndef OPENSSL_NO_OCB
452 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
453 size_t blocks, const void *key,
454 size_t start_block_num,
455 unsigned char offset_i[16],
456 const unsigned char L_[][16],
457 unsigned char checksum[16]);
458 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
459 size_t blocks, const void *key,
460 size_t start_block_num,
461 unsigned char offset_i[16],
462 const unsigned char L_[][16],
463 unsigned char checksum[16]);
465 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
466 const unsigned char *iv, int enc)
468 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
474 * We set both the encrypt and decrypt key here because decrypt
475 * needs both. We could possibly optimise to remove setting the
476 * decrypt for an encryption operation.
478 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
480 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
482 if (!CRYPTO_ocb128_init(&octx->ocb,
483 &octx->ksenc.ks, &octx->ksdec.ks,
484 (block128_f) aesni_encrypt,
485 (block128_f) aesni_decrypt,
486 enc ? aesni_ocb_encrypt
487 : aesni_ocb_decrypt))
493 * If we have an iv we can set it directly, otherwise use saved IV.
495 if (iv == NULL && octx->iv_set)
498 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
505 /* If key set use IV, otherwise copy */
507 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
509 memcpy(octx->iv, iv, octx->ivlen);
515 # define aesni_ocb_cipher aes_ocb_cipher
516 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
517 const unsigned char *in, size_t len);
518 # endif /* OPENSSL_NO_OCB */
520 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
521 static const EVP_CIPHER aesni_##keylen##_##mode = { \
522 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
523 flags|EVP_CIPH_##MODE##_MODE, \
525 aesni_##mode##_cipher, \
527 sizeof(EVP_AES_KEY), \
528 NULL,NULL,NULL,NULL }; \
529 static const EVP_CIPHER aes_##keylen##_##mode = { \
530 nid##_##keylen##_##nmode,blocksize, \
532 flags|EVP_CIPH_##MODE##_MODE, \
534 aes_##mode##_cipher, \
536 sizeof(EVP_AES_KEY), \
537 NULL,NULL,NULL,NULL }; \
538 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
539 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
541 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
542 static const EVP_CIPHER aesni_##keylen##_##mode = { \
543 nid##_##keylen##_##mode,blocksize, \
544 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
546 flags|EVP_CIPH_##MODE##_MODE, \
547 aesni_##mode##_init_key, \
548 aesni_##mode##_cipher, \
549 aes_##mode##_cleanup, \
550 sizeof(EVP_AES_##MODE##_CTX), \
551 NULL,NULL,aes_##mode##_ctrl,NULL }; \
552 static const EVP_CIPHER aes_##keylen##_##mode = { \
553 nid##_##keylen##_##mode,blocksize, \
554 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
556 flags|EVP_CIPH_##MODE##_MODE, \
557 aes_##mode##_init_key, \
558 aes_##mode##_cipher, \
559 aes_##mode##_cleanup, \
560 sizeof(EVP_AES_##MODE##_CTX), \
561 NULL,NULL,aes_##mode##_ctrl,NULL }; \
562 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
563 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
565 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
567 # include "sparc_arch.h"
569 extern unsigned int OPENSSL_sparcv9cap_P[];
572 * Initial Fujitsu SPARC64 X support
574 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
575 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
576 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
577 # define HWAES_encrypt aes_fx_encrypt
578 # define HWAES_decrypt aes_fx_decrypt
579 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
580 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
582 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
584 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
585 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
586 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
588 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
591 * Key-length specific subroutines were chosen for following reason.
592 * Each SPARC T4 core can execute up to 8 threads which share core's
593 * resources. Loading as much key material to registers allows to
594 * minimize references to shared memory interface, as well as amount
595 * of instructions in inner loops [much needed on T4]. But then having
596 * non-key-length specific routines would require conditional branches
597 * either in inner loops or on subroutines' entries. Former is hardly
598 * acceptable, while latter means code size increase to size occupied
599 * by multiple key-length specific subroutines, so why fight?
601 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
602 size_t len, const AES_KEY *key,
603 unsigned char *ivec);
604 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
605 size_t len, const AES_KEY *key,
606 unsigned char *ivec);
607 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
608 size_t len, const AES_KEY *key,
609 unsigned char *ivec);
610 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
611 size_t len, const AES_KEY *key,
612 unsigned char *ivec);
613 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
614 size_t len, const AES_KEY *key,
615 unsigned char *ivec);
616 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
617 size_t len, const AES_KEY *key,
618 unsigned char *ivec);
619 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
620 size_t blocks, const AES_KEY *key,
621 unsigned char *ivec);
622 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
623 size_t blocks, const AES_KEY *key,
624 unsigned char *ivec);
625 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
626 size_t blocks, const AES_KEY *key,
627 unsigned char *ivec);
628 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
629 size_t blocks, const AES_KEY *key1,
630 const AES_KEY *key2, const unsigned char *ivec);
631 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
632 size_t blocks, const AES_KEY *key1,
633 const AES_KEY *key2, const unsigned char *ivec);
634 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
635 size_t blocks, const AES_KEY *key1,
636 const AES_KEY *key2, const unsigned char *ivec);
637 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
638 size_t blocks, const AES_KEY *key1,
639 const AES_KEY *key2, const unsigned char *ivec);
641 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
642 const unsigned char *iv, int enc)
645 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
647 mode = EVP_CIPHER_CTX_mode(ctx);
648 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
649 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
652 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
653 dat->block = (block128_f) aes_t4_decrypt;
656 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
657 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
660 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
661 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
664 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
665 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
672 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
673 dat->block = (block128_f) aes_t4_encrypt;
676 if (mode == EVP_CIPH_CBC_MODE)
677 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
678 else if (mode == EVP_CIPH_CTR_MODE)
679 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
681 dat->stream.cbc = NULL;
684 if (mode == EVP_CIPH_CBC_MODE)
685 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
686 else if (mode == EVP_CIPH_CTR_MODE)
687 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
689 dat->stream.cbc = NULL;
692 if (mode == EVP_CIPH_CBC_MODE)
693 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
694 else if (mode == EVP_CIPH_CTR_MODE)
695 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
697 dat->stream.cbc = NULL;
705 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
712 # define aes_t4_cbc_cipher aes_cbc_cipher
713 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
714 const unsigned char *in, size_t len);
716 # define aes_t4_ecb_cipher aes_ecb_cipher
717 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
718 const unsigned char *in, size_t len);
720 # define aes_t4_ofb_cipher aes_ofb_cipher
721 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
722 const unsigned char *in, size_t len);
724 # define aes_t4_cfb_cipher aes_cfb_cipher
725 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
726 const unsigned char *in, size_t len);
728 # define aes_t4_cfb8_cipher aes_cfb8_cipher
729 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
730 const unsigned char *in, size_t len);
732 # define aes_t4_cfb1_cipher aes_cfb1_cipher
733 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
734 const unsigned char *in, size_t len);
736 # define aes_t4_ctr_cipher aes_ctr_cipher
737 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
738 const unsigned char *in, size_t len);
740 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
741 const unsigned char *iv, int enc)
743 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
747 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
748 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
749 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
750 (block128_f) aes_t4_encrypt);
753 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
756 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
759 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
765 * If we have an iv can set it directly, otherwise use saved IV.
767 if (iv == NULL && gctx->iv_set)
770 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
775 /* If key set use IV, otherwise copy */
777 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
779 memcpy(gctx->iv, iv, gctx->ivlen);
786 # define aes_t4_gcm_cipher aes_gcm_cipher
787 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
788 const unsigned char *in, size_t len);
790 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
791 const unsigned char *iv, int enc)
793 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
798 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4;
800 /* key_len is two AES keys */
802 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
803 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
806 xctx->stream = aes128_t4_xts_encrypt;
809 xctx->stream = aes256_t4_xts_encrypt;
815 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
817 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
820 xctx->stream = aes128_t4_xts_decrypt;
823 xctx->stream = aes256_t4_xts_decrypt;
830 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
831 EVP_CIPHER_CTX_key_length(ctx) * 4,
833 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
835 xctx->xts.key1 = &xctx->ks1;
839 xctx->xts.key2 = &xctx->ks2;
840 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
846 # define aes_t4_xts_cipher aes_xts_cipher
847 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
848 const unsigned char *in, size_t len);
850 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
851 const unsigned char *iv, int enc)
853 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
857 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
858 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
859 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
860 &cctx->ks, (block128_f) aes_t4_encrypt);
865 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
871 # define aes_t4_ccm_cipher aes_ccm_cipher
872 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
873 const unsigned char *in, size_t len);
875 # ifndef OPENSSL_NO_OCB
876 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
877 const unsigned char *iv, int enc)
879 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
885 * We set both the encrypt and decrypt key here because decrypt
886 * needs both. We could possibly optimise to remove setting the
887 * decrypt for an encryption operation.
889 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
891 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
893 if (!CRYPTO_ocb128_init(&octx->ocb,
894 &octx->ksenc.ks, &octx->ksdec.ks,
895 (block128_f) aes_t4_encrypt,
896 (block128_f) aes_t4_decrypt,
903 * If we have an iv we can set it directly, otherwise use saved IV.
905 if (iv == NULL && octx->iv_set)
908 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
915 /* If key set use IV, otherwise copy */
917 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
919 memcpy(octx->iv, iv, octx->ivlen);
925 # define aes_t4_ocb_cipher aes_ocb_cipher
926 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
927 const unsigned char *in, size_t len);
928 # endif /* OPENSSL_NO_OCB */
930 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
931 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
932 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
933 flags|EVP_CIPH_##MODE##_MODE, \
935 aes_t4_##mode##_cipher, \
937 sizeof(EVP_AES_KEY), \
938 NULL,NULL,NULL,NULL }; \
939 static const EVP_CIPHER aes_##keylen##_##mode = { \
940 nid##_##keylen##_##nmode,blocksize, \
942 flags|EVP_CIPH_##MODE##_MODE, \
944 aes_##mode##_cipher, \
946 sizeof(EVP_AES_KEY), \
947 NULL,NULL,NULL,NULL }; \
948 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
949 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
951 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
952 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
953 nid##_##keylen##_##mode,blocksize, \
954 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
956 flags|EVP_CIPH_##MODE##_MODE, \
957 aes_t4_##mode##_init_key, \
958 aes_t4_##mode##_cipher, \
959 aes_##mode##_cleanup, \
960 sizeof(EVP_AES_##MODE##_CTX), \
961 NULL,NULL,aes_##mode##_ctrl,NULL }; \
962 static const EVP_CIPHER aes_##keylen##_##mode = { \
963 nid##_##keylen##_##mode,blocksize, \
964 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
966 flags|EVP_CIPH_##MODE##_MODE, \
967 aes_##mode##_init_key, \
968 aes_##mode##_cipher, \
969 aes_##mode##_cleanup, \
970 sizeof(EVP_AES_##MODE##_CTX), \
971 NULL,NULL,aes_##mode##_ctrl,NULL }; \
972 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
973 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
975 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
979 # include "s390x_arch.h"
985 * KM-AES parameter block - begin
986 * (see z/Architecture Principles of Operation >= SA22-7832-06)
991 /* KM-AES parameter block - end */
1000 * KMO-AES parameter block - begin
1001 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1004 unsigned char cv[16];
1005 unsigned char k[32];
1007 /* KMO-AES parameter block - end */
1012 } S390X_AES_OFB_CTX;
1018 * KMF-AES parameter block - begin
1019 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1022 unsigned char cv[16];
1023 unsigned char k[32];
1025 /* KMF-AES parameter block - end */
1030 } S390X_AES_CFB_CTX;
1036 * KMA-GCM-AES parameter block - begin
1037 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1040 unsigned char reserved[12];
1046 unsigned long long g[2];
1047 unsigned char b[16];
1049 unsigned char h[16];
1050 unsigned long long taadl;
1051 unsigned long long tpcl;
1053 unsigned long long g[2];
1056 unsigned char k[32];
1058 /* KMA-GCM-AES parameter block - end */
1070 unsigned char ares[16];
1071 unsigned char mres[16];
1072 unsigned char kres[16];
1078 uint64_t tls_enc_records; /* Number of TLS records encrypted */
1079 } S390X_AES_GCM_CTX;
1085 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1086 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1087 * rounds field is used to store the function code and that the key
1088 * schedule is not stored (if aes hardware support is detected).
1091 unsigned char pad[16];
1097 * KMAC-AES parameter block - begin
1098 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1102 unsigned long long g[2];
1103 unsigned char b[16];
1105 unsigned char k[32];
1107 /* KMAC-AES paramater block - end */
1110 unsigned long long g[2];
1111 unsigned char b[16];
1114 unsigned long long g[2];
1115 unsigned char b[16];
1118 unsigned long long blocks;
1127 unsigned char pad[140];
1131 } S390X_AES_CCM_CTX;
1133 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1134 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1136 /* Most modes of operation need km for partial block processing. */
1137 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1138 S390X_CAPBIT(S390X_AES_128))
1139 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1140 S390X_CAPBIT(S390X_AES_192))
1141 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1142 S390X_CAPBIT(S390X_AES_256))
1144 # define s390x_aes_init_key aes_init_key
1145 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1146 const unsigned char *iv, int enc);
1148 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1149 # define S390X_aes_192_cbc_CAPABLE 1
1150 # define S390X_aes_256_cbc_CAPABLE 1
1151 # define S390X_AES_CBC_CTX EVP_AES_KEY
1153 # define s390x_aes_cbc_init_key aes_init_key
1155 # define s390x_aes_cbc_cipher aes_cbc_cipher
1156 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1157 const unsigned char *in, size_t len);
1159 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1160 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1161 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1163 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1164 const unsigned char *key,
1165 const unsigned char *iv, int enc)
1167 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1168 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1170 cctx->fc = S390X_AES_FC(keylen);
1172 cctx->fc |= S390X_DECRYPT;
1174 memcpy(cctx->km.param.k, key, keylen);
1178 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1179 const unsigned char *in, size_t len)
1181 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1183 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1187 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1188 (OPENSSL_s390xcap_P.kmo[0] & \
1189 S390X_CAPBIT(S390X_AES_128)))
1190 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1191 (OPENSSL_s390xcap_P.kmo[0] & \
1192 S390X_CAPBIT(S390X_AES_192)))
1193 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1194 (OPENSSL_s390xcap_P.kmo[0] & \
1195 S390X_CAPBIT(S390X_AES_256)))
1197 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1198 const unsigned char *key,
1199 const unsigned char *ivec, int enc)
1201 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1202 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1203 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1204 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1206 memcpy(cctx->kmo.param.cv, iv, ivlen);
1207 memcpy(cctx->kmo.param.k, key, keylen);
1208 cctx->fc = S390X_AES_FC(keylen);
1213 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1214 const unsigned char *in, size_t len)
1216 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1221 *out = *in ^ cctx->kmo.param.cv[n];
1230 len &= ~(size_t)0xf;
1232 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1239 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1243 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1252 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1253 (OPENSSL_s390xcap_P.kmf[0] & \
1254 S390X_CAPBIT(S390X_AES_128)))
1255 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1256 (OPENSSL_s390xcap_P.kmf[0] & \
1257 S390X_CAPBIT(S390X_AES_192)))
1258 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1259 (OPENSSL_s390xcap_P.kmf[0] & \
1260 S390X_CAPBIT(S390X_AES_256)))
1262 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1263 const unsigned char *key,
1264 const unsigned char *ivec, int enc)
1266 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1267 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1268 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1269 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1271 cctx->fc = S390X_AES_FC(keylen);
1272 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1274 cctx->fc |= S390X_DECRYPT;
1277 memcpy(cctx->kmf.param.cv, iv, ivlen);
1278 memcpy(cctx->kmf.param.k, key, keylen);
1282 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1283 const unsigned char *in, size_t len)
1285 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1286 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1287 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1294 *out = cctx->kmf.param.cv[n] ^ tmp;
1295 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1304 len &= ~(size_t)0xf;
1306 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1313 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1314 S390X_AES_FC(keylen), cctx->kmf.param.k);
1318 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1319 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1328 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1329 S390X_CAPBIT(S390X_AES_128))
1330 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1331 S390X_CAPBIT(S390X_AES_192))
1332 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1333 S390X_CAPBIT(S390X_AES_256))
1335 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1336 const unsigned char *key,
1337 const unsigned char *ivec, int enc)
1339 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1340 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1341 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1342 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1344 cctx->fc = S390X_AES_FC(keylen);
1345 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1347 cctx->fc |= S390X_DECRYPT;
1349 memcpy(cctx->kmf.param.cv, iv, ivlen);
1350 memcpy(cctx->kmf.param.k, key, keylen);
1354 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1355 const unsigned char *in, size_t len)
1357 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1359 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1363 # define S390X_aes_128_cfb1_CAPABLE 0
1364 # define S390X_aes_192_cfb1_CAPABLE 0
1365 # define S390X_aes_256_cfb1_CAPABLE 0
1367 # define s390x_aes_cfb1_init_key aes_init_key
1369 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1370 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1371 const unsigned char *in, size_t len);
1373 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1374 # define S390X_aes_192_ctr_CAPABLE 1
1375 # define S390X_aes_256_ctr_CAPABLE 1
1376 # define S390X_AES_CTR_CTX EVP_AES_KEY
1378 # define s390x_aes_ctr_init_key aes_init_key
1380 # define s390x_aes_ctr_cipher aes_ctr_cipher
1381 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1382 const unsigned char *in, size_t len);
1384 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1385 (OPENSSL_s390xcap_P.kma[0] & \
1386 S390X_CAPBIT(S390X_AES_128)))
1387 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1388 (OPENSSL_s390xcap_P.kma[0] & \
1389 S390X_CAPBIT(S390X_AES_192)))
1390 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1391 (OPENSSL_s390xcap_P.kma[0] & \
1392 S390X_CAPBIT(S390X_AES_256)))
1394 /* iv + padding length for iv lenghts != 12 */
1395 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1398 * Process additional authenticated data. Returns 0 on success. Code is
1401 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1404 unsigned long long alen;
1407 if (ctx->kma.param.tpcl)
1410 alen = ctx->kma.param.taadl + len;
1411 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1413 ctx->kma.param.taadl = alen;
1418 ctx->ares[n] = *aad;
1423 /* ctx->ares contains a complete block if offset has wrapped around */
1425 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1426 ctx->fc |= S390X_KMA_HS;
1433 len &= ~(size_t)0xf;
1435 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1437 ctx->fc |= S390X_KMA_HS;
1445 ctx->ares[rem] = aad[rem];
1452 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1453 * success. Code is big-endian.
1455 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1456 unsigned char *out, size_t len)
1458 const unsigned char *inptr;
1459 unsigned long long mlen;
1462 unsigned char b[16];
1467 mlen = ctx->kma.param.tpcl + len;
1468 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1470 ctx->kma.param.tpcl = mlen;
1476 while (n && inlen) {
1477 ctx->mres[n] = *inptr;
1482 /* ctx->mres contains a complete block if offset has wrapped around */
1484 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1485 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1486 ctx->fc |= S390X_KMA_HS;
1489 /* previous call already encrypted/decrypted its remainder,
1490 * see comment below */
1505 len &= ~(size_t)0xf;
1507 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1508 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1511 ctx->fc |= S390X_KMA_HS;
1516 * If there is a remainder, it has to be saved such that it can be
1517 * processed by kma later. However, we also have to do the for-now
1518 * unauthenticated encryption/decryption part here and now...
1521 if (!ctx->mreslen) {
1522 buf.w[0] = ctx->kma.param.j0.w[0];
1523 buf.w[1] = ctx->kma.param.j0.w[1];
1524 buf.w[2] = ctx->kma.param.j0.w[2];
1525 buf.w[3] = ctx->kma.param.cv.w + 1;
1526 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1530 for (i = 0; i < rem; i++) {
1531 ctx->mres[n + i] = in[i];
1532 out[i] = in[i] ^ ctx->kres[n + i];
1535 ctx->mreslen += rem;
1541 * Initialize context structure. Code is big-endian.
1543 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1544 const unsigned char *iv)
1546 ctx->kma.param.t.g[0] = 0;
1547 ctx->kma.param.t.g[1] = 0;
1548 ctx->kma.param.tpcl = 0;
1549 ctx->kma.param.taadl = 0;
1554 if (ctx->ivlen == 12) {
1555 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1556 ctx->kma.param.j0.w[3] = 1;
1557 ctx->kma.param.cv.w = 1;
1559 /* ctx->iv has the right size and is already padded. */
1560 memcpy(ctx->iv, iv, ctx->ivlen);
1561 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1562 ctx->fc, &ctx->kma.param);
1563 ctx->fc |= S390X_KMA_HS;
1565 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1566 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1567 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1568 ctx->kma.param.t.g[0] = 0;
1569 ctx->kma.param.t.g[1] = 0;
1574 * Performs various operations on the context structure depending on control
1575 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1576 * Code is big-endian.
1578 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1580 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1581 S390X_AES_GCM_CTX *gctx_out;
1582 EVP_CIPHER_CTX *out;
1583 unsigned char *buf, *iv;
1584 int ivlen, enc, len;
1588 ivlen = EVP_CIPHER_CTX_iv_length(c);
1589 iv = EVP_CIPHER_CTX_iv_noconst(c);
1592 gctx->ivlen = ivlen;
1596 gctx->tls_aad_len = -1;
1599 case EVP_CTRL_AEAD_SET_IVLEN:
1604 iv = EVP_CIPHER_CTX_iv_noconst(c);
1605 len = S390X_gcm_ivpadlen(arg);
1607 /* Allocate memory for iv if needed. */
1608 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1610 OPENSSL_free(gctx->iv);
1612 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1613 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1618 memset(gctx->iv + arg, 0, len - arg - 8);
1619 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1624 case EVP_CTRL_AEAD_SET_TAG:
1625 buf = EVP_CIPHER_CTX_buf_noconst(c);
1626 enc = EVP_CIPHER_CTX_encrypting(c);
1627 if (arg <= 0 || arg > 16 || enc)
1630 memcpy(buf, ptr, arg);
1634 case EVP_CTRL_AEAD_GET_TAG:
1635 enc = EVP_CIPHER_CTX_encrypting(c);
1636 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1639 memcpy(ptr, gctx->kma.param.t.b, arg);
1642 case EVP_CTRL_GCM_SET_IV_FIXED:
1643 /* Special case: -1 length restores whole iv */
1645 memcpy(gctx->iv, ptr, gctx->ivlen);
1650 * Fixed field must be at least 4 bytes and invocation field at least
1653 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1657 memcpy(gctx->iv, ptr, arg);
1659 enc = EVP_CIPHER_CTX_encrypting(c);
1660 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1666 case EVP_CTRL_GCM_IV_GEN:
1667 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1670 s390x_aes_gcm_setiv(gctx, gctx->iv);
1672 if (arg <= 0 || arg > gctx->ivlen)
1675 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1677 * Invocation field will be at least 8 bytes in size and so no need
1678 * to check wrap around or increment more than last 8 bytes.
1680 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1684 case EVP_CTRL_GCM_SET_IV_INV:
1685 enc = EVP_CIPHER_CTX_encrypting(c);
1686 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1689 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1690 s390x_aes_gcm_setiv(gctx, gctx->iv);
1694 case EVP_CTRL_AEAD_TLS1_AAD:
1695 /* Save the aad for later use. */
1696 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1699 buf = EVP_CIPHER_CTX_buf_noconst(c);
1700 memcpy(buf, ptr, arg);
1701 gctx->tls_aad_len = arg;
1702 gctx->tls_enc_records = 0;
1704 len = buf[arg - 2] << 8 | buf[arg - 1];
1705 /* Correct length for explicit iv. */
1706 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1708 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1710 /* If decrypting correct for tag too. */
1711 enc = EVP_CIPHER_CTX_encrypting(c);
1713 if (len < EVP_GCM_TLS_TAG_LEN)
1715 len -= EVP_GCM_TLS_TAG_LEN;
1717 buf[arg - 2] = len >> 8;
1718 buf[arg - 1] = len & 0xff;
1719 /* Extra padding: tag appended to record. */
1720 return EVP_GCM_TLS_TAG_LEN;
1724 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1725 iv = EVP_CIPHER_CTX_iv_noconst(c);
1727 if (gctx->iv == iv) {
1728 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1730 len = S390X_gcm_ivpadlen(gctx->ivlen);
1732 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1733 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1737 memcpy(gctx_out->iv, gctx->iv, len);
1747 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1749 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1750 const unsigned char *key,
1751 const unsigned char *iv, int enc)
1753 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1756 if (iv == NULL && key == NULL)
1760 keylen = EVP_CIPHER_CTX_key_length(ctx);
1761 memcpy(&gctx->kma.param.k, key, keylen);
1763 gctx->fc = S390X_AES_FC(keylen);
1765 gctx->fc |= S390X_DECRYPT;
1767 if (iv == NULL && gctx->iv_set)
1771 s390x_aes_gcm_setiv(gctx, iv);
1777 s390x_aes_gcm_setiv(gctx, iv);
1779 memcpy(gctx->iv, iv, gctx->ivlen);
1788 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1789 * if successful. Otherwise -1 is returned. Code is big-endian.
1791 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1792 const unsigned char *in, size_t len)
1794 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1795 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1796 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1799 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1803 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1804 * Requirements from SP 800-38D". The requirements is for one party to the
1805 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1808 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1809 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
1813 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1814 : EVP_CTRL_GCM_SET_IV_INV,
1815 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1818 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1819 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1820 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1822 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1823 gctx->kma.param.tpcl = len << 3;
1824 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1825 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1828 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1829 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1831 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1832 EVP_GCM_TLS_TAG_LEN)) {
1833 OPENSSL_cleanse(out, len);
1840 gctx->tls_aad_len = -1;
1845 * Called from EVP layer to initialize context, process additional
1846 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1847 * ciphertext or process a TLS packet, depending on context. Returns bytes
1848 * written on success. Otherwise -1 is returned. Code is big-endian.
1850 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1851 const unsigned char *in, size_t len)
1853 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1854 unsigned char *buf, tmp[16];
1860 if (gctx->tls_aad_len >= 0)
1861 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1868 if (s390x_aes_gcm_aad(gctx, in, len))
1871 if (s390x_aes_gcm(gctx, in, out, len))
1876 gctx->kma.param.taadl <<= 3;
1877 gctx->kma.param.tpcl <<= 3;
1878 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1879 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1880 /* recall that we already did en-/decrypt gctx->mres
1881 * and returned it to caller... */
1882 OPENSSL_cleanse(tmp, gctx->mreslen);
1885 enc = EVP_CIPHER_CTX_encrypting(ctx);
1889 if (gctx->taglen < 0)
1892 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1893 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1900 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1902 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1903 const unsigned char *iv;
1908 iv = EVP_CIPHER_CTX_iv(c);
1910 OPENSSL_free(gctx->iv);
1912 OPENSSL_cleanse(gctx, sizeof(*gctx));
1916 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1917 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1918 # define S390X_aes_256_xts_CAPABLE 1
1920 # define s390x_aes_xts_init_key aes_xts_init_key
1921 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1922 const unsigned char *key,
1923 const unsigned char *iv, int enc);
1924 # define s390x_aes_xts_cipher aes_xts_cipher
1925 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1926 const unsigned char *in, size_t len);
1927 # define s390x_aes_xts_ctrl aes_xts_ctrl
1928 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1929 # define s390x_aes_xts_cleanup aes_xts_cleanup
1931 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1932 (OPENSSL_s390xcap_P.kmac[0] & \
1933 S390X_CAPBIT(S390X_AES_128)))
1934 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1935 (OPENSSL_s390xcap_P.kmac[0] & \
1936 S390X_CAPBIT(S390X_AES_192)))
1937 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1938 (OPENSSL_s390xcap_P.kmac[0] & \
1939 S390X_CAPBIT(S390X_AES_256)))
1941 # define S390X_CCM_AAD_FLAG 0x40
1944 * Set nonce and length fields. Code is big-endian.
1946 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1947 const unsigned char *nonce,
1950 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1951 ctx->aes.ccm.nonce.g[1] = mlen;
1952 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1956 * Process additional authenticated data. Code is big-endian.
1958 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1967 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1969 /* Suppress 'type-punned pointer dereference' warning. */
1970 ptr = ctx->aes.ccm.buf.b;
1972 if (alen < ((1 << 16) - (1 << 8))) {
1973 *(uint16_t *)ptr = alen;
1975 } else if (sizeof(alen) == 8
1976 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1977 *(uint16_t *)ptr = 0xffff;
1978 *(uint64_t *)(ptr + 2) = alen;
1981 *(uint16_t *)ptr = 0xfffe;
1982 *(uint32_t *)(ptr + 2) = alen;
1986 while (i < 16 && alen) {
1987 ctx->aes.ccm.buf.b[i] = *aad;
1993 ctx->aes.ccm.buf.b[i] = 0;
1997 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1998 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1999 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2000 &ctx->aes.ccm.kmac_param);
2001 ctx->aes.ccm.blocks += 2;
2004 alen &= ~(size_t)0xf;
2006 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2007 ctx->aes.ccm.blocks += alen >> 4;
2011 for (i = 0; i < rem; i++)
2012 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2014 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2015 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2016 ctx->aes.ccm.kmac_param.k);
2017 ctx->aes.ccm.blocks++;
2022 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2025 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2026 unsigned char *out, size_t len, int enc)
2029 unsigned int i, l, num;
2030 unsigned char flags;
2032 flags = ctx->aes.ccm.nonce.b[0];
2033 if (!(flags & S390X_CCM_AAD_FLAG)) {
2034 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2035 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2036 ctx->aes.ccm.blocks++;
2039 ctx->aes.ccm.nonce.b[0] = l;
2042 * Reconstruct length from encoded length field
2043 * and initialize it with counter value.
2046 for (i = 15 - l; i < 15; i++) {
2047 n |= ctx->aes.ccm.nonce.b[i];
2048 ctx->aes.ccm.nonce.b[i] = 0;
2051 n |= ctx->aes.ccm.nonce.b[15];
2052 ctx->aes.ccm.nonce.b[15] = 1;
2055 return -1; /* length mismatch */
2058 /* Two operations per block plus one for tag encryption */
2059 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2060 if (ctx->aes.ccm.blocks > (1ULL << 61))
2061 return -2; /* too much data */
2066 len &= ~(size_t)0xf;
2069 /* mac-then-encrypt */
2071 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2073 for (i = 0; i < rem; i++)
2074 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2076 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2077 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2078 ctx->aes.ccm.kmac_param.k);
2081 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2082 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2083 &num, (ctr128_f)AES_ctr32_encrypt);
2085 /* decrypt-then-mac */
2086 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2087 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2088 &num, (ctr128_f)AES_ctr32_encrypt);
2091 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2093 for (i = 0; i < rem; i++)
2094 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2096 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2097 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2098 ctx->aes.ccm.kmac_param.k);
2102 for (i = 15 - l; i < 16; i++)
2103 ctx->aes.ccm.nonce.b[i] = 0;
2105 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2106 ctx->aes.ccm.kmac_param.k);
2107 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2108 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2110 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2115 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2116 * if successful. Otherwise -1 is returned.
2118 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2119 const unsigned char *in, size_t len)
2121 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2122 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2123 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2124 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2127 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2131 /* Set explicit iv (sequence number). */
2132 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2135 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2137 * Get explicit iv (sequence number). We already have fixed iv
2138 * (server/client_write_iv) here.
2140 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2141 s390x_aes_ccm_setiv(cctx, ivec, len);
2143 /* Process aad (sequence number|type|version|length) */
2144 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2146 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2147 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2150 if (s390x_aes_ccm(cctx, in, out, len, enc))
2153 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2154 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2156 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2157 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2162 OPENSSL_cleanse(out, len);
2168 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2171 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2172 const unsigned char *key,
2173 const unsigned char *iv, int enc)
2175 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2176 unsigned char *ivec;
2179 if (iv == NULL && key == NULL)
2183 keylen = EVP_CIPHER_CTX_key_length(ctx);
2184 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2185 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2187 /* Store encoded m and l. */
2188 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2189 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2190 memset(cctx->aes.ccm.nonce.b + 1, 0,
2191 sizeof(cctx->aes.ccm.nonce.b));
2192 cctx->aes.ccm.blocks = 0;
2194 cctx->aes.ccm.key_set = 1;
2198 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2199 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2201 cctx->aes.ccm.iv_set = 1;
2208 * Called from EVP layer to initialize context, process additional
2209 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2210 * plaintext or process a TLS packet, depending on context. Returns bytes
2211 * written on success. Otherwise -1 is returned.
2213 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2214 const unsigned char *in, size_t len)
2216 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2217 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2219 unsigned char *buf, *ivec;
2221 if (!cctx->aes.ccm.key_set)
2224 if (cctx->aes.ccm.tls_aad_len >= 0)
2225 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2228 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2229 * so integrity must be checked already at Update() i.e., before
2230 * potentially corrupted data is output.
2232 if (in == NULL && out != NULL)
2235 if (!cctx->aes.ccm.iv_set)
2238 if (!enc && !cctx->aes.ccm.tag_set)
2242 /* Update(): Pass message length. */
2244 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2245 s390x_aes_ccm_setiv(cctx, ivec, len);
2247 cctx->aes.ccm.len_set = 1;
2251 /* Update(): Process aad. */
2252 if (!cctx->aes.ccm.len_set && len)
2255 s390x_aes_ccm_aad(cctx, in, len);
2259 /* Update(): Process message. */
2261 if (!cctx->aes.ccm.len_set) {
2263 * In case message length was not previously set explicitly via
2264 * Update(), set it now.
2266 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2267 s390x_aes_ccm_setiv(cctx, ivec, len);
2269 cctx->aes.ccm.len_set = 1;
2273 if (s390x_aes_ccm(cctx, in, out, len, enc))
2276 cctx->aes.ccm.tag_set = 1;
2281 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2282 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2283 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2289 OPENSSL_cleanse(out, len);
2291 cctx->aes.ccm.iv_set = 0;
2292 cctx->aes.ccm.tag_set = 0;
2293 cctx->aes.ccm.len_set = 0;
2299 * Performs various operations on the context structure depending on control
2300 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2301 * Code is big-endian.
2303 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2305 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2306 unsigned char *buf, *iv;
2311 cctx->aes.ccm.key_set = 0;
2312 cctx->aes.ccm.iv_set = 0;
2313 cctx->aes.ccm.l = 8;
2314 cctx->aes.ccm.m = 12;
2315 cctx->aes.ccm.tag_set = 0;
2316 cctx->aes.ccm.len_set = 0;
2317 cctx->aes.ccm.tls_aad_len = -1;
2320 case EVP_CTRL_AEAD_TLS1_AAD:
2321 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2324 /* Save the aad for later use. */
2325 buf = EVP_CIPHER_CTX_buf_noconst(c);
2326 memcpy(buf, ptr, arg);
2327 cctx->aes.ccm.tls_aad_len = arg;
2329 len = buf[arg - 2] << 8 | buf[arg - 1];
2330 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2333 /* Correct length for explicit iv. */
2334 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2336 enc = EVP_CIPHER_CTX_encrypting(c);
2338 if (len < cctx->aes.ccm.m)
2341 /* Correct length for tag. */
2342 len -= cctx->aes.ccm.m;
2345 buf[arg - 2] = len >> 8;
2346 buf[arg - 1] = len & 0xff;
2348 /* Extra padding: tag appended to record. */
2349 return cctx->aes.ccm.m;
2351 case EVP_CTRL_CCM_SET_IV_FIXED:
2352 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2355 /* Copy to first part of the iv. */
2356 iv = EVP_CIPHER_CTX_iv_noconst(c);
2357 memcpy(iv, ptr, arg);
2360 case EVP_CTRL_AEAD_SET_IVLEN:
2364 case EVP_CTRL_CCM_SET_L:
2365 if (arg < 2 || arg > 8)
2368 cctx->aes.ccm.l = arg;
2371 case EVP_CTRL_AEAD_SET_TAG:
2372 if ((arg & 1) || arg < 4 || arg > 16)
2375 enc = EVP_CIPHER_CTX_encrypting(c);
2380 cctx->aes.ccm.tag_set = 1;
2381 buf = EVP_CIPHER_CTX_buf_noconst(c);
2382 memcpy(buf, ptr, arg);
2385 cctx->aes.ccm.m = arg;
2388 case EVP_CTRL_AEAD_GET_TAG:
2389 enc = EVP_CIPHER_CTX_encrypting(c);
2390 if (!enc || !cctx->aes.ccm.tag_set)
2393 if(arg < cctx->aes.ccm.m)
2396 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2397 cctx->aes.ccm.tag_set = 0;
2398 cctx->aes.ccm.iv_set = 0;
2399 cctx->aes.ccm.len_set = 0;
2410 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2412 # ifndef OPENSSL_NO_OCB
2413 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2414 # define S390X_aes_128_ocb_CAPABLE 0
2415 # define S390X_aes_192_ocb_CAPABLE 0
2416 # define S390X_aes_256_ocb_CAPABLE 0
2418 # define s390x_aes_ocb_init_key aes_ocb_init_key
2419 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2420 const unsigned char *iv, int enc);
2421 # define s390x_aes_ocb_cipher aes_ocb_cipher
2422 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2423 const unsigned char *in, size_t len);
2424 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2425 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2426 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2427 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2430 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2432 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2433 nid##_##keylen##_##nmode,blocksize, \
2436 flags | EVP_CIPH_##MODE##_MODE, \
2437 s390x_aes_##mode##_init_key, \
2438 s390x_aes_##mode##_cipher, \
2440 sizeof(S390X_AES_##MODE##_CTX), \
2446 static const EVP_CIPHER aes_##keylen##_##mode = { \
2447 nid##_##keylen##_##nmode, \
2451 flags | EVP_CIPH_##MODE##_MODE, \
2453 aes_##mode##_cipher, \
2455 sizeof(EVP_AES_KEY), \
2461 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2463 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2464 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2467 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2468 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2469 nid##_##keylen##_##mode, \
2471 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2473 flags | EVP_CIPH_##MODE##_MODE, \
2474 s390x_aes_##mode##_init_key, \
2475 s390x_aes_##mode##_cipher, \
2476 s390x_aes_##mode##_cleanup, \
2477 sizeof(S390X_AES_##MODE##_CTX), \
2480 s390x_aes_##mode##_ctrl, \
2483 static const EVP_CIPHER aes_##keylen##_##mode = { \
2484 nid##_##keylen##_##mode,blocksize, \
2485 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2487 flags | EVP_CIPH_##MODE##_MODE, \
2488 aes_##mode##_init_key, \
2489 aes_##mode##_cipher, \
2490 aes_##mode##_cleanup, \
2491 sizeof(EVP_AES_##MODE##_CTX), \
2494 aes_##mode##_ctrl, \
2497 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2499 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2500 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2505 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2506 static const EVP_CIPHER aes_##keylen##_##mode = { \
2507 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2508 flags|EVP_CIPH_##MODE##_MODE, \
2510 aes_##mode##_cipher, \
2512 sizeof(EVP_AES_KEY), \
2513 NULL,NULL,NULL,NULL }; \
2514 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2515 { return &aes_##keylen##_##mode; }
2517 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2518 static const EVP_CIPHER aes_##keylen##_##mode = { \
2519 nid##_##keylen##_##mode,blocksize, \
2520 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2522 flags|EVP_CIPH_##MODE##_MODE, \
2523 aes_##mode##_init_key, \
2524 aes_##mode##_cipher, \
2525 aes_##mode##_cleanup, \
2526 sizeof(EVP_AES_##MODE##_CTX), \
2527 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2528 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2529 { return &aes_##keylen##_##mode; }
2533 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2534 # include "arm_arch.h"
2535 # if __ARM_MAX_ARCH__>=7
2536 # if defined(BSAES_ASM)
2537 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2539 # if defined(VPAES_ASM)
2540 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2542 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2543 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2544 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2545 # define HWAES_encrypt aes_v8_encrypt
2546 # define HWAES_decrypt aes_v8_decrypt
2547 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2548 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2552 #if defined(HWAES_CAPABLE)
2553 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2555 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2557 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2558 const AES_KEY *key);
2559 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2560 const AES_KEY *key);
2561 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2562 size_t length, const AES_KEY *key,
2563 unsigned char *ivec, const int enc);
2564 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2565 size_t len, const AES_KEY *key,
2566 const unsigned char ivec[16]);
2567 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2568 size_t len, const AES_KEY *key1,
2569 const AES_KEY *key2, const unsigned char iv[16]);
2570 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2571 size_t len, const AES_KEY *key1,
2572 const AES_KEY *key2, const unsigned char iv[16]);
2575 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2576 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2577 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2578 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2579 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2580 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2581 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2582 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2584 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2585 const unsigned char *iv, int enc)
2588 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2590 mode = EVP_CIPHER_CTX_mode(ctx);
2591 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2593 #ifdef HWAES_CAPABLE
2594 if (HWAES_CAPABLE) {
2595 ret = HWAES_set_decrypt_key(key,
2596 EVP_CIPHER_CTX_key_length(ctx) * 8,
2598 dat->block = (block128_f) HWAES_decrypt;
2599 dat->stream.cbc = NULL;
2600 # ifdef HWAES_cbc_encrypt
2601 if (mode == EVP_CIPH_CBC_MODE)
2602 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2606 #ifdef BSAES_CAPABLE
2607 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2608 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2610 dat->block = (block128_f) AES_decrypt;
2611 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2614 #ifdef VPAES_CAPABLE
2615 if (VPAES_CAPABLE) {
2616 ret = vpaes_set_decrypt_key(key,
2617 EVP_CIPHER_CTX_key_length(ctx) * 8,
2619 dat->block = (block128_f) vpaes_decrypt;
2620 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2621 (cbc128_f) vpaes_cbc_encrypt : NULL;
2625 ret = AES_set_decrypt_key(key,
2626 EVP_CIPHER_CTX_key_length(ctx) * 8,
2628 dat->block = (block128_f) AES_decrypt;
2629 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2630 (cbc128_f) AES_cbc_encrypt : NULL;
2633 #ifdef HWAES_CAPABLE
2634 if (HWAES_CAPABLE) {
2635 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2637 dat->block = (block128_f) HWAES_encrypt;
2638 dat->stream.cbc = NULL;
2639 # ifdef HWAES_cbc_encrypt
2640 if (mode == EVP_CIPH_CBC_MODE)
2641 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2644 # ifdef HWAES_ctr32_encrypt_blocks
2645 if (mode == EVP_CIPH_CTR_MODE)
2646 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2649 (void)0; /* terminate potentially open 'else' */
2652 #ifdef BSAES_CAPABLE
2653 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2654 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2656 dat->block = (block128_f) AES_encrypt;
2657 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2660 #ifdef VPAES_CAPABLE
2661 if (VPAES_CAPABLE) {
2662 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2664 dat->block = (block128_f) vpaes_encrypt;
2665 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2666 (cbc128_f) vpaes_cbc_encrypt : NULL;
2670 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2672 dat->block = (block128_f) AES_encrypt;
2673 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2674 (cbc128_f) AES_cbc_encrypt : NULL;
2676 if (mode == EVP_CIPH_CTR_MODE)
2677 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2682 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2689 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2690 const unsigned char *in, size_t len)
2692 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2694 if (dat->stream.cbc)
2695 (*dat->stream.cbc) (in, out, len, &dat->ks,
2696 EVP_CIPHER_CTX_iv_noconst(ctx),
2697 EVP_CIPHER_CTX_encrypting(ctx));
2698 else if (EVP_CIPHER_CTX_encrypting(ctx))
2699 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2700 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2702 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2703 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2708 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2709 const unsigned char *in, size_t len)
2711 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2713 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2718 for (i = 0, len -= bl; i <= len; i += bl)
2719 (*dat->block) (in + i, out + i, &dat->ks);
2724 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2725 const unsigned char *in, size_t len)
2727 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2729 int num = EVP_CIPHER_CTX_num(ctx);
2730 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2731 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2732 EVP_CIPHER_CTX_set_num(ctx, num);
2736 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2737 const unsigned char *in, size_t len)
2739 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2741 int num = EVP_CIPHER_CTX_num(ctx);
2742 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2743 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2744 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2745 EVP_CIPHER_CTX_set_num(ctx, num);
2749 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2750 const unsigned char *in, size_t len)
2752 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2754 int num = EVP_CIPHER_CTX_num(ctx);
2755 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2756 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2757 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2758 EVP_CIPHER_CTX_set_num(ctx, num);
2762 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2763 const unsigned char *in, size_t len)
2765 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2767 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2768 int num = EVP_CIPHER_CTX_num(ctx);
2769 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2770 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2771 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2772 EVP_CIPHER_CTX_set_num(ctx, num);
2776 while (len >= MAXBITCHUNK) {
2777 int num = EVP_CIPHER_CTX_num(ctx);
2778 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2779 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2780 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2781 EVP_CIPHER_CTX_set_num(ctx, num);
2787 int num = EVP_CIPHER_CTX_num(ctx);
2788 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2789 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2790 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2791 EVP_CIPHER_CTX_set_num(ctx, num);
2797 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2798 const unsigned char *in, size_t len)
2800 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2801 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2803 if (dat->stream.ctr)
2804 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2805 EVP_CIPHER_CTX_iv_noconst(ctx),
2806 EVP_CIPHER_CTX_buf_noconst(ctx),
2807 &num, dat->stream.ctr);
2809 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2810 EVP_CIPHER_CTX_iv_noconst(ctx),
2811 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2813 EVP_CIPHER_CTX_set_num(ctx, num);
2817 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2818 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2819 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2821 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2823 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2826 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2827 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2828 OPENSSL_free(gctx->iv);
2832 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2834 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2839 gctx->ivlen = c->cipher->iv_len;
2843 gctx->tls_aad_len = -1;
2846 case EVP_CTRL_AEAD_SET_IVLEN:
2849 /* Allocate memory for IV if needed */
2850 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2851 if (gctx->iv != c->iv)
2852 OPENSSL_free(gctx->iv);
2853 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2854 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2861 case EVP_CTRL_AEAD_SET_TAG:
2862 if (arg <= 0 || arg > 16 || c->encrypt)
2864 memcpy(c->buf, ptr, arg);
2868 case EVP_CTRL_AEAD_GET_TAG:
2869 if (arg <= 0 || arg > 16 || !c->encrypt
2870 || gctx->taglen < 0)
2872 memcpy(ptr, c->buf, arg);
2875 case EVP_CTRL_GET_IV:
2876 if (gctx->iv_gen != 1)
2878 if (gctx->ivlen != arg)
2880 memcpy(ptr, gctx->iv, arg);
2883 case EVP_CTRL_GCM_SET_IV_FIXED:
2884 /* Special case: -1 length restores whole IV */
2886 memcpy(gctx->iv, ptr, gctx->ivlen);
2891 * Fixed field must be at least 4 bytes and invocation field at least
2894 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2897 memcpy(gctx->iv, ptr, arg);
2898 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2903 case EVP_CTRL_GCM_IV_GEN:
2904 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2906 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2907 if (arg <= 0 || arg > gctx->ivlen)
2909 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2911 * Invocation field will be at least 8 bytes in size and so no need
2912 * to check wrap around or increment more than last 8 bytes.
2914 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2918 case EVP_CTRL_GCM_SET_IV_INV:
2919 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2921 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2922 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2926 case EVP_CTRL_AEAD_TLS1_AAD:
2927 /* Save the AAD for later use */
2928 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2930 memcpy(c->buf, ptr, arg);
2931 gctx->tls_aad_len = arg;
2932 gctx->tls_enc_records = 0;
2934 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2935 /* Correct length for explicit IV */
2936 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2938 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2939 /* If decrypting correct for tag too */
2941 if (len < EVP_GCM_TLS_TAG_LEN)
2943 len -= EVP_GCM_TLS_TAG_LEN;
2945 c->buf[arg - 2] = len >> 8;
2946 c->buf[arg - 1] = len & 0xff;
2948 /* Extra padding: tag appended to record */
2949 return EVP_GCM_TLS_TAG_LEN;
2953 EVP_CIPHER_CTX *out = ptr;
2954 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2955 if (gctx->gcm.key) {
2956 if (gctx->gcm.key != &gctx->ks)
2958 gctx_out->gcm.key = &gctx_out->ks;
2960 if (gctx->iv == c->iv)
2961 gctx_out->iv = out->iv;
2963 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2964 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2967 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2978 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2979 const unsigned char *iv, int enc)
2981 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2986 #ifdef HWAES_CAPABLE
2987 if (HWAES_CAPABLE) {
2988 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2989 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2990 (block128_f) HWAES_encrypt);
2991 # ifdef HWAES_ctr32_encrypt_blocks
2992 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2999 #ifdef BSAES_CAPABLE
3000 if (BSAES_CAPABLE) {
3001 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3002 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3003 (block128_f) AES_encrypt);
3004 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3008 #ifdef VPAES_CAPABLE
3009 if (VPAES_CAPABLE) {
3010 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3011 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3012 (block128_f) vpaes_encrypt);
3017 (void)0; /* terminate potentially open 'else' */
3019 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3020 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3021 (block128_f) AES_encrypt);
3023 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3030 * If we have an iv can set it directly, otherwise use saved IV.
3032 if (iv == NULL && gctx->iv_set)
3035 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3040 /* If key set use IV, otherwise copy */
3042 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3044 memcpy(gctx->iv, iv, gctx->ivlen);
3052 * Handle TLS GCM packet format. This consists of the last portion of the IV
3053 * followed by the payload and finally the tag. On encrypt generate IV,
3054 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3058 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3059 const unsigned char *in, size_t len)
3061 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3063 /* Encrypt/decrypt must be performed in place */
3065 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3069 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
3070 * Requirements from SP 800-38D". The requirements is for one party to the
3071 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
3074 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
3075 EVPerr(EVP_F_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
3080 * Set IV from start of buffer or generate IV and write to start of
3083 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3084 : EVP_CTRL_GCM_SET_IV_INV,
3085 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3088 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3090 /* Fix buffer and length to point to payload */
3091 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3092 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3093 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3095 /* Encrypt payload */
3098 #if defined(AES_GCM_ASM)
3099 if (len >= 32 && AES_GCM_ASM(gctx)) {
3100 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3103 bulk = AES_gcm_encrypt(in, out, len,
3105 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3106 gctx->gcm.len.u[1] += bulk;
3109 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3112 len - bulk, gctx->ctr))
3116 #if defined(AES_GCM_ASM2)
3117 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3118 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3121 bulk = AES_gcm_encrypt(in, out, len,
3123 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3124 gctx->gcm.len.u[1] += bulk;
3127 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3128 in + bulk, out + bulk, len - bulk))
3132 /* Finally write tag */
3133 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3134 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3139 #if defined(AES_GCM_ASM)
3140 if (len >= 16 && AES_GCM_ASM(gctx)) {
3141 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3144 bulk = AES_gcm_decrypt(in, out, len,
3146 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3147 gctx->gcm.len.u[1] += bulk;
3150 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3153 len - bulk, gctx->ctr))
3157 #if defined(AES_GCM_ASM2)
3158 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3159 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3162 bulk = AES_gcm_decrypt(in, out, len,
3164 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3165 gctx->gcm.len.u[1] += bulk;
3168 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3169 in + bulk, out + bulk, len - bulk))
3173 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3174 /* If tag mismatch wipe buffer */
3175 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3176 OPENSSL_cleanse(out, len);
3184 gctx->tls_aad_len = -1;
3188 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3189 const unsigned char *in, size_t len)
3191 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3192 /* If not set up, return error */
3196 if (gctx->tls_aad_len >= 0)
3197 return aes_gcm_tls_cipher(ctx, out, in, len);
3203 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3205 } else if (ctx->encrypt) {
3208 #if defined(AES_GCM_ASM)
3209 if (len >= 32 && AES_GCM_ASM(gctx)) {
3210 size_t res = (16 - gctx->gcm.mres) % 16;
3212 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3215 bulk = AES_gcm_encrypt(in + res,
3216 out + res, len - res,
3217 gctx->gcm.key, gctx->gcm.Yi.c,
3219 gctx->gcm.len.u[1] += bulk;
3223 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3226 len - bulk, gctx->ctr))
3230 #if defined(AES_GCM_ASM2)
3231 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3232 size_t res = (16 - gctx->gcm.mres) % 16;
3234 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3237 bulk = AES_gcm_encrypt(in + res,
3238 out + res, len - res,
3239 gctx->gcm.key, gctx->gcm.Yi.c,
3241 gctx->gcm.len.u[1] += bulk;
3245 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3246 in + bulk, out + bulk, len - bulk))
3252 #if defined(AES_GCM_ASM)
3253 if (len >= 16 && AES_GCM_ASM(gctx)) {
3254 size_t res = (16 - gctx->gcm.mres) % 16;
3256 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3259 bulk = AES_gcm_decrypt(in + res,
3260 out + res, len - res,
3262 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3263 gctx->gcm.len.u[1] += bulk;
3267 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3270 len - bulk, gctx->ctr))
3274 #if defined(AES_GCM_ASM2)
3275 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3276 size_t res = (16 - gctx->gcm.mres) % 16;
3278 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3281 bulk = AES_gcm_decrypt(in + res,
3282 out + res, len - res,
3284 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3285 gctx->gcm.len.u[1] += bulk;
3289 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3290 in + bulk, out + bulk, len - bulk))
3296 if (!ctx->encrypt) {
3297 if (gctx->taglen < 0)
3299 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3304 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3306 /* Don't reuse the IV */
3313 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3314 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3315 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3316 | EVP_CIPH_CUSTOM_COPY)
3318 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3319 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3320 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3321 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3322 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3323 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3325 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3327 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
3328 if (type == EVP_CTRL_COPY) {
3329 EVP_CIPHER_CTX *out = ptr;
3330 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3331 if (xctx->xts.key1) {
3332 if (xctx->xts.key1 != &xctx->ks1)
3334 xctx_out->xts.key1 = &xctx_out->ks1;
3336 if (xctx->xts.key2) {
3337 if (xctx->xts.key2 != &xctx->ks2)
3339 xctx_out->xts.key2 = &xctx_out->ks2;
3342 } else if (type != EVP_CTRL_INIT)
3344 /* key1 and key2 are used as an indicator both key and IV are set */
3345 xctx->xts.key1 = NULL;
3346 xctx->xts.key2 = NULL;
3350 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3351 const unsigned char *iv, int enc)
3353 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3360 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3362 xctx->stream = NULL;
3364 /* key_len is two AES keys */
3365 #ifdef HWAES_CAPABLE
3366 if (HWAES_CAPABLE) {
3368 HWAES_set_encrypt_key(key,
3369 EVP_CIPHER_CTX_key_length(ctx) * 4,
3371 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3372 # ifdef HWAES_xts_encrypt
3373 xctx->stream = HWAES_xts_encrypt;
3376 HWAES_set_decrypt_key(key,
3377 EVP_CIPHER_CTX_key_length(ctx) * 4,
3379 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3380 # ifdef HWAES_xts_decrypt
3381 xctx->stream = HWAES_xts_decrypt;
3385 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3386 EVP_CIPHER_CTX_key_length(ctx) * 4,
3388 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3390 xctx->xts.key1 = &xctx->ks1;
3394 #ifdef BSAES_CAPABLE
3396 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3399 #ifdef VPAES_CAPABLE
3400 if (VPAES_CAPABLE) {
3402 vpaes_set_encrypt_key(key,
3403 EVP_CIPHER_CTX_key_length(ctx) * 4,
3405 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3407 vpaes_set_decrypt_key(key,
3408 EVP_CIPHER_CTX_key_length(ctx) * 4,
3410 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3413 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3414 EVP_CIPHER_CTX_key_length(ctx) * 4,
3416 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3418 xctx->xts.key1 = &xctx->ks1;
3422 (void)0; /* terminate potentially open 'else' */
3425 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3427 xctx->xts.block1 = (block128_f) AES_encrypt;
3429 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3431 xctx->xts.block1 = (block128_f) AES_decrypt;
3434 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3435 EVP_CIPHER_CTX_key_length(ctx) * 4,
3437 xctx->xts.block2 = (block128_f) AES_encrypt;
3439 xctx->xts.key1 = &xctx->ks1;
3443 xctx->xts.key2 = &xctx->ks2;
3444 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3450 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3451 const unsigned char *in, size_t len)
3453 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3455 if (xctx->xts.key1 == NULL
3456 || xctx->xts.key2 == NULL
3459 || len < AES_BLOCK_SIZE)
3463 * Verify that the two keys are different.
3465 * This addresses the vulnerability described in Rogaway's September 2004
3466 * paper (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf):
3467 * "Efficient Instantiations of Tweakable Blockciphers and Refinements
3468 * to Modes OCB and PMAC".
3470 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states that:
3471 * "The check for Key_1 != Key_2 shall be done at any place BEFORE
3472 * using the keys in the XTS-AES algorithm to process data with them."
3474 if (CRYPTO_memcmp(xctx->xts.key1, xctx->xts.key2,
3475 EVP_CIPHER_CTX_key_length(ctx) / 2) == 0)
3479 (*xctx->stream) (in, out, len,
3480 xctx->xts.key1, xctx->xts.key2,
3481 EVP_CIPHER_CTX_iv_noconst(ctx));
3482 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3484 EVP_CIPHER_CTX_encrypting(ctx)))
3489 #define aes_xts_cleanup NULL
3491 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3492 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3493 | EVP_CIPH_CUSTOM_COPY)
3495 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3496 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3498 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3500 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3509 cctx->tls_aad_len = -1;
3512 case EVP_CTRL_AEAD_TLS1_AAD:
3513 /* Save the AAD for later use */
3514 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3516 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3517 cctx->tls_aad_len = arg;
3520 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3521 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3522 /* Correct length for explicit IV */
3523 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3525 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3526 /* If decrypting correct for tag too */
3527 if (!EVP_CIPHER_CTX_encrypting(c)) {
3532 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3533 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3535 /* Extra padding: tag appended to record */
3538 case EVP_CTRL_CCM_SET_IV_FIXED:
3539 /* Sanity check length */
3540 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3542 /* Just copy to first part of IV */
3543 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3546 case EVP_CTRL_AEAD_SET_IVLEN:
3549 case EVP_CTRL_CCM_SET_L:
3550 if (arg < 2 || arg > 8)
3555 case EVP_CTRL_AEAD_SET_TAG:
3556 if ((arg & 1) || arg < 4 || arg > 16)
3558 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3562 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3567 case EVP_CTRL_AEAD_GET_TAG:
3568 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3570 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3579 EVP_CIPHER_CTX *out = ptr;
3580 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3581 if (cctx->ccm.key) {
3582 if (cctx->ccm.key != &cctx->ks)
3584 cctx_out->ccm.key = &cctx_out->ks;
3595 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3596 const unsigned char *iv, int enc)
3598 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3603 #ifdef HWAES_CAPABLE
3604 if (HWAES_CAPABLE) {
3605 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3608 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3609 &cctx->ks, (block128_f) HWAES_encrypt);
3615 #ifdef VPAES_CAPABLE
3616 if (VPAES_CAPABLE) {
3617 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3619 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3620 &cctx->ks, (block128_f) vpaes_encrypt);
3626 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3628 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3629 &cctx->ks, (block128_f) AES_encrypt);
3634 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3640 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3641 const unsigned char *in, size_t len)
3643 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3644 CCM128_CONTEXT *ccm = &cctx->ccm;
3645 /* Encrypt/decrypt must be performed in place */
3646 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3648 /* If encrypting set explicit IV from sequence number (start of AAD) */
3649 if (EVP_CIPHER_CTX_encrypting(ctx))
3650 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3651 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3652 /* Get rest of IV from explicit IV */
3653 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3654 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3655 /* Correct length value */
3656 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3657 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3661 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3662 /* Fix buffer to point to payload */
3663 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3664 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3665 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3666 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3668 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3670 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3672 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3674 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3676 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3677 unsigned char tag[16];
3678 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3679 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3683 OPENSSL_cleanse(out, len);
3688 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3689 const unsigned char *in, size_t len)
3691 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3692 CCM128_CONTEXT *ccm = &cctx->ccm;
3693 /* If not set up, return error */
3697 if (cctx->tls_aad_len >= 0)
3698 return aes_ccm_tls_cipher(ctx, out, in, len);
3700 /* EVP_*Final() doesn't return any data */
3701 if (in == NULL && out != NULL)
3707 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3711 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3717 /* If have AAD need message length */
3718 if (!cctx->len_set && len)
3720 CRYPTO_ccm128_aad(ccm, in, len);
3723 /* If not set length yet do it */
3724 if (!cctx->len_set) {
3725 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3730 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3731 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3733 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3739 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3741 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3742 unsigned char tag[16];
3743 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3744 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3750 OPENSSL_cleanse(out, len);
3758 #define aes_ccm_cleanup NULL
3760 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3761 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3762 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3763 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3764 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3765 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3772 /* Indicates if IV has been set */
3776 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3777 const unsigned char *iv, int enc)
3779 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3783 if (EVP_CIPHER_CTX_encrypting(ctx))
3784 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3787 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3793 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3794 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3799 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3800 const unsigned char *in, size_t inlen)
3802 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3804 /* AES wrap with padding has IV length of 4, without padding 8 */
3805 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3806 /* No final operation so always return zero length */
3809 /* Input length must always be non-zero */
3812 /* If decrypting need at least 16 bytes and multiple of 8 */
3813 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3815 /* If not padding input must be multiple of 8 */
3816 if (!pad && inlen & 0x7)
3818 if (is_partially_overlapping(out, in, inlen)) {
3819 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3823 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3824 /* If padding round up to multiple of 8 */
3826 inlen = (inlen + 7) / 8 * 8;
3831 * If not padding output will be exactly 8 bytes smaller than
3832 * input. If padding it will be at least 8 bytes smaller but we
3833 * don't know how much.
3839 if (EVP_CIPHER_CTX_encrypting(ctx))
3840 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3842 (block128_f) AES_encrypt);
3844 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3846 (block128_f) AES_decrypt);
3848 if (EVP_CIPHER_CTX_encrypting(ctx))
3849 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3850 out, in, inlen, (block128_f) AES_encrypt);
3852 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3853 out, in, inlen, (block128_f) AES_decrypt);
3855 return rv ? (int)rv : -1;
3858 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3859 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3860 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3862 static const EVP_CIPHER aes_128_wrap = {
3864 8, 16, 8, WRAP_FLAGS,
3865 aes_wrap_init_key, aes_wrap_cipher,
3867 sizeof(EVP_AES_WRAP_CTX),
3868 NULL, NULL, NULL, NULL
3871 const EVP_CIPHER *EVP_aes_128_wrap(void)
3873 return &aes_128_wrap;
3876 static const EVP_CIPHER aes_192_wrap = {
3878 8, 24, 8, WRAP_FLAGS,
3879 aes_wrap_init_key, aes_wrap_cipher,
3881 sizeof(EVP_AES_WRAP_CTX),
3882 NULL, NULL, NULL, NULL
3885 const EVP_CIPHER *EVP_aes_192_wrap(void)
3887 return &aes_192_wrap;
3890 static const EVP_CIPHER aes_256_wrap = {
3892 8, 32, 8, WRAP_FLAGS,
3893 aes_wrap_init_key, aes_wrap_cipher,
3895 sizeof(EVP_AES_WRAP_CTX),
3896 NULL, NULL, NULL, NULL
3899 const EVP_CIPHER *EVP_aes_256_wrap(void)
3901 return &aes_256_wrap;
3904 static const EVP_CIPHER aes_128_wrap_pad = {
3905 NID_id_aes128_wrap_pad,
3906 8, 16, 4, WRAP_FLAGS,
3907 aes_wrap_init_key, aes_wrap_cipher,
3909 sizeof(EVP_AES_WRAP_CTX),
3910 NULL, NULL, NULL, NULL
3913 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3915 return &aes_128_wrap_pad;
3918 static const EVP_CIPHER aes_192_wrap_pad = {
3919 NID_id_aes192_wrap_pad,
3920 8, 24, 4, WRAP_FLAGS,
3921 aes_wrap_init_key, aes_wrap_cipher,
3923 sizeof(EVP_AES_WRAP_CTX),
3924 NULL, NULL, NULL, NULL
3927 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3929 return &aes_192_wrap_pad;
3932 static const EVP_CIPHER aes_256_wrap_pad = {
3933 NID_id_aes256_wrap_pad,
3934 8, 32, 4, WRAP_FLAGS,
3935 aes_wrap_init_key, aes_wrap_cipher,
3937 sizeof(EVP_AES_WRAP_CTX),
3938 NULL, NULL, NULL, NULL
3941 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3943 return &aes_256_wrap_pad;
3946 #ifndef OPENSSL_NO_OCB
3947 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3949 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3950 EVP_CIPHER_CTX *newc;
3951 EVP_AES_OCB_CTX *new_octx;
3957 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
3958 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3960 octx->data_buf_len = 0;
3961 octx->aad_buf_len = 0;
3964 case EVP_CTRL_AEAD_SET_IVLEN:
3965 /* IV len must be 1 to 15 */
3966 if (arg <= 0 || arg > 15)
3972 case EVP_CTRL_AEAD_SET_TAG:
3974 /* Tag len must be 0 to 16 */
3975 if (arg < 0 || arg > 16)
3981 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
3983 memcpy(octx->tag, ptr, arg);
3986 case EVP_CTRL_AEAD_GET_TAG:
3987 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
3990 memcpy(ptr, octx->tag, arg);
3994 newc = (EVP_CIPHER_CTX *)ptr;
3995 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3996 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3997 &new_octx->ksenc.ks,
3998 &new_octx->ksdec.ks);
4006 # ifdef HWAES_CAPABLE
4007 # ifdef HWAES_ocb_encrypt
4008 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4009 size_t blocks, const void *key,
4010 size_t start_block_num,
4011 unsigned char offset_i[16],
4012 const unsigned char L_[][16],
4013 unsigned char checksum[16]);
4015 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4017 # ifdef HWAES_ocb_decrypt
4018 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4019 size_t blocks, const void *key,
4020 size_t start_block_num,
4021 unsigned char offset_i[16],
4022 const unsigned char L_[][16],
4023 unsigned char checksum[16]);
4025 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4029 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4030 const unsigned char *iv, int enc)
4032 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4038 * We set both the encrypt and decrypt key here because decrypt
4039 * needs both. We could possibly optimise to remove setting the
4040 * decrypt for an encryption operation.
4042 # ifdef HWAES_CAPABLE
4043 if (HWAES_CAPABLE) {
4044 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4046 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4048 if (!CRYPTO_ocb128_init(&octx->ocb,
4049 &octx->ksenc.ks, &octx->ksdec.ks,
4050 (block128_f) HWAES_encrypt,
4051 (block128_f) HWAES_decrypt,
4052 enc ? HWAES_ocb_encrypt
4053 : HWAES_ocb_decrypt))
4058 # ifdef VPAES_CAPABLE
4059 if (VPAES_CAPABLE) {
4060 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4062 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4064 if (!CRYPTO_ocb128_init(&octx->ocb,
4065 &octx->ksenc.ks, &octx->ksdec.ks,
4066 (block128_f) vpaes_encrypt,
4067 (block128_f) vpaes_decrypt,
4073 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4075 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4077 if (!CRYPTO_ocb128_init(&octx->ocb,
4078 &octx->ksenc.ks, &octx->ksdec.ks,
4079 (block128_f) AES_encrypt,
4080 (block128_f) AES_decrypt,
4087 * If we have an iv we can set it directly, otherwise use saved IV.
4089 if (iv == NULL && octx->iv_set)
4092 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4099 /* If key set use IV, otherwise copy */
4101 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4103 memcpy(octx->iv, iv, octx->ivlen);
4109 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4110 const unsigned char *in, size_t len)
4114 int written_len = 0;
4115 size_t trailing_len;
4116 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4118 /* If IV or Key not set then return error */
4127 * Need to ensure we are only passing full blocks to low level OCB
4128 * routines. We do it here rather than in EVP_EncryptUpdate/
4129 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4130 * and those routines don't support that
4133 /* Are we dealing with AAD or normal data here? */
4135 buf = octx->aad_buf;
4136 buf_len = &(octx->aad_buf_len);
4138 buf = octx->data_buf;
4139 buf_len = &(octx->data_buf_len);
4141 if (is_partially_overlapping(out + *buf_len, in, len)) {
4142 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4148 * If we've got a partially filled buffer from a previous call then
4149 * use that data first
4152 unsigned int remaining;
4154 remaining = AES_BLOCK_SIZE - (*buf_len);
4155 if (remaining > len) {
4156 memcpy(buf + (*buf_len), in, len);
4160 memcpy(buf + (*buf_len), in, remaining);
4163 * If we get here we've filled the buffer, so process it
4168 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4170 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4171 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4175 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4179 written_len = AES_BLOCK_SIZE;
4182 out += AES_BLOCK_SIZE;
4185 /* Do we have a partial block to handle at the end? */
4186 trailing_len = len % AES_BLOCK_SIZE;
4189 * If we've got some full blocks to handle, then process these first
4191 if (len != trailing_len) {
4193 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4195 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4196 if (!CRYPTO_ocb128_encrypt
4197 (&octx->ocb, in, out, len - trailing_len))
4200 if (!CRYPTO_ocb128_decrypt
4201 (&octx->ocb, in, out, len - trailing_len))
4204 written_len += len - trailing_len;
4205 in += len - trailing_len;
4208 /* Handle any trailing partial block */
4209 if (trailing_len > 0) {
4210 memcpy(buf, in, trailing_len);
4211 *buf_len = trailing_len;
4217 * First of all empty the buffer of any partial block that we might
4218 * have been provided - both for data and AAD
4220 if (octx->data_buf_len > 0) {
4221 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4222 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4223 octx->data_buf_len))
4226 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4227 octx->data_buf_len))
4230 written_len = octx->data_buf_len;
4231 octx->data_buf_len = 0;
4233 if (octx->aad_buf_len > 0) {
4234 if (!CRYPTO_ocb128_aad
4235 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4237 octx->aad_buf_len = 0;
4239 /* If decrypting then verify */
4240 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4241 if (octx->taglen < 0)
4243 if (CRYPTO_ocb128_finish(&octx->ocb,
4244 octx->tag, octx->taglen) != 0)
4249 /* If encrypting then just get the tag */
4250 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4252 /* Don't reuse the IV */
4258 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4260 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4261 CRYPTO_ocb128_cleanup(&octx->ocb);
4265 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4266 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4267 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4268 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4269 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4270 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4271 #endif /* OPENSSL_NO_OCB */
4274 #ifndef OPENSSL_NO_SIV
4276 typedef SIV128_CONTEXT EVP_AES_SIV_CTX;
4278 #define aesni_siv_init_key aes_siv_init_key
4279 static int aes_siv_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4280 const unsigned char *iv, int enc)
4282 const EVP_CIPHER *ctr;
4283 const EVP_CIPHER *cbc;
4284 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4285 int klen = EVP_CIPHER_CTX_key_length(ctx) / 2;
4292 cbc = EVP_aes_128_cbc();
4293 ctr = EVP_aes_128_ctr();
4296 cbc = EVP_aes_192_cbc();
4297 ctr = EVP_aes_192_ctr();
4300 cbc = EVP_aes_256_cbc();
4301 ctr = EVP_aes_256_ctr();
4307 /* klen is the length of the underlying cipher, not the input key,
4308 which should be twice as long */
4309 return CRYPTO_siv128_init(sctx, key, klen, cbc, ctr);
4312 #define aesni_siv_cipher aes_siv_cipher
4313 static int aes_siv_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4314 const unsigned char *in, size_t len)
4316 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4318 /* EncryptFinal or DecryptFinal */
4320 return CRYPTO_siv128_finish(sctx);
4322 /* Deal with associated data */
4324 return CRYPTO_siv128_aad(sctx, in, len);
4326 if (EVP_CIPHER_CTX_encrypting(ctx))
4327 return CRYPTO_siv128_encrypt(sctx, in, out, len);
4329 return CRYPTO_siv128_decrypt(sctx, in, out, len);
4332 #define aesni_siv_cleanup aes_siv_cleanup
4333 static int aes_siv_cleanup(EVP_CIPHER_CTX *c)
4335 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4337 return CRYPTO_siv128_cleanup(sctx);
4341 #define aesni_siv_ctrl aes_siv_ctrl
4342 static int aes_siv_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4344 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4345 SIV128_CONTEXT *sctx_out;
4349 return CRYPTO_siv128_cleanup(sctx);
4351 case EVP_CTRL_SET_SPEED:
4352 return CRYPTO_siv128_speed(sctx, arg);
4354 case EVP_CTRL_AEAD_SET_TAG:
4355 if (!EVP_CIPHER_CTX_encrypting(c))
4356 return CRYPTO_siv128_set_tag(sctx, ptr, arg);
4359 case EVP_CTRL_AEAD_GET_TAG:
4360 if (!EVP_CIPHER_CTX_encrypting(c))
4362 return CRYPTO_siv128_get_tag(sctx, ptr, arg);
4365 sctx_out = EVP_C_DATA(SIV128_CONTEXT, (EVP_CIPHER_CTX*)ptr);
4366 return CRYPTO_siv128_copy_ctx(sctx_out, sctx);
4374 #define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4375 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4376 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4377 | EVP_CIPH_CTRL_INIT)
4379 BLOCK_CIPHER_custom(NID_aes, 128, 1, 0, siv, SIV, SIV_FLAGS)
4380 BLOCK_CIPHER_custom(NID_aes, 192, 1, 0, siv, SIV, SIV_FLAGS)
4381 BLOCK_CIPHER_custom(NID_aes, 256, 1, 0, siv, SIV, SIV_FLAGS)