2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "modes_lcl.h"
19 #include <openssl/rand.h>
20 #include <openssl/cmac.h>
39 } ks; /* AES key schedule to use */
40 int key_set; /* Set if key initialised */
41 int iv_set; /* Set if an iv is set */
43 unsigned char *iv; /* Temporary IV store */
44 int ivlen; /* IV length */
46 int iv_gen; /* It is OK to generate IVs */
47 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
48 int tls_aad_len; /* TLS AAD length */
49 uint64_t tls_enc_records; /* Number of TLS records encrypted */
57 } ks1, ks2; /* AES key schedules to use */
59 void (*stream) (const unsigned char *in,
60 unsigned char *out, size_t length,
61 const AES_KEY *key1, const AES_KEY *key2,
62 const unsigned char iv[16]);
69 } ks; /* AES key schedule to use */
70 int key_set; /* Set if key initialised */
71 int iv_set; /* Set if an iv is set */
72 int tag_set; /* Set if tag is valid */
73 int len_set; /* Set if message length set */
74 int L, M; /* L and M parameters from RFC3610 */
75 int tls_aad_len; /* TLS AAD length */
80 #ifndef OPENSSL_NO_OCB
85 } ksenc; /* AES key schedule to use for encryption */
89 } ksdec; /* AES key schedule to use for decryption */
90 int key_set; /* Set if key initialised */
91 int iv_set; /* Set if an iv is set */
93 unsigned char *iv; /* Temporary IV store */
94 unsigned char tag[16];
95 unsigned char data_buf[16]; /* Store partial data blocks */
96 unsigned char aad_buf[16]; /* Store partial AAD blocks */
99 int ivlen; /* IV length */
104 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
107 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
109 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
112 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
114 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
117 void vpaes_cbc_encrypt(const unsigned char *in,
120 const AES_KEY *key, unsigned char *ivec, int enc);
123 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
124 size_t length, const AES_KEY *key,
125 unsigned char ivec[16], int enc);
126 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
127 size_t len, const AES_KEY *key,
128 const unsigned char ivec[16]);
129 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
130 size_t len, const AES_KEY *key1,
131 const AES_KEY *key2, const unsigned char iv[16]);
132 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
133 size_t len, const AES_KEY *key1,
134 const AES_KEY *key2, const unsigned char iv[16]);
137 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
138 size_t blocks, const AES_KEY *key,
139 const unsigned char ivec[AES_BLOCK_SIZE]);
142 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
143 const AES_KEY *key1, const AES_KEY *key2,
144 const unsigned char iv[16]);
145 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
146 const AES_KEY *key1, const AES_KEY *key2,
147 const unsigned char iv[16]);
150 /* increment counter (64-bit int) by 1 */
151 static void ctr64_inc(unsigned char *counter)
166 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
167 # include "ppc_arch.h"
169 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
171 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
172 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
173 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
174 # define HWAES_encrypt aes_p8_encrypt
175 # define HWAES_decrypt aes_p8_decrypt
176 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
177 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
178 # define HWAES_xts_encrypt aes_p8_xts_encrypt
179 # define HWAES_xts_decrypt aes_p8_xts_decrypt
182 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
183 ((defined(__i386) || defined(__i386__) || \
184 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
185 defined(__x86_64) || defined(__x86_64__) || \
186 defined(_M_AMD64) || defined(_M_X64) )
188 extern unsigned int OPENSSL_ia32cap_P[];
191 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
194 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
199 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
201 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
203 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
206 void aesni_encrypt(const unsigned char *in, unsigned char *out,
208 void aesni_decrypt(const unsigned char *in, unsigned char *out,
211 void aesni_ecb_encrypt(const unsigned char *in,
213 size_t length, const AES_KEY *key, int enc);
214 void aesni_cbc_encrypt(const unsigned char *in,
217 const AES_KEY *key, unsigned char *ivec, int enc);
219 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
222 const void *key, const unsigned char *ivec);
224 void aesni_xts_encrypt(const unsigned char *in,
227 const AES_KEY *key1, const AES_KEY *key2,
228 const unsigned char iv[16]);
230 void aesni_xts_decrypt(const unsigned char *in,
233 const AES_KEY *key1, const AES_KEY *key2,
234 const unsigned char iv[16]);
236 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
240 const unsigned char ivec[16],
241 unsigned char cmac[16]);
243 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
247 const unsigned char ivec[16],
248 unsigned char cmac[16]);
250 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
251 size_t aesni_gcm_encrypt(const unsigned char *in,
254 const void *key, unsigned char ivec[16], u64 *Xi);
255 # define AES_gcm_encrypt aesni_gcm_encrypt
256 size_t aesni_gcm_decrypt(const unsigned char *in,
259 const void *key, unsigned char ivec[16], u64 *Xi);
260 # define AES_gcm_decrypt aesni_gcm_decrypt
261 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
263 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
264 gctx->gcm.ghash==gcm_ghash_avx)
265 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
266 gctx->gcm.ghash==gcm_ghash_avx)
267 # undef AES_GCM_ASM2 /* minor size optimization */
270 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
271 const unsigned char *iv, int enc)
274 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
276 mode = EVP_CIPHER_CTX_mode(ctx);
277 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
279 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
281 dat->block = (block128_f) aesni_decrypt;
282 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
283 (cbc128_f) aesni_cbc_encrypt : NULL;
285 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
287 dat->block = (block128_f) aesni_encrypt;
288 if (mode == EVP_CIPH_CBC_MODE)
289 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
290 else if (mode == EVP_CIPH_CTR_MODE)
291 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
293 dat->stream.cbc = NULL;
297 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
304 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
305 const unsigned char *in, size_t len)
307 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
308 EVP_CIPHER_CTX_iv_noconst(ctx),
309 EVP_CIPHER_CTX_encrypting(ctx));
314 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
315 const unsigned char *in, size_t len)
317 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
322 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
323 EVP_CIPHER_CTX_encrypting(ctx));
328 # define aesni_ofb_cipher aes_ofb_cipher
329 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
330 const unsigned char *in, size_t len);
332 # define aesni_cfb_cipher aes_cfb_cipher
333 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
334 const unsigned char *in, size_t len);
336 # define aesni_cfb8_cipher aes_cfb8_cipher
337 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
338 const unsigned char *in, size_t len);
340 # define aesni_cfb1_cipher aes_cfb1_cipher
341 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
342 const unsigned char *in, size_t len);
344 # define aesni_ctr_cipher aes_ctr_cipher
345 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
346 const unsigned char *in, size_t len);
348 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
349 const unsigned char *iv, int enc)
351 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
355 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
357 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
358 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
360 * If we have an iv can set it directly, otherwise use saved IV.
362 if (iv == NULL && gctx->iv_set)
365 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
370 /* If key set use IV, otherwise copy */
372 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
374 memcpy(gctx->iv, iv, gctx->ivlen);
381 # define aesni_gcm_cipher aes_gcm_cipher
382 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
383 const unsigned char *in, size_t len);
385 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
386 const unsigned char *iv, int enc)
388 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
393 /* key_len is two AES keys */
395 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
397 xctx->xts.block1 = (block128_f) aesni_encrypt;
398 xctx->stream = aesni_xts_encrypt;
400 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
402 xctx->xts.block1 = (block128_f) aesni_decrypt;
403 xctx->stream = aesni_xts_decrypt;
406 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
407 EVP_CIPHER_CTX_key_length(ctx) * 4,
409 xctx->xts.block2 = (block128_f) aesni_encrypt;
411 xctx->xts.key1 = &xctx->ks1;
415 xctx->xts.key2 = &xctx->ks2;
416 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
422 # define aesni_xts_cipher aes_xts_cipher
423 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
424 const unsigned char *in, size_t len);
426 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
427 const unsigned char *iv, int enc)
429 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
433 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
435 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
436 &cctx->ks, (block128_f) aesni_encrypt);
437 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
438 (ccm128_f) aesni_ccm64_decrypt_blocks;
442 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
448 # define aesni_ccm_cipher aes_ccm_cipher
449 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
450 const unsigned char *in, size_t len);
452 # ifndef OPENSSL_NO_OCB
453 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
454 size_t blocks, const void *key,
455 size_t start_block_num,
456 unsigned char offset_i[16],
457 const unsigned char L_[][16],
458 unsigned char checksum[16]);
459 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
460 size_t blocks, const void *key,
461 size_t start_block_num,
462 unsigned char offset_i[16],
463 const unsigned char L_[][16],
464 unsigned char checksum[16]);
466 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
467 const unsigned char *iv, int enc)
469 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
475 * We set both the encrypt and decrypt key here because decrypt
476 * needs both. We could possibly optimise to remove setting the
477 * decrypt for an encryption operation.
479 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
481 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
483 if (!CRYPTO_ocb128_init(&octx->ocb,
484 &octx->ksenc.ks, &octx->ksdec.ks,
485 (block128_f) aesni_encrypt,
486 (block128_f) aesni_decrypt,
487 enc ? aesni_ocb_encrypt
488 : aesni_ocb_decrypt))
494 * If we have an iv we can set it directly, otherwise use saved IV.
496 if (iv == NULL && octx->iv_set)
499 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
506 /* If key set use IV, otherwise copy */
508 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
510 memcpy(octx->iv, iv, octx->ivlen);
516 # define aesni_ocb_cipher aes_ocb_cipher
517 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
518 const unsigned char *in, size_t len);
519 # endif /* OPENSSL_NO_OCB */
521 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
522 static const EVP_CIPHER aesni_##keylen##_##mode = { \
523 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
524 flags|EVP_CIPH_##MODE##_MODE, \
526 aesni_##mode##_cipher, \
528 sizeof(EVP_AES_KEY), \
529 NULL,NULL,NULL,NULL }; \
530 static const EVP_CIPHER aes_##keylen##_##mode = { \
531 nid##_##keylen##_##nmode,blocksize, \
533 flags|EVP_CIPH_##MODE##_MODE, \
535 aes_##mode##_cipher, \
537 sizeof(EVP_AES_KEY), \
538 NULL,NULL,NULL,NULL }; \
539 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
540 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
542 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
543 static const EVP_CIPHER aesni_##keylen##_##mode = { \
544 nid##_##keylen##_##mode,blocksize, \
545 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
547 flags|EVP_CIPH_##MODE##_MODE, \
548 aesni_##mode##_init_key, \
549 aesni_##mode##_cipher, \
550 aes_##mode##_cleanup, \
551 sizeof(EVP_AES_##MODE##_CTX), \
552 NULL,NULL,aes_##mode##_ctrl,NULL }; \
553 static const EVP_CIPHER aes_##keylen##_##mode = { \
554 nid##_##keylen##_##mode,blocksize, \
555 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
557 flags|EVP_CIPH_##MODE##_MODE, \
558 aes_##mode##_init_key, \
559 aes_##mode##_cipher, \
560 aes_##mode##_cleanup, \
561 sizeof(EVP_AES_##MODE##_CTX), \
562 NULL,NULL,aes_##mode##_ctrl,NULL }; \
563 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
564 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
566 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
568 # include "sparc_arch.h"
570 extern unsigned int OPENSSL_sparcv9cap_P[];
573 * Initial Fujitsu SPARC64 X support
575 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
576 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
577 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
578 # define HWAES_encrypt aes_fx_encrypt
579 # define HWAES_decrypt aes_fx_decrypt
580 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
581 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
583 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
585 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
586 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
587 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
589 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
592 * Key-length specific subroutines were chosen for following reason.
593 * Each SPARC T4 core can execute up to 8 threads which share core's
594 * resources. Loading as much key material to registers allows to
595 * minimize references to shared memory interface, as well as amount
596 * of instructions in inner loops [much needed on T4]. But then having
597 * non-key-length specific routines would require conditional branches
598 * either in inner loops or on subroutines' entries. Former is hardly
599 * acceptable, while latter means code size increase to size occupied
600 * by multiple key-length specific subroutines, so why fight?
602 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
603 size_t len, const AES_KEY *key,
604 unsigned char *ivec);
605 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
606 size_t len, const AES_KEY *key,
607 unsigned char *ivec);
608 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
609 size_t len, const AES_KEY *key,
610 unsigned char *ivec);
611 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
612 size_t len, const AES_KEY *key,
613 unsigned char *ivec);
614 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
615 size_t len, const AES_KEY *key,
616 unsigned char *ivec);
617 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
618 size_t len, const AES_KEY *key,
619 unsigned char *ivec);
620 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
621 size_t blocks, const AES_KEY *key,
622 unsigned char *ivec);
623 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
624 size_t blocks, const AES_KEY *key,
625 unsigned char *ivec);
626 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
627 size_t blocks, const AES_KEY *key,
628 unsigned char *ivec);
629 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
630 size_t blocks, const AES_KEY *key1,
631 const AES_KEY *key2, const unsigned char *ivec);
632 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
633 size_t blocks, const AES_KEY *key1,
634 const AES_KEY *key2, const unsigned char *ivec);
635 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
636 size_t blocks, const AES_KEY *key1,
637 const AES_KEY *key2, const unsigned char *ivec);
638 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
639 size_t blocks, const AES_KEY *key1,
640 const AES_KEY *key2, const unsigned char *ivec);
642 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
643 const unsigned char *iv, int enc)
646 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
648 mode = EVP_CIPHER_CTX_mode(ctx);
649 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
650 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
653 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
654 dat->block = (block128_f) aes_t4_decrypt;
657 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
658 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
661 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
662 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
665 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
666 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
673 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
674 dat->block = (block128_f) aes_t4_encrypt;
677 if (mode == EVP_CIPH_CBC_MODE)
678 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
679 else if (mode == EVP_CIPH_CTR_MODE)
680 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
682 dat->stream.cbc = NULL;
685 if (mode == EVP_CIPH_CBC_MODE)
686 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
687 else if (mode == EVP_CIPH_CTR_MODE)
688 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
690 dat->stream.cbc = NULL;
693 if (mode == EVP_CIPH_CBC_MODE)
694 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
695 else if (mode == EVP_CIPH_CTR_MODE)
696 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
698 dat->stream.cbc = NULL;
706 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
713 # define aes_t4_cbc_cipher aes_cbc_cipher
714 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
715 const unsigned char *in, size_t len);
717 # define aes_t4_ecb_cipher aes_ecb_cipher
718 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
719 const unsigned char *in, size_t len);
721 # define aes_t4_ofb_cipher aes_ofb_cipher
722 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
723 const unsigned char *in, size_t len);
725 # define aes_t4_cfb_cipher aes_cfb_cipher
726 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
727 const unsigned char *in, size_t len);
729 # define aes_t4_cfb8_cipher aes_cfb8_cipher
730 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
731 const unsigned char *in, size_t len);
733 # define aes_t4_cfb1_cipher aes_cfb1_cipher
734 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
735 const unsigned char *in, size_t len);
737 # define aes_t4_ctr_cipher aes_ctr_cipher
738 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
739 const unsigned char *in, size_t len);
741 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
742 const unsigned char *iv, int enc)
744 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
748 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
749 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
750 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
751 (block128_f) aes_t4_encrypt);
754 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
757 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
760 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
766 * If we have an iv can set it directly, otherwise use saved IV.
768 if (iv == NULL && gctx->iv_set)
771 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
776 /* If key set use IV, otherwise copy */
778 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
780 memcpy(gctx->iv, iv, gctx->ivlen);
787 # define aes_t4_gcm_cipher aes_gcm_cipher
788 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
789 const unsigned char *in, size_t len);
791 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
792 const unsigned char *iv, int enc)
794 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
799 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4;
801 /* key_len is two AES keys */
803 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
804 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
807 xctx->stream = aes128_t4_xts_encrypt;
810 xctx->stream = aes256_t4_xts_encrypt;
816 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
818 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
821 xctx->stream = aes128_t4_xts_decrypt;
824 xctx->stream = aes256_t4_xts_decrypt;
831 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
832 EVP_CIPHER_CTX_key_length(ctx) * 4,
834 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
836 xctx->xts.key1 = &xctx->ks1;
840 xctx->xts.key2 = &xctx->ks2;
841 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
847 # define aes_t4_xts_cipher aes_xts_cipher
848 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
849 const unsigned char *in, size_t len);
851 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
852 const unsigned char *iv, int enc)
854 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
858 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
859 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
860 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
861 &cctx->ks, (block128_f) aes_t4_encrypt);
866 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
872 # define aes_t4_ccm_cipher aes_ccm_cipher
873 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
874 const unsigned char *in, size_t len);
876 # ifndef OPENSSL_NO_OCB
877 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
878 const unsigned char *iv, int enc)
880 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
886 * We set both the encrypt and decrypt key here because decrypt
887 * needs both. We could possibly optimise to remove setting the
888 * decrypt for an encryption operation.
890 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
892 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
894 if (!CRYPTO_ocb128_init(&octx->ocb,
895 &octx->ksenc.ks, &octx->ksdec.ks,
896 (block128_f) aes_t4_encrypt,
897 (block128_f) aes_t4_decrypt,
904 * If we have an iv we can set it directly, otherwise use saved IV.
906 if (iv == NULL && octx->iv_set)
909 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
916 /* If key set use IV, otherwise copy */
918 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
920 memcpy(octx->iv, iv, octx->ivlen);
926 # define aes_t4_ocb_cipher aes_ocb_cipher
927 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
928 const unsigned char *in, size_t len);
929 # endif /* OPENSSL_NO_OCB */
931 # ifndef OPENSSL_NO_SIV
932 # define aes_t4_siv_init_key aes_siv_init_key
933 # define aes_t4_siv_cipher aes_siv_cipher
934 # endif /* OPENSSL_NO_SIV */
936 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
937 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
938 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
939 flags|EVP_CIPH_##MODE##_MODE, \
941 aes_t4_##mode##_cipher, \
943 sizeof(EVP_AES_KEY), \
944 NULL,NULL,NULL,NULL }; \
945 static const EVP_CIPHER aes_##keylen##_##mode = { \
946 nid##_##keylen##_##nmode,blocksize, \
948 flags|EVP_CIPH_##MODE##_MODE, \
950 aes_##mode##_cipher, \
952 sizeof(EVP_AES_KEY), \
953 NULL,NULL,NULL,NULL }; \
954 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
955 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
957 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
958 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
959 nid##_##keylen##_##mode,blocksize, \
960 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
962 flags|EVP_CIPH_##MODE##_MODE, \
963 aes_t4_##mode##_init_key, \
964 aes_t4_##mode##_cipher, \
965 aes_##mode##_cleanup, \
966 sizeof(EVP_AES_##MODE##_CTX), \
967 NULL,NULL,aes_##mode##_ctrl,NULL }; \
968 static const EVP_CIPHER aes_##keylen##_##mode = { \
969 nid##_##keylen##_##mode,blocksize, \
970 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
972 flags|EVP_CIPH_##MODE##_MODE, \
973 aes_##mode##_init_key, \
974 aes_##mode##_cipher, \
975 aes_##mode##_cleanup, \
976 sizeof(EVP_AES_##MODE##_CTX), \
977 NULL,NULL,aes_##mode##_ctrl,NULL }; \
978 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
979 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
981 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
985 # include "s390x_arch.h"
991 * KM-AES parameter block - begin
992 * (see z/Architecture Principles of Operation >= SA22-7832-06)
997 /* KM-AES parameter block - end */
1000 } S390X_AES_ECB_CTX;
1006 * KMO-AES parameter block - begin
1007 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1010 unsigned char cv[16];
1011 unsigned char k[32];
1013 /* KMO-AES parameter block - end */
1018 } S390X_AES_OFB_CTX;
1024 * KMF-AES parameter block - begin
1025 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1028 unsigned char cv[16];
1029 unsigned char k[32];
1031 /* KMF-AES parameter block - end */
1036 } S390X_AES_CFB_CTX;
1042 * KMA-GCM-AES parameter block - begin
1043 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1046 unsigned char reserved[12];
1052 unsigned long long g[2];
1053 unsigned char b[16];
1055 unsigned char h[16];
1056 unsigned long long taadl;
1057 unsigned long long tpcl;
1059 unsigned long long g[2];
1062 unsigned char k[32];
1064 /* KMA-GCM-AES parameter block - end */
1076 unsigned char ares[16];
1077 unsigned char mres[16];
1078 unsigned char kres[16];
1084 uint64_t tls_enc_records; /* Number of TLS records encrypted */
1085 } S390X_AES_GCM_CTX;
1091 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1092 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1093 * rounds field is used to store the function code and that the key
1094 * schedule is not stored (if aes hardware support is detected).
1097 unsigned char pad[16];
1103 * KMAC-AES parameter block - begin
1104 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1108 unsigned long long g[2];
1109 unsigned char b[16];
1111 unsigned char k[32];
1113 /* KMAC-AES paramater block - end */
1116 unsigned long long g[2];
1117 unsigned char b[16];
1120 unsigned long long g[2];
1121 unsigned char b[16];
1124 unsigned long long blocks;
1133 unsigned char pad[140];
1137 } S390X_AES_CCM_CTX;
1139 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1140 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1142 /* Most modes of operation need km for partial block processing. */
1143 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1144 S390X_CAPBIT(S390X_AES_128))
1145 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1146 S390X_CAPBIT(S390X_AES_192))
1147 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1148 S390X_CAPBIT(S390X_AES_256))
1150 # define s390x_aes_init_key aes_init_key
1151 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1152 const unsigned char *iv, int enc);
1154 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1155 # define S390X_aes_192_cbc_CAPABLE 1
1156 # define S390X_aes_256_cbc_CAPABLE 1
1157 # define S390X_AES_CBC_CTX EVP_AES_KEY
1159 # define s390x_aes_cbc_init_key aes_init_key
1161 # define s390x_aes_cbc_cipher aes_cbc_cipher
1162 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1163 const unsigned char *in, size_t len);
1165 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1166 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1167 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1169 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1170 const unsigned char *key,
1171 const unsigned char *iv, int enc)
1173 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1174 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1176 cctx->fc = S390X_AES_FC(keylen);
1178 cctx->fc |= S390X_DECRYPT;
1180 memcpy(cctx->km.param.k, key, keylen);
1184 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1185 const unsigned char *in, size_t len)
1187 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1189 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1193 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1194 (OPENSSL_s390xcap_P.kmo[0] & \
1195 S390X_CAPBIT(S390X_AES_128)))
1196 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1197 (OPENSSL_s390xcap_P.kmo[0] & \
1198 S390X_CAPBIT(S390X_AES_192)))
1199 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1200 (OPENSSL_s390xcap_P.kmo[0] & \
1201 S390X_CAPBIT(S390X_AES_256)))
1203 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1204 const unsigned char *key,
1205 const unsigned char *ivec, int enc)
1207 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1208 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1209 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1210 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1212 memcpy(cctx->kmo.param.cv, iv, ivlen);
1213 memcpy(cctx->kmo.param.k, key, keylen);
1214 cctx->fc = S390X_AES_FC(keylen);
1219 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1220 const unsigned char *in, size_t len)
1222 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1227 *out = *in ^ cctx->kmo.param.cv[n];
1236 len &= ~(size_t)0xf;
1238 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1245 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1249 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1258 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1259 (OPENSSL_s390xcap_P.kmf[0] & \
1260 S390X_CAPBIT(S390X_AES_128)))
1261 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1262 (OPENSSL_s390xcap_P.kmf[0] & \
1263 S390X_CAPBIT(S390X_AES_192)))
1264 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1265 (OPENSSL_s390xcap_P.kmf[0] & \
1266 S390X_CAPBIT(S390X_AES_256)))
1268 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1269 const unsigned char *key,
1270 const unsigned char *ivec, int enc)
1272 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1273 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1274 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1275 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1277 cctx->fc = S390X_AES_FC(keylen);
1278 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1280 cctx->fc |= S390X_DECRYPT;
1283 memcpy(cctx->kmf.param.cv, iv, ivlen);
1284 memcpy(cctx->kmf.param.k, key, keylen);
1288 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1289 const unsigned char *in, size_t len)
1291 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1292 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1293 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1300 *out = cctx->kmf.param.cv[n] ^ tmp;
1301 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1310 len &= ~(size_t)0xf;
1312 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1319 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1320 S390X_AES_FC(keylen), cctx->kmf.param.k);
1324 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1325 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1334 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1335 S390X_CAPBIT(S390X_AES_128))
1336 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1337 S390X_CAPBIT(S390X_AES_192))
1338 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1339 S390X_CAPBIT(S390X_AES_256))
1341 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1342 const unsigned char *key,
1343 const unsigned char *ivec, int enc)
1345 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1346 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1347 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1348 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1350 cctx->fc = S390X_AES_FC(keylen);
1351 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1353 cctx->fc |= S390X_DECRYPT;
1355 memcpy(cctx->kmf.param.cv, iv, ivlen);
1356 memcpy(cctx->kmf.param.k, key, keylen);
1360 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1361 const unsigned char *in, size_t len)
1363 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1365 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1369 # define S390X_aes_128_cfb1_CAPABLE 0
1370 # define S390X_aes_192_cfb1_CAPABLE 0
1371 # define S390X_aes_256_cfb1_CAPABLE 0
1373 # define s390x_aes_cfb1_init_key aes_init_key
1375 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1376 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1377 const unsigned char *in, size_t len);
1379 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1380 # define S390X_aes_192_ctr_CAPABLE 1
1381 # define S390X_aes_256_ctr_CAPABLE 1
1382 # define S390X_AES_CTR_CTX EVP_AES_KEY
1384 # define s390x_aes_ctr_init_key aes_init_key
1386 # define s390x_aes_ctr_cipher aes_ctr_cipher
1387 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1388 const unsigned char *in, size_t len);
1390 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1391 (OPENSSL_s390xcap_P.kma[0] & \
1392 S390X_CAPBIT(S390X_AES_128)))
1393 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1394 (OPENSSL_s390xcap_P.kma[0] & \
1395 S390X_CAPBIT(S390X_AES_192)))
1396 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1397 (OPENSSL_s390xcap_P.kma[0] & \
1398 S390X_CAPBIT(S390X_AES_256)))
1400 /* iv + padding length for iv lengths != 12 */
1401 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1404 * Process additional authenticated data. Returns 0 on success. Code is
1407 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1410 unsigned long long alen;
1413 if (ctx->kma.param.tpcl)
1416 alen = ctx->kma.param.taadl + len;
1417 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1419 ctx->kma.param.taadl = alen;
1424 ctx->ares[n] = *aad;
1429 /* ctx->ares contains a complete block if offset has wrapped around */
1431 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1432 ctx->fc |= S390X_KMA_HS;
1439 len &= ~(size_t)0xf;
1441 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1443 ctx->fc |= S390X_KMA_HS;
1451 ctx->ares[rem] = aad[rem];
1458 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1459 * success. Code is big-endian.
1461 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1462 unsigned char *out, size_t len)
1464 const unsigned char *inptr;
1465 unsigned long long mlen;
1468 unsigned char b[16];
1473 mlen = ctx->kma.param.tpcl + len;
1474 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1476 ctx->kma.param.tpcl = mlen;
1482 while (n && inlen) {
1483 ctx->mres[n] = *inptr;
1488 /* ctx->mres contains a complete block if offset has wrapped around */
1490 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1491 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1492 ctx->fc |= S390X_KMA_HS;
1495 /* previous call already encrypted/decrypted its remainder,
1496 * see comment below */
1511 len &= ~(size_t)0xf;
1513 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1514 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1517 ctx->fc |= S390X_KMA_HS;
1522 * If there is a remainder, it has to be saved such that it can be
1523 * processed by kma later. However, we also have to do the for-now
1524 * unauthenticated encryption/decryption part here and now...
1527 if (!ctx->mreslen) {
1528 buf.w[0] = ctx->kma.param.j0.w[0];
1529 buf.w[1] = ctx->kma.param.j0.w[1];
1530 buf.w[2] = ctx->kma.param.j0.w[2];
1531 buf.w[3] = ctx->kma.param.cv.w + 1;
1532 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1536 for (i = 0; i < rem; i++) {
1537 ctx->mres[n + i] = in[i];
1538 out[i] = in[i] ^ ctx->kres[n + i];
1541 ctx->mreslen += rem;
1547 * Initialize context structure. Code is big-endian.
1549 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1550 const unsigned char *iv)
1552 ctx->kma.param.t.g[0] = 0;
1553 ctx->kma.param.t.g[1] = 0;
1554 ctx->kma.param.tpcl = 0;
1555 ctx->kma.param.taadl = 0;
1560 if (ctx->ivlen == 12) {
1561 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1562 ctx->kma.param.j0.w[3] = 1;
1563 ctx->kma.param.cv.w = 1;
1565 /* ctx->iv has the right size and is already padded. */
1566 memcpy(ctx->iv, iv, ctx->ivlen);
1567 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1568 ctx->fc, &ctx->kma.param);
1569 ctx->fc |= S390X_KMA_HS;
1571 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1572 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1573 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1574 ctx->kma.param.t.g[0] = 0;
1575 ctx->kma.param.t.g[1] = 0;
1580 * Performs various operations on the context structure depending on control
1581 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1582 * Code is big-endian.
1584 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1586 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1587 S390X_AES_GCM_CTX *gctx_out;
1588 EVP_CIPHER_CTX *out;
1589 unsigned char *buf, *iv;
1590 int ivlen, enc, len;
1594 ivlen = EVP_CIPHER_CTX_iv_length(c);
1595 iv = EVP_CIPHER_CTX_iv_noconst(c);
1598 gctx->ivlen = ivlen;
1602 gctx->tls_aad_len = -1;
1605 case EVP_CTRL_AEAD_SET_IVLEN:
1610 iv = EVP_CIPHER_CTX_iv_noconst(c);
1611 len = S390X_gcm_ivpadlen(arg);
1613 /* Allocate memory for iv if needed. */
1614 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1616 OPENSSL_free(gctx->iv);
1618 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1619 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1624 memset(gctx->iv + arg, 0, len - arg - 8);
1625 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1630 case EVP_CTRL_AEAD_SET_TAG:
1631 buf = EVP_CIPHER_CTX_buf_noconst(c);
1632 enc = EVP_CIPHER_CTX_encrypting(c);
1633 if (arg <= 0 || arg > 16 || enc)
1636 memcpy(buf, ptr, arg);
1640 case EVP_CTRL_AEAD_GET_TAG:
1641 enc = EVP_CIPHER_CTX_encrypting(c);
1642 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1645 memcpy(ptr, gctx->kma.param.t.b, arg);
1648 case EVP_CTRL_GCM_SET_IV_FIXED:
1649 /* Special case: -1 length restores whole iv */
1651 memcpy(gctx->iv, ptr, gctx->ivlen);
1656 * Fixed field must be at least 4 bytes and invocation field at least
1659 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1663 memcpy(gctx->iv, ptr, arg);
1665 enc = EVP_CIPHER_CTX_encrypting(c);
1666 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1672 case EVP_CTRL_GCM_IV_GEN:
1673 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1676 s390x_aes_gcm_setiv(gctx, gctx->iv);
1678 if (arg <= 0 || arg > gctx->ivlen)
1681 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1683 * Invocation field will be at least 8 bytes in size and so no need
1684 * to check wrap around or increment more than last 8 bytes.
1686 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1690 case EVP_CTRL_GCM_SET_IV_INV:
1691 enc = EVP_CIPHER_CTX_encrypting(c);
1692 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1695 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1696 s390x_aes_gcm_setiv(gctx, gctx->iv);
1700 case EVP_CTRL_AEAD_TLS1_AAD:
1701 /* Save the aad for later use. */
1702 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1705 buf = EVP_CIPHER_CTX_buf_noconst(c);
1706 memcpy(buf, ptr, arg);
1707 gctx->tls_aad_len = arg;
1708 gctx->tls_enc_records = 0;
1710 len = buf[arg - 2] << 8 | buf[arg - 1];
1711 /* Correct length for explicit iv. */
1712 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1714 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1716 /* If decrypting correct for tag too. */
1717 enc = EVP_CIPHER_CTX_encrypting(c);
1719 if (len < EVP_GCM_TLS_TAG_LEN)
1721 len -= EVP_GCM_TLS_TAG_LEN;
1723 buf[arg - 2] = len >> 8;
1724 buf[arg - 1] = len & 0xff;
1725 /* Extra padding: tag appended to record. */
1726 return EVP_GCM_TLS_TAG_LEN;
1730 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1731 iv = EVP_CIPHER_CTX_iv_noconst(c);
1733 if (gctx->iv == iv) {
1734 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1736 len = S390X_gcm_ivpadlen(gctx->ivlen);
1738 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1739 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1743 memcpy(gctx_out->iv, gctx->iv, len);
1753 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1755 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1756 const unsigned char *key,
1757 const unsigned char *iv, int enc)
1759 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1762 if (iv == NULL && key == NULL)
1766 keylen = EVP_CIPHER_CTX_key_length(ctx);
1767 memcpy(&gctx->kma.param.k, key, keylen);
1769 gctx->fc = S390X_AES_FC(keylen);
1771 gctx->fc |= S390X_DECRYPT;
1773 if (iv == NULL && gctx->iv_set)
1777 s390x_aes_gcm_setiv(gctx, iv);
1783 s390x_aes_gcm_setiv(gctx, iv);
1785 memcpy(gctx->iv, iv, gctx->ivlen);
1794 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1795 * if successful. Otherwise -1 is returned. Code is big-endian.
1797 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1798 const unsigned char *in, size_t len)
1800 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1801 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1802 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1805 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1809 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1810 * Requirements from SP 800-38D". The requirements is for one party to the
1811 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1814 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1815 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
1819 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1820 : EVP_CTRL_GCM_SET_IV_INV,
1821 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1824 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1825 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1826 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1828 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1829 gctx->kma.param.tpcl = len << 3;
1830 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1831 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1834 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1835 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1837 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1838 EVP_GCM_TLS_TAG_LEN)) {
1839 OPENSSL_cleanse(out, len);
1846 gctx->tls_aad_len = -1;
1851 * Called from EVP layer to initialize context, process additional
1852 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1853 * ciphertext or process a TLS packet, depending on context. Returns bytes
1854 * written on success. Otherwise -1 is returned. Code is big-endian.
1856 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1857 const unsigned char *in, size_t len)
1859 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1860 unsigned char *buf, tmp[16];
1866 if (gctx->tls_aad_len >= 0)
1867 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1874 if (s390x_aes_gcm_aad(gctx, in, len))
1877 if (s390x_aes_gcm(gctx, in, out, len))
1882 gctx->kma.param.taadl <<= 3;
1883 gctx->kma.param.tpcl <<= 3;
1884 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1885 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1886 /* recall that we already did en-/decrypt gctx->mres
1887 * and returned it to caller... */
1888 OPENSSL_cleanse(tmp, gctx->mreslen);
1891 enc = EVP_CIPHER_CTX_encrypting(ctx);
1895 if (gctx->taglen < 0)
1898 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1899 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1906 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1908 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1909 const unsigned char *iv;
1914 iv = EVP_CIPHER_CTX_iv(c);
1916 OPENSSL_free(gctx->iv);
1918 OPENSSL_cleanse(gctx, sizeof(*gctx));
1922 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1923 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1924 # define S390X_aes_256_xts_CAPABLE 1
1926 # define s390x_aes_xts_init_key aes_xts_init_key
1927 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1928 const unsigned char *key,
1929 const unsigned char *iv, int enc);
1930 # define s390x_aes_xts_cipher aes_xts_cipher
1931 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1932 const unsigned char *in, size_t len);
1933 # define s390x_aes_xts_ctrl aes_xts_ctrl
1934 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1935 # define s390x_aes_xts_cleanup aes_xts_cleanup
1937 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1938 (OPENSSL_s390xcap_P.kmac[0] & \
1939 S390X_CAPBIT(S390X_AES_128)))
1940 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1941 (OPENSSL_s390xcap_P.kmac[0] & \
1942 S390X_CAPBIT(S390X_AES_192)))
1943 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1944 (OPENSSL_s390xcap_P.kmac[0] & \
1945 S390X_CAPBIT(S390X_AES_256)))
1947 # define S390X_CCM_AAD_FLAG 0x40
1950 * Set nonce and length fields. Code is big-endian.
1952 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1953 const unsigned char *nonce,
1956 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1957 ctx->aes.ccm.nonce.g[1] = mlen;
1958 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1962 * Process additional authenticated data. Code is big-endian.
1964 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1973 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1975 /* Suppress 'type-punned pointer dereference' warning. */
1976 ptr = ctx->aes.ccm.buf.b;
1978 if (alen < ((1 << 16) - (1 << 8))) {
1979 *(uint16_t *)ptr = alen;
1981 } else if (sizeof(alen) == 8
1982 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1983 *(uint16_t *)ptr = 0xffff;
1984 *(uint64_t *)(ptr + 2) = alen;
1987 *(uint16_t *)ptr = 0xfffe;
1988 *(uint32_t *)(ptr + 2) = alen;
1992 while (i < 16 && alen) {
1993 ctx->aes.ccm.buf.b[i] = *aad;
1999 ctx->aes.ccm.buf.b[i] = 0;
2003 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
2004 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
2005 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2006 &ctx->aes.ccm.kmac_param);
2007 ctx->aes.ccm.blocks += 2;
2010 alen &= ~(size_t)0xf;
2012 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2013 ctx->aes.ccm.blocks += alen >> 4;
2017 for (i = 0; i < rem; i++)
2018 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2020 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2021 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2022 ctx->aes.ccm.kmac_param.k);
2023 ctx->aes.ccm.blocks++;
2028 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2031 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2032 unsigned char *out, size_t len, int enc)
2035 unsigned int i, l, num;
2036 unsigned char flags;
2038 flags = ctx->aes.ccm.nonce.b[0];
2039 if (!(flags & S390X_CCM_AAD_FLAG)) {
2040 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2041 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2042 ctx->aes.ccm.blocks++;
2045 ctx->aes.ccm.nonce.b[0] = l;
2048 * Reconstruct length from encoded length field
2049 * and initialize it with counter value.
2052 for (i = 15 - l; i < 15; i++) {
2053 n |= ctx->aes.ccm.nonce.b[i];
2054 ctx->aes.ccm.nonce.b[i] = 0;
2057 n |= ctx->aes.ccm.nonce.b[15];
2058 ctx->aes.ccm.nonce.b[15] = 1;
2061 return -1; /* length mismatch */
2064 /* Two operations per block plus one for tag encryption */
2065 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2066 if (ctx->aes.ccm.blocks > (1ULL << 61))
2067 return -2; /* too much data */
2072 len &= ~(size_t)0xf;
2075 /* mac-then-encrypt */
2077 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2079 for (i = 0; i < rem; i++)
2080 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2082 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2083 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2084 ctx->aes.ccm.kmac_param.k);
2087 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2088 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2089 &num, (ctr128_f)AES_ctr32_encrypt);
2091 /* decrypt-then-mac */
2092 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2093 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2094 &num, (ctr128_f)AES_ctr32_encrypt);
2097 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2099 for (i = 0; i < rem; i++)
2100 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2102 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2103 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2104 ctx->aes.ccm.kmac_param.k);
2108 for (i = 15 - l; i < 16; i++)
2109 ctx->aes.ccm.nonce.b[i] = 0;
2111 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2112 ctx->aes.ccm.kmac_param.k);
2113 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2114 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2116 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2121 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2122 * if successful. Otherwise -1 is returned.
2124 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2125 const unsigned char *in, size_t len)
2127 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2128 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2129 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2130 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2133 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2137 /* Set explicit iv (sequence number). */
2138 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2141 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2143 * Get explicit iv (sequence number). We already have fixed iv
2144 * (server/client_write_iv) here.
2146 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2147 s390x_aes_ccm_setiv(cctx, ivec, len);
2149 /* Process aad (sequence number|type|version|length) */
2150 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2152 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2153 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2156 if (s390x_aes_ccm(cctx, in, out, len, enc))
2159 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2160 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2162 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2163 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2168 OPENSSL_cleanse(out, len);
2174 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2177 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2178 const unsigned char *key,
2179 const unsigned char *iv, int enc)
2181 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2182 unsigned char *ivec;
2185 if (iv == NULL && key == NULL)
2189 keylen = EVP_CIPHER_CTX_key_length(ctx);
2190 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2191 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2193 /* Store encoded m and l. */
2194 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2195 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2196 memset(cctx->aes.ccm.nonce.b + 1, 0,
2197 sizeof(cctx->aes.ccm.nonce.b));
2198 cctx->aes.ccm.blocks = 0;
2200 cctx->aes.ccm.key_set = 1;
2204 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2205 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2207 cctx->aes.ccm.iv_set = 1;
2214 * Called from EVP layer to initialize context, process additional
2215 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2216 * plaintext or process a TLS packet, depending on context. Returns bytes
2217 * written on success. Otherwise -1 is returned.
2219 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2220 const unsigned char *in, size_t len)
2222 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2223 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2225 unsigned char *buf, *ivec;
2227 if (!cctx->aes.ccm.key_set)
2230 if (cctx->aes.ccm.tls_aad_len >= 0)
2231 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2234 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2235 * so integrity must be checked already at Update() i.e., before
2236 * potentially corrupted data is output.
2238 if (in == NULL && out != NULL)
2241 if (!cctx->aes.ccm.iv_set)
2244 if (!enc && !cctx->aes.ccm.tag_set)
2248 /* Update(): Pass message length. */
2250 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2251 s390x_aes_ccm_setiv(cctx, ivec, len);
2253 cctx->aes.ccm.len_set = 1;
2257 /* Update(): Process aad. */
2258 if (!cctx->aes.ccm.len_set && len)
2261 s390x_aes_ccm_aad(cctx, in, len);
2265 /* Update(): Process message. */
2267 if (!cctx->aes.ccm.len_set) {
2269 * In case message length was not previously set explicitly via
2270 * Update(), set it now.
2272 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2273 s390x_aes_ccm_setiv(cctx, ivec, len);
2275 cctx->aes.ccm.len_set = 1;
2279 if (s390x_aes_ccm(cctx, in, out, len, enc))
2282 cctx->aes.ccm.tag_set = 1;
2287 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2288 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2289 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2295 OPENSSL_cleanse(out, len);
2297 cctx->aes.ccm.iv_set = 0;
2298 cctx->aes.ccm.tag_set = 0;
2299 cctx->aes.ccm.len_set = 0;
2305 * Performs various operations on the context structure depending on control
2306 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2307 * Code is big-endian.
2309 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2311 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2312 unsigned char *buf, *iv;
2317 cctx->aes.ccm.key_set = 0;
2318 cctx->aes.ccm.iv_set = 0;
2319 cctx->aes.ccm.l = 8;
2320 cctx->aes.ccm.m = 12;
2321 cctx->aes.ccm.tag_set = 0;
2322 cctx->aes.ccm.len_set = 0;
2323 cctx->aes.ccm.tls_aad_len = -1;
2326 case EVP_CTRL_AEAD_TLS1_AAD:
2327 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2330 /* Save the aad for later use. */
2331 buf = EVP_CIPHER_CTX_buf_noconst(c);
2332 memcpy(buf, ptr, arg);
2333 cctx->aes.ccm.tls_aad_len = arg;
2335 len = buf[arg - 2] << 8 | buf[arg - 1];
2336 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2339 /* Correct length for explicit iv. */
2340 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2342 enc = EVP_CIPHER_CTX_encrypting(c);
2344 if (len < cctx->aes.ccm.m)
2347 /* Correct length for tag. */
2348 len -= cctx->aes.ccm.m;
2351 buf[arg - 2] = len >> 8;
2352 buf[arg - 1] = len & 0xff;
2354 /* Extra padding: tag appended to record. */
2355 return cctx->aes.ccm.m;
2357 case EVP_CTRL_CCM_SET_IV_FIXED:
2358 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2361 /* Copy to first part of the iv. */
2362 iv = EVP_CIPHER_CTX_iv_noconst(c);
2363 memcpy(iv, ptr, arg);
2366 case EVP_CTRL_AEAD_SET_IVLEN:
2370 case EVP_CTRL_CCM_SET_L:
2371 if (arg < 2 || arg > 8)
2374 cctx->aes.ccm.l = arg;
2377 case EVP_CTRL_AEAD_SET_TAG:
2378 if ((arg & 1) || arg < 4 || arg > 16)
2381 enc = EVP_CIPHER_CTX_encrypting(c);
2386 cctx->aes.ccm.tag_set = 1;
2387 buf = EVP_CIPHER_CTX_buf_noconst(c);
2388 memcpy(buf, ptr, arg);
2391 cctx->aes.ccm.m = arg;
2394 case EVP_CTRL_AEAD_GET_TAG:
2395 enc = EVP_CIPHER_CTX_encrypting(c);
2396 if (!enc || !cctx->aes.ccm.tag_set)
2399 if(arg < cctx->aes.ccm.m)
2402 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2403 cctx->aes.ccm.tag_set = 0;
2404 cctx->aes.ccm.iv_set = 0;
2405 cctx->aes.ccm.len_set = 0;
2416 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2418 # ifndef OPENSSL_NO_OCB
2419 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2420 # define S390X_aes_128_ocb_CAPABLE 0
2421 # define S390X_aes_192_ocb_CAPABLE 0
2422 # define S390X_aes_256_ocb_CAPABLE 0
2424 # define s390x_aes_ocb_init_key aes_ocb_init_key
2425 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2426 const unsigned char *iv, int enc);
2427 # define s390x_aes_ocb_cipher aes_ocb_cipher
2428 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2429 const unsigned char *in, size_t len);
2430 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2431 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2432 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2433 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2436 # ifndef OPENSSL_NO_SIV
2437 # define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2438 # define S390X_aes_128_siv_CAPABLE 0
2439 # define S390X_aes_192_siv_CAPABLE 0
2440 # define S390X_aes_256_siv_CAPABLE 0
2442 # define s390x_aes_siv_init_key aes_siv_init_key
2443 # define s390x_aes_siv_cipher aes_siv_cipher
2444 # define s390x_aes_siv_cleanup aes_siv_cleanup
2445 # define s390x_aes_siv_ctrl aes_siv_ctrl
2448 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2450 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2451 nid##_##keylen##_##nmode,blocksize, \
2454 flags | EVP_CIPH_##MODE##_MODE, \
2455 s390x_aes_##mode##_init_key, \
2456 s390x_aes_##mode##_cipher, \
2458 sizeof(S390X_AES_##MODE##_CTX), \
2464 static const EVP_CIPHER aes_##keylen##_##mode = { \
2465 nid##_##keylen##_##nmode, \
2469 flags | EVP_CIPH_##MODE##_MODE, \
2471 aes_##mode##_cipher, \
2473 sizeof(EVP_AES_KEY), \
2479 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2481 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2482 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2485 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2486 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2487 nid##_##keylen##_##mode, \
2489 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2491 flags | EVP_CIPH_##MODE##_MODE, \
2492 s390x_aes_##mode##_init_key, \
2493 s390x_aes_##mode##_cipher, \
2494 s390x_aes_##mode##_cleanup, \
2495 sizeof(S390X_AES_##MODE##_CTX), \
2498 s390x_aes_##mode##_ctrl, \
2501 static const EVP_CIPHER aes_##keylen##_##mode = { \
2502 nid##_##keylen##_##mode,blocksize, \
2503 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2505 flags | EVP_CIPH_##MODE##_MODE, \
2506 aes_##mode##_init_key, \
2507 aes_##mode##_cipher, \
2508 aes_##mode##_cleanup, \
2509 sizeof(EVP_AES_##MODE##_CTX), \
2512 aes_##mode##_ctrl, \
2515 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2517 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2518 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2523 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2524 static const EVP_CIPHER aes_##keylen##_##mode = { \
2525 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2526 flags|EVP_CIPH_##MODE##_MODE, \
2528 aes_##mode##_cipher, \
2530 sizeof(EVP_AES_KEY), \
2531 NULL,NULL,NULL,NULL }; \
2532 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2533 { return &aes_##keylen##_##mode; }
2535 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2536 static const EVP_CIPHER aes_##keylen##_##mode = { \
2537 nid##_##keylen##_##mode,blocksize, \
2538 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2540 flags|EVP_CIPH_##MODE##_MODE, \
2541 aes_##mode##_init_key, \
2542 aes_##mode##_cipher, \
2543 aes_##mode##_cleanup, \
2544 sizeof(EVP_AES_##MODE##_CTX), \
2545 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2546 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2547 { return &aes_##keylen##_##mode; }
2551 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2552 # include "arm_arch.h"
2553 # if __ARM_MAX_ARCH__>=7
2554 # if defined(BSAES_ASM)
2555 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2557 # if defined(VPAES_ASM)
2558 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2560 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2561 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2562 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2563 # define HWAES_encrypt aes_v8_encrypt
2564 # define HWAES_decrypt aes_v8_decrypt
2565 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2566 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2570 #if defined(HWAES_CAPABLE)
2571 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2573 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2575 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2576 const AES_KEY *key);
2577 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2578 const AES_KEY *key);
2579 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2580 size_t length, const AES_KEY *key,
2581 unsigned char *ivec, const int enc);
2582 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2583 size_t len, const AES_KEY *key,
2584 const unsigned char ivec[16]);
2585 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2586 size_t len, const AES_KEY *key1,
2587 const AES_KEY *key2, const unsigned char iv[16]);
2588 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2589 size_t len, const AES_KEY *key1,
2590 const AES_KEY *key2, const unsigned char iv[16]);
2593 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2594 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2595 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2596 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2597 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2598 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2599 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2600 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2602 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2603 const unsigned char *iv, int enc)
2606 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2608 mode = EVP_CIPHER_CTX_mode(ctx);
2609 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2611 #ifdef HWAES_CAPABLE
2612 if (HWAES_CAPABLE) {
2613 ret = HWAES_set_decrypt_key(key,
2614 EVP_CIPHER_CTX_key_length(ctx) * 8,
2616 dat->block = (block128_f) HWAES_decrypt;
2617 dat->stream.cbc = NULL;
2618 # ifdef HWAES_cbc_encrypt
2619 if (mode == EVP_CIPH_CBC_MODE)
2620 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2624 #ifdef BSAES_CAPABLE
2625 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2626 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2628 dat->block = (block128_f) AES_decrypt;
2629 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2632 #ifdef VPAES_CAPABLE
2633 if (VPAES_CAPABLE) {
2634 ret = vpaes_set_decrypt_key(key,
2635 EVP_CIPHER_CTX_key_length(ctx) * 8,
2637 dat->block = (block128_f) vpaes_decrypt;
2638 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2639 (cbc128_f) vpaes_cbc_encrypt : NULL;
2643 ret = AES_set_decrypt_key(key,
2644 EVP_CIPHER_CTX_key_length(ctx) * 8,
2646 dat->block = (block128_f) AES_decrypt;
2647 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2648 (cbc128_f) AES_cbc_encrypt : NULL;
2651 #ifdef HWAES_CAPABLE
2652 if (HWAES_CAPABLE) {
2653 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2655 dat->block = (block128_f) HWAES_encrypt;
2656 dat->stream.cbc = NULL;
2657 # ifdef HWAES_cbc_encrypt
2658 if (mode == EVP_CIPH_CBC_MODE)
2659 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2662 # ifdef HWAES_ctr32_encrypt_blocks
2663 if (mode == EVP_CIPH_CTR_MODE)
2664 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2667 (void)0; /* terminate potentially open 'else' */
2670 #ifdef BSAES_CAPABLE
2671 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2672 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2674 dat->block = (block128_f) AES_encrypt;
2675 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2678 #ifdef VPAES_CAPABLE
2679 if (VPAES_CAPABLE) {
2680 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2682 dat->block = (block128_f) vpaes_encrypt;
2683 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2684 (cbc128_f) vpaes_cbc_encrypt : NULL;
2688 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2690 dat->block = (block128_f) AES_encrypt;
2691 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2692 (cbc128_f) AES_cbc_encrypt : NULL;
2694 if (mode == EVP_CIPH_CTR_MODE)
2695 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2700 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2707 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2708 const unsigned char *in, size_t len)
2710 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2712 if (dat->stream.cbc)
2713 (*dat->stream.cbc) (in, out, len, &dat->ks,
2714 EVP_CIPHER_CTX_iv_noconst(ctx),
2715 EVP_CIPHER_CTX_encrypting(ctx));
2716 else if (EVP_CIPHER_CTX_encrypting(ctx))
2717 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2718 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2720 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2721 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2726 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2727 const unsigned char *in, size_t len)
2729 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2731 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2736 for (i = 0, len -= bl; i <= len; i += bl)
2737 (*dat->block) (in + i, out + i, &dat->ks);
2742 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2743 const unsigned char *in, size_t len)
2745 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2747 int num = EVP_CIPHER_CTX_num(ctx);
2748 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2749 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2750 EVP_CIPHER_CTX_set_num(ctx, num);
2754 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2755 const unsigned char *in, size_t len)
2757 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2759 int num = EVP_CIPHER_CTX_num(ctx);
2760 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2761 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2762 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2763 EVP_CIPHER_CTX_set_num(ctx, num);
2767 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2768 const unsigned char *in, size_t len)
2770 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2772 int num = EVP_CIPHER_CTX_num(ctx);
2773 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2774 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2775 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2776 EVP_CIPHER_CTX_set_num(ctx, num);
2780 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2781 const unsigned char *in, size_t len)
2783 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2785 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2786 int num = EVP_CIPHER_CTX_num(ctx);
2787 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2788 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2789 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2790 EVP_CIPHER_CTX_set_num(ctx, num);
2794 while (len >= MAXBITCHUNK) {
2795 int num = EVP_CIPHER_CTX_num(ctx);
2796 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2797 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2798 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2799 EVP_CIPHER_CTX_set_num(ctx, num);
2805 int num = EVP_CIPHER_CTX_num(ctx);
2806 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2807 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2808 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2809 EVP_CIPHER_CTX_set_num(ctx, num);
2815 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2816 const unsigned char *in, size_t len)
2818 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2819 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2821 if (dat->stream.ctr)
2822 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2823 EVP_CIPHER_CTX_iv_noconst(ctx),
2824 EVP_CIPHER_CTX_buf_noconst(ctx),
2825 &num, dat->stream.ctr);
2827 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2828 EVP_CIPHER_CTX_iv_noconst(ctx),
2829 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2831 EVP_CIPHER_CTX_set_num(ctx, num);
2835 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2836 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2837 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2839 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2841 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2844 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2845 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2846 OPENSSL_free(gctx->iv);
2850 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2852 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2857 gctx->ivlen = c->cipher->iv_len;
2861 gctx->tls_aad_len = -1;
2864 case EVP_CTRL_AEAD_SET_IVLEN:
2867 /* Allocate memory for IV if needed */
2868 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2869 if (gctx->iv != c->iv)
2870 OPENSSL_free(gctx->iv);
2871 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2872 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2879 case EVP_CTRL_AEAD_SET_TAG:
2880 if (arg <= 0 || arg > 16 || c->encrypt)
2882 memcpy(c->buf, ptr, arg);
2886 case EVP_CTRL_AEAD_GET_TAG:
2887 if (arg <= 0 || arg > 16 || !c->encrypt
2888 || gctx->taglen < 0)
2890 memcpy(ptr, c->buf, arg);
2893 case EVP_CTRL_GET_IV:
2894 if (gctx->iv_gen != 1 && gctx->iv_gen_rand != 1)
2896 if (gctx->ivlen != arg)
2898 memcpy(ptr, gctx->iv, arg);
2901 case EVP_CTRL_GCM_SET_IV_FIXED:
2902 /* Special case: -1 length restores whole IV */
2904 memcpy(gctx->iv, ptr, gctx->ivlen);
2909 * Fixed field must be at least 4 bytes and invocation field at least
2912 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2915 memcpy(gctx->iv, ptr, arg);
2916 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2921 case EVP_CTRL_GCM_IV_GEN:
2922 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2924 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2925 if (arg <= 0 || arg > gctx->ivlen)
2927 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2929 * Invocation field will be at least 8 bytes in size and so no need
2930 * to check wrap around or increment more than last 8 bytes.
2932 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2936 case EVP_CTRL_GCM_SET_IV_INV:
2937 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2939 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2940 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2944 case EVP_CTRL_AEAD_TLS1_AAD:
2945 /* Save the AAD for later use */
2946 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2948 memcpy(c->buf, ptr, arg);
2949 gctx->tls_aad_len = arg;
2950 gctx->tls_enc_records = 0;
2952 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2953 /* Correct length for explicit IV */
2954 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2956 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2957 /* If decrypting correct for tag too */
2959 if (len < EVP_GCM_TLS_TAG_LEN)
2961 len -= EVP_GCM_TLS_TAG_LEN;
2963 c->buf[arg - 2] = len >> 8;
2964 c->buf[arg - 1] = len & 0xff;
2966 /* Extra padding: tag appended to record */
2967 return EVP_GCM_TLS_TAG_LEN;
2971 EVP_CIPHER_CTX *out = ptr;
2972 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2973 if (gctx->gcm.key) {
2974 if (gctx->gcm.key != &gctx->ks)
2976 gctx_out->gcm.key = &gctx_out->ks;
2978 if (gctx->iv == c->iv)
2979 gctx_out->iv = out->iv;
2981 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2982 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2985 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2996 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2997 const unsigned char *iv, int enc)
2999 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3004 #ifdef HWAES_CAPABLE
3005 if (HWAES_CAPABLE) {
3006 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3007 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3008 (block128_f) HWAES_encrypt);
3009 # ifdef HWAES_ctr32_encrypt_blocks
3010 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
3017 #ifdef BSAES_CAPABLE
3018 if (BSAES_CAPABLE) {
3019 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3020 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3021 (block128_f) AES_encrypt);
3022 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3026 #ifdef VPAES_CAPABLE
3027 if (VPAES_CAPABLE) {
3028 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3029 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3030 (block128_f) vpaes_encrypt);
3035 (void)0; /* terminate potentially open 'else' */
3037 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3038 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3039 (block128_f) AES_encrypt);
3041 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3048 * If we have an iv can set it directly, otherwise use saved IV.
3050 if (iv == NULL && gctx->iv_set)
3053 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3058 /* If key set use IV, otherwise copy */
3060 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3062 memcpy(gctx->iv, iv, gctx->ivlen);
3070 * Handle TLS GCM packet format. This consists of the last portion of the IV
3071 * followed by the payload and finally the tag. On encrypt generate IV,
3072 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3076 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3077 const unsigned char *in, size_t len)
3079 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3081 /* Encrypt/decrypt must be performed in place */
3083 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3087 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
3088 * Requirements from SP 800-38D". The requirements is for one party to the
3089 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
3092 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
3093 EVPerr(EVP_F_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
3098 * Set IV from start of buffer or generate IV and write to start of
3101 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3102 : EVP_CTRL_GCM_SET_IV_INV,
3103 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3106 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3108 /* Fix buffer and length to point to payload */
3109 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3110 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3111 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3113 /* Encrypt payload */
3116 #if defined(AES_GCM_ASM)
3117 if (len >= 32 && AES_GCM_ASM(gctx)) {
3118 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3121 bulk = AES_gcm_encrypt(in, out, len,
3123 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3124 gctx->gcm.len.u[1] += bulk;
3127 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3130 len - bulk, gctx->ctr))
3134 #if defined(AES_GCM_ASM2)
3135 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3136 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3139 bulk = AES_gcm_encrypt(in, out, len,
3141 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3142 gctx->gcm.len.u[1] += bulk;
3145 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3146 in + bulk, out + bulk, len - bulk))
3150 /* Finally write tag */
3151 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3152 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3157 #if defined(AES_GCM_ASM)
3158 if (len >= 16 && AES_GCM_ASM(gctx)) {
3159 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3162 bulk = AES_gcm_decrypt(in, out, len,
3164 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3165 gctx->gcm.len.u[1] += bulk;
3168 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3171 len - bulk, gctx->ctr))
3175 #if defined(AES_GCM_ASM2)
3176 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3177 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3180 bulk = AES_gcm_decrypt(in, out, len,
3182 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3183 gctx->gcm.len.u[1] += bulk;
3186 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3187 in + bulk, out + bulk, len - bulk))
3191 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3192 /* If tag mismatch wipe buffer */
3193 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3194 OPENSSL_cleanse(out, len);
3202 gctx->tls_aad_len = -1;
3208 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
3210 * See also 8.2.2 RBG-based construction.
3211 * Random construction consists of a free field (which can be NULL) and a
3212 * random field which will use a DRBG that can return at least 96 bits of
3213 * entropy strength. (The DRBG must be seeded by the FIPS module).
3215 static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
3217 int sz = gctx->ivlen - offset;
3219 /* Must be at least 96 bits */
3220 if (sz <= 0 || gctx->ivlen < 12)
3223 /* Use DRBG to generate random iv */
3224 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
3228 #endif /* FIPS_MODE */
3230 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3231 const unsigned char *in, size_t len)
3233 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3235 /* If not set up, return error */
3239 if (gctx->tls_aad_len >= 0)
3240 return aes_gcm_tls_cipher(ctx, out, in, len);
3244 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
3245 * The IV can still be set externally (the security policy will state that
3246 * this is not FIPS compliant). There are some applications
3247 * where setting the IV externally is the only option available.
3249 if (!gctx->iv_set) {
3250 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
3252 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
3254 gctx->iv_gen_rand = 1;
3259 #endif /* FIPS_MODE */
3263 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3265 } else if (ctx->encrypt) {
3268 #if defined(AES_GCM_ASM)
3269 if (len >= 32 && AES_GCM_ASM(gctx)) {
3270 size_t res = (16 - gctx->gcm.mres) % 16;
3272 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3275 bulk = AES_gcm_encrypt(in + res,
3276 out + res, len - res,
3277 gctx->gcm.key, gctx->gcm.Yi.c,
3279 gctx->gcm.len.u[1] += bulk;
3283 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3286 len - bulk, gctx->ctr))
3290 #if defined(AES_GCM_ASM2)
3291 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3292 size_t res = (16 - gctx->gcm.mres) % 16;
3294 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3297 bulk = AES_gcm_encrypt(in + res,
3298 out + res, len - res,
3299 gctx->gcm.key, gctx->gcm.Yi.c,
3301 gctx->gcm.len.u[1] += bulk;
3305 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3306 in + bulk, out + bulk, len - bulk))
3312 #if defined(AES_GCM_ASM)
3313 if (len >= 16 && AES_GCM_ASM(gctx)) {
3314 size_t res = (16 - gctx->gcm.mres) % 16;
3316 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3319 bulk = AES_gcm_decrypt(in + res,
3320 out + res, len - res,
3322 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3323 gctx->gcm.len.u[1] += bulk;
3327 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3330 len - bulk, gctx->ctr))
3334 #if defined(AES_GCM_ASM2)
3335 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3336 size_t res = (16 - gctx->gcm.mres) % 16;
3338 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3341 bulk = AES_gcm_decrypt(in + res,
3342 out + res, len - res,
3344 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3345 gctx->gcm.len.u[1] += bulk;
3349 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3350 in + bulk, out + bulk, len - bulk))
3356 if (!ctx->encrypt) {
3357 if (gctx->taglen < 0)
3359 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3364 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3366 /* Don't reuse the IV */
3373 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3374 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3375 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3376 | EVP_CIPH_CUSTOM_COPY)
3378 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3379 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3380 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3381 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3382 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3383 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3385 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3387 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
3388 if (type == EVP_CTRL_COPY) {
3389 EVP_CIPHER_CTX *out = ptr;
3390 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3391 if (xctx->xts.key1) {
3392 if (xctx->xts.key1 != &xctx->ks1)
3394 xctx_out->xts.key1 = &xctx_out->ks1;
3396 if (xctx->xts.key2) {
3397 if (xctx->xts.key2 != &xctx->ks2)
3399 xctx_out->xts.key2 = &xctx_out->ks2;
3402 } else if (type != EVP_CTRL_INIT)
3404 /* key1 and key2 are used as an indicator both key and IV are set */
3405 xctx->xts.key1 = NULL;
3406 xctx->xts.key2 = NULL;
3410 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3411 const unsigned char *iv, int enc)
3413 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3420 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3422 xctx->stream = NULL;
3424 /* key_len is two AES keys */
3425 #ifdef HWAES_CAPABLE
3426 if (HWAES_CAPABLE) {
3428 HWAES_set_encrypt_key(key,
3429 EVP_CIPHER_CTX_key_length(ctx) * 4,
3431 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3432 # ifdef HWAES_xts_encrypt
3433 xctx->stream = HWAES_xts_encrypt;
3436 HWAES_set_decrypt_key(key,
3437 EVP_CIPHER_CTX_key_length(ctx) * 4,
3439 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3440 # ifdef HWAES_xts_decrypt
3441 xctx->stream = HWAES_xts_decrypt;
3445 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3446 EVP_CIPHER_CTX_key_length(ctx) * 4,
3448 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3450 xctx->xts.key1 = &xctx->ks1;
3454 #ifdef BSAES_CAPABLE
3456 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3459 #ifdef VPAES_CAPABLE
3460 if (VPAES_CAPABLE) {
3462 vpaes_set_encrypt_key(key,
3463 EVP_CIPHER_CTX_key_length(ctx) * 4,
3465 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3467 vpaes_set_decrypt_key(key,
3468 EVP_CIPHER_CTX_key_length(ctx) * 4,
3470 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3473 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3474 EVP_CIPHER_CTX_key_length(ctx) * 4,
3476 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3478 xctx->xts.key1 = &xctx->ks1;
3482 (void)0; /* terminate potentially open 'else' */
3485 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3487 xctx->xts.block1 = (block128_f) AES_encrypt;
3489 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3491 xctx->xts.block1 = (block128_f) AES_decrypt;
3494 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3495 EVP_CIPHER_CTX_key_length(ctx) * 4,
3497 xctx->xts.block2 = (block128_f) AES_encrypt;
3499 xctx->xts.key1 = &xctx->ks1;
3503 xctx->xts.key2 = &xctx->ks2;
3504 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3510 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3511 const unsigned char *in, size_t len)
3513 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3515 if (xctx->xts.key1 == NULL
3516 || xctx->xts.key2 == NULL
3519 || len < AES_BLOCK_SIZE)
3523 * Impose a limit of 2^20 blocks per data unit as specifed by
3524 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3525 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3526 * NIST SP 800-38E mandates the same limit.
3528 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3529 EVPerr(EVP_F_AES_XTS_CIPHER, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3534 * Verify that the two keys are different.
3536 * This addresses the vulnerability described in Rogaway's September 2004
3537 * paper (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf):
3538 * "Efficient Instantiations of Tweakable Blockciphers and Refinements
3539 * to Modes OCB and PMAC".
3541 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states that:
3542 * "The check for Key_1 != Key_2 shall be done at any place BEFORE
3543 * using the keys in the XTS-AES algorithm to process data with them."
3545 if (CRYPTO_memcmp(xctx->xts.key1, xctx->xts.key2,
3546 EVP_CIPHER_CTX_key_length(ctx) / 2) == 0)
3550 (*xctx->stream) (in, out, len,
3551 xctx->xts.key1, xctx->xts.key2,
3552 EVP_CIPHER_CTX_iv_noconst(ctx));
3553 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3555 EVP_CIPHER_CTX_encrypting(ctx)))
3560 #define aes_xts_cleanup NULL
3562 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3563 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3564 | EVP_CIPH_CUSTOM_COPY)
3566 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3567 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3569 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3571 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3580 cctx->tls_aad_len = -1;
3583 case EVP_CTRL_AEAD_TLS1_AAD:
3584 /* Save the AAD for later use */
3585 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3587 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3588 cctx->tls_aad_len = arg;
3591 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3592 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3593 /* Correct length for explicit IV */
3594 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3596 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3597 /* If decrypting correct for tag too */
3598 if (!EVP_CIPHER_CTX_encrypting(c)) {
3603 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3604 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3606 /* Extra padding: tag appended to record */
3609 case EVP_CTRL_CCM_SET_IV_FIXED:
3610 /* Sanity check length */
3611 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3613 /* Just copy to first part of IV */
3614 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3617 case EVP_CTRL_AEAD_SET_IVLEN:
3620 case EVP_CTRL_CCM_SET_L:
3621 if (arg < 2 || arg > 8)
3626 case EVP_CTRL_AEAD_SET_TAG:
3627 if ((arg & 1) || arg < 4 || arg > 16)
3629 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3633 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3638 case EVP_CTRL_AEAD_GET_TAG:
3639 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3641 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3650 EVP_CIPHER_CTX *out = ptr;
3651 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3652 if (cctx->ccm.key) {
3653 if (cctx->ccm.key != &cctx->ks)
3655 cctx_out->ccm.key = &cctx_out->ks;
3666 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3667 const unsigned char *iv, int enc)
3669 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3674 #ifdef HWAES_CAPABLE
3675 if (HWAES_CAPABLE) {
3676 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3679 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3680 &cctx->ks, (block128_f) HWAES_encrypt);
3686 #ifdef VPAES_CAPABLE
3687 if (VPAES_CAPABLE) {
3688 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3690 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3691 &cctx->ks, (block128_f) vpaes_encrypt);
3697 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3699 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3700 &cctx->ks, (block128_f) AES_encrypt);
3705 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3711 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3712 const unsigned char *in, size_t len)
3714 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3715 CCM128_CONTEXT *ccm = &cctx->ccm;
3716 /* Encrypt/decrypt must be performed in place */
3717 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3719 /* If encrypting set explicit IV from sequence number (start of AAD) */
3720 if (EVP_CIPHER_CTX_encrypting(ctx))
3721 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3722 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3723 /* Get rest of IV from explicit IV */
3724 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3725 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3726 /* Correct length value */
3727 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3728 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3732 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3733 /* Fix buffer to point to payload */
3734 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3735 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3736 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3737 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3739 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3741 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3743 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3745 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3747 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3748 unsigned char tag[16];
3749 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3750 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3754 OPENSSL_cleanse(out, len);
3759 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3760 const unsigned char *in, size_t len)
3762 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3763 CCM128_CONTEXT *ccm = &cctx->ccm;
3764 /* If not set up, return error */
3768 if (cctx->tls_aad_len >= 0)
3769 return aes_ccm_tls_cipher(ctx, out, in, len);
3771 /* EVP_*Final() doesn't return any data */
3772 if (in == NULL && out != NULL)
3778 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3782 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3788 /* If have AAD need message length */
3789 if (!cctx->len_set && len)
3791 CRYPTO_ccm128_aad(ccm, in, len);
3794 /* If not set length yet do it */
3795 if (!cctx->len_set) {
3796 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3801 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3802 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3804 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3810 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3812 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3813 unsigned char tag[16];
3814 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3815 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3821 OPENSSL_cleanse(out, len);
3829 #define aes_ccm_cleanup NULL
3831 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3832 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3833 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3834 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3835 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3836 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3843 /* Indicates if IV has been set */
3847 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3848 const unsigned char *iv, int enc)
3850 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3854 if (EVP_CIPHER_CTX_encrypting(ctx))
3855 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3858 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3864 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3865 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3870 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3871 const unsigned char *in, size_t inlen)
3873 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3875 /* AES wrap with padding has IV length of 4, without padding 8 */
3876 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3877 /* No final operation so always return zero length */
3880 /* Input length must always be non-zero */
3883 /* If decrypting need at least 16 bytes and multiple of 8 */
3884 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3886 /* If not padding input must be multiple of 8 */
3887 if (!pad && inlen & 0x7)
3889 if (is_partially_overlapping(out, in, inlen)) {
3890 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3894 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3895 /* If padding round up to multiple of 8 */
3897 inlen = (inlen + 7) / 8 * 8;
3902 * If not padding output will be exactly 8 bytes smaller than
3903 * input. If padding it will be at least 8 bytes smaller but we
3904 * don't know how much.
3910 if (EVP_CIPHER_CTX_encrypting(ctx))
3911 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3913 (block128_f) AES_encrypt);
3915 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3917 (block128_f) AES_decrypt);
3919 if (EVP_CIPHER_CTX_encrypting(ctx))
3920 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3921 out, in, inlen, (block128_f) AES_encrypt);
3923 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3924 out, in, inlen, (block128_f) AES_decrypt);
3926 return rv ? (int)rv : -1;
3929 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3930 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3931 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3933 static const EVP_CIPHER aes_128_wrap = {
3935 8, 16, 8, WRAP_FLAGS,
3936 aes_wrap_init_key, aes_wrap_cipher,
3938 sizeof(EVP_AES_WRAP_CTX),
3939 NULL, NULL, NULL, NULL
3942 const EVP_CIPHER *EVP_aes_128_wrap(void)
3944 return &aes_128_wrap;
3947 static const EVP_CIPHER aes_192_wrap = {
3949 8, 24, 8, WRAP_FLAGS,
3950 aes_wrap_init_key, aes_wrap_cipher,
3952 sizeof(EVP_AES_WRAP_CTX),
3953 NULL, NULL, NULL, NULL
3956 const EVP_CIPHER *EVP_aes_192_wrap(void)
3958 return &aes_192_wrap;
3961 static const EVP_CIPHER aes_256_wrap = {
3963 8, 32, 8, WRAP_FLAGS,
3964 aes_wrap_init_key, aes_wrap_cipher,
3966 sizeof(EVP_AES_WRAP_CTX),
3967 NULL, NULL, NULL, NULL
3970 const EVP_CIPHER *EVP_aes_256_wrap(void)
3972 return &aes_256_wrap;
3975 static const EVP_CIPHER aes_128_wrap_pad = {
3976 NID_id_aes128_wrap_pad,
3977 8, 16, 4, WRAP_FLAGS,
3978 aes_wrap_init_key, aes_wrap_cipher,
3980 sizeof(EVP_AES_WRAP_CTX),
3981 NULL, NULL, NULL, NULL
3984 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3986 return &aes_128_wrap_pad;
3989 static const EVP_CIPHER aes_192_wrap_pad = {
3990 NID_id_aes192_wrap_pad,
3991 8, 24, 4, WRAP_FLAGS,
3992 aes_wrap_init_key, aes_wrap_cipher,
3994 sizeof(EVP_AES_WRAP_CTX),
3995 NULL, NULL, NULL, NULL
3998 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
4000 return &aes_192_wrap_pad;
4003 static const EVP_CIPHER aes_256_wrap_pad = {
4004 NID_id_aes256_wrap_pad,
4005 8, 32, 4, WRAP_FLAGS,
4006 aes_wrap_init_key, aes_wrap_cipher,
4008 sizeof(EVP_AES_WRAP_CTX),
4009 NULL, NULL, NULL, NULL
4012 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
4014 return &aes_256_wrap_pad;
4017 #ifndef OPENSSL_NO_OCB
4018 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4020 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4021 EVP_CIPHER_CTX *newc;
4022 EVP_AES_OCB_CTX *new_octx;
4028 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
4029 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
4031 octx->data_buf_len = 0;
4032 octx->aad_buf_len = 0;
4035 case EVP_CTRL_AEAD_SET_IVLEN:
4036 /* IV len must be 1 to 15 */
4037 if (arg <= 0 || arg > 15)
4043 case EVP_CTRL_AEAD_SET_TAG:
4045 /* Tag len must be 0 to 16 */
4046 if (arg < 0 || arg > 16)
4052 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
4054 memcpy(octx->tag, ptr, arg);
4057 case EVP_CTRL_AEAD_GET_TAG:
4058 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
4061 memcpy(ptr, octx->tag, arg);
4065 newc = (EVP_CIPHER_CTX *)ptr;
4066 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
4067 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
4068 &new_octx->ksenc.ks,
4069 &new_octx->ksdec.ks);
4077 # ifdef HWAES_CAPABLE
4078 # ifdef HWAES_ocb_encrypt
4079 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4080 size_t blocks, const void *key,
4081 size_t start_block_num,
4082 unsigned char offset_i[16],
4083 const unsigned char L_[][16],
4084 unsigned char checksum[16]);
4086 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4088 # ifdef HWAES_ocb_decrypt
4089 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4090 size_t blocks, const void *key,
4091 size_t start_block_num,
4092 unsigned char offset_i[16],
4093 const unsigned char L_[][16],
4094 unsigned char checksum[16]);
4096 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4100 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4101 const unsigned char *iv, int enc)
4103 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4109 * We set both the encrypt and decrypt key here because decrypt
4110 * needs both. We could possibly optimise to remove setting the
4111 * decrypt for an encryption operation.
4113 # ifdef HWAES_CAPABLE
4114 if (HWAES_CAPABLE) {
4115 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4117 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4119 if (!CRYPTO_ocb128_init(&octx->ocb,
4120 &octx->ksenc.ks, &octx->ksdec.ks,
4121 (block128_f) HWAES_encrypt,
4122 (block128_f) HWAES_decrypt,
4123 enc ? HWAES_ocb_encrypt
4124 : HWAES_ocb_decrypt))
4129 # ifdef VPAES_CAPABLE
4130 if (VPAES_CAPABLE) {
4131 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4133 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4135 if (!CRYPTO_ocb128_init(&octx->ocb,
4136 &octx->ksenc.ks, &octx->ksdec.ks,
4137 (block128_f) vpaes_encrypt,
4138 (block128_f) vpaes_decrypt,
4144 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4146 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4148 if (!CRYPTO_ocb128_init(&octx->ocb,
4149 &octx->ksenc.ks, &octx->ksdec.ks,
4150 (block128_f) AES_encrypt,
4151 (block128_f) AES_decrypt,
4158 * If we have an iv we can set it directly, otherwise use saved IV.
4160 if (iv == NULL && octx->iv_set)
4163 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4170 /* If key set use IV, otherwise copy */
4172 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4174 memcpy(octx->iv, iv, octx->ivlen);
4180 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4181 const unsigned char *in, size_t len)
4185 int written_len = 0;
4186 size_t trailing_len;
4187 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4189 /* If IV or Key not set then return error */
4198 * Need to ensure we are only passing full blocks to low level OCB
4199 * routines. We do it here rather than in EVP_EncryptUpdate/
4200 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4201 * and those routines don't support that
4204 /* Are we dealing with AAD or normal data here? */
4206 buf = octx->aad_buf;
4207 buf_len = &(octx->aad_buf_len);
4209 buf = octx->data_buf;
4210 buf_len = &(octx->data_buf_len);
4212 if (is_partially_overlapping(out + *buf_len, in, len)) {
4213 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4219 * If we've got a partially filled buffer from a previous call then
4220 * use that data first
4223 unsigned int remaining;
4225 remaining = AES_BLOCK_SIZE - (*buf_len);
4226 if (remaining > len) {
4227 memcpy(buf + (*buf_len), in, len);
4231 memcpy(buf + (*buf_len), in, remaining);
4234 * If we get here we've filled the buffer, so process it
4239 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4241 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4242 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4246 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4250 written_len = AES_BLOCK_SIZE;
4253 out += AES_BLOCK_SIZE;
4256 /* Do we have a partial block to handle at the end? */
4257 trailing_len = len % AES_BLOCK_SIZE;
4260 * If we've got some full blocks to handle, then process these first
4262 if (len != trailing_len) {
4264 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4266 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4267 if (!CRYPTO_ocb128_encrypt
4268 (&octx->ocb, in, out, len - trailing_len))
4271 if (!CRYPTO_ocb128_decrypt
4272 (&octx->ocb, in, out, len - trailing_len))
4275 written_len += len - trailing_len;
4276 in += len - trailing_len;
4279 /* Handle any trailing partial block */
4280 if (trailing_len > 0) {
4281 memcpy(buf, in, trailing_len);
4282 *buf_len = trailing_len;
4288 * First of all empty the buffer of any partial block that we might
4289 * have been provided - both for data and AAD
4291 if (octx->data_buf_len > 0) {
4292 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4293 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4294 octx->data_buf_len))
4297 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4298 octx->data_buf_len))
4301 written_len = octx->data_buf_len;
4302 octx->data_buf_len = 0;
4304 if (octx->aad_buf_len > 0) {
4305 if (!CRYPTO_ocb128_aad
4306 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4308 octx->aad_buf_len = 0;
4310 /* If decrypting then verify */
4311 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4312 if (octx->taglen < 0)
4314 if (CRYPTO_ocb128_finish(&octx->ocb,
4315 octx->tag, octx->taglen) != 0)
4320 /* If encrypting then just get the tag */
4321 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4323 /* Don't reuse the IV */
4329 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4331 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4332 CRYPTO_ocb128_cleanup(&octx->ocb);
4336 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4337 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4338 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4339 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4340 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4341 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4342 #endif /* OPENSSL_NO_OCB */
4345 #ifndef OPENSSL_NO_SIV
4347 typedef SIV128_CONTEXT EVP_AES_SIV_CTX;
4349 #define aesni_siv_init_key aes_siv_init_key
4350 static int aes_siv_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4351 const unsigned char *iv, int enc)
4353 const EVP_CIPHER *ctr;
4354 const EVP_CIPHER *cbc;
4355 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4356 int klen = EVP_CIPHER_CTX_key_length(ctx) / 2;
4363 cbc = EVP_aes_128_cbc();
4364 ctr = EVP_aes_128_ctr();
4367 cbc = EVP_aes_192_cbc();
4368 ctr = EVP_aes_192_ctr();
4371 cbc = EVP_aes_256_cbc();
4372 ctr = EVP_aes_256_ctr();
4378 /* klen is the length of the underlying cipher, not the input key,
4379 which should be twice as long */
4380 return CRYPTO_siv128_init(sctx, key, klen, cbc, ctr);
4383 #define aesni_siv_cipher aes_siv_cipher
4384 static int aes_siv_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4385 const unsigned char *in, size_t len)
4387 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4389 /* EncryptFinal or DecryptFinal */
4391 return CRYPTO_siv128_finish(sctx);
4393 /* Deal with associated data */
4395 return CRYPTO_siv128_aad(sctx, in, len);
4397 if (EVP_CIPHER_CTX_encrypting(ctx))
4398 return CRYPTO_siv128_encrypt(sctx, in, out, len);
4400 return CRYPTO_siv128_decrypt(sctx, in, out, len);
4403 #define aesni_siv_cleanup aes_siv_cleanup
4404 static int aes_siv_cleanup(EVP_CIPHER_CTX *c)
4406 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4408 return CRYPTO_siv128_cleanup(sctx);
4412 #define aesni_siv_ctrl aes_siv_ctrl
4413 static int aes_siv_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4415 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4416 SIV128_CONTEXT *sctx_out;
4420 return CRYPTO_siv128_cleanup(sctx);
4422 case EVP_CTRL_SET_SPEED:
4423 return CRYPTO_siv128_speed(sctx, arg);
4425 case EVP_CTRL_AEAD_SET_TAG:
4426 if (!EVP_CIPHER_CTX_encrypting(c))
4427 return CRYPTO_siv128_set_tag(sctx, ptr, arg);
4430 case EVP_CTRL_AEAD_GET_TAG:
4431 if (!EVP_CIPHER_CTX_encrypting(c))
4433 return CRYPTO_siv128_get_tag(sctx, ptr, arg);
4436 sctx_out = EVP_C_DATA(SIV128_CONTEXT, (EVP_CIPHER_CTX*)ptr);
4437 return CRYPTO_siv128_copy_ctx(sctx_out, sctx);
4445 #define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4446 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4447 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4448 | EVP_CIPH_CTRL_INIT)
4450 BLOCK_CIPHER_custom(NID_aes, 128, 1, 0, siv, SIV, SIV_FLAGS)
4451 BLOCK_CIPHER_custom(NID_aes, 192, 1, 0, siv, SIV, SIV_FLAGS)
4452 BLOCK_CIPHER_custom(NID_aes, 256, 1, 0, siv, SIV, SIV_FLAGS)