2 * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "modes_lcl.h"
19 #include <openssl/rand.h>
38 } ks; /* AES key schedule to use */
39 int key_set; /* Set if key initialised */
40 int iv_set; /* Set if an iv is set */
42 unsigned char *iv; /* Temporary IV store */
43 int ivlen; /* IV length */
45 int iv_gen; /* It is OK to generate IVs */
46 int tls_aad_len; /* TLS AAD length */
54 } ks1, ks2; /* AES key schedules to use */
56 void (*stream) (const unsigned char *in,
57 unsigned char *out, size_t length,
58 const AES_KEY *key1, const AES_KEY *key2,
59 const unsigned char iv[16]);
66 } ks; /* AES key schedule to use */
67 int key_set; /* Set if key initialised */
68 int iv_set; /* Set if an iv is set */
69 int tag_set; /* Set if tag is valid */
70 int len_set; /* Set if message length set */
71 int L, M; /* L and M parameters from RFC3610 */
72 int tls_aad_len; /* TLS AAD length */
77 #ifndef OPENSSL_NO_OCB
82 } ksenc; /* AES key schedule to use for encryption */
86 } ksdec; /* AES key schedule to use for decryption */
87 int key_set; /* Set if key initialised */
88 int iv_set; /* Set if an iv is set */
90 unsigned char *iv; /* Temporary IV store */
91 unsigned char tag[16];
92 unsigned char data_buf[16]; /* Store partial data blocks */
93 unsigned char aad_buf[16]; /* Store partial AAD blocks */
96 int ivlen; /* IV length */
101 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
109 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
111 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
114 void vpaes_cbc_encrypt(const unsigned char *in,
117 const AES_KEY *key, unsigned char *ivec, int enc);
120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
121 size_t length, const AES_KEY *key,
122 unsigned char ivec[16], int enc);
123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
124 size_t len, const AES_KEY *key,
125 const unsigned char ivec[16]);
126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
127 size_t len, const AES_KEY *key1,
128 const AES_KEY *key2, const unsigned char iv[16]);
129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
130 size_t len, const AES_KEY *key1,
131 const AES_KEY *key2, const unsigned char iv[16]);
134 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
135 size_t blocks, const AES_KEY *key,
136 const unsigned char ivec[AES_BLOCK_SIZE]);
139 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
140 const AES_KEY *key1, const AES_KEY *key2,
141 const unsigned char iv[16]);
142 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
143 const AES_KEY *key1, const AES_KEY *key2,
144 const unsigned char iv[16]);
147 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
148 # include "ppc_arch.h"
150 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
152 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
153 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
154 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
155 # define HWAES_encrypt aes_p8_encrypt
156 # define HWAES_decrypt aes_p8_decrypt
157 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
158 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
159 # define HWAES_xts_encrypt aes_p8_xts_encrypt
160 # define HWAES_xts_decrypt aes_p8_xts_decrypt
163 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
164 ((defined(__i386) || defined(__i386__) || \
165 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
166 defined(__x86_64) || defined(__x86_64__) || \
167 defined(_M_AMD64) || defined(_M_X64) )
169 extern unsigned int OPENSSL_ia32cap_P[];
172 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
175 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
180 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
182 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
184 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
187 void aesni_encrypt(const unsigned char *in, unsigned char *out,
189 void aesni_decrypt(const unsigned char *in, unsigned char *out,
192 void aesni_ecb_encrypt(const unsigned char *in,
194 size_t length, const AES_KEY *key, int enc);
195 void aesni_cbc_encrypt(const unsigned char *in,
198 const AES_KEY *key, unsigned char *ivec, int enc);
200 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
203 const void *key, const unsigned char *ivec);
205 void aesni_xts_encrypt(const unsigned char *in,
208 const AES_KEY *key1, const AES_KEY *key2,
209 const unsigned char iv[16]);
211 void aesni_xts_decrypt(const unsigned char *in,
214 const AES_KEY *key1, const AES_KEY *key2,
215 const unsigned char iv[16]);
217 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
221 const unsigned char ivec[16],
222 unsigned char cmac[16]);
224 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
228 const unsigned char ivec[16],
229 unsigned char cmac[16]);
231 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
232 size_t aesni_gcm_encrypt(const unsigned char *in,
235 const void *key, unsigned char ivec[16], u64 *Xi);
236 # define AES_gcm_encrypt aesni_gcm_encrypt
237 size_t aesni_gcm_decrypt(const unsigned char *in,
240 const void *key, unsigned char ivec[16], u64 *Xi);
241 # define AES_gcm_decrypt aesni_gcm_decrypt
242 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
244 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
245 gctx->gcm.ghash==gcm_ghash_avx)
246 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
247 gctx->gcm.ghash==gcm_ghash_avx)
248 # undef AES_GCM_ASM2 /* minor size optimization */
251 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
252 const unsigned char *iv, int enc)
255 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
257 mode = EVP_CIPHER_CTX_mode(ctx);
258 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
260 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
262 dat->block = (block128_f) aesni_decrypt;
263 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
264 (cbc128_f) aesni_cbc_encrypt : NULL;
266 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
268 dat->block = (block128_f) aesni_encrypt;
269 if (mode == EVP_CIPH_CBC_MODE)
270 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
271 else if (mode == EVP_CIPH_CTR_MODE)
272 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
274 dat->stream.cbc = NULL;
278 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
285 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
286 const unsigned char *in, size_t len)
288 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
289 EVP_CIPHER_CTX_iv_noconst(ctx),
290 EVP_CIPHER_CTX_encrypting(ctx));
295 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
296 const unsigned char *in, size_t len)
298 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
303 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
304 EVP_CIPHER_CTX_encrypting(ctx));
309 # define aesni_ofb_cipher aes_ofb_cipher
310 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
311 const unsigned char *in, size_t len);
313 # define aesni_cfb_cipher aes_cfb_cipher
314 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
315 const unsigned char *in, size_t len);
317 # define aesni_cfb8_cipher aes_cfb8_cipher
318 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
319 const unsigned char *in, size_t len);
321 # define aesni_cfb1_cipher aes_cfb1_cipher
322 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
323 const unsigned char *in, size_t len);
325 # define aesni_ctr_cipher aes_ctr_cipher
326 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
327 const unsigned char *in, size_t len);
329 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
330 const unsigned char *iv, int enc)
332 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
336 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
338 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
339 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
341 * If we have an iv can set it directly, otherwise use saved IV.
343 if (iv == NULL && gctx->iv_set)
346 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
351 /* If key set use IV, otherwise copy */
353 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
355 memcpy(gctx->iv, iv, gctx->ivlen);
362 # define aesni_gcm_cipher aes_gcm_cipher
363 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
364 const unsigned char *in, size_t len);
366 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
367 const unsigned char *iv, int enc)
369 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
374 /* key_len is two AES keys */
376 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
378 xctx->xts.block1 = (block128_f) aesni_encrypt;
379 xctx->stream = aesni_xts_encrypt;
381 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
383 xctx->xts.block1 = (block128_f) aesni_decrypt;
384 xctx->stream = aesni_xts_decrypt;
387 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
388 EVP_CIPHER_CTX_key_length(ctx) * 4,
390 xctx->xts.block2 = (block128_f) aesni_encrypt;
392 xctx->xts.key1 = &xctx->ks1;
396 xctx->xts.key2 = &xctx->ks2;
397 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
403 # define aesni_xts_cipher aes_xts_cipher
404 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
405 const unsigned char *in, size_t len);
407 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
408 const unsigned char *iv, int enc)
410 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
414 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
416 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
417 &cctx->ks, (block128_f) aesni_encrypt);
418 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
419 (ccm128_f) aesni_ccm64_decrypt_blocks;
423 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
429 # define aesni_ccm_cipher aes_ccm_cipher
430 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
431 const unsigned char *in, size_t len);
433 # ifndef OPENSSL_NO_OCB
434 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
435 size_t blocks, const void *key,
436 size_t start_block_num,
437 unsigned char offset_i[16],
438 const unsigned char L_[][16],
439 unsigned char checksum[16]);
440 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
441 size_t blocks, const void *key,
442 size_t start_block_num,
443 unsigned char offset_i[16],
444 const unsigned char L_[][16],
445 unsigned char checksum[16]);
447 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
448 const unsigned char *iv, int enc)
450 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
456 * We set both the encrypt and decrypt key here because decrypt
457 * needs both. We could possibly optimise to remove setting the
458 * decrypt for an encryption operation.
460 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
462 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
464 if (!CRYPTO_ocb128_init(&octx->ocb,
465 &octx->ksenc.ks, &octx->ksdec.ks,
466 (block128_f) aesni_encrypt,
467 (block128_f) aesni_decrypt,
468 enc ? aesni_ocb_encrypt
469 : aesni_ocb_decrypt))
475 * If we have an iv we can set it directly, otherwise use saved IV.
477 if (iv == NULL && octx->iv_set)
480 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
487 /* If key set use IV, otherwise copy */
489 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
491 memcpy(octx->iv, iv, octx->ivlen);
497 # define aesni_ocb_cipher aes_ocb_cipher
498 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
499 const unsigned char *in, size_t len);
500 # endif /* OPENSSL_NO_OCB */
502 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
503 static const EVP_CIPHER aesni_##keylen##_##mode = { \
504 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
505 flags|EVP_CIPH_##MODE##_MODE, \
507 aesni_##mode##_cipher, \
509 sizeof(EVP_AES_KEY), \
510 NULL,NULL,NULL,NULL }; \
511 static const EVP_CIPHER aes_##keylen##_##mode = { \
512 nid##_##keylen##_##nmode,blocksize, \
514 flags|EVP_CIPH_##MODE##_MODE, \
516 aes_##mode##_cipher, \
518 sizeof(EVP_AES_KEY), \
519 NULL,NULL,NULL,NULL }; \
520 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
521 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
523 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
524 static const EVP_CIPHER aesni_##keylen##_##mode = { \
525 nid##_##keylen##_##mode,blocksize, \
526 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
527 flags|EVP_CIPH_##MODE##_MODE, \
528 aesni_##mode##_init_key, \
529 aesni_##mode##_cipher, \
530 aes_##mode##_cleanup, \
531 sizeof(EVP_AES_##MODE##_CTX), \
532 NULL,NULL,aes_##mode##_ctrl,NULL }; \
533 static const EVP_CIPHER aes_##keylen##_##mode = { \
534 nid##_##keylen##_##mode,blocksize, \
535 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
536 flags|EVP_CIPH_##MODE##_MODE, \
537 aes_##mode##_init_key, \
538 aes_##mode##_cipher, \
539 aes_##mode##_cleanup, \
540 sizeof(EVP_AES_##MODE##_CTX), \
541 NULL,NULL,aes_##mode##_ctrl,NULL }; \
542 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
543 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
545 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
547 # include "sparc_arch.h"
549 extern unsigned int OPENSSL_sparcv9cap_P[];
552 * Initial Fujitsu SPARC64 X support
554 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
555 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
556 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
557 # define HWAES_encrypt aes_fx_encrypt
558 # define HWAES_decrypt aes_fx_decrypt
559 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
560 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
562 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
564 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
565 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
566 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
568 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
571 * Key-length specific subroutines were chosen for following reason.
572 * Each SPARC T4 core can execute up to 8 threads which share core's
573 * resources. Loading as much key material to registers allows to
574 * minimize references to shared memory interface, as well as amount
575 * of instructions in inner loops [much needed on T4]. But then having
576 * non-key-length specific routines would require conditional branches
577 * either in inner loops or on subroutines' entries. Former is hardly
578 * acceptable, while latter means code size increase to size occupied
579 * by multiple key-length specific subroutines, so why fight?
581 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
582 size_t len, const AES_KEY *key,
583 unsigned char *ivec);
584 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
585 size_t len, const AES_KEY *key,
586 unsigned char *ivec);
587 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
588 size_t len, const AES_KEY *key,
589 unsigned char *ivec);
590 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
591 size_t len, const AES_KEY *key,
592 unsigned char *ivec);
593 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
594 size_t len, const AES_KEY *key,
595 unsigned char *ivec);
596 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
597 size_t len, const AES_KEY *key,
598 unsigned char *ivec);
599 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
600 size_t blocks, const AES_KEY *key,
601 unsigned char *ivec);
602 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
603 size_t blocks, const AES_KEY *key,
604 unsigned char *ivec);
605 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
606 size_t blocks, const AES_KEY *key,
607 unsigned char *ivec);
608 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
609 size_t blocks, const AES_KEY *key1,
610 const AES_KEY *key2, const unsigned char *ivec);
611 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
612 size_t blocks, const AES_KEY *key1,
613 const AES_KEY *key2, const unsigned char *ivec);
614 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
615 size_t blocks, const AES_KEY *key1,
616 const AES_KEY *key2, const unsigned char *ivec);
617 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
618 size_t blocks, const AES_KEY *key1,
619 const AES_KEY *key2, const unsigned char *ivec);
621 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
622 const unsigned char *iv, int enc)
625 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
627 mode = EVP_CIPHER_CTX_mode(ctx);
628 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
629 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
632 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
633 dat->block = (block128_f) aes_t4_decrypt;
636 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
637 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
640 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
641 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
644 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
645 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
652 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
653 dat->block = (block128_f) aes_t4_encrypt;
656 if (mode == EVP_CIPH_CBC_MODE)
657 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
658 else if (mode == EVP_CIPH_CTR_MODE)
659 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
661 dat->stream.cbc = NULL;
664 if (mode == EVP_CIPH_CBC_MODE)
665 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
666 else if (mode == EVP_CIPH_CTR_MODE)
667 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
669 dat->stream.cbc = NULL;
672 if (mode == EVP_CIPH_CBC_MODE)
673 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
674 else if (mode == EVP_CIPH_CTR_MODE)
675 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
677 dat->stream.cbc = NULL;
685 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
692 # define aes_t4_cbc_cipher aes_cbc_cipher
693 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
694 const unsigned char *in, size_t len);
696 # define aes_t4_ecb_cipher aes_ecb_cipher
697 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
698 const unsigned char *in, size_t len);
700 # define aes_t4_ofb_cipher aes_ofb_cipher
701 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
702 const unsigned char *in, size_t len);
704 # define aes_t4_cfb_cipher aes_cfb_cipher
705 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
706 const unsigned char *in, size_t len);
708 # define aes_t4_cfb8_cipher aes_cfb8_cipher
709 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
710 const unsigned char *in, size_t len);
712 # define aes_t4_cfb1_cipher aes_cfb1_cipher
713 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
714 const unsigned char *in, size_t len);
716 # define aes_t4_ctr_cipher aes_ctr_cipher
717 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
718 const unsigned char *in, size_t len);
720 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
721 const unsigned char *iv, int enc)
723 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
727 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
728 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
729 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
730 (block128_f) aes_t4_encrypt);
733 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
736 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
739 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
745 * If we have an iv can set it directly, otherwise use saved IV.
747 if (iv == NULL && gctx->iv_set)
750 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
755 /* If key set use IV, otherwise copy */
757 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
759 memcpy(gctx->iv, iv, gctx->ivlen);
766 # define aes_t4_gcm_cipher aes_gcm_cipher
767 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
768 const unsigned char *in, size_t len);
770 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
771 const unsigned char *iv, int enc)
773 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
778 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4;
780 /* key_len is two AES keys */
782 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
783 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
786 xctx->stream = aes128_t4_xts_encrypt;
789 xctx->stream = aes256_t4_xts_encrypt;
795 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
797 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
800 xctx->stream = aes128_t4_xts_decrypt;
803 xctx->stream = aes256_t4_xts_decrypt;
810 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
811 EVP_CIPHER_CTX_key_length(ctx) * 4,
813 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
815 xctx->xts.key1 = &xctx->ks1;
819 xctx->xts.key2 = &xctx->ks2;
820 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
826 # define aes_t4_xts_cipher aes_xts_cipher
827 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
828 const unsigned char *in, size_t len);
830 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
831 const unsigned char *iv, int enc)
833 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
837 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
838 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
839 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
840 &cctx->ks, (block128_f) aes_t4_encrypt);
845 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
851 # define aes_t4_ccm_cipher aes_ccm_cipher
852 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
853 const unsigned char *in, size_t len);
855 # ifndef OPENSSL_NO_OCB
856 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
857 const unsigned char *iv, int enc)
859 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
865 * We set both the encrypt and decrypt key here because decrypt
866 * needs both. We could possibly optimise to remove setting the
867 * decrypt for an encryption operation.
869 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
871 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
873 if (!CRYPTO_ocb128_init(&octx->ocb,
874 &octx->ksenc.ks, &octx->ksdec.ks,
875 (block128_f) aes_t4_encrypt,
876 (block128_f) aes_t4_decrypt,
883 * If we have an iv we can set it directly, otherwise use saved IV.
885 if (iv == NULL && octx->iv_set)
888 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
895 /* If key set use IV, otherwise copy */
897 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
899 memcpy(octx->iv, iv, octx->ivlen);
905 # define aes_t4_ocb_cipher aes_ocb_cipher
906 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
907 const unsigned char *in, size_t len);
908 # endif /* OPENSSL_NO_OCB */
910 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
911 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
912 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
913 flags|EVP_CIPH_##MODE##_MODE, \
915 aes_t4_##mode##_cipher, \
917 sizeof(EVP_AES_KEY), \
918 NULL,NULL,NULL,NULL }; \
919 static const EVP_CIPHER aes_##keylen##_##mode = { \
920 nid##_##keylen##_##nmode,blocksize, \
922 flags|EVP_CIPH_##MODE##_MODE, \
924 aes_##mode##_cipher, \
926 sizeof(EVP_AES_KEY), \
927 NULL,NULL,NULL,NULL }; \
928 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
929 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
931 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
932 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
933 nid##_##keylen##_##mode,blocksize, \
934 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
935 flags|EVP_CIPH_##MODE##_MODE, \
936 aes_t4_##mode##_init_key, \
937 aes_t4_##mode##_cipher, \
938 aes_##mode##_cleanup, \
939 sizeof(EVP_AES_##MODE##_CTX), \
940 NULL,NULL,aes_##mode##_ctrl,NULL }; \
941 static const EVP_CIPHER aes_##keylen##_##mode = { \
942 nid##_##keylen##_##mode,blocksize, \
943 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
944 flags|EVP_CIPH_##MODE##_MODE, \
945 aes_##mode##_init_key, \
946 aes_##mode##_cipher, \
947 aes_##mode##_cleanup, \
948 sizeof(EVP_AES_##MODE##_CTX), \
949 NULL,NULL,aes_##mode##_ctrl,NULL }; \
950 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
951 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
953 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
957 # include "s390x_arch.h"
963 * KM-AES parameter block - begin
964 * (see z/Architecture Principles of Operation >= SA22-7832-06)
969 /* KM-AES parameter block - end */
978 * KMO-AES parameter block - begin
979 * (see z/Architecture Principles of Operation >= SA22-7832-08)
982 unsigned char cv[16];
985 /* KMO-AES parameter block - end */
996 * KMF-AES parameter block - begin
997 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1000 unsigned char cv[16];
1001 unsigned char k[32];
1003 /* KMF-AES parameter block - end */
1008 } S390X_AES_CFB_CTX;
1014 * KMA-GCM-AES parameter block - begin
1015 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1018 unsigned char reserved[12];
1024 unsigned long long g[2];
1025 unsigned char b[16];
1027 unsigned char h[16];
1028 unsigned long long taadl;
1029 unsigned long long tpcl;
1031 unsigned long long g[2];
1034 unsigned char k[32];
1036 /* KMA-GCM-AES parameter block - end */
1048 unsigned char ares[16];
1049 unsigned char mres[16];
1050 unsigned char kres[16];
1056 } S390X_AES_GCM_CTX;
1062 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1063 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1064 * rounds field is used to store the function code and that the key
1065 * schedule is not stored (if aes hardware support is detected).
1068 unsigned char pad[16];
1074 * KMAC-AES parameter block - begin
1075 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1079 unsigned long long g[2];
1080 unsigned char b[16];
1082 unsigned char k[32];
1084 /* KMAC-AES paramater block - end */
1087 unsigned long long g[2];
1088 unsigned char b[16];
1091 unsigned long long g[2];
1092 unsigned char b[16];
1095 unsigned long long blocks;
1104 unsigned char pad[140];
1108 } S390X_AES_CCM_CTX;
1110 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1111 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1113 /* Most modes of operation need km for partial block processing. */
1114 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1115 S390X_CAPBIT(S390X_AES_128))
1116 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1117 S390X_CAPBIT(S390X_AES_192))
1118 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1119 S390X_CAPBIT(S390X_AES_256))
1121 # define s390x_aes_init_key aes_init_key
1122 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1123 const unsigned char *iv, int enc);
1125 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1126 # define S390X_aes_192_cbc_CAPABLE 1
1127 # define S390X_aes_256_cbc_CAPABLE 1
1128 # define S390X_AES_CBC_CTX EVP_AES_KEY
1130 # define s390x_aes_cbc_init_key aes_init_key
1132 # define s390x_aes_cbc_cipher aes_cbc_cipher
1133 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1134 const unsigned char *in, size_t len);
1136 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1137 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1138 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1140 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1141 const unsigned char *key,
1142 const unsigned char *iv, int enc)
1144 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1145 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1147 cctx->fc = S390X_AES_FC(keylen);
1149 cctx->fc |= S390X_DECRYPT;
1151 memcpy(cctx->km.param.k, key, keylen);
1155 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1156 const unsigned char *in, size_t len)
1158 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1160 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1164 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1165 (OPENSSL_s390xcap_P.kmo[0] & \
1166 S390X_CAPBIT(S390X_AES_128)))
1167 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1168 (OPENSSL_s390xcap_P.kmo[0] & \
1169 S390X_CAPBIT(S390X_AES_192)))
1170 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1171 (OPENSSL_s390xcap_P.kmo[0] & \
1172 S390X_CAPBIT(S390X_AES_256)))
1174 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1175 const unsigned char *key,
1176 const unsigned char *ivec, int enc)
1178 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1179 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1180 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1181 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1183 memcpy(cctx->kmo.param.cv, iv, ivlen);
1184 memcpy(cctx->kmo.param.k, key, keylen);
1185 cctx->fc = S390X_AES_FC(keylen);
1190 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1191 const unsigned char *in, size_t len)
1193 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1198 *out = *in ^ cctx->kmo.param.cv[n];
1207 len &= ~(size_t)0xf;
1209 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1216 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1220 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1229 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1230 (OPENSSL_s390xcap_P.kmf[0] & \
1231 S390X_CAPBIT(S390X_AES_128)))
1232 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1233 (OPENSSL_s390xcap_P.kmf[0] & \
1234 S390X_CAPBIT(S390X_AES_192)))
1235 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1236 (OPENSSL_s390xcap_P.kmf[0] & \
1237 S390X_CAPBIT(S390X_AES_256)))
1239 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1240 const unsigned char *key,
1241 const unsigned char *ivec, int enc)
1243 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1244 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1245 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1246 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1248 cctx->fc = S390X_AES_FC(keylen);
1249 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1251 cctx->fc |= S390X_DECRYPT;
1254 memcpy(cctx->kmf.param.cv, iv, ivlen);
1255 memcpy(cctx->kmf.param.k, key, keylen);
1259 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1260 const unsigned char *in, size_t len)
1262 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1263 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1264 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1271 *out = cctx->kmf.param.cv[n] ^ tmp;
1272 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1281 len &= ~(size_t)0xf;
1283 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1290 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1291 S390X_AES_FC(keylen), cctx->kmf.param.k);
1295 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1296 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1305 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1306 S390X_CAPBIT(S390X_AES_128))
1307 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1308 S390X_CAPBIT(S390X_AES_192))
1309 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1310 S390X_CAPBIT(S390X_AES_256))
1312 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1313 const unsigned char *key,
1314 const unsigned char *ivec, int enc)
1316 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1317 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1318 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1319 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1321 cctx->fc = S390X_AES_FC(keylen);
1322 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1324 cctx->fc |= S390X_DECRYPT;
1326 memcpy(cctx->kmf.param.cv, iv, ivlen);
1327 memcpy(cctx->kmf.param.k, key, keylen);
1331 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1332 const unsigned char *in, size_t len)
1334 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1336 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1340 # define S390X_aes_128_cfb1_CAPABLE 0
1341 # define S390X_aes_192_cfb1_CAPABLE 0
1342 # define S390X_aes_256_cfb1_CAPABLE 0
1344 # define s390x_aes_cfb1_init_key aes_init_key
1346 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1347 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1348 const unsigned char *in, size_t len);
1350 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1351 # define S390X_aes_192_ctr_CAPABLE 1
1352 # define S390X_aes_256_ctr_CAPABLE 1
1353 # define S390X_AES_CTR_CTX EVP_AES_KEY
1355 # define s390x_aes_ctr_init_key aes_init_key
1357 # define s390x_aes_ctr_cipher aes_ctr_cipher
1358 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1359 const unsigned char *in, size_t len);
1361 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1362 (OPENSSL_s390xcap_P.kma[0] & \
1363 S390X_CAPBIT(S390X_AES_128)))
1364 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1365 (OPENSSL_s390xcap_P.kma[0] & \
1366 S390X_CAPBIT(S390X_AES_192)))
1367 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1368 (OPENSSL_s390xcap_P.kma[0] & \
1369 S390X_CAPBIT(S390X_AES_256)))
1371 /* iv + padding length for iv lenghts != 12 */
1372 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1375 * Process additional authenticated data. Returns 0 on success. Code is
1378 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1381 unsigned long long alen;
1384 if (ctx->kma.param.tpcl)
1387 alen = ctx->kma.param.taadl + len;
1388 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1390 ctx->kma.param.taadl = alen;
1395 ctx->ares[n] = *aad;
1400 /* ctx->ares contains a complete block if offset has wrapped around */
1402 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1403 ctx->fc |= S390X_KMA_HS;
1410 len &= ~(size_t)0xf;
1412 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1414 ctx->fc |= S390X_KMA_HS;
1422 ctx->ares[rem] = aad[rem];
1429 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1430 * success. Code is big-endian.
1432 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1433 unsigned char *out, size_t len)
1435 const unsigned char *inptr;
1436 unsigned long long mlen;
1439 unsigned char b[16];
1444 mlen = ctx->kma.param.tpcl + len;
1445 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1447 ctx->kma.param.tpcl = mlen;
1453 while (n && inlen) {
1454 ctx->mres[n] = *inptr;
1459 /* ctx->mres contains a complete block if offset has wrapped around */
1461 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1462 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1463 ctx->fc |= S390X_KMA_HS;
1466 /* previous call already encrypted/decrypted its remainder,
1467 * see comment below */
1482 len &= ~(size_t)0xf;
1484 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1485 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1488 ctx->fc |= S390X_KMA_HS;
1493 * If there is a remainder, it has to be saved such that it can be
1494 * processed by kma later. However, we also have to do the for-now
1495 * unauthenticated encryption/decryption part here and now...
1498 if (!ctx->mreslen) {
1499 buf.w[0] = ctx->kma.param.j0.w[0];
1500 buf.w[1] = ctx->kma.param.j0.w[1];
1501 buf.w[2] = ctx->kma.param.j0.w[2];
1502 buf.w[3] = ctx->kma.param.cv.w + 1;
1503 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1507 for (i = 0; i < rem; i++) {
1508 ctx->mres[n + i] = in[i];
1509 out[i] = in[i] ^ ctx->kres[n + i];
1512 ctx->mreslen += rem;
1518 * Initialize context structure. Code is big-endian.
1520 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1521 const unsigned char *iv)
1523 ctx->kma.param.t.g[0] = 0;
1524 ctx->kma.param.t.g[1] = 0;
1525 ctx->kma.param.tpcl = 0;
1526 ctx->kma.param.taadl = 0;
1531 if (ctx->ivlen == 12) {
1532 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1533 ctx->kma.param.j0.w[3] = 1;
1534 ctx->kma.param.cv.w = 1;
1536 /* ctx->iv has the right size and is already padded. */
1537 memcpy(ctx->iv, iv, ctx->ivlen);
1538 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1539 ctx->fc, &ctx->kma.param);
1540 ctx->fc |= S390X_KMA_HS;
1542 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1543 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1544 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1545 ctx->kma.param.t.g[0] = 0;
1546 ctx->kma.param.t.g[1] = 0;
1551 * Performs various operations on the context structure depending on control
1552 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1553 * Code is big-endian.
1555 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1557 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1558 S390X_AES_GCM_CTX *gctx_out;
1559 EVP_CIPHER_CTX *out;
1560 unsigned char *buf, *iv;
1561 int ivlen, enc, len;
1565 ivlen = EVP_CIPHER_CTX_iv_length(c);
1566 iv = EVP_CIPHER_CTX_iv_noconst(c);
1569 gctx->ivlen = ivlen;
1573 gctx->tls_aad_len = -1;
1576 case EVP_CTRL_AEAD_SET_IVLEN:
1581 iv = EVP_CIPHER_CTX_iv_noconst(c);
1582 len = S390X_gcm_ivpadlen(arg);
1584 /* Allocate memory for iv if needed. */
1585 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1587 OPENSSL_free(gctx->iv);
1589 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1590 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1595 memset(gctx->iv + arg, 0, len - arg - 8);
1596 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1601 case EVP_CTRL_AEAD_SET_TAG:
1602 buf = EVP_CIPHER_CTX_buf_noconst(c);
1603 enc = EVP_CIPHER_CTX_encrypting(c);
1604 if (arg <= 0 || arg > 16 || enc)
1607 memcpy(buf, ptr, arg);
1611 case EVP_CTRL_AEAD_GET_TAG:
1612 enc = EVP_CIPHER_CTX_encrypting(c);
1613 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1616 memcpy(ptr, gctx->kma.param.t.b, arg);
1619 case EVP_CTRL_GCM_SET_IV_FIXED:
1620 /* Special case: -1 length restores whole iv */
1622 memcpy(gctx->iv, ptr, gctx->ivlen);
1627 * Fixed field must be at least 4 bytes and invocation field at least
1630 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1634 memcpy(gctx->iv, ptr, arg);
1636 enc = EVP_CIPHER_CTX_encrypting(c);
1637 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1643 case EVP_CTRL_GCM_IV_GEN:
1644 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1647 s390x_aes_gcm_setiv(gctx, gctx->iv);
1649 if (arg <= 0 || arg > gctx->ivlen)
1652 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1654 * Invocation field will be at least 8 bytes in size and so no need
1655 * to check wrap around or increment more than last 8 bytes.
1657 (*(unsigned long long *)(gctx->iv + gctx->ivlen - 8))++;
1661 case EVP_CTRL_GCM_SET_IV_INV:
1662 enc = EVP_CIPHER_CTX_encrypting(c);
1663 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1666 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1667 s390x_aes_gcm_setiv(gctx, gctx->iv);
1671 case EVP_CTRL_AEAD_TLS1_AAD:
1672 /* Save the aad for later use. */
1673 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1676 buf = EVP_CIPHER_CTX_buf_noconst(c);
1677 memcpy(buf, ptr, arg);
1678 gctx->tls_aad_len = arg;
1680 len = buf[arg - 2] << 8 | buf[arg - 1];
1681 /* Correct length for explicit iv. */
1682 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1684 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1686 /* If decrypting correct for tag too. */
1687 enc = EVP_CIPHER_CTX_encrypting(c);
1689 if (len < EVP_GCM_TLS_TAG_LEN)
1691 len -= EVP_GCM_TLS_TAG_LEN;
1693 buf[arg - 2] = len >> 8;
1694 buf[arg - 1] = len & 0xff;
1695 /* Extra padding: tag appended to record. */
1696 return EVP_GCM_TLS_TAG_LEN;
1700 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1701 iv = EVP_CIPHER_CTX_iv_noconst(c);
1703 if (gctx->iv == iv) {
1704 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1706 len = S390X_gcm_ivpadlen(gctx->ivlen);
1708 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1709 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1713 memcpy(gctx_out->iv, gctx->iv, len);
1723 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1725 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1726 const unsigned char *key,
1727 const unsigned char *iv, int enc)
1729 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1732 if (iv == NULL && key == NULL)
1736 keylen = EVP_CIPHER_CTX_key_length(ctx);
1737 memcpy(&gctx->kma.param.k, key, keylen);
1739 gctx->fc = S390X_AES_FC(keylen);
1741 gctx->fc |= S390X_DECRYPT;
1743 if (iv == NULL && gctx->iv_set)
1747 s390x_aes_gcm_setiv(gctx, iv);
1753 s390x_aes_gcm_setiv(gctx, iv);
1755 memcpy(gctx->iv, iv, gctx->ivlen);
1764 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1765 * if successful. Otherwise -1 is returned. Code is big-endian.
1767 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1768 const unsigned char *in, size_t len)
1770 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1771 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1772 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1775 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1778 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1779 : EVP_CTRL_GCM_SET_IV_INV,
1780 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1783 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1784 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1785 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1787 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1788 gctx->kma.param.tpcl = len << 3;
1789 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1790 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1793 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1794 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1796 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1797 EVP_GCM_TLS_TAG_LEN)) {
1798 OPENSSL_cleanse(out, len);
1805 gctx->tls_aad_len = -1;
1810 * Called from EVP layer to initialize context, process additional
1811 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1812 * ciphertext or process a TLS packet, depending on context. Returns bytes
1813 * written on success. Otherwise -1 is returned. Code is big-endian.
1815 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1816 const unsigned char *in, size_t len)
1818 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1819 unsigned char *buf, tmp[16];
1825 if (gctx->tls_aad_len >= 0)
1826 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1833 if (s390x_aes_gcm_aad(gctx, in, len))
1836 if (s390x_aes_gcm(gctx, in, out, len))
1841 gctx->kma.param.taadl <<= 3;
1842 gctx->kma.param.tpcl <<= 3;
1843 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1844 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1845 /* recall that we already did en-/decrypt gctx->mres
1846 * and returned it to caller... */
1847 OPENSSL_cleanse(tmp, gctx->mreslen);
1850 enc = EVP_CIPHER_CTX_encrypting(ctx);
1854 if (gctx->taglen < 0)
1857 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1858 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1865 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1867 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1868 const unsigned char *iv;
1873 iv = EVP_CIPHER_CTX_iv(c);
1875 OPENSSL_free(gctx->iv);
1877 OPENSSL_cleanse(gctx, sizeof(*gctx));
1881 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1882 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1883 # define S390X_aes_256_xts_CAPABLE 1
1885 # define s390x_aes_xts_init_key aes_xts_init_key
1886 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1887 const unsigned char *key,
1888 const unsigned char *iv, int enc);
1889 # define s390x_aes_xts_cipher aes_xts_cipher
1890 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1891 const unsigned char *in, size_t len);
1892 # define s390x_aes_xts_ctrl aes_xts_ctrl
1893 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1894 # define s390x_aes_xts_cleanup aes_xts_cleanup
1896 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1897 (OPENSSL_s390xcap_P.kmac[0] & \
1898 S390X_CAPBIT(S390X_AES_128)))
1899 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1900 (OPENSSL_s390xcap_P.kmac[0] & \
1901 S390X_CAPBIT(S390X_AES_192)))
1902 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1903 (OPENSSL_s390xcap_P.kmac[0] & \
1904 S390X_CAPBIT(S390X_AES_256)))
1906 # define S390X_CCM_AAD_FLAG 0x40
1909 * Set nonce and length fields. Code is big-endian.
1911 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1912 const unsigned char *nonce,
1915 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1916 ctx->aes.ccm.nonce.g[1] = mlen;
1917 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1921 * Process additional authenticated data. Code is big-endian.
1923 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1932 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1934 /* Suppress 'type-punned pointer dereference' warning. */
1935 ptr = ctx->aes.ccm.buf.b;
1937 if (alen < ((1 << 16) - (1 << 8))) {
1938 *(uint16_t *)ptr = alen;
1940 } else if (sizeof(alen) == 8
1941 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1942 *(uint16_t *)ptr = 0xffff;
1943 *(uint64_t *)(ptr + 2) = alen;
1946 *(uint16_t *)ptr = 0xfffe;
1947 *(uint32_t *)(ptr + 2) = alen;
1951 while (i < 16 && alen) {
1952 ctx->aes.ccm.buf.b[i] = *aad;
1958 ctx->aes.ccm.buf.b[i] = 0;
1962 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1963 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1964 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1965 &ctx->aes.ccm.kmac_param);
1966 ctx->aes.ccm.blocks += 2;
1969 alen &= ~(size_t)0xf;
1971 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1972 ctx->aes.ccm.blocks += alen >> 4;
1976 for (i = 0; i < rem; i++)
1977 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1979 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1980 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1981 ctx->aes.ccm.kmac_param.k);
1982 ctx->aes.ccm.blocks++;
1987 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1990 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1991 unsigned char *out, size_t len, int enc)
1994 unsigned int i, l, num;
1995 unsigned char flags;
1997 flags = ctx->aes.ccm.nonce.b[0];
1998 if (!(flags & S390X_CCM_AAD_FLAG)) {
1999 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2000 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2001 ctx->aes.ccm.blocks++;
2004 ctx->aes.ccm.nonce.b[0] = l;
2007 * Reconstruct length from encoded length field
2008 * and initialize it with counter value.
2011 for (i = 15 - l; i < 15; i++) {
2012 n |= ctx->aes.ccm.nonce.b[i];
2013 ctx->aes.ccm.nonce.b[i] = 0;
2016 n |= ctx->aes.ccm.nonce.b[15];
2017 ctx->aes.ccm.nonce.b[15] = 1;
2020 return -1; /* length mismatch */
2023 /* Two operations per block plus one for tag encryption */
2024 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2025 if (ctx->aes.ccm.blocks > (1ULL << 61))
2026 return -2; /* too much data */
2031 len &= ~(size_t)0xf;
2034 /* mac-then-encrypt */
2036 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2038 for (i = 0; i < rem; i++)
2039 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2041 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2042 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2043 ctx->aes.ccm.kmac_param.k);
2046 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2047 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2048 &num, (ctr128_f)AES_ctr32_encrypt);
2050 /* decrypt-then-mac */
2051 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2052 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2053 &num, (ctr128_f)AES_ctr32_encrypt);
2056 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2058 for (i = 0; i < rem; i++)
2059 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2061 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2062 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2063 ctx->aes.ccm.kmac_param.k);
2067 for (i = 15 - l; i < 16; i++)
2068 ctx->aes.ccm.nonce.b[i] = 0;
2070 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2071 ctx->aes.ccm.kmac_param.k);
2072 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2073 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2075 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2080 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2081 * if successful. Otherwise -1 is returned.
2083 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2084 const unsigned char *in, size_t len)
2086 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2087 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2088 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2089 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2092 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2096 /* Set explicit iv (sequence number). */
2097 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2100 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2102 * Get explicit iv (sequence number). We already have fixed iv
2103 * (server/client_write_iv) here.
2105 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2106 s390x_aes_ccm_setiv(cctx, ivec, len);
2108 /* Process aad (sequence number|type|version|length) */
2109 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2111 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2112 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2115 if (s390x_aes_ccm(cctx, in, out, len, enc))
2118 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2119 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2121 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2122 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2127 OPENSSL_cleanse(out, len);
2133 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2136 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2137 const unsigned char *key,
2138 const unsigned char *iv, int enc)
2140 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2141 unsigned char *ivec;
2144 if (iv == NULL && key == NULL)
2148 keylen = EVP_CIPHER_CTX_key_length(ctx);
2149 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2150 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2152 /* Store encoded m and l. */
2153 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2154 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2155 memset(cctx->aes.ccm.nonce.b + 1, 0,
2156 sizeof(cctx->aes.ccm.nonce.b));
2157 cctx->aes.ccm.blocks = 0;
2159 cctx->aes.ccm.key_set = 1;
2163 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2164 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2166 cctx->aes.ccm.iv_set = 1;
2173 * Called from EVP layer to initialize context, process additional
2174 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2175 * plaintext or process a TLS packet, depending on context. Returns bytes
2176 * written on success. Otherwise -1 is returned.
2178 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2179 const unsigned char *in, size_t len)
2181 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2182 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2184 unsigned char *buf, *ivec;
2186 if (!cctx->aes.ccm.key_set)
2189 if (cctx->aes.ccm.tls_aad_len >= 0)
2190 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2193 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2194 * so integrity must be checked already at Update() i.e., before
2195 * potentially corrupted data is output.
2197 if (in == NULL && out != NULL)
2200 if (!cctx->aes.ccm.iv_set)
2203 if (!enc && !cctx->aes.ccm.tag_set)
2207 /* Update(): Pass message length. */
2209 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2210 s390x_aes_ccm_setiv(cctx, ivec, len);
2212 cctx->aes.ccm.len_set = 1;
2216 /* Update(): Process aad. */
2217 if (!cctx->aes.ccm.len_set && len)
2220 s390x_aes_ccm_aad(cctx, in, len);
2224 /* Update(): Process message. */
2226 if (!cctx->aes.ccm.len_set) {
2228 * In case message length was not previously set explicitely via
2229 * Update(), set it now.
2231 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2232 s390x_aes_ccm_setiv(cctx, ivec, len);
2234 cctx->aes.ccm.len_set = 1;
2238 if (s390x_aes_ccm(cctx, in, out, len, enc))
2241 cctx->aes.ccm.tag_set = 1;
2246 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2247 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2248 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2254 OPENSSL_cleanse(out, len);
2256 cctx->aes.ccm.iv_set = 0;
2257 cctx->aes.ccm.tag_set = 0;
2258 cctx->aes.ccm.len_set = 0;
2264 * Performs various operations on the context structure depending on control
2265 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2266 * Code is big-endian.
2268 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2270 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2271 unsigned char *buf, *iv;
2276 cctx->aes.ccm.key_set = 0;
2277 cctx->aes.ccm.iv_set = 0;
2278 cctx->aes.ccm.l = 8;
2279 cctx->aes.ccm.m = 12;
2280 cctx->aes.ccm.tag_set = 0;
2281 cctx->aes.ccm.len_set = 0;
2282 cctx->aes.ccm.tls_aad_len = -1;
2285 case EVP_CTRL_AEAD_TLS1_AAD:
2286 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2289 /* Save the aad for later use. */
2290 buf = EVP_CIPHER_CTX_buf_noconst(c);
2291 memcpy(buf, ptr, arg);
2292 cctx->aes.ccm.tls_aad_len = arg;
2294 len = *(uint16_t *)(buf + arg - 2);
2295 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2298 /* Correct length for explicit iv. */
2299 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2301 enc = EVP_CIPHER_CTX_encrypting(c);
2303 if (len < cctx->aes.ccm.m)
2306 /* Correct length for tag. */
2307 len -= cctx->aes.ccm.m;
2310 *(uint16_t *)(buf + arg - 2) = len;
2311 /* Extra padding: tag appended to record. */
2312 return cctx->aes.ccm.m;
2314 case EVP_CTRL_CCM_SET_IV_FIXED:
2315 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2318 /* Copy to first part of the iv. */
2319 iv = EVP_CIPHER_CTX_iv_noconst(c);
2320 memcpy(iv, ptr, arg);
2323 case EVP_CTRL_AEAD_SET_IVLEN:
2327 case EVP_CTRL_CCM_SET_L:
2328 if (arg < 2 || arg > 8)
2331 cctx->aes.ccm.l = arg;
2334 case EVP_CTRL_AEAD_SET_TAG:
2335 if ((arg & 1) || arg < 4 || arg > 16)
2338 enc = EVP_CIPHER_CTX_encrypting(c);
2343 cctx->aes.ccm.tag_set = 1;
2344 buf = EVP_CIPHER_CTX_buf_noconst(c);
2345 memcpy(buf, ptr, arg);
2348 cctx->aes.ccm.m = arg;
2351 case EVP_CTRL_AEAD_GET_TAG:
2352 enc = EVP_CIPHER_CTX_encrypting(c);
2353 if (!enc || !cctx->aes.ccm.tag_set)
2356 if(arg < cctx->aes.ccm.m)
2359 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2360 cctx->aes.ccm.tag_set = 0;
2361 cctx->aes.ccm.iv_set = 0;
2362 cctx->aes.ccm.len_set = 0;
2373 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2375 # ifndef OPENSSL_NO_OCB
2376 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2377 # define S390X_aes_128_ocb_CAPABLE 0
2378 # define S390X_aes_192_ocb_CAPABLE 0
2379 # define S390X_aes_256_ocb_CAPABLE 0
2381 # define s390x_aes_ocb_init_key aes_ocb_init_key
2382 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2383 const unsigned char *iv, int enc);
2384 # define s390x_aes_ocb_cipher aes_ocb_cipher
2385 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2386 const unsigned char *in, size_t len);
2387 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2388 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2389 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2390 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2393 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2395 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2396 nid##_##keylen##_##nmode,blocksize, \
2399 flags | EVP_CIPH_##MODE##_MODE, \
2400 s390x_aes_##mode##_init_key, \
2401 s390x_aes_##mode##_cipher, \
2403 sizeof(S390X_AES_##MODE##_CTX), \
2409 static const EVP_CIPHER aes_##keylen##_##mode = { \
2410 nid##_##keylen##_##nmode, \
2414 flags | EVP_CIPH_##MODE##_MODE, \
2416 aes_##mode##_cipher, \
2418 sizeof(EVP_AES_KEY), \
2424 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2426 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2427 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2430 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2431 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2432 nid##_##keylen##_##mode, \
2434 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2436 flags | EVP_CIPH_##MODE##_MODE, \
2437 s390x_aes_##mode##_init_key, \
2438 s390x_aes_##mode##_cipher, \
2439 s390x_aes_##mode##_cleanup, \
2440 sizeof(S390X_AES_##MODE##_CTX), \
2443 s390x_aes_##mode##_ctrl, \
2446 static const EVP_CIPHER aes_##keylen##_##mode = { \
2447 nid##_##keylen##_##mode,blocksize, \
2448 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2450 flags | EVP_CIPH_##MODE##_MODE, \
2451 aes_##mode##_init_key, \
2452 aes_##mode##_cipher, \
2453 aes_##mode##_cleanup, \
2454 sizeof(EVP_AES_##MODE##_CTX), \
2457 aes_##mode##_ctrl, \
2460 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2462 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2463 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2468 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2469 static const EVP_CIPHER aes_##keylen##_##mode = { \
2470 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2471 flags|EVP_CIPH_##MODE##_MODE, \
2473 aes_##mode##_cipher, \
2475 sizeof(EVP_AES_KEY), \
2476 NULL,NULL,NULL,NULL }; \
2477 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2478 { return &aes_##keylen##_##mode; }
2480 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2481 static const EVP_CIPHER aes_##keylen##_##mode = { \
2482 nid##_##keylen##_##mode,blocksize, \
2483 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
2484 flags|EVP_CIPH_##MODE##_MODE, \
2485 aes_##mode##_init_key, \
2486 aes_##mode##_cipher, \
2487 aes_##mode##_cleanup, \
2488 sizeof(EVP_AES_##MODE##_CTX), \
2489 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2490 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2491 { return &aes_##keylen##_##mode; }
2495 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2496 # include "arm_arch.h"
2497 # if __ARM_MAX_ARCH__>=7
2498 # if defined(BSAES_ASM)
2499 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2501 # if defined(VPAES_ASM)
2502 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2504 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2505 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2506 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2507 # define HWAES_encrypt aes_v8_encrypt
2508 # define HWAES_decrypt aes_v8_decrypt
2509 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2510 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2514 #if defined(HWAES_CAPABLE)
2515 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2517 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2519 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2520 const AES_KEY *key);
2521 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2522 const AES_KEY *key);
2523 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2524 size_t length, const AES_KEY *key,
2525 unsigned char *ivec, const int enc);
2526 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2527 size_t len, const AES_KEY *key,
2528 const unsigned char ivec[16]);
2529 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2530 size_t len, const AES_KEY *key1,
2531 const AES_KEY *key2, const unsigned char iv[16]);
2532 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2533 size_t len, const AES_KEY *key1,
2534 const AES_KEY *key2, const unsigned char iv[16]);
2537 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2538 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2539 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2540 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2541 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2542 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2543 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2544 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2546 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2547 const unsigned char *iv, int enc)
2550 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2552 mode = EVP_CIPHER_CTX_mode(ctx);
2553 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2555 #ifdef HWAES_CAPABLE
2556 if (HWAES_CAPABLE) {
2557 ret = HWAES_set_decrypt_key(key,
2558 EVP_CIPHER_CTX_key_length(ctx) * 8,
2560 dat->block = (block128_f) HWAES_decrypt;
2561 dat->stream.cbc = NULL;
2562 # ifdef HWAES_cbc_encrypt
2563 if (mode == EVP_CIPH_CBC_MODE)
2564 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2568 #ifdef BSAES_CAPABLE
2569 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2570 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2572 dat->block = (block128_f) AES_decrypt;
2573 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2576 #ifdef VPAES_CAPABLE
2577 if (VPAES_CAPABLE) {
2578 ret = vpaes_set_decrypt_key(key,
2579 EVP_CIPHER_CTX_key_length(ctx) * 8,
2581 dat->block = (block128_f) vpaes_decrypt;
2582 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2583 (cbc128_f) vpaes_cbc_encrypt : NULL;
2587 ret = AES_set_decrypt_key(key,
2588 EVP_CIPHER_CTX_key_length(ctx) * 8,
2590 dat->block = (block128_f) AES_decrypt;
2591 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2592 (cbc128_f) AES_cbc_encrypt : NULL;
2595 #ifdef HWAES_CAPABLE
2596 if (HWAES_CAPABLE) {
2597 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2599 dat->block = (block128_f) HWAES_encrypt;
2600 dat->stream.cbc = NULL;
2601 # ifdef HWAES_cbc_encrypt
2602 if (mode == EVP_CIPH_CBC_MODE)
2603 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2606 # ifdef HWAES_ctr32_encrypt_blocks
2607 if (mode == EVP_CIPH_CTR_MODE)
2608 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2611 (void)0; /* terminate potentially open 'else' */
2614 #ifdef BSAES_CAPABLE
2615 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2616 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2618 dat->block = (block128_f) AES_encrypt;
2619 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2622 #ifdef VPAES_CAPABLE
2623 if (VPAES_CAPABLE) {
2624 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2626 dat->block = (block128_f) vpaes_encrypt;
2627 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2628 (cbc128_f) vpaes_cbc_encrypt : NULL;
2632 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2634 dat->block = (block128_f) AES_encrypt;
2635 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2636 (cbc128_f) AES_cbc_encrypt : NULL;
2638 if (mode == EVP_CIPH_CTR_MODE)
2639 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2644 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2651 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2652 const unsigned char *in, size_t len)
2654 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2656 if (dat->stream.cbc)
2657 (*dat->stream.cbc) (in, out, len, &dat->ks,
2658 EVP_CIPHER_CTX_iv_noconst(ctx),
2659 EVP_CIPHER_CTX_encrypting(ctx));
2660 else if (EVP_CIPHER_CTX_encrypting(ctx))
2661 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2662 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2664 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2665 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2670 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2671 const unsigned char *in, size_t len)
2673 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2675 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2680 for (i = 0, len -= bl; i <= len; i += bl)
2681 (*dat->block) (in + i, out + i, &dat->ks);
2686 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2687 const unsigned char *in, size_t len)
2689 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2691 int num = EVP_CIPHER_CTX_num(ctx);
2692 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2693 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2694 EVP_CIPHER_CTX_set_num(ctx, num);
2698 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2699 const unsigned char *in, size_t len)
2701 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2703 int num = EVP_CIPHER_CTX_num(ctx);
2704 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2705 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2706 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2707 EVP_CIPHER_CTX_set_num(ctx, num);
2711 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2712 const unsigned char *in, size_t len)
2714 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2716 int num = EVP_CIPHER_CTX_num(ctx);
2717 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2718 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2719 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2720 EVP_CIPHER_CTX_set_num(ctx, num);
2724 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2725 const unsigned char *in, size_t len)
2727 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2729 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2730 int num = EVP_CIPHER_CTX_num(ctx);
2731 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2732 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2733 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2734 EVP_CIPHER_CTX_set_num(ctx, num);
2738 while (len >= MAXBITCHUNK) {
2739 int num = EVP_CIPHER_CTX_num(ctx);
2740 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2741 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2742 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2743 EVP_CIPHER_CTX_set_num(ctx, num);
2749 int num = EVP_CIPHER_CTX_num(ctx);
2750 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2751 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2752 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2753 EVP_CIPHER_CTX_set_num(ctx, num);
2759 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2760 const unsigned char *in, size_t len)
2762 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2763 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2765 if (dat->stream.ctr)
2766 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2767 EVP_CIPHER_CTX_iv_noconst(ctx),
2768 EVP_CIPHER_CTX_buf_noconst(ctx),
2769 &num, dat->stream.ctr);
2771 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2772 EVP_CIPHER_CTX_iv_noconst(ctx),
2773 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2775 EVP_CIPHER_CTX_set_num(ctx, num);
2779 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2780 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2781 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2783 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2785 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2788 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2789 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2790 OPENSSL_free(gctx->iv);
2794 /* increment counter (64-bit int) by 1 */
2795 static void ctr64_inc(unsigned char *counter)
2810 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2812 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2817 gctx->ivlen = c->cipher->iv_len;
2821 gctx->tls_aad_len = -1;
2824 case EVP_CTRL_AEAD_SET_IVLEN:
2827 /* Allocate memory for IV if needed */
2828 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2829 if (gctx->iv != c->iv)
2830 OPENSSL_free(gctx->iv);
2831 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2832 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2839 case EVP_CTRL_AEAD_SET_TAG:
2840 if (arg <= 0 || arg > 16 || c->encrypt)
2842 memcpy(c->buf, ptr, arg);
2846 case EVP_CTRL_AEAD_GET_TAG:
2847 if (arg <= 0 || arg > 16 || !c->encrypt
2848 || gctx->taglen < 0)
2850 memcpy(ptr, c->buf, arg);
2853 case EVP_CTRL_GCM_SET_IV_FIXED:
2854 /* Special case: -1 length restores whole IV */
2856 memcpy(gctx->iv, ptr, gctx->ivlen);
2861 * Fixed field must be at least 4 bytes and invocation field at least
2864 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2867 memcpy(gctx->iv, ptr, arg);
2868 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2873 case EVP_CTRL_GCM_IV_GEN:
2874 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2876 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2877 if (arg <= 0 || arg > gctx->ivlen)
2879 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2881 * Invocation field will be at least 8 bytes in size and so no need
2882 * to check wrap around or increment more than last 8 bytes.
2884 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2888 case EVP_CTRL_GCM_SET_IV_INV:
2889 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2891 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2892 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2896 case EVP_CTRL_AEAD_TLS1_AAD:
2897 /* Save the AAD for later use */
2898 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2900 memcpy(c->buf, ptr, arg);
2901 gctx->tls_aad_len = arg;
2903 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2904 /* Correct length for explicit IV */
2905 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2907 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2908 /* If decrypting correct for tag too */
2910 if (len < EVP_GCM_TLS_TAG_LEN)
2912 len -= EVP_GCM_TLS_TAG_LEN;
2914 c->buf[arg - 2] = len >> 8;
2915 c->buf[arg - 1] = len & 0xff;
2917 /* Extra padding: tag appended to record */
2918 return EVP_GCM_TLS_TAG_LEN;
2922 EVP_CIPHER_CTX *out = ptr;
2923 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2924 if (gctx->gcm.key) {
2925 if (gctx->gcm.key != &gctx->ks)
2927 gctx_out->gcm.key = &gctx_out->ks;
2929 if (gctx->iv == c->iv)
2930 gctx_out->iv = out->iv;
2932 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2933 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2936 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2947 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2948 const unsigned char *iv, int enc)
2950 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2955 #ifdef HWAES_CAPABLE
2956 if (HWAES_CAPABLE) {
2957 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2958 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2959 (block128_f) HWAES_encrypt);
2960 # ifdef HWAES_ctr32_encrypt_blocks
2961 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2968 #ifdef BSAES_CAPABLE
2969 if (BSAES_CAPABLE) {
2970 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2971 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2972 (block128_f) AES_encrypt);
2973 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2977 #ifdef VPAES_CAPABLE
2978 if (VPAES_CAPABLE) {
2979 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2980 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2981 (block128_f) vpaes_encrypt);
2986 (void)0; /* terminate potentially open 'else' */
2988 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2989 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2990 (block128_f) AES_encrypt);
2992 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2999 * If we have an iv can set it directly, otherwise use saved IV.
3001 if (iv == NULL && gctx->iv_set)
3004 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3009 /* If key set use IV, otherwise copy */
3011 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3013 memcpy(gctx->iv, iv, gctx->ivlen);
3021 * Handle TLS GCM packet format. This consists of the last portion of the IV
3022 * followed by the payload and finally the tag. On encrypt generate IV,
3023 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3027 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3028 const unsigned char *in, size_t len)
3030 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3032 /* Encrypt/decrypt must be performed in place */
3034 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3037 * Set IV from start of buffer or generate IV and write to start of
3040 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3041 : EVP_CTRL_GCM_SET_IV_INV,
3042 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3045 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3047 /* Fix buffer and length to point to payload */
3048 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3049 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3050 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3052 /* Encrypt payload */
3055 #if defined(AES_GCM_ASM)
3056 if (len >= 32 && AES_GCM_ASM(gctx)) {
3057 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3060 bulk = AES_gcm_encrypt(in, out, len,
3062 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3063 gctx->gcm.len.u[1] += bulk;
3066 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3069 len - bulk, gctx->ctr))
3073 #if defined(AES_GCM_ASM2)
3074 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3075 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3078 bulk = AES_gcm_encrypt(in, out, len,
3080 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3081 gctx->gcm.len.u[1] += bulk;
3084 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3085 in + bulk, out + bulk, len - bulk))
3089 /* Finally write tag */
3090 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3091 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3096 #if defined(AES_GCM_ASM)
3097 if (len >= 16 && AES_GCM_ASM(gctx)) {
3098 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3101 bulk = AES_gcm_decrypt(in, out, len,
3103 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3104 gctx->gcm.len.u[1] += bulk;
3107 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3110 len - bulk, gctx->ctr))
3114 #if defined(AES_GCM_ASM2)
3115 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3116 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3119 bulk = AES_gcm_decrypt(in, out, len,
3121 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3122 gctx->gcm.len.u[1] += bulk;
3125 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3126 in + bulk, out + bulk, len - bulk))
3130 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3131 /* If tag mismatch wipe buffer */
3132 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3133 OPENSSL_cleanse(out, len);
3141 gctx->tls_aad_len = -1;
3145 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3146 const unsigned char *in, size_t len)
3148 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3149 /* If not set up, return error */
3153 if (gctx->tls_aad_len >= 0)
3154 return aes_gcm_tls_cipher(ctx, out, in, len);
3160 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3162 } else if (ctx->encrypt) {
3165 #if defined(AES_GCM_ASM)
3166 if (len >= 32 && AES_GCM_ASM(gctx)) {
3167 size_t res = (16 - gctx->gcm.mres) % 16;
3169 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3172 bulk = AES_gcm_encrypt(in + res,
3173 out + res, len - res,
3174 gctx->gcm.key, gctx->gcm.Yi.c,
3176 gctx->gcm.len.u[1] += bulk;
3180 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3183 len - bulk, gctx->ctr))
3187 #if defined(AES_GCM_ASM2)
3188 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3189 size_t res = (16 - gctx->gcm.mres) % 16;
3191 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3194 bulk = AES_gcm_encrypt(in + res,
3195 out + res, len - res,
3196 gctx->gcm.key, gctx->gcm.Yi.c,
3198 gctx->gcm.len.u[1] += bulk;
3202 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3203 in + bulk, out + bulk, len - bulk))
3209 #if defined(AES_GCM_ASM)
3210 if (len >= 16 && AES_GCM_ASM(gctx)) {
3211 size_t res = (16 - gctx->gcm.mres) % 16;
3213 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3216 bulk = AES_gcm_decrypt(in + res,
3217 out + res, len - res,
3219 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3220 gctx->gcm.len.u[1] += bulk;
3224 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3227 len - bulk, gctx->ctr))
3231 #if defined(AES_GCM_ASM2)
3232 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3233 size_t res = (16 - gctx->gcm.mres) % 16;
3235 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3238 bulk = AES_gcm_decrypt(in + res,
3239 out + res, len - res,
3241 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3242 gctx->gcm.len.u[1] += bulk;
3246 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3247 in + bulk, out + bulk, len - bulk))
3253 if (!ctx->encrypt) {
3254 if (gctx->taglen < 0)
3256 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3261 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3263 /* Don't reuse the IV */
3270 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3271 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3272 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3273 | EVP_CIPH_CUSTOM_COPY)
3275 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3276 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3277 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3278 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3279 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3280 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3282 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3284 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
3285 if (type == EVP_CTRL_COPY) {
3286 EVP_CIPHER_CTX *out = ptr;
3287 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3288 if (xctx->xts.key1) {
3289 if (xctx->xts.key1 != &xctx->ks1)
3291 xctx_out->xts.key1 = &xctx_out->ks1;
3293 if (xctx->xts.key2) {
3294 if (xctx->xts.key2 != &xctx->ks2)
3296 xctx_out->xts.key2 = &xctx_out->ks2;
3299 } else if (type != EVP_CTRL_INIT)
3301 /* key1 and key2 are used as an indicator both key and IV are set */
3302 xctx->xts.key1 = NULL;
3303 xctx->xts.key2 = NULL;
3307 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3308 const unsigned char *iv, int enc)
3310 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3317 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3319 xctx->stream = NULL;
3321 /* key_len is two AES keys */
3322 #ifdef HWAES_CAPABLE
3323 if (HWAES_CAPABLE) {
3325 HWAES_set_encrypt_key(key,
3326 EVP_CIPHER_CTX_key_length(ctx) * 4,
3328 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3329 # ifdef HWAES_xts_encrypt
3330 xctx->stream = HWAES_xts_encrypt;
3333 HWAES_set_decrypt_key(key,
3334 EVP_CIPHER_CTX_key_length(ctx) * 4,
3336 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3337 # ifdef HWAES_xts_decrypt
3338 xctx->stream = HWAES_xts_decrypt;
3342 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3343 EVP_CIPHER_CTX_key_length(ctx) * 4,
3345 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3347 xctx->xts.key1 = &xctx->ks1;
3351 #ifdef BSAES_CAPABLE
3353 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3356 #ifdef VPAES_CAPABLE
3357 if (VPAES_CAPABLE) {
3359 vpaes_set_encrypt_key(key,
3360 EVP_CIPHER_CTX_key_length(ctx) * 4,
3362 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3364 vpaes_set_decrypt_key(key,
3365 EVP_CIPHER_CTX_key_length(ctx) * 4,
3367 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3370 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3371 EVP_CIPHER_CTX_key_length(ctx) * 4,
3373 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3375 xctx->xts.key1 = &xctx->ks1;
3379 (void)0; /* terminate potentially open 'else' */
3382 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3384 xctx->xts.block1 = (block128_f) AES_encrypt;
3386 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3388 xctx->xts.block1 = (block128_f) AES_decrypt;
3391 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3392 EVP_CIPHER_CTX_key_length(ctx) * 4,
3394 xctx->xts.block2 = (block128_f) AES_encrypt;
3396 xctx->xts.key1 = &xctx->ks1;
3400 xctx->xts.key2 = &xctx->ks2;
3401 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3407 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3408 const unsigned char *in, size_t len)
3410 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3411 if (!xctx->xts.key1 || !xctx->xts.key2)
3413 if (!out || !in || len < AES_BLOCK_SIZE)
3416 (*xctx->stream) (in, out, len,
3417 xctx->xts.key1, xctx->xts.key2,
3418 EVP_CIPHER_CTX_iv_noconst(ctx));
3419 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3421 EVP_CIPHER_CTX_encrypting(ctx)))
3426 #define aes_xts_cleanup NULL
3428 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3429 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3430 | EVP_CIPH_CUSTOM_COPY)
3432 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3433 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3435 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3437 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3446 cctx->tls_aad_len = -1;
3449 case EVP_CTRL_AEAD_TLS1_AAD:
3450 /* Save the AAD for later use */
3451 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3453 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3454 cctx->tls_aad_len = arg;
3457 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3458 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3459 /* Correct length for explicit IV */
3460 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3462 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3463 /* If decrypting correct for tag too */
3464 if (!EVP_CIPHER_CTX_encrypting(c)) {
3469 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3470 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3472 /* Extra padding: tag appended to record */
3475 case EVP_CTRL_CCM_SET_IV_FIXED:
3476 /* Sanity check length */
3477 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3479 /* Just copy to first part of IV */
3480 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3483 case EVP_CTRL_AEAD_SET_IVLEN:
3486 case EVP_CTRL_CCM_SET_L:
3487 if (arg < 2 || arg > 8)
3492 case EVP_CTRL_AEAD_SET_TAG:
3493 if ((arg & 1) || arg < 4 || arg > 16)
3495 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3499 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3504 case EVP_CTRL_AEAD_GET_TAG:
3505 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3507 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3516 EVP_CIPHER_CTX *out = ptr;
3517 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3518 if (cctx->ccm.key) {
3519 if (cctx->ccm.key != &cctx->ks)
3521 cctx_out->ccm.key = &cctx_out->ks;
3532 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3533 const unsigned char *iv, int enc)
3535 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3540 #ifdef HWAES_CAPABLE
3541 if (HWAES_CAPABLE) {
3542 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3545 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3546 &cctx->ks, (block128_f) HWAES_encrypt);
3552 #ifdef VPAES_CAPABLE
3553 if (VPAES_CAPABLE) {
3554 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3556 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3557 &cctx->ks, (block128_f) vpaes_encrypt);
3563 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3565 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3566 &cctx->ks, (block128_f) AES_encrypt);
3571 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3577 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3578 const unsigned char *in, size_t len)
3580 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3581 CCM128_CONTEXT *ccm = &cctx->ccm;
3582 /* Encrypt/decrypt must be performed in place */
3583 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3585 /* If encrypting set explicit IV from sequence number (start of AAD) */
3586 if (EVP_CIPHER_CTX_encrypting(ctx))
3587 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3588 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3589 /* Get rest of IV from explicit IV */
3590 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3591 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3592 /* Correct length value */
3593 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3594 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3598 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3599 /* Fix buffer to point to payload */
3600 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3601 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3602 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3603 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3605 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3607 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3609 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3611 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3613 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3614 unsigned char tag[16];
3615 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3616 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3620 OPENSSL_cleanse(out, len);
3625 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3626 const unsigned char *in, size_t len)
3628 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3629 CCM128_CONTEXT *ccm = &cctx->ccm;
3630 /* If not set up, return error */
3634 if (cctx->tls_aad_len >= 0)
3635 return aes_ccm_tls_cipher(ctx, out, in, len);
3637 /* EVP_*Final() doesn't return any data */
3638 if (in == NULL && out != NULL)
3644 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3648 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3654 /* If have AAD need message length */
3655 if (!cctx->len_set && len)
3657 CRYPTO_ccm128_aad(ccm, in, len);
3660 /* If not set length yet do it */
3661 if (!cctx->len_set) {
3662 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3667 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3668 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3670 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3676 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3678 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3679 unsigned char tag[16];
3680 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3681 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3687 OPENSSL_cleanse(out, len);
3695 #define aes_ccm_cleanup NULL
3697 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3698 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3699 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3700 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3701 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3702 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3709 /* Indicates if IV has been set */
3713 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3714 const unsigned char *iv, int enc)
3716 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3720 if (EVP_CIPHER_CTX_encrypting(ctx))
3721 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3724 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3730 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3731 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3736 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3737 const unsigned char *in, size_t inlen)
3739 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3741 /* AES wrap with padding has IV length of 4, without padding 8 */
3742 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3743 /* No final operation so always return zero length */
3746 /* Input length must always be non-zero */
3749 /* If decrypting need at least 16 bytes and multiple of 8 */
3750 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3752 /* If not padding input must be multiple of 8 */
3753 if (!pad && inlen & 0x7)
3755 if (is_partially_overlapping(out, in, inlen)) {
3756 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3760 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3761 /* If padding round up to multiple of 8 */
3763 inlen = (inlen + 7) / 8 * 8;
3768 * If not padding output will be exactly 8 bytes smaller than
3769 * input. If padding it will be at least 8 bytes smaller but we
3770 * don't know how much.
3776 if (EVP_CIPHER_CTX_encrypting(ctx))
3777 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3779 (block128_f) AES_encrypt);
3781 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3783 (block128_f) AES_decrypt);
3785 if (EVP_CIPHER_CTX_encrypting(ctx))
3786 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3787 out, in, inlen, (block128_f) AES_encrypt);
3789 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3790 out, in, inlen, (block128_f) AES_decrypt);
3792 return rv ? (int)rv : -1;
3795 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3796 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3797 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3799 static const EVP_CIPHER aes_128_wrap = {
3801 8, 16, 8, WRAP_FLAGS,
3802 aes_wrap_init_key, aes_wrap_cipher,
3804 sizeof(EVP_AES_WRAP_CTX),
3805 NULL, NULL, NULL, NULL
3808 const EVP_CIPHER *EVP_aes_128_wrap(void)
3810 return &aes_128_wrap;
3813 static const EVP_CIPHER aes_192_wrap = {
3815 8, 24, 8, WRAP_FLAGS,
3816 aes_wrap_init_key, aes_wrap_cipher,
3818 sizeof(EVP_AES_WRAP_CTX),
3819 NULL, NULL, NULL, NULL
3822 const EVP_CIPHER *EVP_aes_192_wrap(void)
3824 return &aes_192_wrap;
3827 static const EVP_CIPHER aes_256_wrap = {
3829 8, 32, 8, WRAP_FLAGS,
3830 aes_wrap_init_key, aes_wrap_cipher,
3832 sizeof(EVP_AES_WRAP_CTX),
3833 NULL, NULL, NULL, NULL
3836 const EVP_CIPHER *EVP_aes_256_wrap(void)
3838 return &aes_256_wrap;
3841 static const EVP_CIPHER aes_128_wrap_pad = {
3842 NID_id_aes128_wrap_pad,
3843 8, 16, 4, WRAP_FLAGS,
3844 aes_wrap_init_key, aes_wrap_cipher,
3846 sizeof(EVP_AES_WRAP_CTX),
3847 NULL, NULL, NULL, NULL
3850 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3852 return &aes_128_wrap_pad;
3855 static const EVP_CIPHER aes_192_wrap_pad = {
3856 NID_id_aes192_wrap_pad,
3857 8, 24, 4, WRAP_FLAGS,
3858 aes_wrap_init_key, aes_wrap_cipher,
3860 sizeof(EVP_AES_WRAP_CTX),
3861 NULL, NULL, NULL, NULL
3864 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3866 return &aes_192_wrap_pad;
3869 static const EVP_CIPHER aes_256_wrap_pad = {
3870 NID_id_aes256_wrap_pad,
3871 8, 32, 4, WRAP_FLAGS,
3872 aes_wrap_init_key, aes_wrap_cipher,
3874 sizeof(EVP_AES_WRAP_CTX),
3875 NULL, NULL, NULL, NULL
3878 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3880 return &aes_256_wrap_pad;
3883 #ifndef OPENSSL_NO_OCB
3884 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3886 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3887 EVP_CIPHER_CTX *newc;
3888 EVP_AES_OCB_CTX *new_octx;
3894 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
3895 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3897 octx->data_buf_len = 0;
3898 octx->aad_buf_len = 0;
3901 case EVP_CTRL_AEAD_SET_IVLEN:
3902 /* IV len must be 1 to 15 */
3903 if (arg <= 0 || arg > 15)
3909 case EVP_CTRL_AEAD_SET_TAG:
3911 /* Tag len must be 0 to 16 */
3912 if (arg < 0 || arg > 16)
3918 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
3920 memcpy(octx->tag, ptr, arg);
3923 case EVP_CTRL_AEAD_GET_TAG:
3924 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
3927 memcpy(ptr, octx->tag, arg);
3931 newc = (EVP_CIPHER_CTX *)ptr;
3932 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3933 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3934 &new_octx->ksenc.ks,
3935 &new_octx->ksdec.ks);
3943 # ifdef HWAES_CAPABLE
3944 # ifdef HWAES_ocb_encrypt
3945 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
3946 size_t blocks, const void *key,
3947 size_t start_block_num,
3948 unsigned char offset_i[16],
3949 const unsigned char L_[][16],
3950 unsigned char checksum[16]);
3952 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
3954 # ifdef HWAES_ocb_decrypt
3955 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
3956 size_t blocks, const void *key,
3957 size_t start_block_num,
3958 unsigned char offset_i[16],
3959 const unsigned char L_[][16],
3960 unsigned char checksum[16]);
3962 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
3966 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3967 const unsigned char *iv, int enc)
3969 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3975 * We set both the encrypt and decrypt key here because decrypt
3976 * needs both. We could possibly optimise to remove setting the
3977 * decrypt for an encryption operation.
3979 # ifdef HWAES_CAPABLE
3980 if (HWAES_CAPABLE) {
3981 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3983 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3985 if (!CRYPTO_ocb128_init(&octx->ocb,
3986 &octx->ksenc.ks, &octx->ksdec.ks,
3987 (block128_f) HWAES_encrypt,
3988 (block128_f) HWAES_decrypt,
3989 enc ? HWAES_ocb_encrypt
3990 : HWAES_ocb_decrypt))
3995 # ifdef VPAES_CAPABLE
3996 if (VPAES_CAPABLE) {
3997 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3999 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4001 if (!CRYPTO_ocb128_init(&octx->ocb,
4002 &octx->ksenc.ks, &octx->ksdec.ks,
4003 (block128_f) vpaes_encrypt,
4004 (block128_f) vpaes_decrypt,
4010 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4012 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4014 if (!CRYPTO_ocb128_init(&octx->ocb,
4015 &octx->ksenc.ks, &octx->ksdec.ks,
4016 (block128_f) AES_encrypt,
4017 (block128_f) AES_decrypt,
4024 * If we have an iv we can set it directly, otherwise use saved IV.
4026 if (iv == NULL && octx->iv_set)
4029 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4036 /* If key set use IV, otherwise copy */
4038 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4040 memcpy(octx->iv, iv, octx->ivlen);
4046 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4047 const unsigned char *in, size_t len)
4051 int written_len = 0;
4052 size_t trailing_len;
4053 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4055 /* If IV or Key not set then return error */
4064 * Need to ensure we are only passing full blocks to low level OCB
4065 * routines. We do it here rather than in EVP_EncryptUpdate/
4066 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4067 * and those routines don't support that
4070 /* Are we dealing with AAD or normal data here? */
4072 buf = octx->aad_buf;
4073 buf_len = &(octx->aad_buf_len);
4075 buf = octx->data_buf;
4076 buf_len = &(octx->data_buf_len);
4078 if (is_partially_overlapping(out + *buf_len, in, len)) {
4079 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4085 * If we've got a partially filled buffer from a previous call then
4086 * use that data first
4089 unsigned int remaining;
4091 remaining = AES_BLOCK_SIZE - (*buf_len);
4092 if (remaining > len) {
4093 memcpy(buf + (*buf_len), in, len);
4097 memcpy(buf + (*buf_len), in, remaining);
4100 * If we get here we've filled the buffer, so process it
4105 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4107 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4108 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4112 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4116 written_len = AES_BLOCK_SIZE;
4119 out += AES_BLOCK_SIZE;
4122 /* Do we have a partial block to handle at the end? */
4123 trailing_len = len % AES_BLOCK_SIZE;
4126 * If we've got some full blocks to handle, then process these first
4128 if (len != trailing_len) {
4130 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4132 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4133 if (!CRYPTO_ocb128_encrypt
4134 (&octx->ocb, in, out, len - trailing_len))
4137 if (!CRYPTO_ocb128_decrypt
4138 (&octx->ocb, in, out, len - trailing_len))
4141 written_len += len - trailing_len;
4142 in += len - trailing_len;
4145 /* Handle any trailing partial block */
4146 if (trailing_len > 0) {
4147 memcpy(buf, in, trailing_len);
4148 *buf_len = trailing_len;
4154 * First of all empty the buffer of any partial block that we might
4155 * have been provided - both for data and AAD
4157 if (octx->data_buf_len > 0) {
4158 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4159 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4160 octx->data_buf_len))
4163 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4164 octx->data_buf_len))
4167 written_len = octx->data_buf_len;
4168 octx->data_buf_len = 0;
4170 if (octx->aad_buf_len > 0) {
4171 if (!CRYPTO_ocb128_aad
4172 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4174 octx->aad_buf_len = 0;
4176 /* If decrypting then verify */
4177 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4178 if (octx->taglen < 0)
4180 if (CRYPTO_ocb128_finish(&octx->ocb,
4181 octx->tag, octx->taglen) != 0)
4186 /* If encrypting then just get the tag */
4187 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4189 /* Don't reuse the IV */
4195 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4197 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4198 CRYPTO_ocb128_cleanup(&octx->ocb);
4202 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4203 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4204 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4205 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4206 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4207 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4208 #endif /* OPENSSL_NO_OCB */