2 * Copyright 2017 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
5 * Licensed under the OpenSSL license (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
11 #include "internal/cryptlib.h"
12 #ifndef OPENSSL_NO_ARIA
13 # include <openssl/evp.h>
14 # include <openssl/modes.h>
15 # include <openssl/rand.h>
16 # include "internal/aria.h"
17 # include "internal/evp_int.h"
18 # include "internal/rand.h"
19 # include "modes_lcl.h"
20 # include "evp_locl.h"
22 /* ARIA subkey Structure */
27 /* ARIA GCM context */
32 } ks; /* ARIA subkey to use */
33 int key_set; /* Set if key initialised */
34 int iv_set; /* Set if an iv is set */
36 unsigned char *iv; /* Temporary IV store */
37 int ivlen; /* IV length */
39 int iv_gen; /* It is OK to generate IVs */
40 int tls_aad_len; /* TLS AAD length */
43 /* ARIA CCM context */
48 } ks; /* ARIA key schedule to use */
49 int key_set; /* Set if key initialised */
50 int iv_set; /* Set if an iv is set */
51 int tag_set; /* Set if tag is valid */
52 int len_set; /* Set if message length set */
53 int L, M; /* L and M parameters from RFC3610 */
54 int tls_aad_len; /* TLS AAD length */
59 /* The subkey for ARIA is generated. */
60 static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
61 const unsigned char *iv, int enc)
64 int mode = EVP_CIPHER_CTX_mode(ctx);
66 if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
67 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
68 EVP_CIPHER_CTX_get_cipher_data(ctx));
70 ret = aria_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
71 EVP_CIPHER_CTX_get_cipher_data(ctx));
73 EVPerr(EVP_F_ARIA_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
79 static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
80 size_t len, const ARIA_KEY *key,
81 unsigned char *ivec, const int enc)
85 CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
86 (block128_f) aria_encrypt);
88 CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
89 (block128_f) aria_encrypt);
92 static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
93 size_t length, const ARIA_KEY *key,
94 unsigned char *ivec, int *num, const int enc)
97 CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
98 (block128_f) aria_encrypt);
101 static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
102 size_t length, const ARIA_KEY *key,
103 unsigned char *ivec, int *num, const int enc)
105 CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
106 (block128_f) aria_encrypt);
109 static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
110 size_t length, const ARIA_KEY *key,
111 unsigned char *ivec, int *num, const int enc)
113 CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
114 (block128_f) aria_encrypt);
117 static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
118 const ARIA_KEY *key, const int enc)
120 aria_encrypt(in, out, key);
123 static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
124 size_t length, const ARIA_KEY *key,
125 unsigned char *ivec, int *num)
127 CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
128 (block128_f) aria_encrypt);
131 IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
132 NID_aria_128, 16, 16, 16, 128,
133 0, aria_init_key, NULL,
134 EVP_CIPHER_set_asn1_iv,
135 EVP_CIPHER_get_asn1_iv,
137 IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
138 NID_aria_192, 16, 24, 16, 128,
139 0, aria_init_key, NULL,
140 EVP_CIPHER_set_asn1_iv,
141 EVP_CIPHER_get_asn1_iv,
143 IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
144 NID_aria_256, 16, 32, 16, 128,
145 0, aria_init_key, NULL,
146 EVP_CIPHER_set_asn1_iv,
147 EVP_CIPHER_get_asn1_iv,
150 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
151 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
152 IMPLEMENT_ARIA_CFBR(128,1)
153 IMPLEMENT_ARIA_CFBR(192,1)
154 IMPLEMENT_ARIA_CFBR(256,1)
155 IMPLEMENT_ARIA_CFBR(128,8)
156 IMPLEMENT_ARIA_CFBR(192,8)
157 IMPLEMENT_ARIA_CFBR(256,8)
159 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
160 static const EVP_CIPHER aria_##keylen##_##mode = { \
161 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
162 flags|EVP_CIPH_##MODE##_MODE, \
164 aria_##mode##_cipher, \
166 sizeof(EVP_ARIA_KEY), \
167 NULL,NULL,NULL,NULL }; \
168 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
169 { return &aria_##keylen##_##mode; }
171 static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
172 const unsigned char *in, size_t len)
174 unsigned int num = EVP_CIPHER_CTX_num(ctx);
175 EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY,ctx);
177 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
178 EVP_CIPHER_CTX_iv_noconst(ctx),
179 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
180 (block128_f) aria_encrypt);
181 EVP_CIPHER_CTX_set_num(ctx, num);
185 BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
186 BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
187 BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
189 /* Authenticated cipher modes (GCM/CCM) */
191 /* increment counter (64-bit int) by 1 */
192 static void ctr64_inc(unsigned char *counter)
207 static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
208 const unsigned char *iv, int enc)
211 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
216 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
218 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
219 (block128_f) aria_encrypt);
221 EVPerr(EVP_F_ARIA_GCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
226 * If we have an iv can set it directly, otherwise use saved IV.
228 if (iv == NULL && gctx->iv_set)
231 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
236 /* If key set use IV, otherwise copy */
238 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
240 memcpy(gctx->iv, iv, gctx->ivlen);
247 static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
249 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,c);
255 gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
256 gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
259 gctx->tls_aad_len = -1;
262 case EVP_CTRL_AEAD_SET_IVLEN:
265 /* Allocate memory for IV if needed */
266 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
267 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
268 OPENSSL_free(gctx->iv);
269 gctx->iv = OPENSSL_malloc(arg);
270 if (gctx->iv == NULL)
276 case EVP_CTRL_AEAD_SET_TAG:
277 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
279 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
283 case EVP_CTRL_AEAD_GET_TAG:
284 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
287 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
290 case EVP_CTRL_GCM_SET_IV_FIXED:
291 /* Special case: -1 length restores whole IV */
293 memcpy(gctx->iv, ptr, gctx->ivlen);
298 * Fixed field must be at least 4 bytes and invocation field at least
301 if ((arg < 4) || (gctx->ivlen - arg) < 8)
304 memcpy(gctx->iv, ptr, arg);
305 if (EVP_CIPHER_CTX_encrypting(c)) {
306 if (c->drbg != NULL) {
307 if (RAND_DRBG_bytes(c->drbg, gctx->iv + arg, gctx->ivlen - arg) == 0)
309 } else if (RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) {
316 case EVP_CTRL_GCM_IV_GEN:
317 if (gctx->iv_gen == 0 || gctx->key_set == 0)
319 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
320 if (arg <= 0 || arg > gctx->ivlen)
322 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
324 * Invocation field will be at least 8 bytes in size and so no need
325 * to check wrap around or increment more than last 8 bytes.
327 ctr64_inc(gctx->iv + gctx->ivlen - 8);
331 case EVP_CTRL_GCM_SET_IV_INV:
332 if (gctx->iv_gen == 0 || gctx->key_set == 0
333 || EVP_CIPHER_CTX_encrypting(c))
335 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
336 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
340 case EVP_CTRL_AEAD_TLS1_AAD:
341 /* Save the AAD for later use */
342 if (arg != EVP_AEAD_TLS1_AAD_LEN)
344 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
345 gctx->tls_aad_len = arg;
348 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
349 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
350 /* Correct length for explicit IV */
351 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
353 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
354 /* If decrypting correct for tag too */
355 if (!EVP_CIPHER_CTX_encrypting(c)) {
356 if (len < EVP_GCM_TLS_TAG_LEN)
358 len -= EVP_GCM_TLS_TAG_LEN;
360 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
361 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
363 /* Extra padding: tag appended to record */
364 return EVP_GCM_TLS_TAG_LEN;
368 EVP_CIPHER_CTX *out = ptr;
369 EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX,out);
371 if (gctx->gcm.key != &gctx->ks)
373 gctx_out->gcm.key = &gctx_out->ks;
375 if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
376 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
378 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
379 if (gctx_out->iv == NULL)
381 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
392 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
393 const unsigned char *in, size_t len)
395 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
398 /* Encrypt/decrypt must be performed in place */
400 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
403 * Set IV from start of buffer or generate IV and write to start of
406 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
407 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
408 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
411 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
414 /* Fix buffer and length to point to payload */
415 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
416 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
417 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
418 if (EVP_CIPHER_CTX_encrypting(ctx)) {
419 /* Encrypt payload */
420 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
423 /* Finally write tag */
424 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
425 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
428 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
431 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
432 EVP_GCM_TLS_TAG_LEN);
433 /* If tag mismatch wipe buffer */
434 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
435 EVP_GCM_TLS_TAG_LEN)) {
436 OPENSSL_cleanse(out, len);
444 gctx->tls_aad_len = -1;
448 static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
449 const unsigned char *in, size_t len)
451 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
453 /* If not set up, return error */
457 if (gctx->tls_aad_len >= 0)
458 return aria_gcm_tls_cipher(ctx, out, in, len);
464 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
466 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
467 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
470 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
475 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
476 if (gctx->taglen < 0)
478 if (CRYPTO_gcm128_finish(&gctx->gcm,
479 EVP_CIPHER_CTX_buf_noconst(ctx),
485 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
487 /* Don't reuse the IV */
492 static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
493 const unsigned char *iv, int enc)
496 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
502 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
504 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
505 &cctx->ks, (block128_f) aria_encrypt);
507 EVPerr(EVP_F_ARIA_CCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
514 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
520 static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
522 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,c);
532 cctx->tls_aad_len = -1;
535 case EVP_CTRL_AEAD_TLS1_AAD:
536 /* Save the AAD for later use */
537 if (arg != EVP_AEAD_TLS1_AAD_LEN)
539 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
540 cctx->tls_aad_len = arg;
543 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
544 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
545 /* Correct length for explicit IV */
546 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
548 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
549 /* If decrypting correct for tag too */
550 if (!EVP_CIPHER_CTX_encrypting(c)) {
555 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
556 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
558 /* Extra padding: tag appended to record */
561 case EVP_CTRL_CCM_SET_IV_FIXED:
562 /* Sanity check length */
563 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
565 /* Just copy to first part of IV */
566 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
569 case EVP_CTRL_AEAD_SET_IVLEN:
572 case EVP_CTRL_CCM_SET_L:
573 if (arg < 2 || arg > 8)
577 case EVP_CTRL_AEAD_SET_TAG:
578 if ((arg & 1) || arg < 4 || arg > 16)
580 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
584 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
589 case EVP_CTRL_AEAD_GET_TAG:
590 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
592 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
601 EVP_CIPHER_CTX *out = ptr;
602 EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX,out);
604 if (cctx->ccm.key != &cctx->ks)
606 cctx_out->ccm.key = &cctx_out->ks;
616 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
617 const unsigned char *in, size_t len)
619 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
620 CCM128_CONTEXT *ccm = &cctx->ccm;
622 /* Encrypt/decrypt must be performed in place */
623 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
625 /* If encrypting set explicit IV from sequence number (start of AAD) */
626 if (EVP_CIPHER_CTX_encrypting(ctx))
627 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
628 EVP_CCM_TLS_EXPLICIT_IV_LEN);
629 /* Get rest of IV from explicit IV */
630 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
631 EVP_CCM_TLS_EXPLICIT_IV_LEN);
632 /* Correct length value */
633 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
634 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
638 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
639 /* Fix buffer to point to payload */
640 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
641 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
642 if (EVP_CIPHER_CTX_encrypting(ctx)) {
643 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
644 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
646 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
648 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
650 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
651 : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
652 unsigned char tag[16];
653 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
654 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
658 OPENSSL_cleanse(out, len);
663 static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
664 const unsigned char *in, size_t len)
666 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
667 CCM128_CONTEXT *ccm = &cctx->ccm;
669 /* If not set up, return error */
673 if (cctx->tls_aad_len >= 0)
674 return aria_ccm_tls_cipher(ctx, out, in, len);
676 /* EVP_*Final() doesn't return any data */
677 if (in == NULL && out != NULL)
683 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
687 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
693 /* If have AAD need message length */
694 if (!cctx->len_set && len)
696 CRYPTO_ccm128_aad(ccm, in, len);
699 /* If not set length yet do it */
700 if (!cctx->len_set) {
701 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
706 if (EVP_CIPHER_CTX_encrypting(ctx)) {
707 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
708 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
714 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
716 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
717 unsigned char tag[16];
718 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
719 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
725 OPENSSL_cleanse(out, len);
733 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
734 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
735 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
736 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER)
738 #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
739 static const EVP_CIPHER aria_##keylen##_##mode = { \
740 nid##_##keylen##_##nmode, \
741 blocksize, keylen/8, ivlen, \
742 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
743 aria_##mode##_init_key, \
744 aria_##mode##_cipher, \
746 sizeof(EVP_ARIA_##MODE##_CTX), \
747 NULL,NULL,aria_##mode##_ctrl,NULL }; \
748 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
749 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
751 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, gcm, gcm, GCM, 0)
752 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, gcm, gcm, GCM, 0)
753 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, gcm, gcm, GCM, 0)
755 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, ccm, ccm, CCM, 0)
756 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, ccm, ccm, CCM, 0)
757 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, ccm, ccm, CCM, 0)