2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
11 * IBM S390X support for AES GCM.
12 * This file is included by cipher_aes_gcm_hw.c
15 /* iv + padding length for iv lengths != 12 */
16 #define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
18 static int s390x_aes_gcm_initkey(PROV_GCM_CTX *ctx,
19 const unsigned char *key, size_t keylen)
21 PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
24 memcpy(&actx->plat.s390x.param.kma.k, key, keylen);
25 actx->plat.s390x.fc = S390X_AES_FC(keylen);
27 actx->plat.s390x.fc |= S390X_DECRYPT;
31 static int s390x_aes_gcm_setiv(PROV_GCM_CTX *ctx, const unsigned char *iv,
34 PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
35 S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
41 actx->plat.s390x.mreslen = 0;
42 actx->plat.s390x.areslen = 0;
43 actx->plat.s390x.kreslen = 0;
45 if (ivlen == GCM_IV_DEFAULT_SIZE) {
46 memcpy(&kma->j0, iv, ivlen);
50 unsigned long long ivbits = ivlen << 3;
51 size_t len = S390X_gcm_ivpadlen(ivlen);
52 unsigned char iv_zero_pad[S390X_gcm_ivpadlen(GCM_IV_MAX_SIZE)];
54 * The IV length needs to be zero padded to be a multiple of 16 bytes
55 * followed by 8 bytes of zeros and 8 bytes for the IV length.
56 * The GHASH of this value can then be calculated.
58 memcpy(iv_zero_pad, iv, ivlen);
59 memset(iv_zero_pad + ivlen, 0, len - ivlen);
60 memcpy(iv_zero_pad + len - sizeof(ivbits), &ivbits, sizeof(ivbits));
62 * Calculate the ghash of the iv - the result is stored into the tag
65 s390x_kma(iv_zero_pad, len, NULL, 0, NULL, actx->plat.s390x.fc, kma);
66 actx->plat.s390x.fc |= S390X_KMA_HS; /* The hash subkey is set */
68 /* Copy the 128 bit GHASH result into J0 and clear the tag */
69 kma->j0.g[0] = kma->t.g[0];
70 kma->j0.g[1] = kma->t.g[1];
73 /* Set the 32 bit counter */
74 kma->cv.w = kma->j0.w[3];
79 static int s390x_aes_gcm_cipher_final(PROV_GCM_CTX *ctx, unsigned char *tag)
81 PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
82 S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
83 unsigned char out[AES_BLOCK_SIZE];
88 s390x_kma(actx->plat.s390x.ares, actx->plat.s390x.areslen,
89 actx->plat.s390x.mres, actx->plat.s390x.mreslen, out,
90 actx->plat.s390x.fc | S390X_KMA_LAAD | S390X_KMA_LPC, kma);
92 /* gctx->mres already returned to the caller */
93 OPENSSL_cleanse(out, actx->plat.s390x.mreslen);
96 ctx->taglen = GCM_TAG_MAX_SIZE;
97 memcpy(tag, kma->t.b, ctx->taglen);
100 rc = (CRYPTO_memcmp(tag, kma->t.b, ctx->taglen) == 0);
105 static int s390x_aes_gcm_one_shot(PROV_GCM_CTX *ctx,
106 unsigned char *aad, size_t aad_len,
107 const unsigned char *in, size_t in_len,
109 unsigned char *tag, size_t taglen)
111 PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
112 S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
115 kma->taadl = aad_len << 3;
116 kma->tpcl = in_len << 3;
117 s390x_kma(aad, aad_len, in, in_len, out,
118 actx->plat.s390x.fc | S390X_KMA_LAAD | S390X_KMA_LPC, kma);
121 memcpy(tag, kma->t.b, taglen);
124 rc = (CRYPTO_memcmp(tag, kma->t.b, taglen) == 0);
130 * Process additional authenticated data. Returns 1 on success. Code is
133 static int s390x_aes_gcm_aad_update(PROV_GCM_CTX *ctx,
134 const unsigned char *aad, size_t len)
136 PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
137 S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
138 unsigned long long alen;
141 /* If already processed pt/ct then error */
145 /* update the total aad length */
146 alen = kma->taadl + len;
147 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
151 /* check if there is any existing aad data from a previous add */
152 n = actx->plat.s390x.areslen;
154 /* add additional data to a buffer until it has 16 bytes */
156 actx->plat.s390x.ares[n] = *aad;
161 /* ctx->ares contains a complete block if offset has wrapped around */
163 s390x_kma(actx->plat.s390x.ares, 16, NULL, 0, NULL,
164 actx->plat.s390x.fc, kma);
165 actx->plat.s390x.fc |= S390X_KMA_HS;
167 actx->plat.s390x.areslen = n;
170 /* If there are leftover bytes (< 128 bits) save them for next time */
172 /* Add any remaining 16 byte blocks (128 bit each) */
175 s390x_kma(aad, len, NULL, 0, NULL, actx->plat.s390x.fc, kma);
176 actx->plat.s390x.fc |= S390X_KMA_HS;
181 actx->plat.s390x.areslen = rem;
185 actx->plat.s390x.ares[rem] = aad[rem];
192 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 1 for
193 * success. Code is big-endian.
195 static int s390x_aes_gcm_cipher_update(PROV_GCM_CTX *ctx,
196 const unsigned char *in, size_t len,
199 PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
200 S390X_KMA_PARAMS *kma = &actx->plat.s390x.param.kma;
201 const unsigned char *inptr;
202 unsigned long long mlen;
210 mlen = kma->tpcl + len;
211 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
215 n = actx->plat.s390x.mreslen;
220 actx->plat.s390x.mres[n] = *inptr;
225 /* ctx->mres contains a complete block if offset has wrapped around */
227 s390x_kma(actx->plat.s390x.ares, actx->plat.s390x.areslen,
228 actx->plat.s390x.mres, 16, buf.b,
229 actx->plat.s390x.fc | S390X_KMA_LAAD, kma);
230 actx->plat.s390x.fc |= S390X_KMA_HS;
231 actx->plat.s390x.areslen = 0;
233 /* previous call already encrypted/decrypted its remainder,
234 * see comment below */
235 n = actx->plat.s390x.mreslen;
243 actx->plat.s390x.mreslen = 0;
251 s390x_kma(actx->plat.s390x.ares, actx->plat.s390x.areslen, in, len, out,
252 actx->plat.s390x.fc | S390X_KMA_LAAD, kma);
255 actx->plat.s390x.fc |= S390X_KMA_HS;
256 actx->plat.s390x.areslen = 0;
260 * If there is a remainder, it has to be saved such that it can be
261 * processed by kma later. However, we also have to do the for-now
262 * unauthenticated encryption/decryption part here and now...
265 if (!actx->plat.s390x.mreslen) {
266 buf.w[0] = kma->j0.w[0];
267 buf.w[1] = kma->j0.w[1];
268 buf.w[2] = kma->j0.w[2];
269 buf.w[3] = kma->cv.w + 1;
270 s390x_km(buf.b, 16, actx->plat.s390x.kres,
271 actx->plat.s390x.fc & 0x1f, &kma->k);
274 n = actx->plat.s390x.mreslen;
275 for (i = 0; i < rem; i++) {
276 actx->plat.s390x.mres[n + i] = in[i];
277 out[i] = in[i] ^ actx->plat.s390x.kres[n + i];
279 actx->plat.s390x.mreslen += rem;
284 static const PROV_GCM_HW s390x_aes_gcm = {
285 s390x_aes_gcm_initkey,
287 s390x_aes_gcm_aad_update,
288 s390x_aes_gcm_cipher_update,
289 s390x_aes_gcm_cipher_final,
290 s390x_aes_gcm_one_shot
293 const PROV_GCM_HW *PROV_AES_HW_gcm(size_t keybits)
295 if ((keybits == 128 && S390X_aes_128_gcm_CAPABLE)
296 || (keybits == 192 && S390X_aes_192_gcm_CAPABLE)
297 || (keybits == 256 && S390X_aes_256_gcm_CAPABLE))
298 return &s390x_aes_gcm;