1 /* ====================================================================
2 * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 *
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in
13 * the documentation and/or other materials provided with the
14 * distribution.
15 *
16 * 3. All advertising materials mentioning features or use of this
17 * software must display the following acknowledgment:
18 * "This product includes software developed by the OpenSSL Project
19 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20 *
21 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22 * endorse or promote products derived from this software without
23 * prior written permission. For written permission, please contact
24 * [email protected].
25 *
26 * 5. Products derived from this software may not be called "OpenSSL"
27 * nor may "OpenSSL" appear in their names without prior written
28 * permission of the OpenSSL Project.
29 *
30 * 6. Redistributions of any form whatsoever must retain the following
31 * acknowledgment:
32 * "This product includes software developed by the OpenSSL Project
33 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34 *
35 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
39 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46 * OF THE POSSIBILITY OF SUCH DAMAGE.
47 * ==================================================================== */
48
49 #include <assert.h>
50 #include <limits.h>
51 #include <string.h>
52
53 #include <openssl/aead.h>
54 #include <openssl/aes.h>
55 #include <openssl/cipher.h>
56 #include <openssl/err.h>
57 #include <openssl/mem.h>
58 #include <openssl/nid.h>
59 #include <openssl/rand.h>
60
61 #include "internal.h"
62 #include "../../internal.h"
63 #include "../aes/internal.h"
64 #include "../modes/internal.h"
65 #include "../service_indicator/internal.h"
66 #include "../delocate.h"
67
68
69 OPENSSL_MSVC_PRAGMA(warning(push))
70 OPENSSL_MSVC_PRAGMA(warning(disable: 4702)) // Unreachable code.
71
72 #define AES_GCM_NONCE_LENGTH 12
73
74 #if defined(BSAES)
vpaes_ctr32_encrypt_blocks_with_bsaes(const uint8_t * in,uint8_t * out,size_t blocks,const AES_KEY * key,const uint8_t ivec[16])75 static void vpaes_ctr32_encrypt_blocks_with_bsaes(const uint8_t *in,
76 uint8_t *out, size_t blocks,
77 const AES_KEY *key,
78 const uint8_t ivec[16]) {
79 // |bsaes_ctr32_encrypt_blocks| is faster than |vpaes_ctr32_encrypt_blocks|,
80 // but it takes at least one full 8-block batch to amortize the conversion.
81 if (blocks < 8) {
82 vpaes_ctr32_encrypt_blocks(in, out, blocks, key, ivec);
83 return;
84 }
85
86 size_t bsaes_blocks = blocks;
87 if (bsaes_blocks % 8 < 6) {
88 // |bsaes_ctr32_encrypt_blocks| internally works in 8-block batches. If the
89 // final batch is too small (under six blocks), it is faster to loop over
90 // |vpaes_encrypt|. Round |bsaes_blocks| down to a multiple of 8.
91 bsaes_blocks -= bsaes_blocks % 8;
92 }
93
94 AES_KEY bsaes;
95 vpaes_encrypt_key_to_bsaes(&bsaes, key);
96 bsaes_ctr32_encrypt_blocks(in, out, bsaes_blocks, &bsaes, ivec);
97 OPENSSL_cleanse(&bsaes, sizeof(bsaes));
98
99 in += 16 * bsaes_blocks;
100 out += 16 * bsaes_blocks;
101 blocks -= bsaes_blocks;
102
103 uint8_t new_ivec[16];
104 memcpy(new_ivec, ivec, 12);
105 uint32_t ctr = CRYPTO_load_u32_be(ivec + 12) + bsaes_blocks;
106 CRYPTO_store_u32_be(new_ivec + 12, ctr);
107
108 // Finish any remaining blocks with |vpaes_ctr32_encrypt_blocks|.
109 vpaes_ctr32_encrypt_blocks(in, out, blocks, key, new_ivec);
110 }
111 #endif // BSAES
112
113 typedef struct {
114 union {
115 double align;
116 AES_KEY ks;
117 } ks;
118 block128_f block;
119 union {
120 cbc128_f cbc;
121 ctr128_f ctr;
122 } stream;
123 } EVP_AES_KEY;
124
125 typedef struct {
126 GCM128_CONTEXT gcm;
127 union {
128 double align;
129 AES_KEY ks;
130 } ks; // AES key schedule to use
131 int key_set; // Set if key initialised
132 int iv_set; // Set if an iv is set
133 uint8_t *iv; // Temporary IV store
134 int ivlen; // IV length
135 int taglen;
136 int iv_gen; // It is OK to generate IVs
137 ctr128_f ctr;
138 } EVP_AES_GCM_CTX;
139
aes_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)140 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
141 const uint8_t *iv, int enc) {
142 int ret;
143 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
144 const int mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
145
146 if (mode == EVP_CIPH_CTR_MODE) {
147 switch (ctx->key_len) {
148 case 16:
149 boringssl_fips_inc_counter(fips_counter_evp_aes_128_ctr);
150 break;
151
152 case 32:
153 boringssl_fips_inc_counter(fips_counter_evp_aes_256_ctr);
154 break;
155 }
156 }
157
158 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
159 if (hwaes_capable()) {
160 ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
161 dat->block = aes_hw_decrypt;
162 dat->stream.cbc = NULL;
163 if (mode == EVP_CIPH_CBC_MODE) {
164 dat->stream.cbc = aes_hw_cbc_encrypt;
165 }
166 } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
167 assert(vpaes_capable());
168 ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
169 if (ret == 0) {
170 vpaes_decrypt_key_to_bsaes(&dat->ks.ks, &dat->ks.ks);
171 }
172 // If |dat->stream.cbc| is provided, |dat->block| is never used.
173 dat->block = NULL;
174 dat->stream.cbc = bsaes_cbc_encrypt;
175 } else if (vpaes_capable()) {
176 ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
177 dat->block = vpaes_decrypt;
178 dat->stream.cbc = NULL;
179 #if defined(VPAES_CBC)
180 if (mode == EVP_CIPH_CBC_MODE) {
181 dat->stream.cbc = vpaes_cbc_encrypt;
182 }
183 #endif
184 } else {
185 ret = aes_nohw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
186 dat->block = aes_nohw_decrypt;
187 dat->stream.cbc = NULL;
188 if (mode == EVP_CIPH_CBC_MODE) {
189 dat->stream.cbc = aes_nohw_cbc_encrypt;
190 }
191 }
192 } else if (hwaes_capable()) {
193 ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
194 dat->block = aes_hw_encrypt;
195 dat->stream.cbc = NULL;
196 if (mode == EVP_CIPH_CBC_MODE) {
197 dat->stream.cbc = aes_hw_cbc_encrypt;
198 } else if (mode == EVP_CIPH_CTR_MODE) {
199 dat->stream.ctr = aes_hw_ctr32_encrypt_blocks;
200 }
201 } else if (vpaes_capable()) {
202 ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
203 dat->block = vpaes_encrypt;
204 dat->stream.cbc = NULL;
205 #if defined(VPAES_CBC)
206 if (mode == EVP_CIPH_CBC_MODE) {
207 dat->stream.cbc = vpaes_cbc_encrypt;
208 }
209 #endif
210 if (mode == EVP_CIPH_CTR_MODE) {
211 #if defined(BSAES)
212 assert(bsaes_capable());
213 dat->stream.ctr = vpaes_ctr32_encrypt_blocks_with_bsaes;
214 #elif defined(VPAES_CTR32)
215 dat->stream.ctr = vpaes_ctr32_encrypt_blocks;
216 #endif
217 }
218 } else {
219 ret = aes_nohw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
220 dat->block = aes_nohw_encrypt;
221 dat->stream.cbc = NULL;
222 if (mode == EVP_CIPH_CBC_MODE) {
223 dat->stream.cbc = aes_nohw_cbc_encrypt;
224 }
225 }
226
227 if (ret < 0) {
228 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
229 return 0;
230 }
231
232 return 1;
233 }
234
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)235 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
236 size_t len) {
237 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
238
239 if (dat->stream.cbc) {
240 (*dat->stream.cbc)(in, out, len, &dat->ks.ks, ctx->iv, ctx->encrypt);
241 } else if (ctx->encrypt) {
242 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
243 } else {
244 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
245 }
246
247 return 1;
248 }
249
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)250 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
251 size_t len) {
252 size_t bl = ctx->cipher->block_size;
253 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
254
255 if (len < bl) {
256 return 1;
257 }
258
259 len -= bl;
260 for (size_t i = 0; i <= len; i += bl) {
261 (*dat->block)(in + i, out + i, &dat->ks.ks);
262 }
263
264 return 1;
265 }
266
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)267 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
268 size_t len) {
269 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
270
271 if (dat->stream.ctr) {
272 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
273 &ctx->num, dat->stream.ctr);
274 } else {
275 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
276 &ctx->num, dat->block);
277 }
278 return 1;
279 }
280
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)281 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
282 size_t len) {
283 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
284
285 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, &ctx->num,
286 dat->block);
287 return 1;
288 }
289
aes_ctr_set_key(AES_KEY * aes_key,GCM128_KEY * gcm_key,block128_f * out_block,const uint8_t * key,size_t key_bytes)290 ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_KEY *gcm_key,
291 block128_f *out_block, const uint8_t *key,
292 size_t key_bytes) {
293 // This function assumes the key length was previously validated.
294 assert(key_bytes == 128 / 8 || key_bytes == 192 / 8 || key_bytes == 256 / 8);
295 if (hwaes_capable()) {
296 aes_hw_set_encrypt_key(key, (int)key_bytes * 8, aes_key);
297 if (gcm_key != NULL) {
298 CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_hw_encrypt, 1);
299 }
300 if (out_block) {
301 *out_block = aes_hw_encrypt;
302 }
303 return aes_hw_ctr32_encrypt_blocks;
304 }
305
306 if (vpaes_capable()) {
307 vpaes_set_encrypt_key(key, (int)key_bytes * 8, aes_key);
308 if (out_block) {
309 *out_block = vpaes_encrypt;
310 }
311 if (gcm_key != NULL) {
312 CRYPTO_gcm128_init_key(gcm_key, aes_key, vpaes_encrypt, 0);
313 }
314 #if defined(BSAES)
315 assert(bsaes_capable());
316 return vpaes_ctr32_encrypt_blocks_with_bsaes;
317 #elif defined(VPAES_CTR32)
318 return vpaes_ctr32_encrypt_blocks;
319 #else
320 return NULL;
321 #endif
322 }
323
324 aes_nohw_set_encrypt_key(key, (int)key_bytes * 8, aes_key);
325 if (gcm_key != NULL) {
326 CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_nohw_encrypt, 0);
327 }
328 if (out_block) {
329 *out_block = aes_nohw_encrypt;
330 }
331 return aes_nohw_ctr32_encrypt_blocks;
332 }
333
334 #if defined(OPENSSL_32_BIT)
335 #define EVP_AES_GCM_CTX_PADDING (4+8)
336 #else
337 #define EVP_AES_GCM_CTX_PADDING 8
338 #endif
339
aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX * ctx)340 static EVP_AES_GCM_CTX *aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX *ctx) {
341 static_assert(
342 alignof(EVP_AES_GCM_CTX) <= 16,
343 "EVP_AES_GCM_CTX needs more alignment than this function provides");
344
345 // |malloc| guarantees up to 4-byte alignment on 32-bit and 8-byte alignment
346 // on 64-bit systems, so we need to adjust to reach 16-byte alignment.
347 assert(ctx->cipher->ctx_size ==
348 sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING);
349
350 char *ptr = ctx->cipher_data;
351 #if defined(OPENSSL_32_BIT)
352 assert((uintptr_t)ptr % 4 == 0);
353 ptr += (uintptr_t)ptr & 4;
354 #endif
355 assert((uintptr_t)ptr % 8 == 0);
356 ptr += (uintptr_t)ptr & 8;
357 return (EVP_AES_GCM_CTX *)ptr;
358 }
359
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)360 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
361 const uint8_t *iv, int enc) {
362 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
363 if (!iv && !key) {
364 return 1;
365 }
366
367 switch (ctx->key_len) {
368 case 16:
369 boringssl_fips_inc_counter(fips_counter_evp_aes_128_gcm);
370 break;
371
372 case 32:
373 boringssl_fips_inc_counter(fips_counter_evp_aes_256_gcm);
374 break;
375 }
376
377 if (key) {
378 OPENSSL_memset(&gctx->gcm, 0, sizeof(gctx->gcm));
379 gctx->ctr = aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm.gcm_key, NULL, key,
380 ctx->key_len);
381 // If we have an iv can set it directly, otherwise use saved IV.
382 if (iv == NULL && gctx->iv_set) {
383 iv = gctx->iv;
384 }
385 if (iv) {
386 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
387 gctx->iv_set = 1;
388 }
389 gctx->key_set = 1;
390 } else {
391 // If key set use IV, otherwise copy
392 if (gctx->key_set) {
393 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
394 } else {
395 OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
396 }
397 gctx->iv_set = 1;
398 gctx->iv_gen = 0;
399 }
400 return 1;
401 }
402
aes_gcm_cleanup(EVP_CIPHER_CTX * c)403 static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
404 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
405 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
406 if (gctx->iv != c->iv) {
407 OPENSSL_free(gctx->iv);
408 }
409 }
410
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)411 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
412 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
413 switch (type) {
414 case EVP_CTRL_INIT:
415 gctx->key_set = 0;
416 gctx->iv_set = 0;
417 gctx->ivlen = c->cipher->iv_len;
418 gctx->iv = c->iv;
419 gctx->taglen = -1;
420 gctx->iv_gen = 0;
421 return 1;
422
423 case EVP_CTRL_AEAD_SET_IVLEN:
424 if (arg <= 0) {
425 return 0;
426 }
427
428 // Allocate memory for IV if needed
429 if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
430 if (gctx->iv != c->iv) {
431 OPENSSL_free(gctx->iv);
432 }
433 gctx->iv = OPENSSL_malloc(arg);
434 if (!gctx->iv) {
435 return 0;
436 }
437 }
438 gctx->ivlen = arg;
439 return 1;
440
441 case EVP_CTRL_GET_IVLEN:
442 *(int *)ptr = gctx->ivlen;
443 return 1;
444
445 case EVP_CTRL_AEAD_SET_TAG:
446 if (arg <= 0 || arg > 16 || c->encrypt) {
447 return 0;
448 }
449 OPENSSL_memcpy(c->buf, ptr, arg);
450 gctx->taglen = arg;
451 return 1;
452
453 case EVP_CTRL_AEAD_GET_TAG:
454 if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
455 return 0;
456 }
457 OPENSSL_memcpy(ptr, c->buf, arg);
458 return 1;
459
460 case EVP_CTRL_AEAD_SET_IV_FIXED:
461 // Special case: -1 length restores whole IV
462 if (arg == -1) {
463 OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
464 gctx->iv_gen = 1;
465 return 1;
466 }
467 // Fixed field must be at least 4 bytes and invocation field
468 // at least 8.
469 if (arg < 4 || (gctx->ivlen - arg) < 8) {
470 return 0;
471 }
472 OPENSSL_memcpy(gctx->iv, ptr, arg);
473 if (c->encrypt) {
474 // |RAND_bytes| calls within the fipsmodule should be wrapped with state
475 // lock functions to avoid updating the service indicator with the DRBG
476 // functions.
477 FIPS_service_indicator_lock_state();
478 RAND_bytes(gctx->iv + arg, gctx->ivlen - arg);
479 FIPS_service_indicator_unlock_state();
480 }
481 gctx->iv_gen = 1;
482 return 1;
483
484 case EVP_CTRL_GCM_IV_GEN: {
485 if (gctx->iv_gen == 0 || gctx->key_set == 0) {
486 return 0;
487 }
488 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
489 if (arg <= 0 || arg > gctx->ivlen) {
490 arg = gctx->ivlen;
491 }
492 OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
493 // Invocation field will be at least 8 bytes in size, so no need to check
494 // wrap around or increment more than last 8 bytes.
495 uint8_t *ctr = gctx->iv + gctx->ivlen - 8;
496 CRYPTO_store_u64_be(ctr, CRYPTO_load_u64_be(ctr) + 1);
497 gctx->iv_set = 1;
498 return 1;
499 }
500
501 case EVP_CTRL_GCM_SET_IV_INV:
502 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
503 return 0;
504 }
505 OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
506 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
507 gctx->iv_set = 1;
508 return 1;
509
510 case EVP_CTRL_COPY: {
511 EVP_CIPHER_CTX *out = ptr;
512 EVP_AES_GCM_CTX *gctx_out = aes_gcm_from_cipher_ctx(out);
513 // |EVP_CIPHER_CTX_copy| copies this generically, but we must redo it in
514 // case |out->cipher_data| and |in->cipher_data| are differently aligned.
515 OPENSSL_memcpy(gctx_out, gctx, sizeof(EVP_AES_GCM_CTX));
516 if (gctx->iv == c->iv) {
517 gctx_out->iv = out->iv;
518 } else {
519 gctx_out->iv = OPENSSL_memdup(gctx->iv, gctx->ivlen);
520 if (!gctx_out->iv) {
521 return 0;
522 }
523 }
524 return 1;
525 }
526
527 default:
528 return -1;
529 }
530 }
531
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)532 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
533 size_t len) {
534 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
535
536 // If not set up, return error
537 if (!gctx->key_set) {
538 return -1;
539 }
540 if (!gctx->iv_set) {
541 return -1;
542 }
543
544 if (len > INT_MAX) {
545 // This function signature can only express up to |INT_MAX| bytes encrypted.
546 //
547 // TODO(https://crbug.com/boringssl/494): Make the internal |EVP_CIPHER|
548 // calling convention |size_t|-clean.
549 return -1;
550 }
551
552 if (in) {
553 if (out == NULL) {
554 if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
555 return -1;
556 }
557 } else if (ctx->encrypt) {
558 if (gctx->ctr) {
559 if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
560 gctx->ctr)) {
561 return -1;
562 }
563 } else {
564 if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
565 return -1;
566 }
567 }
568 } else {
569 if (gctx->ctr) {
570 if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
571 gctx->ctr)) {
572 return -1;
573 }
574 } else {
575 if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
576 return -1;
577 }
578 }
579 }
580 return (int)len;
581 } else {
582 if (!ctx->encrypt) {
583 if (gctx->taglen < 0 ||
584 !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
585 return -1;
586 }
587 gctx->iv_set = 0;
588 return 0;
589 }
590 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
591 gctx->taglen = 16;
592 // Don't reuse the IV
593 gctx->iv_set = 0;
594 return 0;
595 }
596 }
597
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_128_cbc)598 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_128_cbc) {
599 memset(out, 0, sizeof(EVP_CIPHER));
600
601 out->nid = NID_aes_128_cbc;
602 out->block_size = 16;
603 out->key_len = 16;
604 out->iv_len = 16;
605 out->ctx_size = sizeof(EVP_AES_KEY);
606 out->flags = EVP_CIPH_CBC_MODE;
607 out->init = aes_init_key;
608 out->cipher = aes_cbc_cipher;
609 }
610
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_128_ctr)611 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_128_ctr) {
612 memset(out, 0, sizeof(EVP_CIPHER));
613
614 out->nid = NID_aes_128_ctr;
615 out->block_size = 1;
616 out->key_len = 16;
617 out->iv_len = 16;
618 out->ctx_size = sizeof(EVP_AES_KEY);
619 out->flags = EVP_CIPH_CTR_MODE;
620 out->init = aes_init_key;
621 out->cipher = aes_ctr_cipher;
622 }
623
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ecb_generic)624 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ecb_generic) {
625 memset(out, 0, sizeof(EVP_CIPHER));
626
627 out->nid = NID_aes_128_ecb;
628 out->block_size = 16;
629 out->key_len = 16;
630 out->ctx_size = sizeof(EVP_AES_KEY);
631 out->flags = EVP_CIPH_ECB_MODE;
632 out->init = aes_init_key;
633 out->cipher = aes_ecb_cipher;
634 }
635
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_128_ofb)636 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_128_ofb) {
637 memset(out, 0, sizeof(EVP_CIPHER));
638
639 out->nid = NID_aes_128_ofb128;
640 out->block_size = 1;
641 out->key_len = 16;
642 out->iv_len = 16;
643 out->ctx_size = sizeof(EVP_AES_KEY);
644 out->flags = EVP_CIPH_OFB_MODE;
645 out->init = aes_init_key;
646 out->cipher = aes_ofb_cipher;
647 }
648
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_128_gcm)649 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_128_gcm) {
650 memset(out, 0, sizeof(EVP_CIPHER));
651
652 out->nid = NID_aes_128_gcm;
653 out->block_size = 1;
654 out->key_len = 16;
655 out->iv_len = AES_GCM_NONCE_LENGTH;
656 out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
657 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
658 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
659 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
660 out->init = aes_gcm_init_key;
661 out->cipher = aes_gcm_cipher;
662 out->cleanup = aes_gcm_cleanup;
663 out->ctrl = aes_gcm_ctrl;
664 }
665
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_192_cbc)666 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_192_cbc) {
667 memset(out, 0, sizeof(EVP_CIPHER));
668
669 out->nid = NID_aes_192_cbc;
670 out->block_size = 16;
671 out->key_len = 24;
672 out->iv_len = 16;
673 out->ctx_size = sizeof(EVP_AES_KEY);
674 out->flags = EVP_CIPH_CBC_MODE;
675 out->init = aes_init_key;
676 out->cipher = aes_cbc_cipher;
677 }
678
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_192_ctr)679 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_192_ctr) {
680 memset(out, 0, sizeof(EVP_CIPHER));
681
682 out->nid = NID_aes_192_ctr;
683 out->block_size = 1;
684 out->key_len = 24;
685 out->iv_len = 16;
686 out->ctx_size = sizeof(EVP_AES_KEY);
687 out->flags = EVP_CIPH_CTR_MODE;
688 out->init = aes_init_key;
689 out->cipher = aes_ctr_cipher;
690 }
691
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ecb_generic)692 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ecb_generic) {
693 memset(out, 0, sizeof(EVP_CIPHER));
694
695 out->nid = NID_aes_192_ecb;
696 out->block_size = 16;
697 out->key_len = 24;
698 out->ctx_size = sizeof(EVP_AES_KEY);
699 out->flags = EVP_CIPH_ECB_MODE;
700 out->init = aes_init_key;
701 out->cipher = aes_ecb_cipher;
702 }
703
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_192_ofb)704 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_192_ofb) {
705 memset(out, 0, sizeof(EVP_CIPHER));
706
707 out->nid = NID_aes_192_ofb128;
708 out->block_size = 1;
709 out->key_len = 24;
710 out->iv_len = 16;
711 out->ctx_size = sizeof(EVP_AES_KEY);
712 out->flags = EVP_CIPH_OFB_MODE;
713 out->init = aes_init_key;
714 out->cipher = aes_ofb_cipher;
715 }
716
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_192_gcm)717 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_192_gcm) {
718 memset(out, 0, sizeof(EVP_CIPHER));
719
720 out->nid = NID_aes_192_gcm;
721 out->block_size = 1;
722 out->key_len = 24;
723 out->iv_len = AES_GCM_NONCE_LENGTH;
724 out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
725 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
726 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
727 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
728 out->init = aes_gcm_init_key;
729 out->cipher = aes_gcm_cipher;
730 out->cleanup = aes_gcm_cleanup;
731 out->ctrl = aes_gcm_ctrl;
732 }
733
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_256_cbc)734 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_256_cbc) {
735 memset(out, 0, sizeof(EVP_CIPHER));
736
737 out->nid = NID_aes_256_cbc;
738 out->block_size = 16;
739 out->key_len = 32;
740 out->iv_len = 16;
741 out->ctx_size = sizeof(EVP_AES_KEY);
742 out->flags = EVP_CIPH_CBC_MODE;
743 out->init = aes_init_key;
744 out->cipher = aes_cbc_cipher;
745 }
746
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_256_ctr)747 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_256_ctr) {
748 memset(out, 0, sizeof(EVP_CIPHER));
749
750 out->nid = NID_aes_256_ctr;
751 out->block_size = 1;
752 out->key_len = 32;
753 out->iv_len = 16;
754 out->ctx_size = sizeof(EVP_AES_KEY);
755 out->flags = EVP_CIPH_CTR_MODE;
756 out->init = aes_init_key;
757 out->cipher = aes_ctr_cipher;
758 }
759
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ecb_generic)760 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ecb_generic) {
761 memset(out, 0, sizeof(EVP_CIPHER));
762
763 out->nid = NID_aes_256_ecb;
764 out->block_size = 16;
765 out->key_len = 32;
766 out->ctx_size = sizeof(EVP_AES_KEY);
767 out->flags = EVP_CIPH_ECB_MODE;
768 out->init = aes_init_key;
769 out->cipher = aes_ecb_cipher;
770 }
771
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_256_ofb)772 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_256_ofb) {
773 memset(out, 0, sizeof(EVP_CIPHER));
774
775 out->nid = NID_aes_256_ofb128;
776 out->block_size = 1;
777 out->key_len = 32;
778 out->iv_len = 16;
779 out->ctx_size = sizeof(EVP_AES_KEY);
780 out->flags = EVP_CIPH_OFB_MODE;
781 out->init = aes_init_key;
782 out->cipher = aes_ofb_cipher;
783 }
784
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_256_gcm)785 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_256_gcm) {
786 memset(out, 0, sizeof(EVP_CIPHER));
787
788 out->nid = NID_aes_256_gcm;
789 out->block_size = 1;
790 out->key_len = 32;
791 out->iv_len = AES_GCM_NONCE_LENGTH;
792 out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
793 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
794 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
795 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
796 out->init = aes_gcm_init_key;
797 out->cipher = aes_gcm_cipher;
798 out->cleanup = aes_gcm_cleanup;
799 out->ctrl = aes_gcm_ctrl;
800 }
801
802 #if defined(HWAES_ECB)
803
aes_hw_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)804 static int aes_hw_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
805 const uint8_t *in, size_t len) {
806 size_t bl = ctx->cipher->block_size;
807
808 if (len < bl) {
809 return 1;
810 }
811
812 aes_hw_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
813
814 return 1;
815 }
816
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_128_ecb)817 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_128_ecb) {
818 memset(out, 0, sizeof(EVP_CIPHER));
819
820 out->nid = NID_aes_128_ecb;
821 out->block_size = 16;
822 out->key_len = 16;
823 out->ctx_size = sizeof(EVP_AES_KEY);
824 out->flags = EVP_CIPH_ECB_MODE;
825 out->init = aes_init_key;
826 out->cipher = aes_hw_ecb_cipher;
827 }
828
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_192_ecb)829 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_192_ecb) {
830 memset(out, 0, sizeof(EVP_CIPHER));
831
832 out->nid = NID_aes_192_ecb;
833 out->block_size = 16;
834 out->key_len = 24;
835 out->ctx_size = sizeof(EVP_AES_KEY);
836 out->flags = EVP_CIPH_ECB_MODE;
837 out->init = aes_init_key;
838 out->cipher = aes_hw_ecb_cipher;
839 }
840
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_256_ecb)841 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_256_ecb) {
842 memset(out, 0, sizeof(EVP_CIPHER));
843
844 out->nid = NID_aes_256_ecb;
845 out->block_size = 16;
846 out->key_len = 32;
847 out->ctx_size = sizeof(EVP_AES_KEY);
848 out->flags = EVP_CIPH_ECB_MODE;
849 out->init = aes_init_key;
850 out->cipher = aes_hw_ecb_cipher;
851 }
852
853 #define EVP_ECB_CIPHER_FUNCTION(keybits) \
854 const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
855 if (hwaes_capable()) { \
856 return aes_hw_##keybits##_ecb(); \
857 } \
858 return aes_##keybits##_ecb_generic(); \
859 }
860
861 #else
862
863 #define EVP_ECB_CIPHER_FUNCTION(keybits) \
864 const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
865 return aes_##keybits##_ecb_generic(); \
866 }
867
868 #endif // HWAES_ECB
869
870 EVP_ECB_CIPHER_FUNCTION(128)
871 EVP_ECB_CIPHER_FUNCTION(192)
872 EVP_ECB_CIPHER_FUNCTION(256)
873
874
875 #define EVP_AEAD_AES_GCM_TAG_LEN 16
876
877 struct aead_aes_gcm_ctx {
878 union {
879 double align;
880 AES_KEY ks;
881 } ks;
882 GCM128_KEY gcm_key;
883 ctr128_f ctr;
884 };
885
aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx * gcm_ctx,size_t * out_tag_len,const uint8_t * key,size_t key_len,size_t tag_len)886 static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
887 size_t *out_tag_len, const uint8_t *key,
888 size_t key_len, size_t tag_len) {
889 const size_t key_bits = key_len * 8;
890
891 switch (key_bits) {
892 case 128:
893 boringssl_fips_inc_counter(fips_counter_evp_aes_128_gcm);
894 break;
895
896 case 256:
897 boringssl_fips_inc_counter(fips_counter_evp_aes_256_gcm);
898 break;
899 }
900
901 if (key_bits != 128 && key_bits != 192 && key_bits != 256) {
902 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
903 return 0; // EVP_AEAD_CTX_init should catch this.
904 }
905
906 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
907 tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
908 }
909
910 if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
911 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
912 return 0;
913 }
914
915 gcm_ctx->ctr =
916 aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm_key, NULL, key, key_len);
917 *out_tag_len = tag_len;
918 return 1;
919 }
920
921 static_assert(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
922 sizeof(struct aead_aes_gcm_ctx),
923 "AEAD state is too small");
924 static_assert(alignof(union evp_aead_ctx_st_state) >=
925 alignof(struct aead_aes_gcm_ctx),
926 "AEAD state has insufficient alignment");
927
aead_aes_gcm_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)928 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
929 size_t key_len, size_t requested_tag_len) {
930 struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
931
932 size_t actual_tag_len;
933 if (!aead_aes_gcm_init_impl(gcm_ctx, &actual_tag_len, key, key_len,
934 requested_tag_len)) {
935 return 0;
936 }
937
938 ctx->tag_len = actual_tag_len;
939 return 1;
940 }
941
aead_aes_gcm_cleanup(EVP_AEAD_CTX * ctx)942 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {}
943
aead_aes_gcm_seal_scatter_impl(const struct aead_aes_gcm_ctx * gcm_ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len,size_t tag_len)944 static int aead_aes_gcm_seal_scatter_impl(
945 const struct aead_aes_gcm_ctx *gcm_ctx,
946 uint8_t *out, uint8_t *out_tag, size_t *out_tag_len, size_t max_out_tag_len,
947 const uint8_t *nonce, size_t nonce_len,
948 const uint8_t *in, size_t in_len,
949 const uint8_t *extra_in, size_t extra_in_len,
950 const uint8_t *ad, size_t ad_len,
951 size_t tag_len) {
952 if (extra_in_len + tag_len < tag_len) {
953 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
954 return 0;
955 }
956 if (max_out_tag_len < extra_in_len + tag_len) {
957 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
958 return 0;
959 }
960 if (nonce_len == 0) {
961 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
962 return 0;
963 }
964
965 const AES_KEY *key = &gcm_ctx->ks.ks;
966
967 GCM128_CONTEXT gcm;
968 OPENSSL_memset(&gcm, 0, sizeof(gcm));
969 OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
970 CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
971
972 if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
973 return 0;
974 }
975
976 if (gcm_ctx->ctr) {
977 if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
978 gcm_ctx->ctr)) {
979 return 0;
980 }
981 } else {
982 if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
983 return 0;
984 }
985 }
986
987 if (extra_in_len) {
988 if (gcm_ctx->ctr) {
989 if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, extra_in, out_tag,
990 extra_in_len, gcm_ctx->ctr)) {
991 return 0;
992 }
993 } else {
994 if (!CRYPTO_gcm128_encrypt(&gcm, key, extra_in, out_tag, extra_in_len)) {
995 return 0;
996 }
997 }
998 }
999
1000 CRYPTO_gcm128_tag(&gcm, out_tag + extra_in_len, tag_len);
1001 *out_tag_len = tag_len + extra_in_len;
1002
1003 return 1;
1004 }
1005
aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1006 static int aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
1007 uint8_t *out_tag, size_t *out_tag_len,
1008 size_t max_out_tag_len,
1009 const uint8_t *nonce, size_t nonce_len,
1010 const uint8_t *in, size_t in_len,
1011 const uint8_t *extra_in,
1012 size_t extra_in_len,
1013 const uint8_t *ad, size_t ad_len) {
1014 const struct aead_aes_gcm_ctx *gcm_ctx =
1015 (const struct aead_aes_gcm_ctx *)&ctx->state;
1016 return aead_aes_gcm_seal_scatter_impl(
1017 gcm_ctx, out, out_tag, out_tag_len, max_out_tag_len, nonce, nonce_len, in,
1018 in_len, extra_in, extra_in_len, ad, ad_len, ctx->tag_len);
1019 }
1020
aead_aes_gcm_open_gather_impl(const struct aead_aes_gcm_ctx * gcm_ctx,uint8_t * out,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len,size_t tag_len)1021 static int aead_aes_gcm_open_gather_impl(const struct aead_aes_gcm_ctx *gcm_ctx,
1022 uint8_t *out,
1023 const uint8_t *nonce, size_t nonce_len,
1024 const uint8_t *in, size_t in_len,
1025 const uint8_t *in_tag,
1026 size_t in_tag_len,
1027 const uint8_t *ad, size_t ad_len,
1028 size_t tag_len) {
1029 uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
1030
1031 if (nonce_len == 0) {
1032 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1033 return 0;
1034 }
1035
1036 if (in_tag_len != tag_len) {
1037 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1038 return 0;
1039 }
1040
1041 const AES_KEY *key = &gcm_ctx->ks.ks;
1042
1043 GCM128_CONTEXT gcm;
1044 OPENSSL_memset(&gcm, 0, sizeof(gcm));
1045 OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
1046 CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1047
1048 if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1049 return 0;
1050 }
1051
1052 if (gcm_ctx->ctr) {
1053 if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out, in_len,
1054 gcm_ctx->ctr)) {
1055 return 0;
1056 }
1057 } else {
1058 if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len)) {
1059 return 0;
1060 }
1061 }
1062
1063 CRYPTO_gcm128_tag(&gcm, tag, tag_len);
1064 if (CRYPTO_memcmp(tag, in_tag, tag_len) != 0) {
1065 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1066 return 0;
1067 }
1068
1069 return 1;
1070 }
1071
aead_aes_gcm_open_gather(const EVP_AEAD_CTX * ctx,uint8_t * out,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len)1072 static int aead_aes_gcm_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
1073 const uint8_t *nonce, size_t nonce_len,
1074 const uint8_t *in, size_t in_len,
1075 const uint8_t *in_tag, size_t in_tag_len,
1076 const uint8_t *ad, size_t ad_len) {
1077 struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *)&ctx->state;
1078 if (!aead_aes_gcm_open_gather_impl(gcm_ctx, out, nonce, nonce_len, in, in_len,
1079 in_tag, in_tag_len, ad, ad_len,
1080 ctx->tag_len)) {
1081 return 0;
1082 }
1083
1084 AEAD_GCM_verify_service_indicator(ctx);
1085 return 1;
1086 }
1087
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm)1088 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm) {
1089 memset(out, 0, sizeof(EVP_AEAD));
1090
1091 out->key_len = 16;
1092 out->nonce_len = AES_GCM_NONCE_LENGTH;
1093 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1094 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1095 out->seal_scatter_supports_extra_in = 1;
1096
1097 out->init = aead_aes_gcm_init;
1098 out->cleanup = aead_aes_gcm_cleanup;
1099 out->seal_scatter = aead_aes_gcm_seal_scatter;
1100 out->open_gather = aead_aes_gcm_open_gather;
1101 }
1102
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_192_gcm)1103 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_192_gcm) {
1104 memset(out, 0, sizeof(EVP_AEAD));
1105
1106 out->key_len = 24;
1107 out->nonce_len = AES_GCM_NONCE_LENGTH;
1108 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1109 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1110 out->seal_scatter_supports_extra_in = 1;
1111
1112 out->init = aead_aes_gcm_init;
1113 out->cleanup = aead_aes_gcm_cleanup;
1114 out->seal_scatter = aead_aes_gcm_seal_scatter;
1115 out->open_gather = aead_aes_gcm_open_gather;
1116 }
1117
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm)1118 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm) {
1119 memset(out, 0, sizeof(EVP_AEAD));
1120
1121 out->key_len = 32;
1122 out->nonce_len = AES_GCM_NONCE_LENGTH;
1123 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1124 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1125 out->seal_scatter_supports_extra_in = 1;
1126
1127 out->init = aead_aes_gcm_init;
1128 out->cleanup = aead_aes_gcm_cleanup;
1129 out->seal_scatter = aead_aes_gcm_seal_scatter;
1130 out->open_gather = aead_aes_gcm_open_gather;
1131 }
1132
aead_aes_gcm_init_randnonce(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1133 static int aead_aes_gcm_init_randnonce(EVP_AEAD_CTX *ctx, const uint8_t *key,
1134 size_t key_len,
1135 size_t requested_tag_len) {
1136 if (requested_tag_len != EVP_AEAD_DEFAULT_TAG_LENGTH) {
1137 if (requested_tag_len < AES_GCM_NONCE_LENGTH) {
1138 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1139 return 0;
1140 }
1141 requested_tag_len -= AES_GCM_NONCE_LENGTH;
1142 }
1143
1144 if (!aead_aes_gcm_init(ctx, key, key_len, requested_tag_len)) {
1145 return 0;
1146 }
1147
1148 ctx->tag_len += AES_GCM_NONCE_LENGTH;
1149 return 1;
1150 }
1151
aead_aes_gcm_seal_scatter_randnonce(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * external_nonce,size_t external_nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1152 static int aead_aes_gcm_seal_scatter_randnonce(
1153 const EVP_AEAD_CTX *ctx,
1154 uint8_t *out, uint8_t *out_tag, size_t *out_tag_len, size_t max_out_tag_len,
1155 const uint8_t *external_nonce, size_t external_nonce_len,
1156 const uint8_t *in, size_t in_len,
1157 const uint8_t *extra_in, size_t extra_in_len,
1158 const uint8_t *ad, size_t ad_len) {
1159 if (external_nonce_len != 0) {
1160 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1161 return 0;
1162 }
1163
1164 uint8_t nonce[AES_GCM_NONCE_LENGTH];
1165 if (max_out_tag_len < sizeof(nonce)) {
1166 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1167 return 0;
1168 }
1169
1170 // |RAND_bytes| calls within the fipsmodule should be wrapped with state lock
1171 // functions to avoid updating the service indicator with the DRBG functions.
1172 FIPS_service_indicator_lock_state();
1173 RAND_bytes(nonce, sizeof(nonce));
1174 FIPS_service_indicator_unlock_state();
1175
1176 const struct aead_aes_gcm_ctx *gcm_ctx =
1177 (const struct aead_aes_gcm_ctx *)&ctx->state;
1178 if (!aead_aes_gcm_seal_scatter_impl(gcm_ctx, out, out_tag, out_tag_len,
1179 max_out_tag_len - AES_GCM_NONCE_LENGTH,
1180 nonce, sizeof(nonce), in, in_len,
1181 extra_in, extra_in_len, ad, ad_len,
1182 ctx->tag_len - AES_GCM_NONCE_LENGTH)) {
1183 return 0;
1184 }
1185
1186 assert(*out_tag_len + sizeof(nonce) <= max_out_tag_len);
1187 memcpy(out_tag + *out_tag_len, nonce, sizeof(nonce));
1188 *out_tag_len += sizeof(nonce);
1189
1190 AEAD_GCM_verify_service_indicator(ctx);
1191 return 1;
1192 }
1193
aead_aes_gcm_open_gather_randnonce(const EVP_AEAD_CTX * ctx,uint8_t * out,const uint8_t * external_nonce,size_t external_nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len)1194 static int aead_aes_gcm_open_gather_randnonce(
1195 const EVP_AEAD_CTX *ctx, uint8_t *out,
1196 const uint8_t *external_nonce, size_t external_nonce_len,
1197 const uint8_t *in, size_t in_len,
1198 const uint8_t *in_tag, size_t in_tag_len,
1199 const uint8_t *ad, size_t ad_len) {
1200 if (external_nonce_len != 0) {
1201 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1202 return 0;
1203 }
1204
1205 if (in_tag_len < AES_GCM_NONCE_LENGTH) {
1206 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1207 return 0;
1208 }
1209 const uint8_t *nonce = in_tag + in_tag_len - AES_GCM_NONCE_LENGTH;
1210
1211 const struct aead_aes_gcm_ctx *gcm_ctx =
1212 (const struct aead_aes_gcm_ctx *)&ctx->state;
1213 if (!aead_aes_gcm_open_gather_impl(
1214 gcm_ctx, out, nonce, AES_GCM_NONCE_LENGTH, in, in_len, in_tag,
1215 in_tag_len - AES_GCM_NONCE_LENGTH, ad, ad_len,
1216 ctx->tag_len - AES_GCM_NONCE_LENGTH)) {
1217 return 0;
1218 }
1219
1220 AEAD_GCM_verify_service_indicator(ctx);
1221 return 1;
1222 }
1223
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_randnonce)1224 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_randnonce) {
1225 memset(out, 0, sizeof(EVP_AEAD));
1226
1227 out->key_len = 16;
1228 out->nonce_len = 0;
1229 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1230 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1231 out->seal_scatter_supports_extra_in = 1;
1232
1233 out->init = aead_aes_gcm_init_randnonce;
1234 out->cleanup = aead_aes_gcm_cleanup;
1235 out->seal_scatter = aead_aes_gcm_seal_scatter_randnonce;
1236 out->open_gather = aead_aes_gcm_open_gather_randnonce;
1237 }
1238
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_randnonce)1239 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_randnonce) {
1240 memset(out, 0, sizeof(EVP_AEAD));
1241
1242 out->key_len = 32;
1243 out->nonce_len = 0;
1244 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1245 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1246 out->seal_scatter_supports_extra_in = 1;
1247
1248 out->init = aead_aes_gcm_init_randnonce;
1249 out->cleanup = aead_aes_gcm_cleanup;
1250 out->seal_scatter = aead_aes_gcm_seal_scatter_randnonce;
1251 out->open_gather = aead_aes_gcm_open_gather_randnonce;
1252 }
1253
1254 struct aead_aes_gcm_tls12_ctx {
1255 struct aead_aes_gcm_ctx gcm_ctx;
1256 uint64_t min_next_nonce;
1257 };
1258
1259 static_assert(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1260 sizeof(struct aead_aes_gcm_tls12_ctx),
1261 "AEAD state is too small");
1262 static_assert(alignof(union evp_aead_ctx_st_state) >=
1263 alignof(struct aead_aes_gcm_tls12_ctx),
1264 "AEAD state has insufficient alignment");
1265
aead_aes_gcm_tls12_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1266 static int aead_aes_gcm_tls12_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1267 size_t key_len, size_t requested_tag_len) {
1268 struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1269 (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1270
1271 gcm_ctx->min_next_nonce = 0;
1272
1273 size_t actual_tag_len;
1274 if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1275 requested_tag_len)) {
1276 return 0;
1277 }
1278
1279 ctx->tag_len = actual_tag_len;
1280 return 1;
1281 }
1282
aead_aes_gcm_tls12_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1283 static int aead_aes_gcm_tls12_seal_scatter(
1284 const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1285 size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1286 size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1287 size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1288 struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1289 (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1290
1291 if (nonce_len != AES_GCM_NONCE_LENGTH) {
1292 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1293 return 0;
1294 }
1295
1296 // The given nonces must be strictly monotonically increasing.
1297 uint64_t given_counter =
1298 CRYPTO_load_u64_be(nonce + nonce_len - sizeof(uint64_t));
1299 if (given_counter == UINT64_MAX || given_counter < gcm_ctx->min_next_nonce) {
1300 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1301 return 0;
1302 }
1303
1304 gcm_ctx->min_next_nonce = given_counter + 1;
1305
1306 if (!aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1307 max_out_tag_len, nonce, nonce_len, in, in_len,
1308 extra_in, extra_in_len, ad, ad_len)) {
1309 return 0;
1310 }
1311
1312 AEAD_GCM_verify_service_indicator(ctx);
1313 return 1;
1314 }
1315
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls12)1316 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls12) {
1317 memset(out, 0, sizeof(EVP_AEAD));
1318
1319 out->key_len = 16;
1320 out->nonce_len = AES_GCM_NONCE_LENGTH;
1321 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1322 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1323 out->seal_scatter_supports_extra_in = 1;
1324
1325 out->init = aead_aes_gcm_tls12_init;
1326 out->cleanup = aead_aes_gcm_cleanup;
1327 out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1328 out->open_gather = aead_aes_gcm_open_gather;
1329 }
1330
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls12)1331 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls12) {
1332 memset(out, 0, sizeof(EVP_AEAD));
1333
1334 out->key_len = 32;
1335 out->nonce_len = AES_GCM_NONCE_LENGTH;
1336 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1337 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1338 out->seal_scatter_supports_extra_in = 1;
1339
1340 out->init = aead_aes_gcm_tls12_init;
1341 out->cleanup = aead_aes_gcm_cleanup;
1342 out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1343 out->open_gather = aead_aes_gcm_open_gather;
1344 }
1345
1346 struct aead_aes_gcm_tls13_ctx {
1347 struct aead_aes_gcm_ctx gcm_ctx;
1348 uint64_t min_next_nonce;
1349 uint64_t mask;
1350 uint8_t first;
1351 };
1352
1353 static_assert(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1354 sizeof(struct aead_aes_gcm_tls13_ctx),
1355 "AEAD state is too small");
1356 static_assert(alignof(union evp_aead_ctx_st_state) >=
1357 alignof(struct aead_aes_gcm_tls13_ctx),
1358 "AEAD state has insufficient alignment");
1359
aead_aes_gcm_tls13_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1360 static int aead_aes_gcm_tls13_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1361 size_t key_len, size_t requested_tag_len) {
1362 struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1363 (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1364
1365 gcm_ctx->min_next_nonce = 0;
1366 gcm_ctx->first = 1;
1367
1368 size_t actual_tag_len;
1369 if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1370 requested_tag_len)) {
1371 return 0;
1372 }
1373
1374 ctx->tag_len = actual_tag_len;
1375 return 1;
1376 }
1377
aead_aes_gcm_tls13_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1378 static int aead_aes_gcm_tls13_seal_scatter(
1379 const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1380 size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1381 size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1382 size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1383 struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1384 (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1385
1386 if (nonce_len != AES_GCM_NONCE_LENGTH) {
1387 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1388 return 0;
1389 }
1390
1391 // The given nonces must be strictly monotonically increasing. See
1392 // https://tools.ietf.org/html/rfc8446#section-5.3 for details of the TLS 1.3
1393 // nonce construction.
1394 uint64_t given_counter =
1395 CRYPTO_load_u64_be(nonce + nonce_len - sizeof(uint64_t));
1396
1397 if (gcm_ctx->first) {
1398 // In the first call the sequence number will be zero and therefore the
1399 // given nonce will be 0 ^ mask = mask.
1400 gcm_ctx->mask = given_counter;
1401 gcm_ctx->first = 0;
1402 }
1403 given_counter ^= gcm_ctx->mask;
1404
1405 if (given_counter == UINT64_MAX ||
1406 given_counter < gcm_ctx->min_next_nonce) {
1407 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1408 return 0;
1409 }
1410
1411 gcm_ctx->min_next_nonce = given_counter + 1;
1412
1413 if (!aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1414 max_out_tag_len, nonce, nonce_len, in, in_len,
1415 extra_in, extra_in_len, ad, ad_len)) {
1416 return 0;
1417 }
1418
1419 AEAD_GCM_verify_service_indicator(ctx);
1420 return 1;
1421 }
1422
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls13)1423 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls13) {
1424 memset(out, 0, sizeof(EVP_AEAD));
1425
1426 out->key_len = 16;
1427 out->nonce_len = AES_GCM_NONCE_LENGTH;
1428 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1429 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1430 out->seal_scatter_supports_extra_in = 1;
1431
1432 out->init = aead_aes_gcm_tls13_init;
1433 out->cleanup = aead_aes_gcm_cleanup;
1434 out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1435 out->open_gather = aead_aes_gcm_open_gather;
1436 }
1437
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls13)1438 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls13) {
1439 memset(out, 0, sizeof(EVP_AEAD));
1440
1441 out->key_len = 32;
1442 out->nonce_len = AES_GCM_NONCE_LENGTH;
1443 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1444 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1445 out->seal_scatter_supports_extra_in = 1;
1446
1447 out->init = aead_aes_gcm_tls13_init;
1448 out->cleanup = aead_aes_gcm_cleanup;
1449 out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1450 out->open_gather = aead_aes_gcm_open_gather;
1451 }
1452
EVP_has_aes_hardware(void)1453 int EVP_has_aes_hardware(void) {
1454 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1455 return hwaes_capable() && crypto_gcm_clmul_enabled();
1456 #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1457 return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
1458 #else
1459 return 0;
1460 #endif
1461 }
1462
1463 OPENSSL_MSVC_PRAGMA(warning(pop))
1464