1 |
/* ==================================================================== |
2 |
* Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved. |
3 |
* |
4 |
* Redistribution and use in source and binary forms, with or without |
5 |
* modification, are permitted provided that the following conditions |
6 |
* are met: |
7 |
* |
8 |
* 1. Redistributions of source code must retain the above copyright |
9 |
* notice, this list of conditions and the following disclaimer. |
10 |
* |
11 |
* 2. Redistributions in binary form must reproduce the above copyright |
12 |
* notice, this list of conditions and the following disclaimer in |
13 |
* the documentation and/or other materials provided with the |
14 |
* distribution. |
15 |
* |
16 |
* 3. All advertising materials mentioning features or use of this |
17 |
* software must display the following acknowledgment: |
18 |
* "This product includes software developed by the OpenSSL Project |
19 |
* for use in the OpenSSL Toolkit. (http://www.openssl.org/)" |
20 |
* |
21 |
* 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to |
22 |
* endorse or promote products derived from this software without |
23 |
* prior written permission. For written permission, please contact |
24 |
* openssl-core@openssl.org. |
25 |
* |
26 |
* 5. Products derived from this software may not be called "OpenSSL" |
27 |
* nor may "OpenSSL" appear in their names without prior written |
28 |
* permission of the OpenSSL Project. |
29 |
* |
30 |
* 6. Redistributions of any form whatsoever must retain the following |
31 |
* acknowledgment: |
32 |
* "This product includes software developed by the OpenSSL Project |
33 |
* for use in the OpenSSL Toolkit (http://www.openssl.org/)" |
34 |
* |
35 |
* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY |
36 |
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
37 |
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
38 |
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR |
39 |
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
40 |
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
41 |
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; |
42 |
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) |
43 |
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, |
44 |
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
45 |
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED |
46 |
* OF THE POSSIBILITY OF SUCH DAMAGE. |
47 |
* ==================================================================== |
48 |
* |
49 |
*/ |
50 |
|
51 |
#include <openssl/opensslconf.h> |
52 |
#ifndef OPENSSL_NO_AES |
53 |
#include <openssl/crypto.h> |
54 |
# include <openssl/evp.h> |
55 |
# include <openssl/err.h> |
56 |
# include <string.h> |
57 |
# include <assert.h> |
58 |
# include <openssl/aes.h> |
59 |
# include "evp_locl.h" |
60 |
# ifndef OPENSSL_FIPS |
61 |
# include "modes_lcl.h" |
62 |
# include <openssl/rand.h> |
63 |
|
64 |
typedef struct { |
65 |
AES_KEY ks; |
66 |
block128_f block; |
67 |
union { |
68 |
cbc128_f cbc; |
69 |
ctr128_f ctr; |
70 |
} stream; |
71 |
} EVP_AES_KEY; |
72 |
|
73 |
typedef struct { |
74 |
AES_KEY ks; /* AES key schedule to use */ |
75 |
int key_set; /* Set if key initialised */ |
76 |
int iv_set; /* Set if an iv is set */ |
77 |
GCM128_CONTEXT gcm; |
78 |
unsigned char *iv; /* Temporary IV store */ |
79 |
int ivlen; /* IV length */ |
80 |
int taglen; |
81 |
int iv_gen; /* It is OK to generate IVs */ |
82 |
int tls_aad_len; /* TLS AAD length */ |
83 |
ctr128_f ctr; |
84 |
} EVP_AES_GCM_CTX; |
85 |
|
86 |
typedef struct { |
87 |
AES_KEY ks1, ks2; /* AES key schedules to use */ |
88 |
XTS128_CONTEXT xts; |
89 |
void (*stream) (const unsigned char *in, |
90 |
unsigned char *out, size_t length, |
91 |
const AES_KEY *key1, const AES_KEY *key2, |
92 |
const unsigned char iv[16]); |
93 |
} EVP_AES_XTS_CTX; |
94 |
|
95 |
typedef struct { |
96 |
AES_KEY ks; /* AES key schedule to use */ |
97 |
int key_set; /* Set if key initialised */ |
98 |
int iv_set; /* Set if an iv is set */ |
99 |
int tag_set; /* Set if tag is valid */ |
100 |
int len_set; /* Set if message length set */ |
101 |
int L, M; /* L and M parameters from RFC3610 */ |
102 |
CCM128_CONTEXT ccm; |
103 |
ccm128_f str; |
104 |
} EVP_AES_CCM_CTX; |
105 |
|
106 |
# define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4)) |
107 |
|
108 |
# ifdef VPAES_ASM |
109 |
int vpaes_set_encrypt_key(const unsigned char *userKey, int bits, |
110 |
AES_KEY *key); |
111 |
int vpaes_set_decrypt_key(const unsigned char *userKey, int bits, |
112 |
AES_KEY *key); |
113 |
|
114 |
void vpaes_encrypt(const unsigned char *in, unsigned char *out, |
115 |
const AES_KEY *key); |
116 |
void vpaes_decrypt(const unsigned char *in, unsigned char *out, |
117 |
const AES_KEY *key); |
118 |
|
119 |
void vpaes_cbc_encrypt(const unsigned char *in, |
120 |
unsigned char *out, |
121 |
size_t length, |
122 |
const AES_KEY *key, unsigned char *ivec, int enc); |
123 |
# endif |
124 |
# ifdef BSAES_ASM |
125 |
void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out, |
126 |
size_t length, const AES_KEY *key, |
127 |
unsigned char ivec[16], int enc); |
128 |
void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out, |
129 |
size_t len, const AES_KEY *key, |
130 |
const unsigned char ivec[16]); |
131 |
void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out, |
132 |
size_t len, const AES_KEY *key1, |
133 |
const AES_KEY *key2, const unsigned char iv[16]); |
134 |
void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out, |
135 |
size_t len, const AES_KEY *key1, |
136 |
const AES_KEY *key2, const unsigned char iv[16]); |
137 |
# endif |
138 |
# ifdef AES_CTR_ASM |
139 |
void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out, |
140 |
size_t blocks, const AES_KEY *key, |
141 |
const unsigned char ivec[AES_BLOCK_SIZE]); |
142 |
# endif |
143 |
# ifdef AES_XTS_ASM |
144 |
void AES_xts_encrypt(const char *inp, char *out, size_t len, |
145 |
const AES_KEY *key1, const AES_KEY *key2, |
146 |
const unsigned char iv[16]); |
147 |
void AES_xts_decrypt(const char *inp, char *out, size_t len, |
148 |
const AES_KEY *key1, const AES_KEY *key2, |
149 |
const unsigned char iv[16]); |
150 |
# endif |
151 |
|
152 |
# if defined(AES_ASM) && !defined(I386_ONLY) && ( \ |
153 |
((defined(__i386) || defined(__i386__) || \ |
154 |
defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \ |
155 |
defined(__x86_64) || defined(__x86_64__) || \ |
156 |
defined(_M_AMD64) || defined(_M_X64) || \ |
157 |
defined(__INTEL__) ) |
158 |
|
159 |
extern unsigned int OPENSSL_ia32cap_P[2]; |
160 |
|
161 |
# ifdef VPAES_ASM |
162 |
# define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) |
163 |
# endif |
164 |
# ifdef BSAES_ASM |
165 |
# define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) |
166 |
# endif |
167 |
/* |
168 |
* AES-NI section |
169 |
*/ |
170 |
# define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32))) |
171 |
|
172 |
int aesni_set_encrypt_key(const unsigned char *userKey, int bits, |
173 |
AES_KEY *key); |
174 |
int aesni_set_decrypt_key(const unsigned char *userKey, int bits, |
175 |
AES_KEY *key); |
176 |
|
177 |
void aesni_encrypt(const unsigned char *in, unsigned char *out, |
178 |
const AES_KEY *key); |
179 |
void aesni_decrypt(const unsigned char *in, unsigned char *out, |
180 |
const AES_KEY *key); |
181 |
|
182 |
void aesni_ecb_encrypt(const unsigned char *in, |
183 |
unsigned char *out, |
184 |
size_t length, const AES_KEY *key, int enc); |
185 |
void aesni_cbc_encrypt(const unsigned char *in, |
186 |
unsigned char *out, |
187 |
size_t length, |
188 |
const AES_KEY *key, unsigned char *ivec, int enc); |
189 |
|
190 |
void aesni_ctr32_encrypt_blocks(const unsigned char *in, |
191 |
unsigned char *out, |
192 |
size_t blocks, |
193 |
const void *key, const unsigned char *ivec); |
194 |
|
195 |
void aesni_xts_encrypt(const unsigned char *in, |
196 |
unsigned char *out, |
197 |
size_t length, |
198 |
const AES_KEY *key1, const AES_KEY *key2, |
199 |
const unsigned char iv[16]); |
200 |
|
201 |
void aesni_xts_decrypt(const unsigned char *in, |
202 |
unsigned char *out, |
203 |
size_t length, |
204 |
const AES_KEY *key1, const AES_KEY *key2, |
205 |
const unsigned char iv[16]); |
206 |
|
207 |
void aesni_ccm64_encrypt_blocks(const unsigned char *in, |
208 |
unsigned char *out, |
209 |
size_t blocks, |
210 |
const void *key, |
211 |
const unsigned char ivec[16], |
212 |
unsigned char cmac[16]); |
213 |
|
214 |
void aesni_ccm64_decrypt_blocks(const unsigned char *in, |
215 |
unsigned char *out, |
216 |
size_t blocks, |
217 |
const void *key, |
218 |
const unsigned char ivec[16], |
219 |
unsigned char cmac[16]); |
220 |
|
221 |
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
222 |
const unsigned char *iv, int enc) |
223 |
{ |
224 |
int ret, mode; |
225 |
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; |
226 |
|
227 |
mode = ctx->cipher->flags & EVP_CIPH_MODE; |
228 |
if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) |
229 |
&& !enc) { |
230 |
ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data); |
231 |
dat->block = (block128_f) aesni_decrypt; |
232 |
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? |
233 |
(cbc128_f) aesni_cbc_encrypt : NULL; |
234 |
} else { |
235 |
ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data); |
236 |
dat->block = (block128_f) aesni_encrypt; |
237 |
if (mode == EVP_CIPH_CBC_MODE) |
238 |
dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt; |
239 |
else if (mode == EVP_CIPH_CTR_MODE) |
240 |
dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; |
241 |
else |
242 |
dat->stream.cbc = NULL; |
243 |
} |
244 |
|
245 |
if (ret < 0) { |
246 |
EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); |
247 |
return 0; |
248 |
} |
249 |
|
250 |
return 1; |
251 |
} |
252 |
|
253 |
static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
254 |
const unsigned char *in, size_t len) |
255 |
{ |
256 |
aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt); |
257 |
|
258 |
return 1; |
259 |
} |
260 |
|
261 |
static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
262 |
const unsigned char *in, size_t len) |
263 |
{ |
264 |
size_t bl = ctx->cipher->block_size; |
265 |
|
266 |
if (len < bl) |
267 |
return 1; |
268 |
|
269 |
aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt); |
270 |
|
271 |
return 1; |
272 |
} |
273 |
|
274 |
# define aesni_ofb_cipher aes_ofb_cipher |
275 |
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
276 |
const unsigned char *in, size_t len); |
277 |
|
278 |
# define aesni_cfb_cipher aes_cfb_cipher |
279 |
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
280 |
const unsigned char *in, size_t len); |
281 |
|
282 |
# define aesni_cfb8_cipher aes_cfb8_cipher |
283 |
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
284 |
const unsigned char *in, size_t len); |
285 |
|
286 |
# define aesni_cfb1_cipher aes_cfb1_cipher |
287 |
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
288 |
const unsigned char *in, size_t len); |
289 |
|
290 |
# define aesni_ctr_cipher aes_ctr_cipher |
291 |
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
292 |
const unsigned char *in, size_t len); |
293 |
|
294 |
static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
295 |
const unsigned char *iv, int enc) |
296 |
{ |
297 |
EVP_AES_GCM_CTX *gctx = ctx->cipher_data; |
298 |
if (!iv && !key) |
299 |
return 1; |
300 |
if (key) { |
301 |
aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks); |
302 |
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt); |
303 |
gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; |
304 |
/* |
305 |
* If we have an iv can set it directly, otherwise use saved IV. |
306 |
*/ |
307 |
if (iv == NULL && gctx->iv_set) |
308 |
iv = gctx->iv; |
309 |
if (iv) { |
310 |
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); |
311 |
gctx->iv_set = 1; |
312 |
} |
313 |
gctx->key_set = 1; |
314 |
} else { |
315 |
/* If key set use IV, otherwise copy */ |
316 |
if (gctx->key_set) |
317 |
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); |
318 |
else |
319 |
memcpy(gctx->iv, iv, gctx->ivlen); |
320 |
gctx->iv_set = 1; |
321 |
gctx->iv_gen = 0; |
322 |
} |
323 |
return 1; |
324 |
} |
325 |
|
326 |
# define aesni_gcm_cipher aes_gcm_cipher |
327 |
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
328 |
const unsigned char *in, size_t len); |
329 |
|
330 |
static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
331 |
const unsigned char *iv, int enc) |
332 |
{ |
333 |
EVP_AES_XTS_CTX *xctx = ctx->cipher_data; |
334 |
if (!iv && !key) |
335 |
return 1; |
336 |
|
337 |
if (key) { |
338 |
/* key_len is two AES keys */ |
339 |
if (enc) { |
340 |
aesni_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1); |
341 |
xctx->xts.block1 = (block128_f) aesni_encrypt; |
342 |
xctx->stream = aesni_xts_encrypt; |
343 |
} else { |
344 |
aesni_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1); |
345 |
xctx->xts.block1 = (block128_f) aesni_decrypt; |
346 |
xctx->stream = aesni_xts_decrypt; |
347 |
} |
348 |
|
349 |
aesni_set_encrypt_key(key + ctx->key_len / 2, |
350 |
ctx->key_len * 4, &xctx->ks2); |
351 |
xctx->xts.block2 = (block128_f) aesni_encrypt; |
352 |
|
353 |
xctx->xts.key1 = &xctx->ks1; |
354 |
} |
355 |
|
356 |
if (iv) { |
357 |
xctx->xts.key2 = &xctx->ks2; |
358 |
memcpy(ctx->iv, iv, 16); |
359 |
} |
360 |
|
361 |
return 1; |
362 |
} |
363 |
|
364 |
# define aesni_xts_cipher aes_xts_cipher |
365 |
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
366 |
const unsigned char *in, size_t len); |
367 |
|
368 |
static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
369 |
const unsigned char *iv, int enc) |
370 |
{ |
371 |
EVP_AES_CCM_CTX *cctx = ctx->cipher_data; |
372 |
if (!iv && !key) |
373 |
return 1; |
374 |
if (key) { |
375 |
aesni_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks); |
376 |
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, |
377 |
&cctx->ks, (block128_f) aesni_encrypt); |
378 |
cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks : |
379 |
(ccm128_f) aesni_ccm64_decrypt_blocks; |
380 |
cctx->key_set = 1; |
381 |
} |
382 |
if (iv) { |
383 |
memcpy(ctx->iv, iv, 15 - cctx->L); |
384 |
cctx->iv_set = 1; |
385 |
} |
386 |
return 1; |
387 |
} |
388 |
|
389 |
# define aesni_ccm_cipher aes_ccm_cipher |
390 |
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
391 |
const unsigned char *in, size_t len); |
392 |
|
393 |
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ |
394 |
static const EVP_CIPHER aesni_##keylen##_##mode = { \ |
395 |
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ |
396 |
flags|EVP_CIPH_##MODE##_MODE, \ |
397 |
aesni_init_key, \ |
398 |
aesni_##mode##_cipher, \ |
399 |
NULL, \ |
400 |
sizeof(EVP_AES_KEY), \ |
401 |
NULL,NULL,NULL,NULL }; \ |
402 |
static const EVP_CIPHER aes_##keylen##_##mode = { \ |
403 |
nid##_##keylen##_##nmode,blocksize, \ |
404 |
keylen/8,ivlen, \ |
405 |
flags|EVP_CIPH_##MODE##_MODE, \ |
406 |
aes_init_key, \ |
407 |
aes_##mode##_cipher, \ |
408 |
NULL, \ |
409 |
sizeof(EVP_AES_KEY), \ |
410 |
NULL,NULL,NULL,NULL }; \ |
411 |
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ |
412 |
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } |
413 |
|
414 |
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ |
415 |
static const EVP_CIPHER aesni_##keylen##_##mode = { \ |
416 |
nid##_##keylen##_##mode,blocksize, \ |
417 |
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ |
418 |
flags|EVP_CIPH_##MODE##_MODE, \ |
419 |
aesni_##mode##_init_key, \ |
420 |
aesni_##mode##_cipher, \ |
421 |
aes_##mode##_cleanup, \ |
422 |
sizeof(EVP_AES_##MODE##_CTX), \ |
423 |
NULL,NULL,aes_##mode##_ctrl,NULL }; \ |
424 |
static const EVP_CIPHER aes_##keylen##_##mode = { \ |
425 |
nid##_##keylen##_##mode,blocksize, \ |
426 |
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ |
427 |
flags|EVP_CIPH_##MODE##_MODE, \ |
428 |
aes_##mode##_init_key, \ |
429 |
aes_##mode##_cipher, \ |
430 |
aes_##mode##_cleanup, \ |
431 |
sizeof(EVP_AES_##MODE##_CTX), \ |
432 |
NULL,NULL,aes_##mode##_ctrl,NULL }; \ |
433 |
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ |
434 |
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } |
435 |
|
436 |
# else |
437 |
|
438 |
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ |
439 |
static const EVP_CIPHER aes_##keylen##_##mode = { \ |
440 |
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ |
441 |
flags|EVP_CIPH_##MODE##_MODE, \ |
442 |
aes_init_key, \ |
443 |
aes_##mode##_cipher, \ |
444 |
NULL, \ |
445 |
sizeof(EVP_AES_KEY), \ |
446 |
NULL,NULL,NULL,NULL }; \ |
447 |
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ |
448 |
{ return &aes_##keylen##_##mode; } |
449 |
|
450 |
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ |
451 |
static const EVP_CIPHER aes_##keylen##_##mode = { \ |
452 |
nid##_##keylen##_##mode,blocksize, \ |
453 |
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ |
454 |
flags|EVP_CIPH_##MODE##_MODE, \ |
455 |
aes_##mode##_init_key, \ |
456 |
aes_##mode##_cipher, \ |
457 |
aes_##mode##_cleanup, \ |
458 |
sizeof(EVP_AES_##MODE##_CTX), \ |
459 |
NULL,NULL,aes_##mode##_ctrl,NULL }; \ |
460 |
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ |
461 |
{ return &aes_##keylen##_##mode; } |
462 |
# endif |
463 |
|
464 |
# define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \ |
465 |
BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ |
466 |
BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ |
467 |
BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ |
468 |
BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ |
469 |
BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \ |
470 |
BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \ |
471 |
BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags) |
472 |
|
473 |
static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
474 |
const unsigned char *iv, int enc) |
475 |
{ |
476 |
int ret, mode; |
477 |
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; |
478 |
|
479 |
mode = ctx->cipher->flags & EVP_CIPH_MODE; |
480 |
if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) |
481 |
&& !enc) |
482 |
# ifdef BSAES_CAPABLE |
483 |
if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) { |
484 |
ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks); |
485 |
dat->block = (block128_f) AES_decrypt; |
486 |
dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt; |
487 |
} else |
488 |
# endif |
489 |
# ifdef VPAES_CAPABLE |
490 |
if (VPAES_CAPABLE) { |
491 |
ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks); |
492 |
dat->block = (block128_f) vpaes_decrypt; |
493 |
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? |
494 |
(cbc128_f) vpaes_cbc_encrypt : NULL; |
495 |
} else |
496 |
# endif |
497 |
{ |
498 |
ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks); |
499 |
dat->block = (block128_f) AES_decrypt; |
500 |
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? |
501 |
(cbc128_f) AES_cbc_encrypt : NULL; |
502 |
} else |
503 |
# ifdef BSAES_CAPABLE |
504 |
if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) { |
505 |
ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks); |
506 |
dat->block = (block128_f) AES_encrypt; |
507 |
dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; |
508 |
} else |
509 |
# endif |
510 |
# ifdef VPAES_CAPABLE |
511 |
if (VPAES_CAPABLE) { |
512 |
ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks); |
513 |
dat->block = (block128_f) vpaes_encrypt; |
514 |
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? |
515 |
(cbc128_f) vpaes_cbc_encrypt : NULL; |
516 |
} else |
517 |
# endif |
518 |
{ |
519 |
ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks); |
520 |
dat->block = (block128_f) AES_encrypt; |
521 |
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? |
522 |
(cbc128_f) AES_cbc_encrypt : NULL; |
523 |
# ifdef AES_CTR_ASM |
524 |
if (mode == EVP_CIPH_CTR_MODE) |
525 |
dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt; |
526 |
# endif |
527 |
} |
528 |
|
529 |
if (ret < 0) { |
530 |
EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); |
531 |
return 0; |
532 |
} |
533 |
|
534 |
return 1; |
535 |
} |
536 |
|
537 |
static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
538 |
const unsigned char *in, size_t len) |
539 |
{ |
540 |
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; |
541 |
|
542 |
if (dat->stream.cbc) |
543 |
(*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv, ctx->encrypt); |
544 |
else if (ctx->encrypt) |
545 |
CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block); |
546 |
else |
547 |
CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block); |
548 |
|
549 |
return 1; |
550 |
} |
551 |
|
552 |
static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
553 |
const unsigned char *in, size_t len) |
554 |
{ |
555 |
size_t bl = ctx->cipher->block_size; |
556 |
size_t i; |
557 |
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; |
558 |
|
559 |
if (len < bl) |
560 |
return 1; |
561 |
|
562 |
for (i = 0, len -= bl; i <= len; i += bl) |
563 |
(*dat->block) (in + i, out + i, &dat->ks); |
564 |
|
565 |
return 1; |
566 |
} |
567 |
|
568 |
static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
569 |
const unsigned char *in, size_t len) |
570 |
{ |
571 |
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; |
572 |
|
573 |
CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, |
574 |
ctx->iv, &ctx->num, dat->block); |
575 |
return 1; |
576 |
} |
577 |
|
578 |
static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
579 |
const unsigned char *in, size_t len) |
580 |
{ |
581 |
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; |
582 |
|
583 |
CRYPTO_cfb128_encrypt(in, out, len, &dat->ks, |
584 |
ctx->iv, &ctx->num, ctx->encrypt, dat->block); |
585 |
return 1; |
586 |
} |
587 |
|
588 |
static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
589 |
const unsigned char *in, size_t len) |
590 |
{ |
591 |
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; |
592 |
|
593 |
CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks, |
594 |
ctx->iv, &ctx->num, ctx->encrypt, dat->block); |
595 |
return 1; |
596 |
} |
597 |
|
598 |
static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
599 |
const unsigned char *in, size_t len) |
600 |
{ |
601 |
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; |
602 |
|
603 |
if (ctx->flags & EVP_CIPH_FLAG_LENGTH_BITS) { |
604 |
CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks, |
605 |
ctx->iv, &ctx->num, ctx->encrypt, dat->block); |
606 |
return 1; |
607 |
} |
608 |
|
609 |
while (len >= MAXBITCHUNK) { |
610 |
CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks, |
611 |
ctx->iv, &ctx->num, ctx->encrypt, dat->block); |
612 |
len -= MAXBITCHUNK; |
613 |
} |
614 |
if (len) |
615 |
CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks, |
616 |
ctx->iv, &ctx->num, ctx->encrypt, dat->block); |
617 |
|
618 |
return 1; |
619 |
} |
620 |
|
621 |
static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
622 |
const unsigned char *in, size_t len) |
623 |
{ |
624 |
unsigned int num = ctx->num; |
625 |
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; |
626 |
|
627 |
if (dat->stream.ctr) |
628 |
CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, |
629 |
ctx->iv, ctx->buf, &num, dat->stream.ctr); |
630 |
else |
631 |
CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, |
632 |
ctx->iv, ctx->buf, &num, dat->block); |
633 |
ctx->num = (size_t)num; |
634 |
return 1; |
635 |
} |
636 |
|
637 |
BLOCK_CIPHER_generic_pack(NID_aes, 128, EVP_CIPH_FLAG_FIPS) |
638 |
BLOCK_CIPHER_generic_pack(NID_aes, 192, EVP_CIPH_FLAG_FIPS) |
639 |
BLOCK_CIPHER_generic_pack(NID_aes, 256, EVP_CIPH_FLAG_FIPS) |
640 |
|
641 |
static int aes_gcm_cleanup(EVP_CIPHER_CTX *c) |
642 |
{ |
643 |
EVP_AES_GCM_CTX *gctx = c->cipher_data; |
644 |
OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm)); |
645 |
if (gctx->iv != c->iv) |
646 |
OPENSSL_free(gctx->iv); |
647 |
return 1; |
648 |
} |
649 |
|
650 |
/* increment counter (64-bit int) by 1 */ |
651 |
static void ctr64_inc(unsigned char *counter) |
652 |
{ |
653 |
int n = 8; |
654 |
unsigned char c; |
655 |
|
656 |
do { |
657 |
--n; |
658 |
c = counter[n]; |
659 |
++c; |
660 |
counter[n] = c; |
661 |
if (c) |
662 |
return; |
663 |
} while (n); |
664 |
} |
665 |
|
666 |
static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) |
667 |
{ |
668 |
EVP_AES_GCM_CTX *gctx = c->cipher_data; |
669 |
switch (type) { |
670 |
case EVP_CTRL_INIT: |
671 |
gctx->key_set = 0; |
672 |
gctx->iv_set = 0; |
673 |
gctx->ivlen = c->cipher->iv_len; |
674 |
gctx->iv = c->iv; |
675 |
gctx->taglen = -1; |
676 |
gctx->iv_gen = 0; |
677 |
gctx->tls_aad_len = -1; |
678 |
return 1; |
679 |
|
680 |
case EVP_CTRL_GCM_SET_IVLEN: |
681 |
if (arg <= 0) |
682 |
return 0; |
683 |
# ifdef OPENSSL_FIPS |
684 |
if (FIPS_module_mode() && !(c->flags & EVP_CIPH_FLAG_NON_FIPS_ALLOW) |
685 |
&& arg < 12) |
686 |
return 0; |
687 |
# endif |
688 |
/* Allocate memory for IV if needed */ |
689 |
if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) { |
690 |
if (gctx->iv != c->iv) |
691 |
OPENSSL_free(gctx->iv); |
692 |
gctx->iv = OPENSSL_malloc(arg); |
693 |
if (!gctx->iv) |
694 |
return 0; |
695 |
} |
696 |
gctx->ivlen = arg; |
697 |
return 1; |
698 |
|
699 |
case EVP_CTRL_GCM_SET_TAG: |
700 |
if (arg <= 0 || arg > 16 || c->encrypt) |
701 |
return 0; |
702 |
memcpy(c->buf, ptr, arg); |
703 |
gctx->taglen = arg; |
704 |
return 1; |
705 |
|
706 |
case EVP_CTRL_GCM_GET_TAG: |
707 |
if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) |
708 |
return 0; |
709 |
memcpy(ptr, c->buf, arg); |
710 |
return 1; |
711 |
|
712 |
case EVP_CTRL_GCM_SET_IV_FIXED: |
713 |
/* Special case: -1 length restores whole IV */ |
714 |
if (arg == -1) { |
715 |
memcpy(gctx->iv, ptr, gctx->ivlen); |
716 |
gctx->iv_gen = 1; |
717 |
return 1; |
718 |
} |
719 |
/* |
720 |
* Fixed field must be at least 4 bytes and invocation field at least |
721 |
* 8. |
722 |
*/ |
723 |
if ((arg < 4) || (gctx->ivlen - arg) < 8) |
724 |
return 0; |
725 |
if (arg) |
726 |
memcpy(gctx->iv, ptr, arg); |
727 |
if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) |
728 |
return 0; |
729 |
gctx->iv_gen = 1; |
730 |
return 1; |
731 |
|
732 |
case EVP_CTRL_GCM_IV_GEN: |
733 |
if (gctx->iv_gen == 0 || gctx->key_set == 0) |
734 |
return 0; |
735 |
CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); |
736 |
if (arg <= 0 || arg > gctx->ivlen) |
737 |
arg = gctx->ivlen; |
738 |
memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); |
739 |
/* |
740 |
* Invocation field will be at least 8 bytes in size and so no need |
741 |
* to check wrap around or increment more than last 8 bytes. |
742 |
*/ |
743 |
ctr64_inc(gctx->iv + gctx->ivlen - 8); |
744 |
gctx->iv_set = 1; |
745 |
return 1; |
746 |
|
747 |
case EVP_CTRL_GCM_SET_IV_INV: |
748 |
if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) |
749 |
return 0; |
750 |
memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); |
751 |
CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); |
752 |
gctx->iv_set = 1; |
753 |
return 1; |
754 |
|
755 |
case EVP_CTRL_AEAD_TLS1_AAD: |
756 |
/* Save the AAD for later use */ |
757 |
if (arg != EVP_AEAD_TLS1_AAD_LEN) |
758 |
return 0; |
759 |
memcpy(c->buf, ptr, arg); |
760 |
gctx->tls_aad_len = arg; |
761 |
{ |
762 |
unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1]; |
763 |
/* Correct length for explicit IV */ |
764 |
len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; |
765 |
/* If decrypting correct for tag too */ |
766 |
if (!c->encrypt) |
767 |
len -= EVP_GCM_TLS_TAG_LEN; |
768 |
c->buf[arg - 2] = len >> 8; |
769 |
c->buf[arg - 1] = len & 0xff; |
770 |
} |
771 |
/* Extra padding: tag appended to record */ |
772 |
return EVP_GCM_TLS_TAG_LEN; |
773 |
|
774 |
case EVP_CTRL_COPY: |
775 |
{ |
776 |
EVP_CIPHER_CTX *out = ptr; |
777 |
EVP_AES_GCM_CTX *gctx_out = out->cipher_data; |
778 |
if (gctx->gcm.key) { |
779 |
if (gctx->gcm.key != &gctx->ks) |
780 |
return 0; |
781 |
gctx_out->gcm.key = &gctx_out->ks; |
782 |
} |
783 |
if (gctx->iv == c->iv) |
784 |
gctx_out->iv = out->iv; |
785 |
else { |
786 |
gctx_out->iv = OPENSSL_malloc(gctx->ivlen); |
787 |
if (!gctx_out->iv) |
788 |
return 0; |
789 |
memcpy(gctx_out->iv, gctx->iv, gctx->ivlen); |
790 |
} |
791 |
return 1; |
792 |
} |
793 |
|
794 |
default: |
795 |
return -1; |
796 |
|
797 |
} |
798 |
} |
799 |
|
800 |
static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
801 |
const unsigned char *iv, int enc) |
802 |
{ |
803 |
EVP_AES_GCM_CTX *gctx = ctx->cipher_data; |
804 |
if (!iv && !key) |
805 |
return 1; |
806 |
if (key) { |
807 |
do { |
808 |
# ifdef BSAES_CAPABLE |
809 |
if (BSAES_CAPABLE) { |
810 |
AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks); |
811 |
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, |
812 |
(block128_f) AES_encrypt); |
813 |
gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; |
814 |
break; |
815 |
} else |
816 |
# endif |
817 |
# ifdef VPAES_CAPABLE |
818 |
if (VPAES_CAPABLE) { |
819 |
vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks); |
820 |
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, |
821 |
(block128_f) vpaes_encrypt); |
822 |
gctx->ctr = NULL; |
823 |
break; |
824 |
} else |
825 |
# endif |
826 |
(void)0; /* terminate potentially open 'else' */ |
827 |
|
828 |
AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks); |
829 |
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, |
830 |
(block128_f) AES_encrypt); |
831 |
# ifdef AES_CTR_ASM |
832 |
gctx->ctr = (ctr128_f) AES_ctr32_encrypt; |
833 |
# else |
834 |
gctx->ctr = NULL; |
835 |
# endif |
836 |
} while (0); |
837 |
|
838 |
/* |
839 |
* If we have an iv can set it directly, otherwise use saved IV. |
840 |
*/ |
841 |
if (iv == NULL && gctx->iv_set) |
842 |
iv = gctx->iv; |
843 |
if (iv) { |
844 |
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); |
845 |
gctx->iv_set = 1; |
846 |
} |
847 |
gctx->key_set = 1; |
848 |
} else { |
849 |
/* If key set use IV, otherwise copy */ |
850 |
if (gctx->key_set) |
851 |
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); |
852 |
else |
853 |
memcpy(gctx->iv, iv, gctx->ivlen); |
854 |
gctx->iv_set = 1; |
855 |
gctx->iv_gen = 0; |
856 |
} |
857 |
return 1; |
858 |
} |
859 |
|
860 |
/* |
861 |
* Handle TLS GCM packet format. This consists of the last portion of the IV |
862 |
* followed by the payload and finally the tag. On encrypt generate IV, |
863 |
* encrypt payload and write the tag. On verify retrieve IV, decrypt payload |
864 |
* and verify tag. |
865 |
*/ |
866 |
|
867 |
static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
868 |
const unsigned char *in, size_t len) |
869 |
{ |
870 |
EVP_AES_GCM_CTX *gctx = ctx->cipher_data; |
871 |
int rv = -1; |
872 |
/* Encrypt/decrypt must be performed in place */ |
873 |
if (out != in |
874 |
|| len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN)) |
875 |
return -1; |
876 |
/* |
877 |
* Set IV from start of buffer or generate IV and write to start of |
878 |
* buffer. |
879 |
*/ |
880 |
if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? |
881 |
EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV, |
882 |
EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0) |
883 |
goto err; |
884 |
/* Use saved AAD */ |
885 |
if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len)) |
886 |
goto err; |
887 |
/* Fix buffer and length to point to payload */ |
888 |
in += EVP_GCM_TLS_EXPLICIT_IV_LEN; |
889 |
out += EVP_GCM_TLS_EXPLICIT_IV_LEN; |
890 |
len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; |
891 |
if (ctx->encrypt) { |
892 |
/* Encrypt payload */ |
893 |
if (gctx->ctr) { |
894 |
if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, |
895 |
in, out, len, gctx->ctr)) |
896 |
goto err; |
897 |
} else { |
898 |
if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len)) |
899 |
goto err; |
900 |
} |
901 |
out += len; |
902 |
/* Finally write tag */ |
903 |
CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN); |
904 |
rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; |
905 |
} else { |
906 |
/* Decrypt */ |
907 |
if (gctx->ctr) { |
908 |
if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, |
909 |
in, out, len, gctx->ctr)) |
910 |
goto err; |
911 |
} else { |
912 |
if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len)) |
913 |
goto err; |
914 |
} |
915 |
/* Retrieve tag */ |
916 |
CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN); |
917 |
/* If tag mismatch wipe buffer */ |
918 |
if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) { |
919 |
OPENSSL_cleanse(out, len); |
920 |
goto err; |
921 |
} |
922 |
rv = len; |
923 |
} |
924 |
|
925 |
err: |
926 |
gctx->iv_set = 0; |
927 |
gctx->tls_aad_len = -1; |
928 |
return rv; |
929 |
} |
930 |
|
931 |
static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
932 |
const unsigned char *in, size_t len) |
933 |
{ |
934 |
EVP_AES_GCM_CTX *gctx = ctx->cipher_data; |
935 |
/* If not set up, return error */ |
936 |
if (!gctx->key_set) |
937 |
return -1; |
938 |
|
939 |
if (gctx->tls_aad_len >= 0) |
940 |
return aes_gcm_tls_cipher(ctx, out, in, len); |
941 |
|
942 |
if (!gctx->iv_set) |
943 |
return -1; |
944 |
if (in) { |
945 |
if (out == NULL) { |
946 |
if (CRYPTO_gcm128_aad(&gctx->gcm, in, len)) |
947 |
return -1; |
948 |
} else if (ctx->encrypt) { |
949 |
if (gctx->ctr) { |
950 |
if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, |
951 |
in, out, len, gctx->ctr)) |
952 |
return -1; |
953 |
} else { |
954 |
if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len)) |
955 |
return -1; |
956 |
} |
957 |
} else { |
958 |
if (gctx->ctr) { |
959 |
if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, |
960 |
in, out, len, gctx->ctr)) |
961 |
return -1; |
962 |
} else { |
963 |
if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len)) |
964 |
return -1; |
965 |
} |
966 |
} |
967 |
return len; |
968 |
} else { |
969 |
if (!ctx->encrypt) { |
970 |
if (gctx->taglen < 0) |
971 |
return -1; |
972 |
if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) |
973 |
return -1; |
974 |
gctx->iv_set = 0; |
975 |
return 0; |
976 |
} |
977 |
CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16); |
978 |
gctx->taglen = 16; |
979 |
/* Don't reuse the IV */ |
980 |
gctx->iv_set = 0; |
981 |
return 0; |
982 |
} |
983 |
|
984 |
} |
985 |
|
986 |
# define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \ |
987 |
| EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \ |
988 |
| EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ |
989 |
| EVP_CIPH_CUSTOM_COPY) |
990 |
|
991 |
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM, |
992 |
EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER | |
993 |
CUSTOM_FLAGS) |
994 |
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM, |
995 |
EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER | |
996 |
CUSTOM_FLAGS) |
997 |
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM, |
998 |
EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER | |
999 |
CUSTOM_FLAGS) |
1000 |
|
1001 |
static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) |
1002 |
{ |
1003 |
EVP_AES_XTS_CTX *xctx = c->cipher_data; |
1004 |
if (type == EVP_CTRL_COPY) { |
1005 |
EVP_CIPHER_CTX *out = ptr; |
1006 |
EVP_AES_XTS_CTX *xctx_out = out->cipher_data; |
1007 |
if (xctx->xts.key1) { |
1008 |
if (xctx->xts.key1 != &xctx->ks1) |
1009 |
return 0; |
1010 |
xctx_out->xts.key1 = &xctx_out->ks1; |
1011 |
} |
1012 |
if (xctx->xts.key2) { |
1013 |
if (xctx->xts.key2 != &xctx->ks2) |
1014 |
return 0; |
1015 |
xctx_out->xts.key2 = &xctx_out->ks2; |
1016 |
} |
1017 |
return 1; |
1018 |
} else if (type != EVP_CTRL_INIT) |
1019 |
return -1; |
1020 |
/* key1 and key2 are used as an indicator both key and IV are set */ |
1021 |
xctx->xts.key1 = NULL; |
1022 |
xctx->xts.key2 = NULL; |
1023 |
return 1; |
1024 |
} |
1025 |
|
1026 |
static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
1027 |
const unsigned char *iv, int enc) |
1028 |
{ |
1029 |
EVP_AES_XTS_CTX *xctx = ctx->cipher_data; |
1030 |
if (!iv && !key) |
1031 |
return 1; |
1032 |
|
1033 |
if (key) |
1034 |
do { |
1035 |
# ifdef AES_XTS_ASM |
1036 |
xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt; |
1037 |
# else |
1038 |
xctx->stream = NULL; |
1039 |
# endif |
1040 |
/* key_len is two AES keys */ |
1041 |
# ifdef BSAES_CAPABLE |
1042 |
if (BSAES_CAPABLE) |
1043 |
xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt; |
1044 |
else |
1045 |
# endif |
1046 |
# ifdef VPAES_CAPABLE |
1047 |
if (VPAES_CAPABLE) { |
1048 |
if (enc) { |
1049 |
vpaes_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1); |
1050 |
xctx->xts.block1 = (block128_f) vpaes_encrypt; |
1051 |
} else { |
1052 |
vpaes_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1); |
1053 |
xctx->xts.block1 = (block128_f) vpaes_decrypt; |
1054 |
} |
1055 |
|
1056 |
vpaes_set_encrypt_key(key + ctx->key_len / 2, |
1057 |
ctx->key_len * 4, &xctx->ks2); |
1058 |
xctx->xts.block2 = (block128_f) vpaes_encrypt; |
1059 |
|
1060 |
xctx->xts.key1 = &xctx->ks1; |
1061 |
break; |
1062 |
} else |
1063 |
# endif |
1064 |
(void)0; /* terminate potentially open 'else' */ |
1065 |
|
1066 |
if (enc) { |
1067 |
AES_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1); |
1068 |
xctx->xts.block1 = (block128_f) AES_encrypt; |
1069 |
} else { |
1070 |
AES_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1); |
1071 |
xctx->xts.block1 = (block128_f) AES_decrypt; |
1072 |
} |
1073 |
|
1074 |
AES_set_encrypt_key(key + ctx->key_len / 2, |
1075 |
ctx->key_len * 4, &xctx->ks2); |
1076 |
xctx->xts.block2 = (block128_f) AES_encrypt; |
1077 |
|
1078 |
xctx->xts.key1 = &xctx->ks1; |
1079 |
} while (0); |
1080 |
|
1081 |
if (iv) { |
1082 |
xctx->xts.key2 = &xctx->ks2; |
1083 |
memcpy(ctx->iv, iv, 16); |
1084 |
} |
1085 |
|
1086 |
return 1; |
1087 |
} |
1088 |
|
1089 |
static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
1090 |
const unsigned char *in, size_t len) |
1091 |
{ |
1092 |
EVP_AES_XTS_CTX *xctx = ctx->cipher_data; |
1093 |
if (!xctx->xts.key1 || !xctx->xts.key2) |
1094 |
return 0; |
1095 |
if (!out || !in || len < AES_BLOCK_SIZE) |
1096 |
return 0; |
1097 |
# ifdef OPENSSL_FIPS |
1098 |
/* Requirement of SP800-38E */ |
1099 |
if (FIPS_module_mode() && !(ctx->flags & EVP_CIPH_FLAG_NON_FIPS_ALLOW) && |
1100 |
(len > (1UL << 20) * 16)) { |
1101 |
EVPerr(EVP_F_AES_XTS_CIPHER, EVP_R_TOO_LARGE); |
1102 |
return 0; |
1103 |
} |
1104 |
# endif |
1105 |
if (xctx->stream) |
1106 |
(*xctx->stream) (in, out, len, |
1107 |
xctx->xts.key1, xctx->xts.key2, ctx->iv); |
1108 |
else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len, |
1109 |
ctx->encrypt)) |
1110 |
return 0; |
1111 |
return 1; |
1112 |
} |
1113 |
|
1114 |
# define aes_xts_cleanup NULL |
1115 |
|
1116 |
# define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \ |
1117 |
| EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ |
1118 |
| EVP_CIPH_CUSTOM_COPY) |
1119 |
|
1120 |
BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, |
1121 |
EVP_CIPH_FLAG_FIPS | XTS_FLAGS) |
1122 |
BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, |
1123 |
EVP_CIPH_FLAG_FIPS | XTS_FLAGS) |
1124 |
|
1125 |
static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) |
1126 |
{ |
1127 |
EVP_AES_CCM_CTX *cctx = c->cipher_data; |
1128 |
switch (type) { |
1129 |
case EVP_CTRL_INIT: |
1130 |
cctx->key_set = 0; |
1131 |
cctx->iv_set = 0; |
1132 |
cctx->L = 8; |
1133 |
cctx->M = 12; |
1134 |
cctx->tag_set = 0; |
1135 |
cctx->len_set = 0; |
1136 |
return 1; |
1137 |
|
1138 |
case EVP_CTRL_CCM_SET_IVLEN: |
1139 |
arg = 15 - arg; |
1140 |
case EVP_CTRL_CCM_SET_L: |
1141 |
if (arg < 2 || arg > 8) |
1142 |
return 0; |
1143 |
cctx->L = arg; |
1144 |
return 1; |
1145 |
|
1146 |
case EVP_CTRL_CCM_SET_TAG: |
1147 |
if ((arg & 1) || arg < 4 || arg > 16) |
1148 |
return 0; |
1149 |
if (c->encrypt && ptr) |
1150 |
return 0; |
1151 |
if (ptr) { |
1152 |
cctx->tag_set = 1; |
1153 |
memcpy(c->buf, ptr, arg); |
1154 |
} |
1155 |
cctx->M = arg; |
1156 |
return 1; |
1157 |
|
1158 |
case EVP_CTRL_CCM_GET_TAG: |
1159 |
if (!c->encrypt || !cctx->tag_set) |
1160 |
return 0; |
1161 |
if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg)) |
1162 |
return 0; |
1163 |
cctx->tag_set = 0; |
1164 |
cctx->iv_set = 0; |
1165 |
cctx->len_set = 0; |
1166 |
return 1; |
1167 |
|
1168 |
case EVP_CTRL_COPY: |
1169 |
{ |
1170 |
EVP_CIPHER_CTX *out = ptr; |
1171 |
EVP_AES_CCM_CTX *cctx_out = out->cipher_data; |
1172 |
if (cctx->ccm.key) { |
1173 |
if (cctx->ccm.key != &cctx->ks) |
1174 |
return 0; |
1175 |
cctx_out->ccm.key = &cctx_out->ks; |
1176 |
} |
1177 |
return 1; |
1178 |
} |
1179 |
|
1180 |
default: |
1181 |
return -1; |
1182 |
|
1183 |
} |
1184 |
} |
1185 |
|
1186 |
static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, |
1187 |
const unsigned char *iv, int enc) |
1188 |
{ |
1189 |
EVP_AES_CCM_CTX *cctx = ctx->cipher_data; |
1190 |
if (!iv && !key) |
1191 |
return 1; |
1192 |
if (key) |
1193 |
do { |
1194 |
# ifdef VPAES_CAPABLE |
1195 |
if (VPAES_CAPABLE) { |
1196 |
vpaes_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks); |
1197 |
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, |
1198 |
&cctx->ks, (block128_f) vpaes_encrypt); |
1199 |
cctx->str = NULL; |
1200 |
cctx->key_set = 1; |
1201 |
break; |
1202 |
} |
1203 |
# endif |
1204 |
AES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks); |
1205 |
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, |
1206 |
&cctx->ks, (block128_f) AES_encrypt); |
1207 |
cctx->str = NULL; |
1208 |
cctx->key_set = 1; |
1209 |
} while (0); |
1210 |
if (iv) { |
1211 |
memcpy(ctx->iv, iv, 15 - cctx->L); |
1212 |
cctx->iv_set = 1; |
1213 |
} |
1214 |
return 1; |
1215 |
} |
1216 |
|
1217 |
static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, |
1218 |
const unsigned char *in, size_t len) |
1219 |
{ |
1220 |
EVP_AES_CCM_CTX *cctx = ctx->cipher_data; |
1221 |
CCM128_CONTEXT *ccm = &cctx->ccm; |
1222 |
/* If not set up, return error */ |
1223 |
if (!cctx->iv_set && !cctx->key_set) |
1224 |
return -1; |
1225 |
if (!ctx->encrypt && !cctx->tag_set) |
1226 |
return -1; |
1227 |
if (!out) { |
1228 |
if (!in) { |
1229 |
if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len)) |
1230 |
return -1; |
1231 |
cctx->len_set = 1; |
1232 |
return len; |
1233 |
} |
1234 |
/* If have AAD need message length */ |
1235 |
if (!cctx->len_set && len) |
1236 |
return -1; |
1237 |
CRYPTO_ccm128_aad(ccm, in, len); |
1238 |
return len; |
1239 |
} |
1240 |
/* EVP_*Final() doesn't return any data */ |
1241 |
if (!in) |
1242 |
return 0; |
1243 |
/* If not set length yet do it */ |
1244 |
if (!cctx->len_set) { |
1245 |
if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len)) |
1246 |
return -1; |
1247 |
cctx->len_set = 1; |
1248 |
} |
1249 |
if (ctx->encrypt) { |
1250 |
if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, |
1251 |
cctx->str) : |
1252 |
CRYPTO_ccm128_encrypt(ccm, in, out, len)) |
1253 |
return -1; |
1254 |
cctx->tag_set = 1; |
1255 |
return len; |
1256 |
} else { |
1257 |
int rv = -1; |
1258 |
if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, |
1259 |
cctx->str) : |
1260 |
!CRYPTO_ccm128_decrypt(ccm, in, out, len)) { |
1261 |
unsigned char tag[16]; |
1262 |
if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) { |
1263 |
if (!CRYPTO_memcmp(tag, ctx->buf, cctx->M)) |
1264 |
rv = len; |
1265 |
} |
1266 |
} |
1267 |
if (rv == -1) |
1268 |
OPENSSL_cleanse(out, len); |
1269 |
cctx->iv_set = 0; |
1270 |
cctx->tag_set = 0; |
1271 |
cctx->len_set = 0; |
1272 |
return rv; |
1273 |
} |
1274 |
|
1275 |
} |
1276 |
|
1277 |
# define aes_ccm_cleanup NULL |
1278 |
|
1279 |
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM, |
1280 |
EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS) |
1281 |
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM, |
1282 |
EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS) |
1283 |
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM, |
1284 |
EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS) |
1285 |
# endif |
1286 |
#endif |