Remove OPENSSL_NO_AES guards

no-aes is no longer a Configure option and therefore the OPENSSL_NO_AES
guards can be removed.

Reviewed-by: Richard Levitte <levitte@openssl.org>
This commit is contained in:
Matt Caswell 2016-04-13 11:28:45 +01:00
parent 7ec8de16fe
commit 5158c763f5
13 changed files with 410 additions and 482 deletions

View file

@ -214,11 +214,9 @@ OPTIONS cms_options[] = {
{"receipt_request_to", OPT_RR_TO, 's'},
{"", OPT_CIPHER, '-', "Any supported cipher"},
OPT_V_OPTIONS,
# ifndef OPENSSL_NO_AES
{"aes128-wrap", OPT_AES128_WRAP, '-', "Use AES128 to wrap key"},
{"aes192-wrap", OPT_AES192_WRAP, '-', "Use AES192 to wrap key"},
{"aes256-wrap", OPT_AES256_WRAP, '-', "Use AES256 to wrap key"},
# endif
# ifndef OPENSSL_NO_DES
{"des3-wrap", OPT_3DES_WRAP, '-', "Use 3DES-EDE to wrap key"},
# endif
@ -603,7 +601,6 @@ int cms_main(int argc, char **argv)
wrap_cipher = EVP_des_ede3_wrap();
# endif
break;
# ifndef OPENSSL_NO_AES
case OPT_AES128_WRAP:
wrap_cipher = EVP_aes_128_wrap();
break;
@ -613,12 +610,6 @@ int cms_main(int argc, char **argv)
case OPT_AES256_WRAP:
wrap_cipher = EVP_aes_256_wrap();
break;
# else
case OPT_AES128_WRAP:
case OPT_AES192_WRAP:
case OPT_AES256_WRAP:
break;
# endif
}
}
argc = opt_num_rest();

View file

@ -644,9 +644,6 @@ static int SortFnByName(const void *_f1, const void *_f2)
static void list_disabled(void)
{
BIO_puts(bio_out, "Disabled algorithms:\n");
#ifdef OPENSSL_NO_AES
BIO_puts(bio_out, "AES\n");
#endif
#ifdef OPENSSL_NO_BF
BIO_puts(bio_out, "BF\n");
#endif

View file

@ -248,24 +248,12 @@ static FUNCTION functions[] = {
#ifndef OPENSSL_NO_BLAKE2
{ FT_md, "blake2s256", dgst_main},
#endif
#ifndef OPENSSL_NO_AES
{ FT_cipher, "aes-128-cbc", enc_main, enc_options },
#endif
#ifndef OPENSSL_NO_AES
{ FT_cipher, "aes-128-ecb", enc_main, enc_options },
#endif
#ifndef OPENSSL_NO_AES
{ FT_cipher, "aes-192-cbc", enc_main, enc_options },
#endif
#ifndef OPENSSL_NO_AES
{ FT_cipher, "aes-192-ecb", enc_main, enc_options },
#endif
#ifndef OPENSSL_NO_AES
{ FT_cipher, "aes-256-cbc", enc_main, enc_options },
#endif
#ifndef OPENSSL_NO_AES
{ FT_cipher, "aes-256-ecb", enc_main, enc_options },
#endif
#ifndef OPENSSL_NO_CAMELLIA
{ FT_cipher, "camellia-128-cbc", enc_main, enc_options },
#endif

View file

@ -99,9 +99,7 @@
#ifndef OPENSSL_NO_DES
# include <openssl/des.h>
#endif
#ifndef OPENSSL_NO_AES
# include <openssl/aes.h>
#endif
#include <openssl/aes.h>
#ifndef OPENSSL_NO_CAMELLIA
# include <openssl/camellia.h>
#endif
@ -249,7 +247,6 @@ static int RC4_loop(void *args);
static int DES_ncbc_encrypt_loop(void *args);
static int DES_ede3_cbc_encrypt_loop(void *args);
#endif
#ifndef OPENSSL_NO_AES
static int AES_cbc_128_encrypt_loop(void *args);
static int AES_cbc_192_encrypt_loop(void *args);
static int AES_ige_128_encrypt_loop(void *args);
@ -257,7 +254,6 @@ static int AES_cbc_256_encrypt_loop(void *args);
static int AES_ige_192_encrypt_loop(void *args);
static int AES_ige_256_encrypt_loop(void *args);
static int CRYPTO_gcm128_aad_loop(void *args);
#endif
static int EVP_Update_loop(void *args);
static int EVP_Digest_loop(void *args);
#ifndef OPENSSL_NO_RSA
@ -508,14 +504,12 @@ static OPT_PAIR doit_choices[] = {
{"des-cbc", D_CBC_DES},
{"des-ede3", D_EDE3_DES},
#endif
#ifndef OPENSSL_NO_AES
{"aes-128-cbc", D_CBC_128_AES},
{"aes-192-cbc", D_CBC_192_AES},
{"aes-256-cbc", D_CBC_256_AES},
{"aes-128-ige", D_IGE_128_AES},
{"aes-192-ige", D_IGE_192_AES},
{"aes-256-ige", D_IGE_256_AES},
#endif
#ifndef OPENSSL_NO_RC2
{"rc2-cbc", D_CBC_RC2},
{"rc2", D_CBC_RC2},
@ -819,14 +813,9 @@ static int DES_ede3_cbc_encrypt_loop(void *args)
}
#endif
#ifndef OPENSSL_NO_AES
# define MAX_BLOCK_SIZE 128
#else
# define MAX_BLOCK_SIZE 64
#endif
#define MAX_BLOCK_SIZE 128
static unsigned char iv[2 * MAX_BLOCK_SIZE / 8];
#ifndef OPENSSL_NO_AES
static AES_KEY aes_ks1, aes_ks2, aes_ks3;
static int AES_cbc_128_encrypt_loop(void *args)
{
@ -914,8 +903,6 @@ static int CRYPTO_gcm128_aad_loop(void *args)
return count;
}
#endif
static int decrypt = 0;
static int EVP_Update_loop(void *args)
{
@ -1294,7 +1281,6 @@ int speed_main(int argc, char **argv)
0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0,
0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x12
};
#ifndef OPENSSL_NO_AES
static const unsigned char key24[24] = {
0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0,
0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x12,
@ -1306,7 +1292,6 @@ int speed_main(int argc, char **argv)
0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x12, 0x34,
0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x12, 0x34, 0x56
};
#endif
#ifndef OPENSSL_NO_CAMELLIA
static const unsigned char ckey24[24] = {
0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0,
@ -1542,13 +1527,11 @@ int speed_main(int argc, char **argv)
continue;
}
#endif
#ifndef OPENSSL_NO_AES
if (strcmp(*argv, "aes") == 0) {
doit[D_CBC_128_AES] = doit[D_CBC_192_AES] =
doit[D_CBC_256_AES] = 1;
continue;
}
#endif
#ifndef OPENSSL_NO_CAMELLIA
if (strcmp(*argv, "camellia") == 0) {
doit[D_CBC_128_CML] = doit[D_CBC_192_CML] =
@ -1675,11 +1658,9 @@ int speed_main(int argc, char **argv)
DES_set_key_unchecked(&key2, &sch2);
DES_set_key_unchecked(&key3, &sch3);
#endif
#ifndef OPENSSL_NO_AES
AES_set_encrypt_key(key16, 128, &aes_ks1);
AES_set_encrypt_key(key24, 192, &aes_ks2);
AES_set_encrypt_key(key32, 256, &aes_ks3);
#endif
#ifndef OPENSSL_NO_CAMELLIA
Camellia_set_key(key16, 128, &camellia_ks1);
Camellia_set_key(ckey24, 192, &camellia_ks2);
@ -2079,7 +2060,7 @@ int speed_main(int argc, char **argv)
}
}
#endif
#ifndef OPENSSL_NO_AES
if (doit[D_CBC_128_AES]) {
for (testnum = 0; testnum < SIZE_NUM; testnum++) {
print_message(names[D_CBC_128_AES], c[D_CBC_128_AES][testnum],
@ -2157,7 +2138,7 @@ int speed_main(int argc, char **argv)
for (i = 0; i < loopargs_len; i++)
CRYPTO_gcm128_release(loopargs[i].gcm_ctx);
}
#endif
#ifndef OPENSSL_NO_CAMELLIA
if (doit[D_CBC_128_CML]) {
for (testnum = 0; testnum < SIZE_NUM; testnum++) {
@ -2710,9 +2691,7 @@ int speed_main(int argc, char **argv)
#ifndef OPENSSL_NO_DES
printf("%s ", DES_options());
#endif
#ifndef OPENSSL_NO_AES
printf("%s ", AES_options());
#endif
#ifndef OPENSSL_NO_IDEA
printf("%s ", idea_options());
#endif

View file

@ -167,7 +167,6 @@ void openssl_add_all_ciphers_int(void)
EVP_add_cipher_alias(SN_rc5_cbc, "RC5");
#endif
#ifndef OPENSSL_NO_AES
EVP_add_cipher(EVP_aes_128_ecb());
EVP_add_cipher(EVP_aes_128_cbc());
EVP_add_cipher(EVP_aes_128_cfb());
@ -176,9 +175,9 @@ void openssl_add_all_ciphers_int(void)
EVP_add_cipher(EVP_aes_128_ofb());
EVP_add_cipher(EVP_aes_128_ctr());
EVP_add_cipher(EVP_aes_128_gcm());
# ifndef OPENSSL_NO_OCB
#ifndef OPENSSL_NO_OCB
EVP_add_cipher(EVP_aes_128_ocb());
# endif
#endif
EVP_add_cipher(EVP_aes_128_xts());
EVP_add_cipher(EVP_aes_128_ccm());
EVP_add_cipher(EVP_aes_128_wrap());
@ -194,9 +193,9 @@ void openssl_add_all_ciphers_int(void)
EVP_add_cipher(EVP_aes_192_ofb());
EVP_add_cipher(EVP_aes_192_ctr());
EVP_add_cipher(EVP_aes_192_gcm());
# ifndef OPENSSL_NO_OCB
#ifndef OPENSSL_NO_OCB
EVP_add_cipher(EVP_aes_192_ocb());
# endif
#endif
EVP_add_cipher(EVP_aes_192_ccm());
EVP_add_cipher(EVP_aes_192_wrap());
EVP_add_cipher_alias(SN_id_aes192_wrap, "aes192-wrap");
@ -211,9 +210,9 @@ void openssl_add_all_ciphers_int(void)
EVP_add_cipher(EVP_aes_256_ofb());
EVP_add_cipher(EVP_aes_256_ctr());
EVP_add_cipher(EVP_aes_256_gcm());
# ifndef OPENSSL_NO_OCB
#ifndef OPENSSL_NO_OCB
EVP_add_cipher(EVP_aes_256_ocb());
# endif
#endif
EVP_add_cipher(EVP_aes_256_xts());
EVP_add_cipher(EVP_aes_256_ccm());
EVP_add_cipher(EVP_aes_256_wrap());
@ -225,7 +224,6 @@ void openssl_add_all_ciphers_int(void)
EVP_add_cipher(EVP_aes_256_cbc_hmac_sha1());
EVP_add_cipher(EVP_aes_128_cbc_hmac_sha256());
EVP_add_cipher(EVP_aes_256_cbc_hmac_sha256());
#endif
#ifndef OPENSSL_NO_CAMELLIA
EVP_add_cipher(EVP_camellia_128_ecb());

View file

@ -49,16 +49,15 @@
*/
#include <openssl/opensslconf.h>
#ifndef OPENSSL_NO_AES
# include <openssl/crypto.h>
# include <openssl/evp.h>
# include <openssl/err.h>
# include <string.h>
# include <assert.h>
# include <openssl/aes.h>
# include "internal/evp_int.h"
# include "modes_lcl.h"
# include <openssl/rand.h>
#include <openssl/crypto.h>
#include <openssl/evp.h>
#include <openssl/err.h>
#include <string.h>
#include <assert.h>
#include <openssl/aes.h>
#include "internal/evp_int.h"
#include "modes_lcl.h"
#include <openssl/rand.h>
typedef struct {
union {
@ -115,7 +114,7 @@ typedef struct {
ccm128_f str;
} EVP_AES_CCM_CTX;
# ifndef OPENSSL_NO_OCB
#ifndef OPENSSL_NO_OCB
typedef struct {
union {
double align;
@ -137,11 +136,11 @@ typedef struct {
int ivlen; /* IV length */
int taglen;
} EVP_AES_OCB_CTX;
# endif
#endif
# define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
#define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
# ifdef VPAES_ASM
#ifdef VPAES_ASM
int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
AES_KEY *key);
int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
@ -156,8 +155,8 @@ void vpaes_cbc_encrypt(const unsigned char *in,
unsigned char *out,
size_t length,
const AES_KEY *key, unsigned char *ivec, int enc);
# endif
# ifdef BSAES_ASM
#endif
#ifdef BSAES_ASM
void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
size_t length, const AES_KEY *key,
unsigned char ivec[16], int enc);
@ -170,36 +169,36 @@ void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
size_t len, const AES_KEY *key1,
const AES_KEY *key2, const unsigned char iv[16]);
# endif
# ifdef AES_CTR_ASM
#endif
#ifdef AES_CTR_ASM
void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key,
const unsigned char ivec[AES_BLOCK_SIZE]);
# endif
# ifdef AES_XTS_ASM
#endif
#ifdef AES_XTS_ASM
void AES_xts_encrypt(const char *inp, char *out, size_t len,
const AES_KEY *key1, const AES_KEY *key2,
const unsigned char iv[16]);
void AES_xts_decrypt(const char *inp, char *out, size_t len,
const AES_KEY *key1, const AES_KEY *key2,
const unsigned char iv[16]);
# endif
#endif
# if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
# include "ppc_arch.h"
# ifdef VPAES_ASM
# define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
# endif
# define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
# define HWAES_set_encrypt_key aes_p8_set_encrypt_key
# define HWAES_set_decrypt_key aes_p8_set_decrypt_key
# define HWAES_encrypt aes_p8_encrypt
# define HWAES_decrypt aes_p8_decrypt
# define HWAES_cbc_encrypt aes_p8_cbc_encrypt
# define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
#if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
# include "ppc_arch.h"
# ifdef VPAES_ASM
# define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
# endif
# define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
# define HWAES_set_encrypt_key aes_p8_set_encrypt_key
# define HWAES_set_decrypt_key aes_p8_set_decrypt_key
# define HWAES_encrypt aes_p8_encrypt
# define HWAES_decrypt aes_p8_decrypt
# define HWAES_cbc_encrypt aes_p8_cbc_encrypt
# define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
#endif
# if defined(AES_ASM) && !defined(I386_ONLY) && ( \
#if defined(AES_ASM) && !defined(I386_ONLY) && ( \
((defined(__i386) || defined(__i386__) || \
defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
defined(__x86_64) || defined(__x86_64__) || \
@ -208,16 +207,16 @@ void AES_xts_decrypt(const char *inp, char *out, size_t len,
extern unsigned int OPENSSL_ia32cap_P[];
# ifdef VPAES_ASM
# define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
# endif
# ifdef BSAES_ASM
# define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
# endif
# ifdef VPAES_ASM
# define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
# endif
# ifdef BSAES_ASM
# define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
# endif
/*
* AES-NI section
*/
# define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
# define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
AES_KEY *key);
@ -268,25 +267,25 @@ void aesni_ccm64_decrypt_blocks(const unsigned char *in,
const unsigned char ivec[16],
unsigned char cmac[16]);
# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
size_t aesni_gcm_encrypt(const unsigned char *in,
unsigned char *out,
size_t len,
const void *key, unsigned char ivec[16], u64 *Xi);
# define AES_gcm_encrypt aesni_gcm_encrypt
# define AES_gcm_encrypt aesni_gcm_encrypt
size_t aesni_gcm_decrypt(const unsigned char *in,
unsigned char *out,
size_t len,
const void *key, unsigned char ivec[16], u64 *Xi);
# define AES_gcm_decrypt aesni_gcm_decrypt
# define AES_gcm_decrypt aesni_gcm_decrypt
void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
size_t len);
# define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
# define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
gctx->gcm.ghash==gcm_ghash_avx)
# define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
# define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
gctx->gcm.ghash==gcm_ghash_avx)
# undef AES_GCM_ASM2 /* minor size optimization */
# endif
# undef AES_GCM_ASM2 /* minor size optimization */
# endif
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
@ -346,23 +345,23 @@ static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
return 1;
}
# define aesni_ofb_cipher aes_ofb_cipher
# define aesni_ofb_cipher aes_ofb_cipher
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aesni_cfb_cipher aes_cfb_cipher
# define aesni_cfb_cipher aes_cfb_cipher
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aesni_cfb8_cipher aes_cfb8_cipher
# define aesni_cfb8_cipher aes_cfb8_cipher
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aesni_cfb1_cipher aes_cfb1_cipher
# define aesni_cfb1_cipher aes_cfb1_cipher
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aesni_ctr_cipher aes_ctr_cipher
# define aesni_ctr_cipher aes_ctr_cipher
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
@ -399,7 +398,7 @@ static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# define aesni_gcm_cipher aes_gcm_cipher
# define aesni_gcm_cipher aes_gcm_cipher
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
@ -440,7 +439,7 @@ static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# define aesni_xts_cipher aes_xts_cipher
# define aesni_xts_cipher aes_xts_cipher
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
@ -466,11 +465,11 @@ static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# define aesni_ccm_cipher aes_ccm_cipher
# define aesni_ccm_cipher aes_ccm_cipher
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# ifndef OPENSSL_NO_OCB
# ifndef OPENSSL_NO_OCB
void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
size_t blocks, const void *key,
size_t start_block_num,
@ -534,12 +533,12 @@ static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# define aesni_ocb_cipher aes_ocb_cipher
# define aesni_ocb_cipher aes_ocb_cipher
static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# endif /* OPENSSL_NO_OCB */
# endif /* OPENSSL_NO_OCB */
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
static const EVP_CIPHER aesni_##keylen##_##mode = { \
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
@ -560,7 +559,7 @@ static const EVP_CIPHER aes_##keylen##_##mode = { \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
static const EVP_CIPHER aesni_##keylen##_##mode = { \
nid##_##keylen##_##mode,blocksize, \
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
@ -582,13 +581,13 @@ static const EVP_CIPHER aes_##keylen##_##mode = { \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
# elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
#elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
# include "sparc_arch.h"
# include "sparc_arch.h"
extern unsigned int OPENSSL_sparcv9cap_P[];
# define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
# define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
@ -718,31 +717,31 @@ static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# define aes_t4_cbc_cipher aes_cbc_cipher
# define aes_t4_cbc_cipher aes_cbc_cipher
static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aes_t4_ecb_cipher aes_ecb_cipher
# define aes_t4_ecb_cipher aes_ecb_cipher
static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aes_t4_ofb_cipher aes_ofb_cipher
# define aes_t4_ofb_cipher aes_ofb_cipher
static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aes_t4_cfb_cipher aes_cfb_cipher
# define aes_t4_cfb_cipher aes_cfb_cipher
static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aes_t4_cfb8_cipher aes_cfb8_cipher
# define aes_t4_cfb8_cipher aes_cfb8_cipher
static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aes_t4_cfb1_cipher aes_cfb1_cipher
# define aes_t4_cfb1_cipher aes_cfb1_cipher
static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# define aes_t4_ctr_cipher aes_ctr_cipher
# define aes_t4_ctr_cipher aes_ctr_cipher
static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
@ -792,7 +791,7 @@ static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# define aes_t4_gcm_cipher aes_gcm_cipher
# define aes_t4_gcm_cipher aes_gcm_cipher
static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
@ -852,7 +851,7 @@ static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# define aes_t4_xts_cipher aes_xts_cipher
# define aes_t4_xts_cipher aes_xts_cipher
static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
@ -877,11 +876,11 @@ static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# define aes_t4_ccm_cipher aes_ccm_cipher
# define aes_t4_ccm_cipher aes_ccm_cipher
static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# ifndef OPENSSL_NO_OCB
# ifndef OPENSSL_NO_OCB
static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
@ -931,12 +930,12 @@ static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# define aes_t4_ocb_cipher aes_ocb_cipher
# define aes_t4_ocb_cipher aes_ocb_cipher
static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len);
# endif /* OPENSSL_NO_OCB */
# endif /* OPENSSL_NO_OCB */
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
@ -957,7 +956,7 @@ static const EVP_CIPHER aes_##keylen##_##mode = { \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
nid##_##keylen##_##mode,blocksize, \
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
@ -979,9 +978,9 @@ static const EVP_CIPHER aes_##keylen##_##mode = { \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
# else
#else
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
static const EVP_CIPHER aes_##keylen##_##mode = { \
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
flags|EVP_CIPH_##MODE##_MODE, \
@ -993,7 +992,7 @@ static const EVP_CIPHER aes_##keylen##_##mode = { \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return &aes_##keylen##_##mode; }
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
static const EVP_CIPHER aes_##keylen##_##mode = { \
nid##_##keylen##_##mode,blocksize, \
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
@ -1006,28 +1005,28 @@ static const EVP_CIPHER aes_##keylen##_##mode = { \
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
{ return &aes_##keylen##_##mode; }
# endif
#endif
# if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
# include "arm_arch.h"
# if __ARM_MAX_ARCH__>=7
# if defined(BSAES_ASM)
# define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
# endif
# if defined(VPAES_ASM)
# define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
# endif
# define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
# define HWAES_set_encrypt_key aes_v8_set_encrypt_key
# define HWAES_set_decrypt_key aes_v8_set_decrypt_key
# define HWAES_encrypt aes_v8_encrypt
# define HWAES_decrypt aes_v8_decrypt
# define HWAES_cbc_encrypt aes_v8_cbc_encrypt
# define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
#if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
# include "arm_arch.h"
# if __ARM_MAX_ARCH__>=7
# if defined(BSAES_ASM)
# define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
# endif
# if defined(VPAES_ASM)
# define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
# endif
# define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
# define HWAES_set_encrypt_key aes_v8_set_encrypt_key
# define HWAES_set_decrypt_key aes_v8_set_decrypt_key
# define HWAES_encrypt aes_v8_encrypt
# define HWAES_decrypt aes_v8_decrypt
# define HWAES_cbc_encrypt aes_v8_cbc_encrypt
# define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
# endif
#endif
# if defined(HWAES_CAPABLE)
#if defined(HWAES_CAPABLE)
int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
AES_KEY *key);
int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
@ -1042,9 +1041,9 @@ void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
size_t len, const AES_KEY *key,
const unsigned char ivec[16]);
# endif
#endif
# define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
#define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
@ -1062,28 +1061,28 @@ static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
mode = EVP_CIPHER_CTX_mode(ctx);
if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
&& !enc)
# ifdef HWAES_CAPABLE
#ifdef HWAES_CAPABLE
if (HWAES_CAPABLE) {
ret = HWAES_set_decrypt_key(key,
EVP_CIPHER_CTX_key_length(ctx) * 8,
&dat->ks.ks);
dat->block = (block128_f) HWAES_decrypt;
dat->stream.cbc = NULL;
# ifdef HWAES_cbc_encrypt
# ifdef HWAES_cbc_encrypt
if (mode == EVP_CIPH_CBC_MODE)
dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
# endif
} else
# endif
# ifdef BSAES_CAPABLE
} else
#endif
#ifdef BSAES_CAPABLE
if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&dat->ks.ks);
dat->block = (block128_f) AES_decrypt;
dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
} else
# endif
# ifdef VPAES_CAPABLE
#endif
#ifdef VPAES_CAPABLE
if (VPAES_CAPABLE) {
ret = vpaes_set_decrypt_key(key,
EVP_CIPHER_CTX_key_length(ctx) * 8,
@ -1092,7 +1091,7 @@ static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
(cbc128_f) vpaes_cbc_encrypt : NULL;
} else
# endif
#endif
{
ret = AES_set_decrypt_key(key,
EVP_CIPHER_CTX_key_length(ctx) * 8,
@ -1101,34 +1100,34 @@ static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
(cbc128_f) AES_cbc_encrypt : NULL;
} else
# ifdef HWAES_CAPABLE
#ifdef HWAES_CAPABLE
if (HWAES_CAPABLE) {
ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&dat->ks.ks);
dat->block = (block128_f) HWAES_encrypt;
dat->stream.cbc = NULL;
# ifdef HWAES_cbc_encrypt
# ifdef HWAES_cbc_encrypt
if (mode == EVP_CIPH_CBC_MODE)
dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
else
# endif
# ifdef HWAES_ctr32_encrypt_blocks
# endif
# ifdef HWAES_ctr32_encrypt_blocks
if (mode == EVP_CIPH_CTR_MODE)
dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
else
# endif
# endif
(void)0; /* terminate potentially open 'else' */
} else
# endif
# ifdef BSAES_CAPABLE
#endif
#ifdef BSAES_CAPABLE
if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&dat->ks.ks);
dat->block = (block128_f) AES_encrypt;
dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
} else
# endif
# ifdef VPAES_CAPABLE
#endif
#ifdef VPAES_CAPABLE
if (VPAES_CAPABLE) {
ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&dat->ks.ks);
@ -1136,17 +1135,17 @@ static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
(cbc128_f) vpaes_cbc_encrypt : NULL;
} else
# endif
#endif
{
ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&dat->ks.ks);
dat->block = (block128_f) AES_encrypt;
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
(cbc128_f) AES_cbc_encrypt : NULL;
# ifdef AES_CTR_ASM
#ifdef AES_CTR_ASM
if (mode == EVP_CIPH_CTR_MODE)
dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
# endif
#endif
}
if (ret < 0) {
@ -1454,21 +1453,21 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
if (key) {
do {
# ifdef HWAES_CAPABLE
#ifdef HWAES_CAPABLE
if (HWAES_CAPABLE) {
HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&gctx->ks.ks);
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
(block128_f) HWAES_encrypt);
# ifdef HWAES_ctr32_encrypt_blocks
# ifdef HWAES_ctr32_encrypt_blocks
gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
# else
# else
gctx->ctr = NULL;
# endif
# endif
break;
} else
# endif
# ifdef BSAES_CAPABLE
#endif
#ifdef BSAES_CAPABLE
if (BSAES_CAPABLE) {
AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&gctx->ks.ks);
@ -1477,8 +1476,8 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
break;
} else
# endif
# ifdef VPAES_CAPABLE
#endif
#ifdef VPAES_CAPABLE
if (VPAES_CAPABLE) {
vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&gctx->ks.ks);
@ -1487,18 +1486,18 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
gctx->ctr = NULL;
break;
} else
# endif
#endif
(void)0; /* terminate potentially open 'else' */
AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&gctx->ks.ks);
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
(block128_f) AES_encrypt);
# ifdef AES_CTR_ASM
#ifdef AES_CTR_ASM
gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
# else
#else
gctx->ctr = NULL;
# endif
#endif
} while (0);
/*
@ -1559,7 +1558,7 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
/* Encrypt payload */
if (gctx->ctr) {
size_t bulk = 0;
# if defined(AES_GCM_ASM)
#if defined(AES_GCM_ASM)
if (len >= 32 && AES_GCM_ASM(gctx)) {
if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
return -1;
@ -1569,7 +1568,7 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
gctx->gcm.len.u[1] += bulk;
}
# endif
#endif
if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
in + bulk,
out + bulk,
@ -1577,7 +1576,7 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
goto err;
} else {
size_t bulk = 0;
# if defined(AES_GCM_ASM2)
#if defined(AES_GCM_ASM2)
if (len >= 32 && AES_GCM_ASM2(gctx)) {
if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
return -1;
@ -1587,7 +1586,7 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
gctx->gcm.len.u[1] += bulk;
}
# endif
#endif
if (CRYPTO_gcm128_encrypt(&gctx->gcm,
in + bulk, out + bulk, len - bulk))
goto err;
@ -1600,7 +1599,7 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
/* Decrypt */
if (gctx->ctr) {
size_t bulk = 0;
# if defined(AES_GCM_ASM)
#if defined(AES_GCM_ASM)
if (len >= 16 && AES_GCM_ASM(gctx)) {
if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
return -1;
@ -1610,7 +1609,7 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
gctx->gcm.len.u[1] += bulk;
}
# endif
#endif
if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
in + bulk,
out + bulk,
@ -1618,7 +1617,7 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
goto err;
} else {
size_t bulk = 0;
# if defined(AES_GCM_ASM2)
#if defined(AES_GCM_ASM2)
if (len >= 16 && AES_GCM_ASM2(gctx)) {
if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
return -1;
@ -1628,7 +1627,7 @@ static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
gctx->gcm.len.u[1] += bulk;
}
# endif
#endif
if (CRYPTO_gcm128_decrypt(&gctx->gcm,
in + bulk, out + bulk, len - bulk))
goto err;
@ -1671,7 +1670,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
} else if (EVP_CIPHER_CTX_encrypting(ctx)) {
if (gctx->ctr) {
size_t bulk = 0;
# if defined(AES_GCM_ASM)
#if defined(AES_GCM_ASM)
if (len >= 32 && AES_GCM_ASM(gctx)) {
size_t res = (16 - gctx->gcm.mres) % 16;
@ -1685,7 +1684,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
gctx->gcm.len.u[1] += bulk;
bulk += res;
}
# endif
#endif
if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
in + bulk,
out + bulk,
@ -1693,7 +1692,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
return -1;
} else {
size_t bulk = 0;
# if defined(AES_GCM_ASM2)
#if defined(AES_GCM_ASM2)
if (len >= 32 && AES_GCM_ASM2(gctx)) {
size_t res = (16 - gctx->gcm.mres) % 16;
@ -1707,7 +1706,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
gctx->gcm.len.u[1] += bulk;
bulk += res;
}
# endif
#endif
if (CRYPTO_gcm128_encrypt(&gctx->gcm,
in + bulk, out + bulk, len - bulk))
return -1;
@ -1715,7 +1714,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
} else {
if (gctx->ctr) {
size_t bulk = 0;
# if defined(AES_GCM_ASM)
#if defined(AES_GCM_ASM)
if (len >= 16 && AES_GCM_ASM(gctx)) {
size_t res = (16 - gctx->gcm.mres) % 16;
@ -1729,7 +1728,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
gctx->gcm.len.u[1] += bulk;
bulk += res;
}
# endif
#endif
if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
in + bulk,
out + bulk,
@ -1737,7 +1736,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
return -1;
} else {
size_t bulk = 0;
# if defined(AES_GCM_ASM2)
#if defined(AES_GCM_ASM2)
if (len >= 16 && AES_GCM_ASM2(gctx)) {
size_t res = (16 - gctx->gcm.mres) % 16;
@ -1751,7 +1750,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
gctx->gcm.len.u[1] += bulk;
bulk += res;
}
# endif
#endif
if (CRYPTO_gcm128_decrypt(&gctx->gcm,
in + bulk, out + bulk, len - bulk))
return -1;
@ -1778,7 +1777,7 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
}
# define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
#define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
| EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
| EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
| EVP_CIPH_CUSTOM_COPY)
@ -1824,13 +1823,13 @@ static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
if (key)
do {
# ifdef AES_XTS_ASM
#ifdef AES_XTS_ASM
xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
# else
#else
xctx->stream = NULL;
# endif
#endif
/* key_len is two AES keys */
# ifdef HWAES_CAPABLE
#ifdef HWAES_CAPABLE
if (HWAES_CAPABLE) {
if (enc) {
HWAES_set_encrypt_key(key,
@ -1852,13 +1851,13 @@ static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
xctx->xts.key1 = &xctx->ks1;
break;
} else
# endif
# ifdef BSAES_CAPABLE
#endif
#ifdef BSAES_CAPABLE
if (BSAES_CAPABLE)
xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
else
# endif
# ifdef VPAES_CAPABLE
#endif
#ifdef VPAES_CAPABLE
if (VPAES_CAPABLE) {
if (enc) {
vpaes_set_encrypt_key(key,
@ -1880,7 +1879,7 @@ static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
xctx->xts.key1 = &xctx->ks1;
break;
} else
# endif
#endif
(void)0; /* terminate potentially open 'else' */
if (enc) {
@ -1928,9 +1927,9 @@ static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
return 1;
}
# define aes_xts_cleanup NULL
#define aes_xts_cleanup NULL
# define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
#define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
| EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
| EVP_CIPH_CUSTOM_COPY)
@ -2036,7 +2035,7 @@ static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
if (key)
do {
# ifdef HWAES_CAPABLE
#ifdef HWAES_CAPABLE
if (HWAES_CAPABLE) {
HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&cctx->ks.ks);
@ -2047,8 +2046,8 @@ static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
cctx->key_set = 1;
break;
} else
# endif
# ifdef VPAES_CAPABLE
#endif
#ifdef VPAES_CAPABLE
if (VPAES_CAPABLE) {
vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&cctx->ks.ks);
@ -2058,7 +2057,7 @@ static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
cctx->key_set = 1;
break;
}
# endif
#endif
AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&cctx->ks.ks);
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
@ -2190,7 +2189,7 @@ static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
}
}
# define aes_ccm_cleanup NULL
#define aes_ccm_cleanup NULL
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
@ -2286,7 +2285,7 @@ static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
return rv ? (int)rv : -1;
}
# define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
| EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
| EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
@ -2374,7 +2373,7 @@ const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
return &aes_256_wrap_pad;
}
# ifndef OPENSSL_NO_OCB
#ifndef OPENSSL_NO_OCB
static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
{
EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
@ -2434,28 +2433,28 @@ static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
}
}
# ifdef HWAES_CAPABLE
# ifdef HWAES_ocb_encrypt
# ifdef HWAES_CAPABLE
# ifdef HWAES_ocb_encrypt
void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
size_t blocks, const void *key,
size_t start_block_num,
unsigned char offset_i[16],
const unsigned char L_[][16],
unsigned char checksum[16]);
# else
# define HWAES_ocb_encrypt NULL
# endif
# ifdef HWAES_ocb_decrypt
# else
# define HWAES_ocb_encrypt NULL
# endif
# ifdef HWAES_ocb_decrypt
void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
size_t blocks, const void *key,
size_t start_block_num,
unsigned char offset_i[16],
const unsigned char L_[][16],
unsigned char checksum[16]);
# else
# define HWAES_ocb_decrypt NULL
# endif
# else
# define HWAES_ocb_decrypt NULL
# endif
# endif
static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
@ -2470,7 +2469,7 @@ static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
* needs both. We could possibly optimise to remove setting the
* decrypt for an encryption operation.
*/
# ifdef HWAES_CAPABLE
# ifdef HWAES_CAPABLE
if (HWAES_CAPABLE) {
HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&octx->ksenc.ks);
@ -2485,8 +2484,8 @@ static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 0;
break;
}
# endif
# ifdef VPAES_CAPABLE
# endif
# ifdef VPAES_CAPABLE
if (VPAES_CAPABLE) {
vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&octx->ksenc.ks);
@ -2500,7 +2499,7 @@ static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 0;
break;
}
# endif
# endif
AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
&octx->ksenc.ks);
AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
@ -2690,5 +2689,4 @@ BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
# endif /* OPENSSL_NO_OCB */
#endif
#endif /* OPENSSL_NO_OCB */

View file

@ -52,31 +52,29 @@
#include <stdio.h>
#include <string.h>
#if !defined(OPENSSL_NO_AES)
#include <openssl/evp.h>
#include <openssl/objects.h>
#include <openssl/aes.h>
#include <openssl/sha.h>
#include <openssl/rand.h>
#include "modes_lcl.h"
#include "internal/evp_int.h"
# include <openssl/evp.h>
# include <openssl/objects.h>
# include <openssl/aes.h>
# include <openssl/sha.h>
# include <openssl/rand.h>
# include "modes_lcl.h"
# include "internal/evp_int.h"
#ifndef EVP_CIPH_FLAG_AEAD_CIPHER
# define EVP_CIPH_FLAG_AEAD_CIPHER 0x200000
# define EVP_CTRL_AEAD_TLS1_AAD 0x16
# define EVP_CTRL_AEAD_SET_MAC_KEY 0x17
#endif
# ifndef EVP_CIPH_FLAG_AEAD_CIPHER
# define EVP_CIPH_FLAG_AEAD_CIPHER 0x200000
# define EVP_CTRL_AEAD_TLS1_AAD 0x16
# define EVP_CTRL_AEAD_SET_MAC_KEY 0x17
# endif
#if !defined(EVP_CIPH_FLAG_DEFAULT_ASN1)
# define EVP_CIPH_FLAG_DEFAULT_ASN1 0
#endif
# if !defined(EVP_CIPH_FLAG_DEFAULT_ASN1)
# define EVP_CIPH_FLAG_DEFAULT_ASN1 0
# endif
#if !defined(EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK)
# define EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK 0
#endif
# if !defined(EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK)
# define EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK 0
# endif
# define TLS1_1_VERSION 0x0302
#define TLS1_1_VERSION 0x0302
typedef struct {
AES_KEY ks;
@ -88,15 +86,15 @@ typedef struct {
} aux;
} EVP_AES_HMAC_SHA1;
# define NO_PAYLOAD_LENGTH ((size_t)-1)
#define NO_PAYLOAD_LENGTH ((size_t)-1)
# if defined(AES_ASM) && ( \
#if defined(AES_ASM) && ( \
defined(__x86_64) || defined(__x86_64__) || \
defined(_M_AMD64) || defined(_M_X64) || \
defined(__INTEL__) )
extern unsigned int OPENSSL_ia32cap_P[];
# define AESNI_CAPABLE (1<<(57-32))
# define AESNI_CAPABLE (1<<(57-32))
int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
AES_KEY *key);
@ -116,7 +114,7 @@ void aesni256_cbc_sha1_dec(const void *inp, void *out, size_t blocks,
const AES_KEY *key, unsigned char iv[16],
SHA_CTX *ctx, const void *in0);
# define data(ctx) ((EVP_AES_HMAC_SHA1 *)EVP_CIPHER_CTX_get_cipher_data(ctx))
# define data(ctx) ((EVP_AES_HMAC_SHA1 *)EVP_CIPHER_CTX_get_cipher_data(ctx))
static int aesni_cbc_hmac_sha1_init_key(EVP_CIPHER_CTX *ctx,
const unsigned char *inkey,
@ -143,12 +141,12 @@ static int aesni_cbc_hmac_sha1_init_key(EVP_CIPHER_CTX *ctx,
return ret < 0 ? 0 : 1;
}
# define STITCHED_CALL
# undef STITCHED_DECRYPT_CALL
# define STITCHED_CALL
# undef STITCHED_DECRYPT_CALL
# if !defined(STITCHED_CALL)
# define aes_off 0
# endif
# if !defined(STITCHED_CALL)
# define aes_off 0
# endif
void sha1_block_data_order(void *c, const void *p, size_t len);
@ -183,12 +181,12 @@ static void sha1_update(SHA_CTX *c, const void *data, size_t len)
SHA1_Update(c, ptr, res);
}
# ifdef SHA1_Update
# undef SHA1_Update
# endif
# define SHA1_Update sha1_update
# ifdef SHA1_Update
# undef SHA1_Update
# endif
# define SHA1_Update sha1_update
# if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
# if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
typedef struct {
unsigned int A[8], B[8], C[8], D[8], E[8];
@ -227,9 +225,9 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
0;
size_t ret = 0;
u8 *IVs;
# if defined(BSWAP8)
# if defined(BSWAP8)
u64 seqnum;
# endif
# endif
/* ask for IVs in bulk */
if (RAND_bytes((IVs = blocks[0].c), 16 * x4) <= 0)
@ -263,15 +261,15 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
IVs += 16;
}
# if defined(BSWAP8)
# if defined(BSWAP8)
memcpy(blocks[0].c, key->md.data, 8);
seqnum = BSWAP8(blocks[0].q[0]);
# endif
# endif
for (i = 0; i < x4; i++) {
unsigned int len = (i == (x4 - 1) ? last : frag);
# if !defined(BSWAP8)
# if !defined(BSWAP8)
unsigned int carry, j;
# endif
# endif
ctx->A[i] = key->md.h0;
ctx->B[i] = key->md.h1;
@ -280,14 +278,14 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
ctx->E[i] = key->md.h4;
/* fix seqnum */
# if defined(BSWAP8)
# if defined(BSWAP8)
blocks[i].q[0] = BSWAP8(seqnum + i);
# else
# else
for (carry = i, j = 8; j--;) {
blocks[i].c[j] = ((u8 *)key->md.data)[j] + carry;
carry = (blocks[i].c[j] - carry) >> (sizeof(carry) * 8 - 1);
}
# endif
# endif
blocks[i].c[8] = ((u8 *)key->md.data)[8];
blocks[i].c[9] = ((u8 *)key->md.data)[9];
blocks[i].c[10] = ((u8 *)key->md.data)[10];
@ -306,10 +304,10 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
/* hash 13-byte headers and first 64-13 bytes of inputs */
sha1_multi_block(ctx, edges, n4x);
/* hash bulk inputs */
# define MAXCHUNKSIZE 2048
# if MAXCHUNKSIZE%64
# error "MAXCHUNKSIZE is not divisible by 64"
# elif MAXCHUNKSIZE
# define MAXCHUNKSIZE 2048
# if MAXCHUNKSIZE%64
# error "MAXCHUNKSIZE is not divisible by 64"
# elif MAXCHUNKSIZE
/*
* goal is to minimize pressure on L1 cache by moving in shorter steps,
* so that hashed data is still in the cache by the time we encrypt it
@ -338,8 +336,8 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
minblocks -= MAXCHUNKSIZE / 64;
} while (minblocks > MAXCHUNKSIZE / 64);
}
# endif
# undef MAXCHUNKSIZE
# endif
# undef MAXCHUNKSIZE
sha1_multi_block(ctx, hash_d, n4x);
memset(blocks, 0, sizeof(blocks));
@ -354,18 +352,18 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
len += 64 + 13; /* 64 is HMAC header */
len *= 8; /* convert to bits */
if (off < (64 - 8)) {
# ifdef BSWAP4
# ifdef BSWAP4
blocks[i].d[15] = BSWAP4(len);
# else
# else
PUTU32(blocks[i].c + 60, len);
# endif
# endif
edges[i].blocks = 1;
} else {
# ifdef BSWAP4
# ifdef BSWAP4
blocks[i].d[31] = BSWAP4(len);
# else
# else
PUTU32(blocks[i].c + 124, len);
# endif
# endif
edges[i].blocks = 2;
}
edges[i].ptr = blocks[i].c;
@ -376,7 +374,7 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
memset(blocks, 0, sizeof(blocks));
for (i = 0; i < x4; i++) {
# ifdef BSWAP4
# ifdef BSWAP4
blocks[i].d[0] = BSWAP4(ctx->A[i]);
ctx->A[i] = key->tail.h0;
blocks[i].d[1] = BSWAP4(ctx->B[i]);
@ -389,7 +387,7 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
ctx->E[i] = key->tail.h4;
blocks[i].c[20] = 0x80;
blocks[i].d[15] = BSWAP4((64 + 20) * 8);
# else
# else
PUTU32(blocks[i].c + 0, ctx->A[i]);
ctx->A[i] = key->tail.h0;
PUTU32(blocks[i].c + 4, ctx->B[i]);
@ -402,7 +400,7 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
ctx->E[i] = key->tail.h4;
blocks[i].c[20] = 0x80;
PUTU32(blocks[i].c + 60, (64 + 20) * 8);
# endif
# endif
edges[i].ptr = blocks[i].c;
edges[i].blocks = 1;
}
@ -455,7 +453,7 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA1 *key,
return ret;
}
# endif
# endif
static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len)
@ -465,11 +463,11 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
size_t plen = key->payload_length, iv = 0, /* explicit IV in TLS 1.1 and
* later */
sha_off = 0;
# if defined(STITCHED_CALL)
# if defined(STITCHED_CALL)
size_t aes_off = 0, blocks;
sha_off = SHA_CBLOCK - key->md.num;
# endif
# endif
key->payload_length = NO_PAYLOAD_LENGTH;
@ -486,7 +484,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
else if (key->aux.tls_ver >= TLS1_1_VERSION)
iv = AES_BLOCK_SIZE;
# if defined(STITCHED_CALL)
# if defined(STITCHED_CALL)
if (plen > (sha_off + iv)
&& (blocks = (plen - (sha_off + iv)) / SHA_CBLOCK)) {
SHA1_Update(&key->md, in + iv, sha_off);
@ -504,7 +502,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
} else {
sha_off = 0;
}
# endif
# endif
sha_off += iv;
SHA1_Update(&key->md, in + sha_off, plen - sha_off);
@ -546,10 +544,10 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
unsigned int u[SHA_LBLOCK];
unsigned char c[SHA_CBLOCK];
} *data = (void *)key->md.data;
# if defined(STITCHED_DECRYPT_CALL)
# if defined(STITCHED_DECRYPT_CALL)
unsigned char tail_iv[AES_BLOCK_SIZE];
int stitch = 0;
# endif
# endif
if ((key->aux.tls_aad[plen - 4] << 8 | key->aux.tls_aad[plen - 3])
>= TLS1_1_VERSION) {
@ -565,7 +563,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
} else if (len < (SHA_DIGEST_LENGTH + 1))
return 0;
# if defined(STITCHED_DECRYPT_CALL)
# if defined(STITCHED_DECRYPT_CALL)
if (len >= 1024 && ctx->key_len == 32) {
/* decrypt last block */
memcpy(tail_iv, in + len - 2 * AES_BLOCK_SIZE,
@ -575,7 +573,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
&key->ks, tail_iv, 0);
stitch = 1;
} else
# endif
# endif
/* decrypt HMAC|padding at once */
aesni_cbc_encrypt(in, out, len, &key->ks,
EVP_CIPHER_CTX_iv_noconst(ctx), 0);
@ -598,7 +596,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
key->md = key->head;
SHA1_Update(&key->md, key->aux.tls_aad, plen);
# if defined(STITCHED_DECRYPT_CALL)
# if defined(STITCHED_DECRYPT_CALL)
if (stitch) {
blocks = (len - (256 + 32 + SHA_CBLOCK)) / SHA_CBLOCK;
aes_off = len - AES_BLOCK_SIZE - blocks * SHA_CBLOCK;
@ -619,9 +617,9 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
key->md.Nl += (blocks << 3); /* at most 18 bits */
memcpy(ctx->iv, tail_iv, AES_BLOCK_SIZE);
}
# endif
# endif
# if 1
# if 1
len -= SHA_DIGEST_LENGTH; /* amend mac */
if (len >= (256 + SHA_CBLOCK)) {
j = (len - (256 + SHA_CBLOCK)) & (0 - SHA_CBLOCK);
@ -634,15 +632,15 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
/* but pretend as if we hashed padded payload */
bitlen = key->md.Nl + (inp_len << 3); /* at most 18 bits */
# ifdef BSWAP4
# ifdef BSWAP4
bitlen = BSWAP4(bitlen);
# else
# else
mac.c[0] = 0;
mac.c[1] = (unsigned char)(bitlen >> 16);
mac.c[2] = (unsigned char)(bitlen >> 8);
mac.c[3] = (unsigned char)bitlen;
bitlen = mac.u[0];
# endif
# endif
pmac->u[0] = 0;
pmac->u[1] = 0;
@ -699,13 +697,13 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
pmac->u[3] |= key->md.h3 & mask;
pmac->u[4] |= key->md.h4 & mask;
# ifdef BSWAP4
# ifdef BSWAP4
pmac->u[0] = BSWAP4(pmac->u[0]);
pmac->u[1] = BSWAP4(pmac->u[1]);
pmac->u[2] = BSWAP4(pmac->u[2]);
pmac->u[3] = BSWAP4(pmac->u[3]);
pmac->u[4] = BSWAP4(pmac->u[4]);
# else
# else
for (i = 0; i < 5; i++) {
res = pmac->u[i];
pmac->c[4 * i + 0] = (unsigned char)(res >> 24);
@ -713,9 +711,9 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
pmac->c[4 * i + 2] = (unsigned char)(res >> 8);
pmac->c[4 * i + 3] = (unsigned char)res;
}
# endif
# endif
len += SHA_DIGEST_LENGTH;
# else
# else
SHA1_Update(&key->md, out, inp_len);
res = key->md.num;
SHA1_Final(pmac->c, &key->md);
@ -734,7 +732,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
for (; inp_blocks < pad_blocks; inp_blocks++)
sha1_block_data_order(&key->md, data, 1);
}
# endif
# endif
key->md = key->tail;
SHA1_Update(&key->md, pmac->c, SHA_DIGEST_LENGTH);
SHA1_Final(pmac->c, &key->md);
@ -742,7 +740,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
/* verify HMAC */
out += inp_len;
len -= inp_len;
# if 1
# if 1
{
unsigned char *p = out + len - 1 - maxpad - SHA_DIGEST_LENGTH;
size_t off = out - p;
@ -764,7 +762,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
res = 0 - ((0 - res) >> (sizeof(res) * 8 - 1));
ret &= (int)~res;
}
# else
# else
for (res = 0, i = 0; i < SHA_DIGEST_LENGTH; i++)
res |= out[i] ^ pmac->c[i];
res = 0 - ((0 - res) >> (sizeof(res) * 8 - 1));
@ -778,10 +776,10 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
res = (0 - res) >> (sizeof(res) * 8 - 1);
ret &= (int)~res;
# endif
# endif
return ret;
} else {
# if defined(STITCHED_DECRYPT_CALL)
# if defined(STITCHED_DECRYPT_CALL)
if (len >= 1024 && ctx->key_len == 32) {
if (sha_off %= SHA_CBLOCK)
blocks = (len - 3 * SHA_CBLOCK) / SHA_CBLOCK;
@ -804,7 +802,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
if (key->md.Nl < (unsigned int)blocks)
key->md.Nh++;
} else
# endif
# endif
/* decrypt HMAC|padding at once */
aesni_cbc_encrypt(in, out, len, &key->ks,
EVP_CIPHER_CTX_iv_noconst(ctx), 0);
@ -882,7 +880,7 @@ static int aesni_cbc_hmac_sha1_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
return SHA_DIGEST_LENGTH;
}
}
# if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
# if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
case EVP_CTRL_TLS1_1_MULTIBLOCK_MAX_BUFSIZE:
return (int)(5 + 16 + ((arg + 20 + 16) & -16));
case EVP_CTRL_TLS1_1_MULTIBLOCK_AAD:
@ -945,18 +943,18 @@ static int aesni_cbc_hmac_sha1_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
param->interleave / 4);
}
case EVP_CTRL_TLS1_1_MULTIBLOCK_DECRYPT:
# endif
# endif
default:
return -1;
}
}
static EVP_CIPHER aesni_128_cbc_hmac_sha1_cipher = {
# ifdef NID_aes_128_cbc_hmac_sha1
# ifdef NID_aes_128_cbc_hmac_sha1
NID_aes_128_cbc_hmac_sha1,
# else
# else
NID_undef,
# endif
# endif
AES_BLOCK_SIZE, 16, AES_BLOCK_SIZE,
EVP_CIPH_CBC_MODE | EVP_CIPH_FLAG_DEFAULT_ASN1 |
EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
@ -971,11 +969,11 @@ static EVP_CIPHER aesni_128_cbc_hmac_sha1_cipher = {
};
static EVP_CIPHER aesni_256_cbc_hmac_sha1_cipher = {
# ifdef NID_aes_256_cbc_hmac_sha1
# ifdef NID_aes_256_cbc_hmac_sha1
NID_aes_256_cbc_hmac_sha1,
# else
# else
NID_undef,
# endif
# endif
AES_BLOCK_SIZE, 32, AES_BLOCK_SIZE,
EVP_CIPH_CBC_MODE | EVP_CIPH_FLAG_DEFAULT_ASN1 |
EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
@ -1000,7 +998,7 @@ const EVP_CIPHER *EVP_aes_256_cbc_hmac_sha1(void)
return (OPENSSL_ia32cap_P[1] & AESNI_CAPABLE ?
&aesni_256_cbc_hmac_sha1_cipher : NULL);
}
# else
#else
const EVP_CIPHER *EVP_aes_128_cbc_hmac_sha1(void)
{
return NULL;
@ -1010,5 +1008,4 @@ const EVP_CIPHER *EVP_aes_256_cbc_hmac_sha1(void)
{
return NULL;
}
# endif
#endif

View file

@ -52,31 +52,30 @@
#include <stdio.h>
#include <string.h>
#if !defined(OPENSSL_NO_AES)
# include <openssl/evp.h>
# include <openssl/objects.h>
# include <openssl/aes.h>
# include <openssl/sha.h>
# include <openssl/rand.h>
# include "modes_lcl.h"
# include "internal/evp_int.h"
#include <openssl/evp.h>
#include <openssl/objects.h>
#include <openssl/aes.h>
#include <openssl/sha.h>
#include <openssl/rand.h>
#include "modes_lcl.h"
#include "internal/evp_int.h"
# ifndef EVP_CIPH_FLAG_AEAD_CIPHER
# define EVP_CIPH_FLAG_AEAD_CIPHER 0x200000
# define EVP_CTRL_AEAD_TLS1_AAD 0x16
# define EVP_CTRL_AEAD_SET_MAC_KEY 0x17
# endif
#ifndef EVP_CIPH_FLAG_AEAD_CIPHER
# define EVP_CIPH_FLAG_AEAD_CIPHER 0x200000
# define EVP_CTRL_AEAD_TLS1_AAD 0x16
# define EVP_CTRL_AEAD_SET_MAC_KEY 0x17
#endif
# if !defined(EVP_CIPH_FLAG_DEFAULT_ASN1)
# define EVP_CIPH_FLAG_DEFAULT_ASN1 0
# endif
#if !defined(EVP_CIPH_FLAG_DEFAULT_ASN1)
# define EVP_CIPH_FLAG_DEFAULT_ASN1 0
#endif
# if !defined(EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK)
# define EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK 0
# endif
#if !defined(EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK)
# define EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK 0
#endif
# define TLS1_1_VERSION 0x0302
#define TLS1_1_VERSION 0x0302
typedef struct {
AES_KEY ks;
@ -90,13 +89,13 @@ typedef struct {
# define NO_PAYLOAD_LENGTH ((size_t)-1)
# if defined(AES_ASM) && ( \
#if defined(AES_ASM) && ( \
defined(__x86_64) || defined(__x86_64__) || \
defined(_M_AMD64) || defined(_M_X64) || \
defined(__INTEL__) )
extern unsigned int OPENSSL_ia32cap_P[];
# define AESNI_CAPABLE (1<<(57-32))
# define AESNI_CAPABLE (1<<(57-32))
int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
AES_KEY *key);
@ -112,7 +111,7 @@ int aesni_cbc_sha256_enc(const void *inp, void *out, size_t blocks,
const AES_KEY *key, unsigned char iv[16],
SHA256_CTX *ctx, const void *in0);
# define data(ctx) ((EVP_AES_HMAC_SHA256 *)EVP_CIPHER_CTX_get_cipher_data(ctx))
# define data(ctx) ((EVP_AES_HMAC_SHA256 *)EVP_CIPHER_CTX_get_cipher_data(ctx))
static int aesni_cbc_hmac_sha256_init_key(EVP_CIPHER_CTX *ctx,
const unsigned char *inkey,
@ -140,11 +139,11 @@ static int aesni_cbc_hmac_sha256_init_key(EVP_CIPHER_CTX *ctx,
return ret < 0 ? 0 : 1;
}
# define STITCHED_CALL
# define STITCHED_CALL
# if !defined(STITCHED_CALL)
# define aes_off 0
# endif
# if !defined(STITCHED_CALL)
# define aes_off 0
# endif
void sha256_block_data_order(void *c, const void *p, size_t len);
@ -179,12 +178,12 @@ static void sha256_update(SHA256_CTX *c, const void *data, size_t len)
SHA256_Update(c, ptr, res);
}
# ifdef SHA256_Update
# undef SHA256_Update
# endif
# define SHA256_Update sha256_update
# ifdef SHA256_Update
# undef SHA256_Update
# endif
# define SHA256_Update sha256_update
# if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
# if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
typedef struct {
unsigned int A[8], B[8], C[8], D[8], E[8], F[8], G[8], H[8];
@ -223,9 +222,9 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
0;
size_t ret = 0;
u8 *IVs;
# if defined(BSWAP8)
# if defined(BSWAP8)
u64 seqnum;
# endif
# endif
/* ask for IVs in bulk */
if (RAND_bytes((IVs = blocks[0].c), 16 * x4) <= 0)
@ -260,15 +259,15 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
IVs += 16;
}
# if defined(BSWAP8)
# if defined(BSWAP8)
memcpy(blocks[0].c, key->md.data, 8);
seqnum = BSWAP8(blocks[0].q[0]);
# endif
# endif
for (i = 0; i < x4; i++) {
unsigned int len = (i == (x4 - 1) ? last : frag);
# if !defined(BSWAP8)
# if !defined(BSWAP8)
unsigned int carry, j;
# endif
# endif
ctx->A[i] = key->md.h[0];
ctx->B[i] = key->md.h[1];
@ -280,14 +279,14 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
ctx->H[i] = key->md.h[7];
/* fix seqnum */
# if defined(BSWAP8)
# if defined(BSWAP8)
blocks[i].q[0] = BSWAP8(seqnum + i);
# else
# else
for (carry = i, j = 8; j--;) {
blocks[i].c[j] = ((u8 *)key->md.data)[j] + carry;
carry = (blocks[i].c[j] - carry) >> (sizeof(carry) * 8 - 1);
}
# endif
# endif
blocks[i].c[8] = ((u8 *)key->md.data)[8];
blocks[i].c[9] = ((u8 *)key->md.data)[9];
blocks[i].c[10] = ((u8 *)key->md.data)[10];
@ -306,10 +305,10 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
/* hash 13-byte headers and first 64-13 bytes of inputs */
sha256_multi_block(ctx, edges, n4x);
/* hash bulk inputs */
# define MAXCHUNKSIZE 2048
# if MAXCHUNKSIZE%64
# error "MAXCHUNKSIZE is not divisible by 64"
# elif MAXCHUNKSIZE
# define MAXCHUNKSIZE 2048
# if MAXCHUNKSIZE%64
# error "MAXCHUNKSIZE is not divisible by 64"
# elif MAXCHUNKSIZE
/*
* goal is to minimize pressure on L1 cache by moving in shorter steps,
* so that hashed data is still in the cache by the time we encrypt it
@ -338,8 +337,8 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
minblocks -= MAXCHUNKSIZE / 64;
} while (minblocks > MAXCHUNKSIZE / 64);
}
# endif
# undef MAXCHUNKSIZE
# endif
# undef MAXCHUNKSIZE
sha256_multi_block(ctx, hash_d, n4x);
memset(blocks, 0, sizeof(blocks));
@ -354,18 +353,18 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
len += 64 + 13; /* 64 is HMAC header */
len *= 8; /* convert to bits */
if (off < (64 - 8)) {
# ifdef BSWAP4
# ifdef BSWAP4
blocks[i].d[15] = BSWAP4(len);
# else
# else
PUTU32(blocks[i].c + 60, len);
# endif
# endif
edges[i].blocks = 1;
} else {
# ifdef BSWAP4
# ifdef BSWAP4
blocks[i].d[31] = BSWAP4(len);
# else
# else
PUTU32(blocks[i].c + 124, len);
# endif
# endif
edges[i].blocks = 2;
}
edges[i].ptr = blocks[i].c;
@ -376,7 +375,7 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
memset(blocks, 0, sizeof(blocks));
for (i = 0; i < x4; i++) {
# ifdef BSWAP4
# ifdef BSWAP4
blocks[i].d[0] = BSWAP4(ctx->A[i]);
ctx->A[i] = key->tail.h[0];
blocks[i].d[1] = BSWAP4(ctx->B[i]);
@ -395,7 +394,7 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
ctx->H[i] = key->tail.h[7];
blocks[i].c[32] = 0x80;
blocks[i].d[15] = BSWAP4((64 + 32) * 8);
# else
# else
PUTU32(blocks[i].c + 0, ctx->A[i]);
ctx->A[i] = key->tail.h[0];
PUTU32(blocks[i].c + 4, ctx->B[i]);
@ -414,7 +413,7 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
ctx->H[i] = key->tail.h[7];
blocks[i].c[32] = 0x80;
PUTU32(blocks[i].c + 60, (64 + 32) * 8);
# endif
# endif
edges[i].ptr = blocks[i].c;
edges[i].blocks = 1;
}
@ -470,7 +469,7 @@ static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
return ret;
}
# endif
# endif
static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
unsigned char *out,
@ -481,11 +480,11 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
size_t plen = key->payload_length, iv = 0, /* explicit IV in TLS 1.1 and
* later */
sha_off = 0;
# if defined(STITCHED_CALL)
# if defined(STITCHED_CALL)
size_t aes_off = 0, blocks;
sha_off = SHA256_CBLOCK - key->md.num;
# endif
# endif
key->payload_length = NO_PAYLOAD_LENGTH;
@ -502,7 +501,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
else if (key->aux.tls_ver >= TLS1_1_VERSION)
iv = AES_BLOCK_SIZE;
# if defined(STITCHED_CALL)
# if defined(STITCHED_CALL)
/*
* Assembly stitch handles AVX-capable processors, but its
* performance is not optimal on AMD Jaguar, ~40% worse, for
@ -532,7 +531,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
} else {
sha_off = 0;
}
# endif
# endif
sha_off += iv;
SHA256_Update(&key->md, in + sha_off, plen - sha_off);
@ -608,7 +607,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
key->md = key->head;
SHA256_Update(&key->md, key->aux.tls_aad, plen);
# if 1
# if 1
len -= SHA256_DIGEST_LENGTH; /* amend mac */
if (len >= (256 + SHA256_CBLOCK)) {
j = (len - (256 + SHA256_CBLOCK)) & (0 - SHA256_CBLOCK);
@ -621,15 +620,15 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
/* but pretend as if we hashed padded payload */
bitlen = key->md.Nl + (inp_len << 3); /* at most 18 bits */
# ifdef BSWAP4
# ifdef BSWAP4
bitlen = BSWAP4(bitlen);
# else
# else
mac.c[0] = 0;
mac.c[1] = (unsigned char)(bitlen >> 16);
mac.c[2] = (unsigned char)(bitlen >> 8);
mac.c[3] = (unsigned char)bitlen;
bitlen = mac.u[0];
# endif
# endif
pmac->u[0] = 0;
pmac->u[1] = 0;
@ -698,7 +697,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
pmac->u[6] |= key->md.h[6] & mask;
pmac->u[7] |= key->md.h[7] & mask;
# ifdef BSWAP4
# ifdef BSWAP4
pmac->u[0] = BSWAP4(pmac->u[0]);
pmac->u[1] = BSWAP4(pmac->u[1]);
pmac->u[2] = BSWAP4(pmac->u[2]);
@ -707,7 +706,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
pmac->u[5] = BSWAP4(pmac->u[5]);
pmac->u[6] = BSWAP4(pmac->u[6]);
pmac->u[7] = BSWAP4(pmac->u[7]);
# else
# else
for (i = 0; i < 8; i++) {
res = pmac->u[i];
pmac->c[4 * i + 0] = (unsigned char)(res >> 24);
@ -715,9 +714,9 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
pmac->c[4 * i + 2] = (unsigned char)(res >> 8);
pmac->c[4 * i + 3] = (unsigned char)res;
}
# endif
# endif
len += SHA256_DIGEST_LENGTH;
# else
# else
SHA256_Update(&key->md, out, inp_len);
res = key->md.num;
SHA256_Final(pmac->c, &key->md);
@ -736,7 +735,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
for (; inp_blocks < pad_blocks; inp_blocks++)
sha1_block_data_order(&key->md, data, 1);
}
# endif
# endif
key->md = key->tail;
SHA256_Update(&key->md, pmac->c, SHA256_DIGEST_LENGTH);
SHA256_Final(pmac->c, &key->md);
@ -744,7 +743,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
/* verify HMAC */
out += inp_len;
len -= inp_len;
# if 1
# if 1
{
unsigned char *p =
out + len - 1 - maxpad - SHA256_DIGEST_LENGTH;
@ -767,7 +766,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
res = 0 - ((0 - res) >> (sizeof(res) * 8 - 1));
ret &= (int)~res;
}
# else
# else
for (res = 0, i = 0; i < SHA256_DIGEST_LENGTH; i++)
res |= out[i] ^ pmac->c[i];
res = 0 - ((0 - res) >> (sizeof(res) * 8 - 1));
@ -781,7 +780,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
res = (0 - res) >> (sizeof(res) * 8 - 1);
ret &= (int)~res;
# endif
# endif
return ret;
} else {
SHA256_Update(&key->md, out, len);
@ -859,7 +858,7 @@ static int aesni_cbc_hmac_sha256_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
return SHA256_DIGEST_LENGTH;
}
}
# if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
# if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
case EVP_CTRL_TLS1_1_MULTIBLOCK_MAX_BUFSIZE:
return (int)(5 + 16 + ((arg + 32 + 16) & -16));
case EVP_CTRL_TLS1_1_MULTIBLOCK_AAD:
@ -925,18 +924,18 @@ static int aesni_cbc_hmac_sha256_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
param->interleave / 4);
}
case EVP_CTRL_TLS1_1_MULTIBLOCK_DECRYPT:
# endif
# endif
default:
return -1;
}
}
static EVP_CIPHER aesni_128_cbc_hmac_sha256_cipher = {
# ifdef NID_aes_128_cbc_hmac_sha256
# ifdef NID_aes_128_cbc_hmac_sha256
NID_aes_128_cbc_hmac_sha256,
# else
# else
NID_undef,
# endif
# endif
AES_BLOCK_SIZE, 16, AES_BLOCK_SIZE,
EVP_CIPH_CBC_MODE | EVP_CIPH_FLAG_DEFAULT_ASN1 |
EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
@ -951,11 +950,11 @@ static EVP_CIPHER aesni_128_cbc_hmac_sha256_cipher = {
};
static EVP_CIPHER aesni_256_cbc_hmac_sha256_cipher = {
# ifdef NID_aes_256_cbc_hmac_sha256
# ifdef NID_aes_256_cbc_hmac_sha256
NID_aes_256_cbc_hmac_sha256,
# else
# else
NID_undef,
# endif
# endif
AES_BLOCK_SIZE, 32, AES_BLOCK_SIZE,
EVP_CIPH_CBC_MODE | EVP_CIPH_FLAG_DEFAULT_ASN1 |
EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
@ -982,7 +981,7 @@ const EVP_CIPHER *EVP_aes_256_cbc_hmac_sha256(void)
aesni_cbc_sha256_enc(NULL, NULL, 0, NULL, NULL, NULL, NULL) ?
&aesni_256_cbc_hmac_sha256_cipher : NULL);
}
# else
#else
const EVP_CIPHER *EVP_aes_128_cbc_hmac_sha256(void)
{
return NULL;
@ -992,5 +991,4 @@ const EVP_CIPHER *EVP_aes_256_cbc_hmac_sha256(void)
{
return NULL;
}
# endif
#endif

View file

@ -138,27 +138,25 @@ const EVP_CIPHER *EVP_rc5_32_12_16_cfb(void)
}
# endif
# ifndef OPENSSL_NO_AES
# undef EVP_aes_128_cfb
# undef EVP_aes_128_cfb
const EVP_CIPHER *EVP_aes_128_cfb(void);
const EVP_CIPHER *EVP_aes_128_cfb(void)
{
return EVP_aes_128_cfb128();
}
# undef EVP_aes_192_cfb
# undef EVP_aes_192_cfb
const EVP_CIPHER *EVP_aes_192_cfb(void);
const EVP_CIPHER *EVP_aes_192_cfb(void)
{
return EVP_aes_192_cfb128();
}
# undef EVP_aes_256_cfb
# undef EVP_aes_256_cfb
const EVP_CIPHER *EVP_aes_256_cfb(void);
const EVP_CIPHER *EVP_aes_256_cfb(void)
{
return EVP_aes_256_cfb128();
}
# endif
#endif

View file

@ -69,9 +69,7 @@
#include <openssl/crypto.h>
#include <openssl/engine.h>
#include <openssl/evp.h>
#ifndef OPENSSL_NO_AES
# include <openssl/aes.h>
#endif
#include <openssl/aes.h>
#include <openssl/rand.h>
#include <openssl/err.h>
#include <openssl/modes.h>
@ -137,10 +135,8 @@ static int padlock_init(ENGINE *e);
static RAND_METHOD padlock_rand;
/* Cipher Stuff */
# ifndef OPENSSL_NO_AES
static int padlock_ciphers(ENGINE *e, const EVP_CIPHER **cipher,
const int **nids, int nid);
# endif
/* Engine names */
static const char *padlock_id = "padlock";
@ -174,9 +170,7 @@ static int padlock_bind_helper(ENGINE *e)
if (!ENGINE_set_id(e, padlock_id) ||
!ENGINE_set_name(e, padlock_name) ||
!ENGINE_set_init_function(e, padlock_init) ||
# ifndef OPENSSL_NO_AES
(padlock_use_ace && !ENGINE_set_ciphers(e, padlock_ciphers)) ||
# endif
(padlock_use_rng && !ENGINE_set_RAND(e, &padlock_rand))) {
return 0;
}
@ -232,12 +226,12 @@ IMPLEMENT_DYNAMIC_CHECK_FN()
IMPLEMENT_DYNAMIC_BIND_FN(padlock_bind_fn)
# endif /* DYNAMIC_ENGINE */
/* ===== Here comes the "real" engine ===== */
# ifndef OPENSSL_NO_AES
/* Some AES-related constants */
# define AES_BLOCK_SIZE 16
# define AES_KEY_SIZE_128 16
# define AES_KEY_SIZE_192 24
# define AES_KEY_SIZE_256 32
# define AES_BLOCK_SIZE 16
# define AES_KEY_SIZE_128 16
# define AES_KEY_SIZE_192 24
# define AES_KEY_SIZE_256 32
/*
* Here we store the status information relevant to the current context.
*/
@ -263,7 +257,6 @@ struct padlock_cipher_data {
} cword; /* Control word */
AES_KEY ks; /* Encryption key */
};
# endif
/* Interface to assembler module */
unsigned int padlock_capability();
@ -303,31 +296,30 @@ static int padlock_available(void)
}
/* ===== AES encryption/decryption ===== */
# ifndef OPENSSL_NO_AES
# if defined(NID_aes_128_cfb128) && ! defined (NID_aes_128_cfb)
# define NID_aes_128_cfb NID_aes_128_cfb128
# endif
# if defined(NID_aes_128_cfb128) && ! defined (NID_aes_128_cfb)
# define NID_aes_128_cfb NID_aes_128_cfb128
# endif
# if defined(NID_aes_128_ofb128) && ! defined (NID_aes_128_ofb)
# define NID_aes_128_ofb NID_aes_128_ofb128
# endif
# if defined(NID_aes_128_ofb128) && ! defined (NID_aes_128_ofb)
# define NID_aes_128_ofb NID_aes_128_ofb128
# endif
# if defined(NID_aes_192_cfb128) && ! defined (NID_aes_192_cfb)
# define NID_aes_192_cfb NID_aes_192_cfb128
# endif
# if defined(NID_aes_192_cfb128) && ! defined (NID_aes_192_cfb)
# define NID_aes_192_cfb NID_aes_192_cfb128
# endif
# if defined(NID_aes_192_ofb128) && ! defined (NID_aes_192_ofb)
# define NID_aes_192_ofb NID_aes_192_ofb128
# endif
# if defined(NID_aes_192_ofb128) && ! defined (NID_aes_192_ofb)
# define NID_aes_192_ofb NID_aes_192_ofb128
# endif
# if defined(NID_aes_256_cfb128) && ! defined (NID_aes_256_cfb)
# define NID_aes_256_cfb NID_aes_256_cfb128
# endif
# if defined(NID_aes_256_cfb128) && ! defined (NID_aes_256_cfb)
# define NID_aes_256_cfb NID_aes_256_cfb128
# endif
# if defined(NID_aes_256_ofb128) && ! defined (NID_aes_256_ofb)
# define NID_aes_256_ofb NID_aes_256_ofb128
# endif
# if defined(NID_aes_256_ofb128) && ! defined (NID_aes_256_ofb)
# define NID_aes_256_ofb NID_aes_256_ofb128
# endif
/* List of supported ciphers. */
static const int padlock_cipher_nids[] = {
@ -357,9 +349,9 @@ static int padlock_cipher_nids_num = (sizeof(padlock_cipher_nids) /
static int padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc);
# define NEAREST_ALIGNED(ptr) ( (unsigned char *)(ptr) + \
# define NEAREST_ALIGNED(ptr) ( (unsigned char *)(ptr) + \
( (0x10 - ((size_t)(ptr) & 0x0F)) & 0x0F ) )
# define ALIGNED_CIPHER_DATA(ctx) ((struct padlock_cipher_data *)\
# define ALIGNED_CIPHER_DATA(ctx) ((struct padlock_cipher_data *)\
NEAREST_ALIGNED(EVP_CIPHER_CTX_get_cipher_data(ctx)))
static int
@ -534,17 +526,17 @@ padlock_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
return 1;
}
# define EVP_CIPHER_block_size_ECB AES_BLOCK_SIZE
# define EVP_CIPHER_block_size_CBC AES_BLOCK_SIZE
# define EVP_CIPHER_block_size_OFB 1
# define EVP_CIPHER_block_size_CFB 1
# define EVP_CIPHER_block_size_CTR 1
# define EVP_CIPHER_block_size_ECB AES_BLOCK_SIZE
# define EVP_CIPHER_block_size_CBC AES_BLOCK_SIZE
# define EVP_CIPHER_block_size_OFB 1
# define EVP_CIPHER_block_size_CFB 1
# define EVP_CIPHER_block_size_CTR 1
/*
* Declaring so many ciphers by hand would be a pain. Instead introduce a bit
* of preprocessor magic :-)
*/
# define DECLARE_AES_EVP(ksize,lmode,umode) \
# define DECLARE_AES_EVP(ksize,lmode,umode) \
static EVP_CIPHER *_hidden_aes_##ksize##_##lmode = NULL; \
static const EVP_CIPHER *padlock_aes_##ksize##_##lmode(void) \
{ \
@ -707,12 +699,12 @@ padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
AES_set_decrypt_key(key, key_len, &cdata->ks);
else
AES_set_encrypt_key(key, key_len, &cdata->ks);
# ifndef AES_ASM
# ifndef AES_ASM
/*
* OpenSSL C functions use byte-swapped extended key.
*/
padlock_key_bswap(&cdata->ks);
# endif
# endif
cdata->cword.b.keygen = 1;
break;
@ -731,8 +723,6 @@ padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
# endif /* OPENSSL_NO_AES */
/* ===== Random Number Generator ===== */
/*
* This code is not engaged. The reason is that it does not comply

View file

@ -53,7 +53,6 @@
# include <openssl/opensslconf.h>
#ifndef OPENSSL_NO_AES
# include <stddef.h>
# ifdef __cplusplus
extern "C" {
@ -130,6 +129,5 @@ int AES_unwrap_key(AES_KEY *key, const unsigned char *iv,
# ifdef __cplusplus
}
# endif
# endif
#endif

View file

@ -793,13 +793,12 @@ const EVP_CIPHER *EVP_rc5_32_12_16_cfb64(void);
# define EVP_rc5_32_12_16_cfb EVP_rc5_32_12_16_cfb64
const EVP_CIPHER *EVP_rc5_32_12_16_ofb(void);
# endif
# ifndef OPENSSL_NO_AES
const EVP_CIPHER *EVP_aes_128_ecb(void);
const EVP_CIPHER *EVP_aes_128_cbc(void);
const EVP_CIPHER *EVP_aes_128_cfb1(void);
const EVP_CIPHER *EVP_aes_128_cfb8(void);
const EVP_CIPHER *EVP_aes_128_cfb128(void);
# define EVP_aes_128_cfb EVP_aes_128_cfb128
# define EVP_aes_128_cfb EVP_aes_128_cfb128
const EVP_CIPHER *EVP_aes_128_ofb(void);
const EVP_CIPHER *EVP_aes_128_ctr(void);
const EVP_CIPHER *EVP_aes_128_ccm(void);
@ -807,30 +806,30 @@ const EVP_CIPHER *EVP_aes_128_gcm(void);
const EVP_CIPHER *EVP_aes_128_xts(void);
const EVP_CIPHER *EVP_aes_128_wrap(void);
const EVP_CIPHER *EVP_aes_128_wrap_pad(void);
# ifndef OPENSSL_NO_OCB
# ifndef OPENSSL_NO_OCB
const EVP_CIPHER *EVP_aes_128_ocb(void);
# endif
# endif
const EVP_CIPHER *EVP_aes_192_ecb(void);
const EVP_CIPHER *EVP_aes_192_cbc(void);
const EVP_CIPHER *EVP_aes_192_cfb1(void);
const EVP_CIPHER *EVP_aes_192_cfb8(void);
const EVP_CIPHER *EVP_aes_192_cfb128(void);
# define EVP_aes_192_cfb EVP_aes_192_cfb128
# define EVP_aes_192_cfb EVP_aes_192_cfb128
const EVP_CIPHER *EVP_aes_192_ofb(void);
const EVP_CIPHER *EVP_aes_192_ctr(void);
const EVP_CIPHER *EVP_aes_192_ccm(void);
const EVP_CIPHER *EVP_aes_192_gcm(void);
const EVP_CIPHER *EVP_aes_192_wrap(void);
const EVP_CIPHER *EVP_aes_192_wrap_pad(void);
# ifndef OPENSSL_NO_OCB
# ifndef OPENSSL_NO_OCB
const EVP_CIPHER *EVP_aes_192_ocb(void);
# endif
# endif
const EVP_CIPHER *EVP_aes_256_ecb(void);
const EVP_CIPHER *EVP_aes_256_cbc(void);
const EVP_CIPHER *EVP_aes_256_cfb1(void);
const EVP_CIPHER *EVP_aes_256_cfb8(void);
const EVP_CIPHER *EVP_aes_256_cfb128(void);
# define EVP_aes_256_cfb EVP_aes_256_cfb128
# define EVP_aes_256_cfb EVP_aes_256_cfb128
const EVP_CIPHER *EVP_aes_256_ofb(void);
const EVP_CIPHER *EVP_aes_256_ctr(void);
const EVP_CIPHER *EVP_aes_256_ccm(void);
@ -838,14 +837,13 @@ const EVP_CIPHER *EVP_aes_256_gcm(void);
const EVP_CIPHER *EVP_aes_256_xts(void);
const EVP_CIPHER *EVP_aes_256_wrap(void);
const EVP_CIPHER *EVP_aes_256_wrap_pad(void);
# ifndef OPENSSL_NO_OCB
# ifndef OPENSSL_NO_OCB
const EVP_CIPHER *EVP_aes_256_ocb(void);
# endif
# endif
const EVP_CIPHER *EVP_aes_128_cbc_hmac_sha1(void);
const EVP_CIPHER *EVP_aes_256_cbc_hmac_sha1(void);
const EVP_CIPHER *EVP_aes_128_cbc_hmac_sha256(void);
const EVP_CIPHER *EVP_aes_256_cbc_hmac_sha256(void);
# endif
# ifndef OPENSSL_NO_CAMELLIA
const EVP_CIPHER *EVP_camellia_128_ecb(void);
const EVP_CIPHER *EVP_camellia_128_cbc(void);

View file

@ -97,7 +97,6 @@ static void ossl_init_ssl_base(void)
*/
EVP_add_cipher(EVP_rc2_40_cbc());
#endif
#ifndef OPENSSL_NO_AES
EVP_add_cipher(EVP_aes_128_cbc());
EVP_add_cipher(EVP_aes_192_cbc());
EVP_add_cipher(EVP_aes_256_cbc());
@ -109,7 +108,6 @@ static void ossl_init_ssl_base(void)
EVP_add_cipher(EVP_aes_256_cbc_hmac_sha1());
EVP_add_cipher(EVP_aes_128_cbc_hmac_sha256());
EVP_add_cipher(EVP_aes_256_cbc_hmac_sha256());
#endif
#ifndef OPENSSL_NO_CAMELLIA
EVP_add_cipher(EVP_camellia_128_cbc());
EVP_add_cipher(EVP_camellia_256_cbc());