Adapt cipher implementations to opaque EVP_CIPHER_CTX

Note: there's a larger number of implementations in crypto/evp/ that
aren't affected because they include evp_locl.h.  They will be handled
in a separate commit.

Reviewed-by: Rich Salz <rsalz@openssl.org>
This commit is contained in:
Richard Levitte 2015-12-13 22:06:14 +01:00
parent c0ca39bdd6
commit 936166aff2
9 changed files with 141 additions and 107 deletions

View file

@ -249,7 +249,7 @@ typedef struct {
unsigned char key[TEST_RC4_KEY_SIZE];
RC4_KEY ks;
} TEST_RC4_KEY;
# define test(ctx) ((TEST_RC4_KEY *)(ctx)->cipher_data)
# define test(ctx) ((TEST_RC4_KEY *)EVP_CIPHER_CTX_cipher_data(ctx))
static int test_rc4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{

View file

@ -115,7 +115,7 @@ void aesni256_cbc_sha1_dec(const void *inp, void *out, size_t blocks,
const AES_KEY *key, unsigned char iv[16],
SHA_CTX *ctx, const void *in0);
# define data(ctx) ((EVP_AES_HMAC_SHA1 *)(ctx)->cipher_data)
# define data(ctx) ((EVP_AES_HMAC_SHA1 *)EVP_CIPHER_CTX_cipher_data(ctx))
static int aesni_cbc_hmac_sha1_init_key(EVP_CIPHER_CTX *ctx,
const unsigned char *inkey,
@ -125,9 +125,13 @@ static int aesni_cbc_hmac_sha1_init_key(EVP_CIPHER_CTX *ctx,
int ret;
if (enc)
ret = aesni_set_encrypt_key(inkey, ctx->key_len * 8, &key->ks);
ret = aesni_set_encrypt_key(inkey,
EVP_CIPHER_CTX_key_length(ctx) * 8,
&key->ks);
else
ret = aesni_set_decrypt_key(inkey, ctx->key_len * 8, &key->ks);
ret = aesni_set_decrypt_key(inkey,
EVP_CIPHER_CTX_key_length(ctx) * 8,
&key->ks);
SHA1_Init(&key->head); /* handy when benchmarking */
key->tail = key->head;
@ -471,7 +475,7 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
if (len % AES_BLOCK_SIZE)
return 0;
if (ctx->encrypt) {
if (EVP_CIPHER_CTX_encrypting(ctx)) {
if (plen == NO_PAYLOAD_LENGTH)
plen = len;
else if (len !=
@ -487,7 +491,8 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
SHA1_Update(&key->md, in + iv, sha_off);
aesni_cbc_sha1_enc(in, out, blocks, &key->ks,
ctx->iv, &key->md, in + iv + sha_off);
EVP_CIPHER_CTX_iv_noconst(ctx),
&key->md, in + iv + sha_off);
blocks *= SHA_CBLOCK;
aes_off += blocks;
sha_off += blocks;
@ -518,10 +523,10 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
out[plen] = l;
/* encrypt HMAC|padding at once */
aesni_cbc_encrypt(out + aes_off, out + aes_off, len - aes_off,
&key->ks, ctx->iv, 1);
&key->ks, EVP_CIPHER_CTX_iv_noconst(ctx), 1);
} else {
aesni_cbc_encrypt(in + aes_off, out + aes_off, len - aes_off,
&key->ks, ctx->iv, 1);
&key->ks, EVP_CIPHER_CTX_iv_noconst(ctx), 1);
}
} else {
union {
@ -551,7 +556,8 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
return 0;
/* omit explicit iv */
memcpy(ctx->iv, in, AES_BLOCK_SIZE);
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), in, AES_BLOCK_SIZE);
in += AES_BLOCK_SIZE;
out += AES_BLOCK_SIZE;
len -= AES_BLOCK_SIZE;
@ -570,7 +576,8 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
} else
# endif
/* decrypt HMAC|padding at once */
aesni_cbc_encrypt(in, out, len, &key->ks, ctx->iv, 0);
aesni_cbc_encrypt(in, out, len, &key->ks,
EVP_CIPHER_CTX_iv_noconst(ctx), 0);
/* figure out payload length */
pad = out[len - 1];
@ -798,7 +805,8 @@ static int aesni_cbc_hmac_sha1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
} else
# endif
/* decrypt HMAC|padding at once */
aesni_cbc_encrypt(in, out, len, &key->ks, ctx->iv, 0);
aesni_cbc_encrypt(in, out, len, &key->ks,
EVP_CIPHER_CTX_iv_noconst(ctx), 0);
SHA1_Update(&key->md, out, len);
}
@ -852,7 +860,7 @@ static int aesni_cbc_hmac_sha1_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
len = p[arg - 2] << 8 | p[arg - 1];
if (ctx->encrypt) {
if (EVP_CIPHER_CTX_encrypting(ctx)) {
key->payload_length = len;
if ((key->aux.tls_ver =
p[arg - 4] << 8 | p[arg - 3]) >= TLS1_1_VERSION) {
@ -888,7 +896,7 @@ static int aesni_cbc_hmac_sha1_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
inp_len = param->inp[11] << 8 | param->inp[12];
if (ctx->encrypt) {
if (EVP_CIPHER_CTX_encrypting(ctx)) {
if ((param->inp[9] << 8 | param->inp[10]) < TLS1_1_VERSION)
return -1;
@ -948,7 +956,7 @@ static EVP_CIPHER aesni_128_cbc_hmac_sha1_cipher = {
# else
NID_undef,
# endif
16, 16, 16,
AES_BLOCK_SIZE, 16, AES_BLOCK_SIZE,
EVP_CIPH_CBC_MODE | EVP_CIPH_FLAG_DEFAULT_ASN1 |
EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
aesni_cbc_hmac_sha1_init_key,
@ -967,7 +975,7 @@ static EVP_CIPHER aesni_256_cbc_hmac_sha1_cipher = {
# else
NID_undef,
# endif
16, 32, 16,
AES_BLOCK_SIZE, 32, AES_BLOCK_SIZE,
EVP_CIPH_CBC_MODE | EVP_CIPH_FLAG_DEFAULT_ASN1 |
EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
aesni_cbc_hmac_sha1_init_key,

View file

@ -111,7 +111,7 @@ int aesni_cbc_sha256_enc(const void *inp, void *out, size_t blocks,
const AES_KEY *key, unsigned char iv[16],
SHA256_CTX *ctx, const void *in0);
# define data(ctx) ((EVP_AES_HMAC_SHA256 *)(ctx)->cipher_data)
# define data(ctx) ((EVP_AES_HMAC_SHA256 *)EVP_CIPHER_CTX_cipher_data(ctx))
static int aesni_cbc_hmac_sha256_init_key(EVP_CIPHER_CTX *ctx,
const unsigned char *inkey,
@ -122,9 +122,13 @@ static int aesni_cbc_hmac_sha256_init_key(EVP_CIPHER_CTX *ctx,
if (enc)
memset(&key->ks, 0, sizeof(key->ks.rd_key)),
ret = aesni_set_encrypt_key(inkey, ctx->key_len * 8, &key->ks);
ret = aesni_set_encrypt_key(inkey,
EVP_CIPHER_CTX_key_length(ctx) * 8,
&key->ks);
else
ret = aesni_set_decrypt_key(inkey, ctx->key_len * 8, &key->ks);
ret = aesni_set_decrypt_key(inkey,
EVP_CIPHER_CTX_key_length(ctx) * 8,
&key->ks);
SHA256_Init(&key->head); /* handy when benchmarking */
key->tail = key->head;
@ -487,7 +491,7 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
if (len % AES_BLOCK_SIZE)
return 0;
if (ctx->encrypt) {
if (EVP_CIPHER_CTX_encrypting(ctx)) {
if (plen == NO_PAYLOAD_LENGTH)
plen = len;
else if (len !=
@ -515,7 +519,8 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
SHA256_Update(&key->md, in + iv, sha_off);
(void)aesni_cbc_sha256_enc(in, out, blocks, &key->ks,
ctx->iv, &key->md, in + iv + sha_off);
EVP_CIPHER_CTX_iv_noconst(ctx),
&key->md, in + iv + sha_off);
blocks *= SHA256_CBLOCK;
aes_off += blocks;
sha_off += blocks;
@ -546,10 +551,10 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
out[plen] = l;
/* encrypt HMAC|padding at once */
aesni_cbc_encrypt(out + aes_off, out + aes_off, len - aes_off,
&key->ks, ctx->iv, 1);
&key->ks, EVP_CIPHER_CTX_iv_noconst(ctx), 1);
} else {
aesni_cbc_encrypt(in + aes_off, out + aes_off, len - aes_off,
&key->ks, ctx->iv, 1);
&key->ks, EVP_CIPHER_CTX_iv_noconst(ctx), 1);
}
} else {
union {
@ -561,7 +566,8 @@ static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx,
pmac = (void *)(((size_t)mac.c + 63) & ((size_t)0 - 64));
/* decrypt HMAC|padding at once */
aesni_cbc_encrypt(in, out, len, &key->ks, ctx->iv, 0);
aesni_cbc_encrypt(in, out, len, &key->ks,
EVP_CIPHER_CTX_iv_noconst(ctx), 0);
if (plen != NO_PAYLOAD_LENGTH) { /* "TLS" mode of operation */
size_t inp_len, mask, j, i;
@ -831,7 +837,7 @@ static int aesni_cbc_hmac_sha256_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
if (arg != EVP_AEAD_TLS1_AAD_LEN)
return -1;
if (ctx->encrypt) {
if (EVP_CIPHER_CTX_encrypting(ctx)) {
key->payload_length = len;
if ((key->aux.tls_ver =
p[arg - 4] << 8 | p[arg - 3]) >= TLS1_1_VERSION) {
@ -870,7 +876,7 @@ static int aesni_cbc_hmac_sha256_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
inp_len = param->inp[11] << 8 | param->inp[12];
if (ctx->encrypt) {
if (EVP_CIPHER_CTX_encrypting(ctx)) {
if ((param->inp[9] << 8 | param->inp[10]) < TLS1_1_VERSION)
return -1;
@ -930,7 +936,7 @@ static EVP_CIPHER aesni_128_cbc_hmac_sha256_cipher = {
# else
NID_undef,
# endif
16, 16, 16,
AES_BLOCK_SIZE, 16, AES_BLOCK_SIZE,
EVP_CIPH_CBC_MODE | EVP_CIPH_FLAG_DEFAULT_ASN1 |
EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
aesni_cbc_hmac_sha256_init_key,
@ -949,7 +955,7 @@ static EVP_CIPHER aesni_256_cbc_hmac_sha256_cipher = {
# else
NID_undef,
# endif
16, 32, 16,
AES_BLOCK_SIZE, 32, AES_BLOCK_SIZE,
EVP_CIPH_CBC_MODE | EVP_CIPH_FLAG_DEFAULT_ASN1 |
EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
aesni_cbc_hmac_sha256_init_key,

View file

@ -199,7 +199,9 @@ static int rc2_set_asn1_type_and_iv(EVP_CIPHER_CTX *c, ASN1_TYPE *type)
if (type != NULL) {
num = rc2_meth_to_magic(c);
j = EVP_CIPHER_CTX_iv_length(c);
i = ASN1_TYPE_set_int_octetstring(type, num, c->oiv, j);
i = ASN1_TYPE_set_int_octetstring(type, num,
(unsigned char *)EVP_CIPHER_CTX_original_iv(c),
j);
}
return (i);
}

View file

@ -72,7 +72,7 @@ typedef struct {
RC4_KEY ks; /* working key */
} EVP_RC4_KEY;
# define data(ctx) ((EVP_RC4_KEY *)(ctx)->cipher_data)
# define data(ctx) ((EVP_RC4_KEY *)EVP_CIPHER_CTX_cipher_data(ctx))
static int rc4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc);

View file

@ -80,7 +80,7 @@ typedef struct {
void rc4_md5_enc(RC4_KEY *key, const void *in0, void *out,
MD5_CTX *ctx, const void *inp, size_t blocks);
# define data(ctx) ((EVP_RC4_HMAC_MD5 *)(ctx)->cipher_data)
# define data(ctx) ((EVP_RC4_HMAC_MD5 *)EVP_CIPHER_CTX_cipher_data(ctx))
static int rc4_hmac_md5_init_key(EVP_CIPHER_CTX *ctx,
const unsigned char *inkey,
@ -127,7 +127,7 @@ static int rc4_hmac_md5_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
if (plen != NO_PAYLOAD_LENGTH && len != (plen + MD5_DIGEST_LENGTH))
return 0;
if (ctx->encrypt) {
if (EVP_CIPHER_CTX_encrypting(ctx)) {
if (plen == NO_PAYLOAD_LENGTH)
plen = len;
# if defined(STITCHED_CALL)
@ -265,7 +265,7 @@ static int rc4_hmac_md5_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
len = p[arg - 2] << 8 | p[arg - 1];
if (!ctx->encrypt) {
if (!EVP_CIPHER_CTX_encrypting(ctx)) {
len -= MD5_DIGEST_LENGTH;
p[arg - 2] = len >> 8;
p[arg - 1] = len;

View file

@ -199,32 +199,38 @@ static int gost_cipher_init_param(EVP_CIPHER_CTX *ctx,
const unsigned char *iv, int enc,
int paramNID, int mode)
{
struct ossl_gost_cipher_ctx *c = ctx->cipher_data;
if (ctx->app_data == NULL) {
struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_cipher_data(ctx);
if (EVP_CIPHER_CTX_get_app_data(ctx) == NULL) {
if (!gost_cipher_set_param(c, paramNID))
return 0;
ctx->app_data = ctx->cipher_data;
EVP_CIPHER_CTX_set_app_data(ctx, EVP_CIPHER_CTX_cipher_data(ctx));
}
if (key)
gost_key(&(c->cctx), key);
if (iv)
memcpy(ctx->oiv, iv, EVP_CIPHER_CTX_iv_length(ctx));
memcpy(ctx->iv, ctx->oiv, EVP_CIPHER_CTX_iv_length(ctx));
memcpy((unsigned char *)EVP_CIPHER_CTX_original_iv(ctx), iv,
EVP_CIPHER_CTX_iv_length(ctx));
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx),
EVP_CIPHER_CTX_original_iv(ctx),
EVP_CIPHER_CTX_iv_length(ctx));
return 1;
}
static int gost_cipher_init_cpa(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
struct ossl_gost_cipher_ctx *c = ctx->cipher_data;
struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_cipher_data(ctx);
gost_init(&(c->cctx), &Gost28147_CryptoProParamSetA);
c->key_meshing = 1;
c->count = 0;
if (key)
gost_key(&(c->cctx), key);
if (iv)
memcpy(ctx->oiv, iv, EVP_CIPHER_CTX_iv_length(ctx));
memcpy(ctx->iv, ctx->oiv, EVP_CIPHER_CTX_iv_length(ctx));
memcpy((unsigned char *)EVP_CIPHER_CTX_original_iv(ctx), iv,
EVP_CIPHER_CTX_iv_length(ctx));
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx),
EVP_CIPHER_CTX_original_iv(ctx),
EVP_CIPHER_CTX_iv_length(ctx));
return 1;
}
@ -291,23 +297,24 @@ int gost_cipher_do_cfb(EVP_CIPHER_CTX *ctx, unsigned char *out,
{
const unsigned char *in_ptr = in;
unsigned char *out_ptr = out;
unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
size_t i = 0;
size_t j = 0;
/* process partial block if any */
if (ctx->num) {
for (j = ctx->num, i = 0; j < 8 && i < inl;
if (EVP_CIPHER_CTX_num(ctx)) {
for (j = EVP_CIPHER_CTX_num(ctx), i = 0; j < 8 && i < inl;
j++, i++, in_ptr++, out_ptr++) {
if (!ctx->encrypt)
ctx->buf[j + 8] = *in_ptr;
*out_ptr = ctx->buf[j] ^ (*in_ptr);
if (ctx->encrypt)
ctx->buf[j + 8] = *out_ptr;
if (!EVP_CIPHER_CTX_encrypting(ctx))
buf[j + 8] = *in_ptr;
*out_ptr = buf[j] ^ (*in_ptr);
if (EVP_CIPHER_CTX_encrypting(ctx))
buf[j + 8] = *out_ptr;
}
if (j == 8) {
memcpy(ctx->iv, ctx->buf + 8, 8);
ctx->num = 0;
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), buf + 8, 8);
EVP_CIPHER_CTX_set_num(ctx, 0);
} else {
ctx->num = j;
EVP_CIPHER_CTX_set_num(ctx, j);
return 1;
}
}
@ -316,36 +323,38 @@ int gost_cipher_do_cfb(EVP_CIPHER_CTX *ctx, unsigned char *out,
/*
* block cipher current iv
*/
gost_crypt_mesh(ctx->cipher_data, ctx->iv, ctx->buf);
gost_crypt_mesh(EVP_CIPHER_CTX_cipher_data(ctx),
EVP_CIPHER_CTX_iv_noconst(ctx), buf);
/*
* xor next block of input text with it and output it
*/
/*
* output this block
*/
if (!ctx->encrypt)
memcpy(ctx->iv, in_ptr, 8);
if (!EVP_CIPHER_CTX_encrypting(ctx))
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), in_ptr, 8);
for (j = 0; j < 8; j++) {
out_ptr[j] = ctx->buf[j] ^ in_ptr[j];
out_ptr[j] = buf[j] ^ in_ptr[j];
}
/* Encrypt */
/* Next iv is next block of cipher text */
if (ctx->encrypt)
memcpy(ctx->iv, out_ptr, 8);
if (EVP_CIPHER_CTX_encrypting(ctx))
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), out_ptr, 8);
}
/* Process rest of buffer */
if (i < inl) {
gost_crypt_mesh(ctx->cipher_data, ctx->iv, ctx->buf);
if (!ctx->encrypt)
memcpy(ctx->buf + 8, in_ptr, inl - i);
gost_crypt_mesh(EVP_CIPHER_CTX_cipher_data(ctx),
EVP_CIPHER_CTX_iv_noconst(ctx), buf);
if (!EVP_CIPHER_CTX_encrypting(ctx))
memcpy(buf + 8, in_ptr, inl - i);
for (j = 0; i < inl; j++, i++) {
out_ptr[j] = ctx->buf[j] ^ in_ptr[j];
out_ptr[j] = buf[j] ^ in_ptr[j];
}
ctx->num = j;
if (ctx->encrypt)
memcpy(ctx->buf + 8, out_ptr, j);
EVP_CIPHER_CTX_set_num(ctx, j);
if (EVP_CIPHER_CTX_encrypting(ctx))
memcpy(buf + 8, out_ptr, j);
} else {
ctx->num = 0;
EVP_CIPHER_CTX_set_num(ctx, 0);
}
return 1;
}
@ -355,18 +364,19 @@ static int gost_cipher_do_cnt(EVP_CIPHER_CTX *ctx, unsigned char *out,
{
const unsigned char *in_ptr = in;
unsigned char *out_ptr = out;
unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
size_t i = 0;
size_t j;
/* process partial block if any */
if (ctx->num) {
for (j = ctx->num, i = 0; j < 8 && i < inl;
if (EVP_CIPHER_CTX_num(ctx)) {
for (j = EVP_CIPHER_CTX_num(ctx), i = 0; j < 8 && i < inl;
j++, i++, in_ptr++, out_ptr++) {
*out_ptr = ctx->buf[j] ^ (*in_ptr);
*out_ptr = buf[j] ^ (*in_ptr);
}
if (j == 8) {
ctx->num = 0;
EVP_CIPHER_CTX_set_num(ctx, 0);
} else {
ctx->num = j;
EVP_CIPHER_CTX_set_num(ctx, j);
return 1;
}
}
@ -376,7 +386,8 @@ static int gost_cipher_do_cnt(EVP_CIPHER_CTX *ctx, unsigned char *out,
* block cipher current iv
*/
/* Encrypt */
gost_cnt_next(ctx->cipher_data, ctx->iv, ctx->buf);
gost_cnt_next(EVP_CIPHER_CTX_cipher_data(ctx),
EVP_CIPHER_CTX_iv_noconst(ctx), buf);
/*
* xor next block of input text with it and output it
*/
@ -384,18 +395,19 @@ static int gost_cipher_do_cnt(EVP_CIPHER_CTX *ctx, unsigned char *out,
* output this block
*/
for (j = 0; j < 8; j++) {
out_ptr[j] = ctx->buf[j] ^ in_ptr[j];
out_ptr[j] = buf[j] ^ in_ptr[j];
}
}
/* Process rest of buffer */
if (i < inl) {
gost_cnt_next(ctx->cipher_data, ctx->iv, ctx->buf);
gost_cnt_next(EVP_CIPHER_CTX_cipher_data(ctx),
EVP_CIPHER_CTX_iv_noconst(ctx), buf);
for (j = 0; i < inl; j++, i++) {
out_ptr[j] = ctx->buf[j] ^ in_ptr[j];
out_ptr[j] = buf[j] ^ in_ptr[j];
}
ctx->num = j;
EVP_CIPHER_CTX_set_num(ctx, j);
} else {
ctx->num = 0;
EVP_CIPHER_CTX_set_num(ctx, 0);
}
return 1;
}
@ -403,8 +415,9 @@ static int gost_cipher_do_cnt(EVP_CIPHER_CTX *ctx, unsigned char *out,
/* Cleaning up of EVP_CIPHER_CTX */
int gost_cipher_cleanup(EVP_CIPHER_CTX *ctx)
{
gost_destroy(&((struct ossl_gost_cipher_ctx *)ctx->cipher_data)->cctx);
ctx->app_data = NULL;
gost_destroy(&((struct ossl_gost_cipher_ctx *)
EVP_CIPHER_CTX_cipher_data(ctx))->cctx);
EVP_CIPHER_CTX_set_app_data(ctx, NULL);
return 1;
}
@ -414,7 +427,8 @@ int gost_cipher_ctl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr)
switch (type) {
case EVP_CTRL_RAND_KEY:
{
if (RAND_bytes((unsigned char *)ptr, ctx->key_len) <= 0) {
if (RAND_bytes((unsigned char *)ptr,
EVP_CIPHER_CTX_key_length(ctx)) <= 0) {
GOSTerr(GOST_F_GOST_CIPHER_CTL,
GOST_R_RANDOM_GENERATOR_ERROR);
return -1;
@ -443,14 +457,15 @@ int gost89_set_asn1_parameters(EVP_CIPHER_CTX *ctx, ASN1_TYPE *params)
int len = 0;
unsigned char *buf = NULL;
unsigned char *p = NULL;
struct ossl_gost_cipher_ctx *c = ctx->cipher_data;
struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_cipher_data(ctx);
GOST_CIPHER_PARAMS *gcp = GOST_CIPHER_PARAMS_new();
ASN1_OCTET_STRING *os = NULL;
if (!gcp) {
GOSTerr(GOST_F_GOST89_SET_ASN1_PARAMETERS, ERR_R_MALLOC_FAILURE);
return 0;
}
if (!ASN1_OCTET_STRING_set(gcp->iv, ctx->iv, ctx->cipher->iv_len)) {
if (!ASN1_OCTET_STRING_set(gcp->iv, EVP_CIPHER_CTX_iv(ctx),
EVP_CIPHER_CTX_iv_length(ctx))) {
GOST_CIPHER_PARAMS_free(gcp);
GOSTerr(GOST_F_GOST89_SET_ASN1_PARAMETERS, ERR_R_MALLOC_FAILURE);
return 0;
@ -488,7 +503,7 @@ int gost89_get_asn1_parameters(EVP_CIPHER_CTX *ctx, ASN1_TYPE *params)
int len;
GOST_CIPHER_PARAMS *gcp = NULL;
unsigned char *p;
struct ossl_gost_cipher_ctx *c = ctx->cipher_data;
struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_cipher_data(ctx);
if (ASN1_TYPE_get(params) != V_ASN1_SEQUENCE) {
return ret;
}
@ -499,7 +514,7 @@ int gost89_get_asn1_parameters(EVP_CIPHER_CTX *ctx, ASN1_TYPE *params)
params->value.sequence->length);
len = gcp->iv->length;
if (len != ctx->cipher->iv_len) {
if (len != EVP_CIPHER_CTX_iv_length(ctx)) {
GOST_CIPHER_PARAMS_free(gcp);
GOSTerr(GOST_F_GOST89_GET_ASN1_PARAMETERS, GOST_R_INVALID_IV_LENGTH);
return -1;
@ -508,7 +523,8 @@ int gost89_get_asn1_parameters(EVP_CIPHER_CTX *ctx, ASN1_TYPE *params)
GOST_CIPHER_PARAMS_free(gcp);
return -1;
}
memcpy(ctx->oiv, gcp->iv->data, len);
memcpy((unsigned char *)EVP_CIPHER_CTX_original_iv(ctx), gcp->iv->data,
EVP_CIPHER_CTX_iv_length(ctx));
GOST_CIPHER_PARAMS_free(gcp);

View file

@ -287,7 +287,7 @@ static const EVP_CIPHER ossltest_aes_128_cbc = { \
EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CBC_MODE,
ossltest_aes128_init_key,
ossltest_aes128_cbc_cipher,
NULL,
NULL, /* FIXME: when EVP_CIPHER goes opaque, this should be set to EVP_aes_128_cbc()->ctx_size */
0, /* We don't know the size of cipher_data at compile time */
NULL,NULL,NULL,NULL
};
@ -569,14 +569,15 @@ static int digest_sha512_final(EVP_MD_CTX *ctx, unsigned char *md)
int ossltest_aes128_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
if (ctx->cipher_data == NULL) {
if (EVP_CIPHER_CTX_cipher_data(ctx) == NULL) {
/*
* Normally cipher_data is allocated automatically for an engine but
* we don't know the ctx_size as compile time so we have to do it at
* run time
*/
ctx->cipher_data = OPENSSL_zalloc(EVP_aes_128_cbc()->ctx_size);
if (ctx->cipher_data == NULL) {
/* FIXME: when EVP_CIPHER goes opaque, we won't need this trickery any more */
EVP_CIPHER_CTX_new_cipher_data(ctx, EVP_aes_128_cbc()->ctx_size);
if (EVP_CIPHER_CTX_cipher_data(ctx) == NULL) {
OSSLTESTerr(OSSLTEST_F_OSSLTEST_AES128_INIT_KEY,
ERR_R_MALLOC_FAILURE);
return 0;

View file

@ -361,7 +361,7 @@ static int padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
# define NEAREST_ALIGNED(ptr) ( (unsigned char *)(ptr) + \
( (0x10 - ((size_t)(ptr) & 0x0F)) & 0x0F ) )
# define ALIGNED_CIPHER_DATA(ctx) ((struct padlock_cipher_data *)\
NEAREST_ALIGNED(ctx->cipher_data))
NEAREST_ALIGNED(EVP_CIPHER_CTX_cipher_data(ctx)))
static int
padlock_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
@ -378,9 +378,9 @@ padlock_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
int ret;
memcpy(cdata->iv, ctx->iv, AES_BLOCK_SIZE);
memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
if ((ret = padlock_cbc_encrypt(out_arg, in_arg, cdata, nbytes)))
memcpy(ctx->iv, cdata->iv, AES_BLOCK_SIZE);
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
return ret;
}
@ -391,13 +391,13 @@ padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
size_t chunk;
if ((chunk = ctx->num)) { /* borrow chunk variable */
unsigned char *ivp = ctx->iv;
if ((chunk = EVP_CIPHER_CTX_num(ctx))) { /* borrow chunk variable */
unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);
if (chunk >= AES_BLOCK_SIZE)
return 0; /* bogus value */
if (ctx->encrypt)
if (EVP_CIPHER_CTX_encrypting(ctx))
while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
ivp[chunk] = *(out_arg++) = *(in_arg++) ^ ivp[chunk];
chunk++, nbytes--;
@ -408,13 +408,13 @@ padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
ivp[chunk++] = c, nbytes--;
}
ctx->num = chunk % AES_BLOCK_SIZE;
EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
}
if (nbytes == 0)
return 1;
memcpy(cdata->iv, ctx->iv, AES_BLOCK_SIZE);
memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
if (!padlock_cfb_encrypt(out_arg, in_arg, cdata, chunk))
@ -427,7 +427,7 @@ padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
out_arg += chunk;
in_arg += chunk;
ctx->num = nbytes;
EVP_CIPHER_CTX_set_num(ctx, nbytes);
if (cdata->cword.b.encdec) {
cdata->cword.b.encdec = 0;
padlock_reload_key();
@ -450,7 +450,7 @@ padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
}
}
memcpy(ctx->iv, cdata->iv, AES_BLOCK_SIZE);
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
return 1;
}
@ -465,8 +465,8 @@ padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
/*
* ctx->num is maintained in byte-oriented modes, such as CFB and OFB...
*/
if ((chunk = ctx->num)) { /* borrow chunk variable */
unsigned char *ivp = ctx->iv;
if ((chunk = EVP_CIPHER_CTX_num(ctx))) { /* borrow chunk variable */
unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);
if (chunk >= AES_BLOCK_SIZE)
return 0; /* bogus value */
@ -476,13 +476,13 @@ padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
chunk++, nbytes--;
}
ctx->num = chunk % AES_BLOCK_SIZE;
EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
}
if (nbytes == 0)
return 1;
memcpy(cdata->iv, ctx->iv, AES_BLOCK_SIZE);
memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
if (!padlock_ofb_encrypt(out_arg, in_arg, cdata, chunk))
@ -495,7 +495,7 @@ padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
out_arg += chunk;
in_arg += chunk;
ctx->num = nbytes;
EVP_CIPHER_CTX_set_num(ctx, nbytes);
padlock_reload_key(); /* empirically found */
padlock_aes_block(ivp, ivp, cdata);
padlock_reload_key(); /* empirically found */
@ -505,7 +505,7 @@ padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
}
}
memcpy(ctx->iv, cdata->iv, AES_BLOCK_SIZE);
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
return 1;
}
@ -524,13 +524,14 @@ padlock_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
const unsigned char *in_arg, size_t nbytes)
{
struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
unsigned int num = ctx->num;
unsigned int num = EVP_CIPHER_CTX_num(ctx);
CRYPTO_ctr128_encrypt_ctr32(in_arg, out_arg, nbytes,
cdata, ctx->iv, ctx->buf, &num,
cdata, EVP_CIPHER_CTX_iv_noconst(ctx),
EVP_CIPHER_CTX_buf_noconst(ctx), &num,
(ctr128_f) padlock_ctr32_encrypt_glue);
ctx->num = (size_t)num;
EVP_CIPHER_CTX_set_num(ctx, (size_t)num);
return 1;
}
@ -667,7 +668,7 @@ padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
if (mode == EVP_CIPH_OFB_MODE || mode == EVP_CIPH_CTR_MODE)
cdata->cword.b.encdec = 0;
else
cdata->cword.b.encdec = (ctx->encrypt == 0);
cdata->cword.b.encdec = (EVP_CIPHER_CTX_encrypting(ctx) == 0);
cdata->cword.b.rounds = 10 + (key_len - 128) / 32;
cdata->cword.b.ksize = (key_len - 128) / 64;