|| !EVP_MD_meth_set_input_blocksize(md, 8)
|| !EVP_MD_meth_set_app_datasize(md,
sizeof(struct ossl_gost_imit_ctx))
- || !EVP_MD_meth_set_flags(md, 0)
+ || !EVP_MD_meth_set_flags(md, EVP_MD_FLAG_XOF)
|| !EVP_MD_meth_set_init(md, gost_imit_init_cpa)
|| !EVP_MD_meth_set_update(md, gost_imit_update)
|| !EVP_MD_meth_set_final(md, gost_imit_final)
|| !EVP_MD_meth_set_input_blocksize(md, 8)
|| !EVP_MD_meth_set_app_datasize(md,
sizeof(struct ossl_gost_imit_ctx))
- || !EVP_MD_meth_set_flags(md, 0)
+ || !EVP_MD_meth_set_flags(md, EVP_MD_FLAG_XOF)
|| !EVP_MD_meth_set_init(md, gost_imit_init_cp_12)
|| !EVP_MD_meth_set_update(md, gost_imit_update)
|| !EVP_MD_meth_set_final(md, gost_imit_final)
}
memcpy(EVP_CIPHER_CTX_iv_noconst(ctx),
EVP_CIPHER_CTX_original_iv(ctx), EVP_CIPHER_CTX_iv_length(ctx));
+ if (EVP_CIPHER_CTX_nid(ctx) == NID_id_tc26_cipher_gostr3412_2015_magma_ctracpkm
+ || EVP_CIPHER_CTX_nid(ctx) == NID_id_tc26_cipher_gostr3412_2015_magma_ctracpkm_omac) {
+ c->key_meshing = 1024;
+ } else {
+ c->key_meshing = 0;
+ }
+
return 1;
}
}
} else {
while (inl > 0) {
+ unsigned char tmpiv[8];
gostdecrypt(&(c->cctx), in_ptr, b);
+ memcpy(tmpiv, in_ptr, 8);
for (i = 0; i < 8; i++) {
out_ptr[i] = iv[i] ^ b[i];
}
- memcpy(iv, in_ptr, 8);
+ memcpy(iv, tmpiv, 8);
out_ptr += 8;
in_ptr += 8;
inl -= 8;
inc_counter(counter, 8);
}
+#define MAGMA_BLOCK_SIZE 8
+#define MAGMA_BLOCK_MASK (MAGMA_BLOCK_SIZE - 1)
+static inline void apply_acpkm_magma(struct ossl_gost_cipher_ctx *
+ ctx, unsigned int *num)
+{
+ if (!ctx->key_meshing || (*num < ctx->key_meshing))
+ return;
+ acpkm_magma_key_meshing(&ctx->cctx);
+ *num &= MAGMA_BLOCK_MASK;
+}
+
/* MAGMA encryption in CTR mode */
static int magma_cipher_do_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t inl)
{
const unsigned char *in_ptr = in;
unsigned char *out_ptr = out;
- size_t i = 0;
size_t j;
struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx);
unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
+ unsigned int num = EVP_CIPHER_CTX_num(ctx);
+ size_t blocks, i, lasted = inl;
unsigned char b[8];
/* Process partial blocks */
- if (EVP_CIPHER_CTX_num(ctx)) {
- for (j = EVP_CIPHER_CTX_num(ctx), i = 0; j < 8 && i < inl;
- j++, i++, in_ptr++, out_ptr++) {
- *out_ptr = buf[7 - j] ^ (*in_ptr);
- }
- if (j == 8) {
- EVP_CIPHER_CTX_set_num(ctx, 0);
- } else {
- EVP_CIPHER_CTX_set_num(ctx, j);
- return 1;
- }
+ while ((num & MAGMA_BLOCK_MASK) && lasted) {
+ *out_ptr++ = *in_ptr++ ^ buf[7 - (num & MAGMA_BLOCK_MASK)];
+ --lasted;
+ num++;
}
+ blocks = lasted / MAGMA_BLOCK_SIZE;
/* Process full blocks */
- for (; i + 8 <= inl; i += 8, in_ptr += 8, out_ptr += 8) {
+ for (i = 0; i < blocks; i++) {
+ apply_acpkm_magma(c, &num);
for (j = 0; j < 8; j++) {
b[7 - j] = iv[j];
}
out_ptr[j] = buf[7 - j] ^ in_ptr[j];
}
ctr64_inc(iv);
+ c->count += MAGMA_BLOCK_SIZE;
+ in_ptr += MAGMA_BLOCK_SIZE;
+ out_ptr += MAGMA_BLOCK_SIZE;
+ num += MAGMA_BLOCK_SIZE;
+ lasted -= MAGMA_BLOCK_SIZE;
}
/* Process the rest of plaintext */
- if (i < inl) {
+ if (lasted > 0) {
+ apply_acpkm_magma(c, &num);
for (j = 0; j < 8; j++) {
b[7 - j] = iv[j];
}
- gostcrypt(&(c->cctx), iv, buf);
- ctr64_inc(iv);
- for (j = 0; i < inl; j++, i++) {
- out_ptr[j] = buf[7 - j] ^ in_ptr[j];
- }
+ gostcrypt(&(c->cctx), b, buf);
- EVP_CIPHER_CTX_set_num(ctx, j);
- } else {
- EVP_CIPHER_CTX_set_num(ctx, 0);
+ for (i = 0; i < lasted; i++)
+ out_ptr[i] = buf[7 - i] ^ in_ptr[i];
+ ctr64_inc(iv);
+ c->count += j;
+ num += lasted;
}
+ EVP_CIPHER_CTX_set_num(ctx, num);
return 1;
}
}
}
- for (; i + 8 < inl; i += 8, in_ptr += 8, out_ptr += 8) {
+ for (; (inl - i) >= 8; i += 8, in_ptr += 8, out_ptr += 8) {
/*
* block cipher current iv
*/
}
}
- for (; i + 8 < inl; i += 8, in_ptr += 8, out_ptr += 8) {
+ for (; (inl - i) >= 8; i += 8, in_ptr += 8, out_ptr += 8) {
/*
* block cipher current iv
*/
#endif
case EVP_CTRL_RAND_KEY:
{
- if (RAND_bytes
+ if (RAND_priv_bytes
((unsigned char *)ptr, EVP_CIPHER_CTX_key_length(ctx)) <= 0) {
GOSTerr(GOST_F_GOST_CIPHER_CTL, GOST_R_RNG_ERROR);
return -1;
/* Store parameters into ASN1 structure */
int gost89_get_asn1_parameters(EVP_CIPHER_CTX *ctx, ASN1_TYPE *params)
{
- int ret = -1;
int len;
GOST_CIPHER_PARAMS *gcp = NULL;
unsigned char *p;
int nid;
if (ASN1_TYPE_get(params) != V_ASN1_SEQUENCE) {
- return ret;
+ return -1;
}
p = params->value.sequence->data;
static void mac_block_mesh(struct ossl_gost_imit_ctx *c,
const unsigned char *data)
{
- unsigned char buffer[8];
/*
- * We are using local buffer for iv because CryptoPro doesn't interpret
+ * We are using NULL for iv because CryptoPro doesn't interpret
* internal state of MAC algorithm as iv during keymeshing (but does
* initialize internal state from iv in key transport
*/
assert(c->count % 8 == 0 && c->count <= 1024);
if (c->key_meshing && c->count == 1024) {
- cryptopro_key_meshing(&(c->cctx), buffer);
+ cryptopro_key_meshing(&(c->cctx), NULL);
}
mac_block(&(c->cctx), c->buffer, data);
c->count = c->count % 1024 + 8;
{
struct ossl_gost_imit_ctx *c = EVP_MD_CTX_md_data(ctx);
const unsigned char *p = data;
- size_t bytes = count, i;
+ size_t bytes = count;
if (!(c->key_set)) {
GOSTerr(GOST_F_GOST_IMIT_UPDATE, GOST_R_MAC_KEY_NOT_SET);
return 0;
}
if (c->bytes_left) {
+ size_t i;
for (i = c->bytes_left; i < 8 && bytes > 0; bytes--, i++, p++) {
c->partial_block[i] = *p;
}
GOSTerr(GOST_F_GOST_IMIT_CTRL, GOST_R_INVALID_MAC_KEY_SIZE);
return 0;
}
- case EVP_MD_CTRL_MAC_LEN:
+ case EVP_MD_CTRL_XOF_LEN:
{
struct ossl_gost_imit_ctx *c = EVP_MD_CTX_md_data(ctx);
if (arg < 1 || arg > 8) {