X-Git-Url: http://wagner.pp.ru/gitweb/?a=blobdiff_plain;f=gost_crypt.c;h=0aa2ecf277b8066dfa9fdd5aef0c74ec1dd11d1e;hb=bd6c4f8c5c8a087ce0ea65c3dc5ee38b38b8802c;hp=feca0b6e2ec030361b6aed90ce8de25f4b67c873;hpb=410e6c831e2fc37ac9c81c5c6e2e32f564720e86;p=openssl-gost%2Fengine.git diff --git a/gost_crypt.c b/gost_crypt.c index feca0b6..0aa2ecf 100644 --- a/gost_crypt.c +++ b/gost_crypt.c @@ -54,6 +54,8 @@ static int magma_cipher_init(EVP_CIPHER_CTX *ctx, const unsigned char *key, static int magma_cipher_init_ctr_acpkm_omac(EVP_CIPHER_CTX *ctx, const unsigned char *key, const unsigned char *iv, int enc); /* Handles block of data in CBC mode */ +static int magma_cipher_do_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out, + const unsigned char *in, size_t inl); static int magma_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out, const unsigned char *in, size_t inl); static int magma_cipher_do_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out, @@ -74,7 +76,7 @@ static int magma_cipher_ctl_acpkm_omac(EVP_CIPHER_CTX *ctx, int type, int arg, v * Note: that you cannot template 0 value. */ #define TPL(st,field) ( \ - ((st)->field) ?: TPL_VAL(st,field) \ + ((st)->field) ? ((st)->field) : TPL_VAL(st,field) \ ) #define TPL_VAL(st,field) ( \ @@ -187,8 +189,7 @@ GOST_cipher Gost28147_89_cnt_12_cipher = { static GOST_cipher magma_template_cipher = { .block_size = 8, .key_len = 32, - .iv_len = 8, - .flags = EVP_CIPH_CUSTOM_IV | + .flags = EVP_CIPH_RAND_KEY | EVP_CIPH_ALWAYS_CALL_INIT, .cleanup = gost_cipher_cleanup, @@ -205,6 +206,7 @@ GOST_cipher magma_ctr_cipher = { .block_size = 1, .iv_len = 4, .flags = EVP_CIPH_CTR_MODE | + EVP_CIPH_CUSTOM_IV | EVP_CIPH_NO_PADDING, .init = magma_cipher_init, }; @@ -215,6 +217,7 @@ GOST_cipher magma_ctr_acpkm_cipher = { .block_size = 1, .iv_len = 4, .flags = EVP_CIPH_CTR_MODE | + EVP_CIPH_CUSTOM_IV | EVP_CIPH_NO_PADDING, .init = magma_cipher_init, }; @@ -225,6 +228,7 @@ GOST_cipher magma_ctr_acpkm_omac_cipher = { .block_size = 1, .iv_len = 4, .flags = EVP_CIPH_CTR_MODE | + EVP_CIPH_CUSTOM_IV | EVP_CIPH_NO_PADDING | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_CUSTOM_CIPHER | @@ -234,10 +238,20 @@ GOST_cipher magma_ctr_acpkm_omac_cipher = { .ctrl = magma_cipher_ctl_acpkm_omac, }; +GOST_cipher magma_ecb_cipher = { + .nid = NID_magma_ecb, + .template = &magma_template_cipher, + .flags = EVP_CIPH_ECB_MODE, + .init = magma_cipher_init, + .do_cipher = magma_cipher_do_ecb, +}; + GOST_cipher magma_cbc_cipher = { .nid = NID_magma_cbc, .template = &gost_template_cipher, - .flags = EVP_CIPH_CBC_MODE, + .iv_len = 8, + .flags = EVP_CIPH_CBC_MODE | + EVP_CIPH_CUSTOM_IV, .init = magma_cipher_init, .do_cipher = magma_cipher_do_cbc, }; @@ -456,8 +470,10 @@ static int magma_cipher_init(EVP_CIPHER_CTX *ctx, const unsigned char *key, } } - if (key) + if (key) { magma_key(&(c->cctx), key); + magma_master_key(&(c->cctx), key); + } if (iv) { memcpy((unsigned char *)EVP_CIPHER_CTX_original_iv(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx)); @@ -575,11 +591,13 @@ static int gost_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out, } } else { while (inl > 0) { + unsigned char tmpiv[8]; gostdecrypt(&(c->cctx), in_ptr, b); + memcpy(tmpiv, in_ptr, 8); for (i = 0; i < 8; i++) { out_ptr[i] = iv[i] ^ b[i]; } - memcpy(iv, in_ptr, 8); + memcpy(iv, tmpiv, 8); out_ptr += 8; in_ptr += 8; inl -= 8; @@ -588,6 +606,29 @@ static int gost_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out, return 1; } +/* MAGMA encryption in ECB mode */ +static int magma_cipher_do_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out, + const unsigned char *in, size_t inl) +{ + struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx); + if (EVP_CIPHER_CTX_encrypting(ctx)) { + while (inl > 0) { + magmacrypt(&(c->cctx), in, out); + out += 8; + in += 8; + inl -= 8; + } + } else { + while (inl > 0) { + magmadecrypt(&(c->cctx), in, out); + out += 8; + in += 8; + inl -= 8; + } + } + return 1; +} + /* MAGMA encryption in CBC mode */ static int magma_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out, const unsigned char *in, size_t inl) @@ -603,13 +644,9 @@ static int magma_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out, while (inl > 0) { for (i = 0; i < 8; i++) { - b[7 - i] = iv[i] ^ in_ptr[i]; - } - gostcrypt(&(c->cctx), b, d); - - for (i = 0; i < 8; i++) { - out_ptr[7 - i] = d[i]; + out_ptr[i] = iv[i] ^ in_ptr[i]; } + magmacrypt(&(c->cctx), out_ptr, out_ptr); memcpy(iv, out_ptr, 8); out_ptr += 8; in_ptr += 8; @@ -617,13 +654,10 @@ static int magma_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out, } } else { while (inl > 0) { - for (i = 0; i < 8; i++) { - d[7 - i] = in_ptr[i]; - } - gostdecrypt(&(c->cctx), d, b); + magmadecrypt(&(c->cctx), in_ptr, b); memcpy(d, in_ptr, 8); for (i = 0; i < 8; i++) { - out_ptr[i] = iv[i] ^ b[7 - i]; + out_ptr[i] = iv[i] ^ b[i]; } memcpy(iv, d, 8); out_ptr += 8; @@ -640,67 +674,64 @@ static void ctr64_inc(unsigned char *counter) inc_counter(counter, 8); } +#define MAGMA_BLOCK_SIZE 8 +#define MAGMA_BLOCK_MASK (MAGMA_BLOCK_SIZE - 1) +static inline void apply_acpkm_magma(struct ossl_gost_cipher_ctx * + ctx, unsigned int *num) +{ + if (!ctx->key_meshing || (*num < (unsigned int)ctx->key_meshing)) + return; + acpkm_magma_key_meshing(&ctx->cctx); + *num &= MAGMA_BLOCK_MASK; +} + /* MAGMA encryption in CTR mode */ static int magma_cipher_do_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out, const unsigned char *in, size_t inl) { const unsigned char *in_ptr = in; unsigned char *out_ptr = out; - size_t i = 0; size_t j; struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx); unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx); unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx); - unsigned char b[8]; + unsigned int num = EVP_CIPHER_CTX_num(ctx); + size_t blocks, i, lasted = inl; /* Process partial blocks */ - if (EVP_CIPHER_CTX_num(ctx)) { - for (j = EVP_CIPHER_CTX_num(ctx), i = 0; j < 8 && i < inl; - j++, i++, in_ptr++, out_ptr++) { - *out_ptr = buf[7 - j] ^ (*in_ptr); - } - if (j == 8) { - EVP_CIPHER_CTX_set_num(ctx, 0); - } else { - EVP_CIPHER_CTX_set_num(ctx, j); - return inl; - } + while ((num & MAGMA_BLOCK_MASK) && lasted) { + *out_ptr++ = *in_ptr++ ^ buf[num & MAGMA_BLOCK_MASK]; + --lasted; + num++; } + blocks = lasted / MAGMA_BLOCK_SIZE; /* Process full blocks */ - for (; i + 8 <= inl; i += 8, in_ptr += 8, out_ptr += 8) { - for (j = 0; j < 8; j++) { - b[7 - j] = iv[j]; - } - gostcrypt(&(c->cctx), b, buf); + for (i = 0; i < blocks; i++) { + apply_acpkm_magma(c, &num); + magmacrypt(&(c->cctx), iv, buf); for (j = 0; j < 8; j++) { - out_ptr[j] = buf[7 - j] ^ in_ptr[j]; + out_ptr[j] = buf[j] ^ in_ptr[j]; } ctr64_inc(iv); - c->count += 8; - if (c->key_meshing && (c->count % c->key_meshing == 0)) - acpkm_magma_key_meshing(&(c->cctx)); + c->count += MAGMA_BLOCK_SIZE; + in_ptr += MAGMA_BLOCK_SIZE; + out_ptr += MAGMA_BLOCK_SIZE; + num += MAGMA_BLOCK_SIZE; + lasted -= MAGMA_BLOCK_SIZE; } /* Process the rest of plaintext */ - if (i < inl) { - for (j = 0; j < 8; j++) { - b[7 - j] = iv[j]; - } - gostcrypt(&(c->cctx), b, buf); - - for (j = 0; i < inl; j++, i++) { - out_ptr[j] = buf[7 - j] ^ in_ptr[j]; - } + if (lasted > 0) { + apply_acpkm_magma(c, &num); + magmacrypt(&(c->cctx), iv, buf); + for (i = 0; i < lasted; i++) + out_ptr[i] = buf[i] ^ in_ptr[i]; ctr64_inc(iv); c->count += 8; - if (c->key_meshing && (c->count % c->key_meshing == 0)) - acpkm_magma_key_meshing(&(c->cctx)); - - EVP_CIPHER_CTX_set_num(ctx, j); - } else { - EVP_CIPHER_CTX_set_num(ctx, 0); + num += lasted; } + EVP_CIPHER_CTX_set_num(ctx, num); return inl; } @@ -759,7 +790,7 @@ static int gost_cipher_do_cfb(EVP_CIPHER_CTX *ctx, unsigned char *out, } } - for (; i + 8 < inl; i += 8, in_ptr += 8, out_ptr += 8) { + for (; (inl - i) >= 8; i += 8, in_ptr += 8, out_ptr += 8) { /* * block cipher current iv */ @@ -820,7 +851,7 @@ static int gost_cipher_do_cnt(EVP_CIPHER_CTX *ctx, unsigned char *out, } } - for (; i + 8 < inl; i += 8, in_ptr += 8, out_ptr += 8) { + for (; (inl - i) >= 8; i += 8, in_ptr += 8, out_ptr += 8) { /* * block cipher current iv */ @@ -942,6 +973,27 @@ static int gost_cipher_ctl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr) return 1; } +/* Decrement 8-byte sequence if needed */ +int decrement_sequence(unsigned char *seq, int decrement) { + if (decrement < 0 || decrement > 1) + return 0; + + int j; + if (decrement) { + for (j = 7; j >= 0; j--) + { + if (seq[j] != 0) + { + seq[j]--; + break; + } + else + seq[j] = 0xFF; + } + } + return 1; +} + /* Control function for gost cipher */ static int magma_cipher_ctl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr) { @@ -969,6 +1021,54 @@ static int magma_cipher_ctl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr) c->key_meshing = arg; return 1; } + case EVP_CTRL_TLSTREE: + { + unsigned char newkey[32]; + int mode = EVP_CIPHER_CTX_mode(ctx); + struct ossl_gost_cipher_ctx *ctr_ctx = NULL; + gost_ctx *c = NULL; + + unsigned char adjusted_iv[8]; + unsigned char seq[8]; + int j, carry, decrement_arg; + if (mode != EVP_CIPH_CTR_MODE) + return -1; + + ctr_ctx = (struct ossl_gost_cipher_ctx *)EVP_CIPHER_CTX_get_cipher_data(ctx); + c = &(ctr_ctx->cctx); + + /* + * 'arg' parameter indicates what we should do with sequence value. + * + * When function called, seq is incremented after MAC calculation. + * In ETM mode, we use seq 'as is' in the ctrl-function (arg = 0) + * Otherwise we have to decrease it in the implementation (arg = 1). + */ + memcpy(seq, ptr, 8); + decrement_arg = arg; + if(!decrement_sequence(seq, decrement_arg)) { + GOSTerr(GOST_F_MAGMA_CIPHER_CTL, GOST_R_CTRL_CALL_FAILED); + return -1; + } + + if (gost_tlstree(NID_magma_cbc, (const unsigned char *)c->master_key, newkey, + (const unsigned char *)seq) > 0) { + memset(adjusted_iv, 0, 8); + memcpy(adjusted_iv, EVP_CIPHER_CTX_original_iv(ctx), 4); + for (j = 3, carry = 0; j >= 0; j--) + { + int adj_byte = adjusted_iv[j] + seq[j+4] + carry; + carry = (adj_byte > 255) ? 1 : 0; + adjusted_iv[j] = adj_byte & 0xFF; + } + EVP_CIPHER_CTX_set_num(ctx, 0); + memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), adjusted_iv, 8); + + magma_key(c, newkey); + return 1; + } + } + return -1; default: GOSTerr(GOST_F_MAGMA_CIPHER_CTL, GOST_R_UNSUPPORTED_CIPHER_CTL_COMMAND); return -1;