+/* MAGMA encryption in ECB mode */
+static int magma_cipher_do_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out,
+ const unsigned char *in, size_t inl)
+{
+ struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx);
+ if (EVP_CIPHER_CTX_encrypting(ctx)) {
+ while (inl > 0) {
+ magmacrypt(&(c->cctx), in, out);
+ out += 8;
+ in += 8;
+ inl -= 8;
+ }
+ } else {
+ while (inl > 0) {
+ magmadecrypt(&(c->cctx), in, out);
+ out += 8;
+ in += 8;
+ inl -= 8;
+ }
+ }
+ return 1;
+}
+
+/* MAGMA encryption in CBC mode */
+static int magma_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out,
+ const unsigned char *in, size_t inl)
+{
+ unsigned char b[8];
+ unsigned char d[8];
+ const unsigned char *in_ptr = in;
+ unsigned char *out_ptr = out;
+ int i;
+ struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx);
+ unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
+ if (EVP_CIPHER_CTX_encrypting(ctx)) {
+ while (inl > 0) {
+
+ for (i = 0; i < 8; i++) {
+ out_ptr[i] = iv[i] ^ in_ptr[i];
+ }
+ magmacrypt(&(c->cctx), out_ptr, out_ptr);
+ memcpy(iv, out_ptr, 8);
+ out_ptr += 8;
+ in_ptr += 8;
+ inl -= 8;
+ }
+ } else {
+ while (inl > 0) {
+ magmadecrypt(&(c->cctx), in_ptr, b);
+ memcpy(d, in_ptr, 8);
+ for (i = 0; i < 8; i++) {
+ out_ptr[i] = iv[i] ^ b[i];
+ }
+ memcpy(iv, d, 8);
+ out_ptr += 8;
+ in_ptr += 8;
+ inl -= 8;
+ }
+ }
+ return 1;
+}
+
+/* increment counter (64-bit int) by 1 */
+static void ctr64_inc(unsigned char *counter)
+{
+ inc_counter(counter, 8);
+}
+
+#define MAGMA_BLOCK_SIZE 8
+#define MAGMA_BLOCK_MASK (MAGMA_BLOCK_SIZE - 1)
+static inline void apply_acpkm_magma(struct ossl_gost_cipher_ctx *
+ ctx, unsigned int *num)
+{
+ if (!ctx->key_meshing || (*num < (unsigned int)ctx->key_meshing))
+ return;
+ acpkm_magma_key_meshing(&ctx->cctx);
+ *num &= MAGMA_BLOCK_MASK;
+}
+
+/* MAGMA encryption in CTR mode */
+static int magma_cipher_do_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out,
+ const unsigned char *in, size_t inl)
+{
+ const unsigned char *in_ptr = in;
+ unsigned char *out_ptr = out;
+ size_t j;
+ struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx);
+ unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
+ unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
+ unsigned int num = EVP_CIPHER_CTX_num(ctx);
+ size_t blocks, i, lasted = inl;
+/* Process partial blocks */
+ while ((num & MAGMA_BLOCK_MASK) && lasted) {
+ *out_ptr++ = *in_ptr++ ^ buf[num & MAGMA_BLOCK_MASK];
+ --lasted;
+ num++;
+ }
+ blocks = lasted / MAGMA_BLOCK_SIZE;
+
+/* Process full blocks */
+ for (i = 0; i < blocks; i++) {
+ apply_acpkm_magma(c, &num);
+ magmacrypt(&(c->cctx), iv, buf);
+ for (j = 0; j < 8; j++) {
+ out_ptr[j] = buf[j] ^ in_ptr[j];
+ }
+ ctr64_inc(iv);
+ c->count += MAGMA_BLOCK_SIZE;
+ in_ptr += MAGMA_BLOCK_SIZE;
+ out_ptr += MAGMA_BLOCK_SIZE;
+ num += MAGMA_BLOCK_SIZE;
+ lasted -= MAGMA_BLOCK_SIZE;
+ }
+
+/* Process the rest of plaintext */
+ if (lasted > 0) {
+ apply_acpkm_magma(c, &num);
+ magmacrypt(&(c->cctx), iv, buf);
+
+ for (i = 0; i < lasted; i++)
+ out_ptr[i] = buf[i] ^ in_ptr[i];
+ ctr64_inc(iv);
+ c->count += 8;
+ num += lasted;
+ }
+ EVP_CIPHER_CTX_set_num(ctx, num);
+
+ return inl;
+}
+
+/* MAGMA encryption in CTR mode */
+static int magma_cipher_do_ctr_acpkm_omac(EVP_CIPHER_CTX *ctx, unsigned char *out,
+ const unsigned char *in, size_t inl)
+{
+ struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx);
+
+ if (in == NULL && inl == 0) /* Final call */
+ return gost2015_final_call(ctx, c->omac_ctx, MAGMA_MAC_MAX_SIZE, c->tag, magma_cipher_do_ctr);
+
+ if (in == NULL)
+ return -1;
+
+ /* As in and out can be the same pointer, process unencrypted here */
+ if (EVP_CIPHER_CTX_encrypting(ctx))
+ EVP_DigestSignUpdate(c->omac_ctx, in, inl);
+
+ if (magma_cipher_do_ctr(ctx, out, in, inl) != inl)
+ return -1;
+
+ /* As in and out can be the same pointer, process decrypted here */
+ if (!EVP_CIPHER_CTX_encrypting(ctx))
+ EVP_DigestSignUpdate(c->omac_ctx, out, inl);
+
+ return inl;
+}