static int aes_aarch64_encrypt(void *_ctx, const void *src, size_t src_size, void *dst, size_t dst_size) { struct aes_ctx *ctx = _ctx; aes_v8_cbc_encrypt(src, dst, src_size, ALIGN16(&ctx->expanded_key), ctx->iv, 1); return 0; }
static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes) { struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); int err, first, rounds = 6 + ctx->key_length / 4; struct blkcipher_walk walk; unsigned int blocks; desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; blkcipher_walk_init(&walk, dst, src, nbytes); err = blkcipher_walk_virt(desc, &walk); //kernel_neon_begin(); for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { aes_v8_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, (u8 *)ctx->key_enc, rounds, blocks, walk.iv, first); err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); } //kernel_neon_end(); return err; }