static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes) { struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS]; struct crypt_priv crypt_ctx = { .ctx = &ctx->serpent_ctx, .fpu_enabled = false, }; struct lrw_crypt_req req = { .tbuf = buf, .tbuflen = sizeof(buf), .table_ctx = &ctx->lrw_table, .crypt_ctx = &crypt_ctx, .crypt_fn = encrypt_callback, }; int ret; desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; ret = lrw_crypt(desc, dst, src, nbytes, &req); serpent_fpu_end(crypt_ctx.fpu_enabled); return ret; } static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes) { struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS]; struct crypt_priv crypt_ctx = { .ctx = &ctx->serpent_ctx, .fpu_enabled = false, }; struct lrw_crypt_req req = { .tbuf = buf, .tbuflen = sizeof(buf), .table_ctx = &ctx->lrw_table, .crypt_ctx = &crypt_ctx, .crypt_fn = decrypt_callback, }; int ret; desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; ret = lrw_crypt(desc, dst, src, nbytes, &req); serpent_fpu_end(crypt_ctx.fpu_enabled); return ret; } static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes) { struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); return glue_xts_crypt_128bit(&serpent_enc_xts, desc, dst, src, nbytes, XTS_TWEAK_CAST(__serpent_encrypt), &ctx->tweak_ctx, &ctx->crypt_ctx); } static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes) { struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); return glue_xts_crypt_128bit(&serpent_dec_xts, desc, dst, src, nbytes, XTS_TWEAK_CAST(__serpent_encrypt), &ctx->tweak_ctx, &ctx->crypt_ctx); } static struct crypto_alg srp_algs[10] = { { .cra_name = "__ecb-serpent-avx2", .cra_driver_name = "__driver-ecb-serpent-avx2", .cra_priority = 0, .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(srp_algs[0].cra_list), .cra_u = { .blkcipher = { .min_keysize = SERPENT_MIN_KEY_SIZE, .max_keysize = SERPENT_MAX_KEY_SIZE, .setkey = serpent_setkey, .encrypt = ecb_encrypt, .decrypt = ecb_decrypt, }, }, }, { .cra_name = "__cbc-serpent-avx2", .cra_driver_name = "__driver-cbc-serpent-avx2", .cra_priority = 0, .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(srp_algs[1].cra_list), .cra_u = { .blkcipher = { .min_keysize = SERPENT_MIN_KEY_SIZE, .max_keysize = SERPENT_MAX_KEY_SIZE, .setkey = serpent_setkey, .encrypt = cbc_encrypt, .decrypt = cbc_decrypt, }, }, }, { .cra_name = "__ctr-serpent-avx2", .cra_driver_name = "__driver-ctr-serpent-avx2", .cra_priority = 0, .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(srp_algs[2].cra_list), .cra_u = { .blkcipher = { .min_keysize = SERPENT_MIN_KEY_SIZE, .max_keysize = SERPENT_MAX_KEY_SIZE, .ivsize = SERPENT_BLOCK_SIZE, .setkey = serpent_setkey, .encrypt = ctr_crypt, .decrypt = ctr_crypt, }, }, }, { .cra_name = "__lrw-serpent-avx2", .cra_driver_name = "__driver-lrw-serpent-avx2", .cra_priority = 0, .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_lrw_ctx), .cra_alignmask = 0, .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(srp_algs[3].cra_list), .cra_exit = lrw_serpent_exit_tfm, .cra_u = { .blkcipher = { .min_keysize = SERPENT_MIN_KEY_SIZE + SERPENT_BLOCK_SIZE, .max_keysize = SERPENT_MAX_KEY_SIZE + SERPENT_BLOCK_SIZE, .ivsize = SERPENT_BLOCK_SIZE, .setkey = lrw_serpent_setkey, .encrypt = lrw_encrypt, .decrypt = lrw_decrypt, }, }, }, { .cra_name = "__xts-serpent-avx2",
static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes) { struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); be128 buf[SERPENT_PARALLEL_BLOCKS]; struct crypt_priv crypt_ctx = { .ctx = &ctx->serpent_ctx, .fpu_enabled = false, }; struct lrw_crypt_req req = { .tbuf = buf, .tbuflen = sizeof(buf), .table_ctx = &ctx->lrw_table, .crypt_ctx = &crypt_ctx, .crypt_fn = encrypt_callback, }; int ret; desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; ret = lrw_crypt(desc, dst, src, nbytes, &req); serpent_fpu_end(crypt_ctx.fpu_enabled); return ret; } static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes) { struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); be128 buf[SERPENT_PARALLEL_BLOCKS]; struct crypt_priv crypt_ctx = { .ctx = &ctx->serpent_ctx, .fpu_enabled = false, }; struct lrw_crypt_req req = { .tbuf = buf, .tbuflen = sizeof(buf), .table_ctx = &ctx->lrw_table, .crypt_ctx = &crypt_ctx, .crypt_fn = decrypt_callback, }; int ret; desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; ret = lrw_crypt(desc, dst, src, nbytes, &req); serpent_fpu_end(crypt_ctx.fpu_enabled); return ret; } void lrw_serpent_exit_tfm(struct crypto_tfm *tfm) { struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm); lrw_free_table(&ctx->lrw_table); } EXPORT_SYMBOL_GPL(lrw_serpent_exit_tfm); int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) { struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm); u32 *flags = &tfm->crt_flags; int err; /* key consists of keys of equal size concatenated, therefore * the length must be even */ if (keylen % 2) { *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; return -EINVAL; } /* first half of xts-key is for crypt */ err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2); if (err) return err; /* second half of xts-key is for tweak */ return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2); } EXPORT_SYMBOL_GPL(xts_serpent_setkey); static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes) { struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); return glue_xts_crypt_128bit(&serpent_enc_xts, desc, dst, src, nbytes, XTS_TWEAK_CAST(__serpent_encrypt), &ctx->tweak_ctx, &ctx->crypt_ctx); } static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes) { struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); return glue_xts_crypt_128bit(&serpent_dec_xts, desc, dst, src, nbytes, XTS_TWEAK_CAST(__serpent_encrypt), &ctx->tweak_ctx, &ctx->crypt_ctx); } static struct crypto_alg serpent_algs[10] = { { .cra_name = "__ecb-serpent-avx", .cra_driver_name = "__driver-ecb-serpent-avx", .cra_priority = 0, .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_u = { .blkcipher = { .min_keysize = SERPENT_MIN_KEY_SIZE, .max_keysize = SERPENT_MAX_KEY_SIZE, .setkey = serpent_setkey, .encrypt = ecb_encrypt, .decrypt = ecb_decrypt, }, }, }, { .cra_name = "__cbc-serpent-avx", .cra_driver_name = "__driver-cbc-serpent-avx", .cra_priority = 0, .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_u = { .blkcipher = { .min_keysize = SERPENT_MIN_KEY_SIZE, .max_keysize = SERPENT_MAX_KEY_SIZE, .setkey = serpent_setkey, .encrypt = cbc_encrypt, .decrypt = cbc_decrypt, }, }, }, { .cra_name = "__ctr-serpent-avx", .cra_driver_name = "__driver-ctr-serpent-avx", .cra_priority = 0, .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_u = { .blkcipher = { .min_keysize = SERPENT_MIN_KEY_SIZE, .max_keysize = SERPENT_MAX_KEY_SIZE, .ivsize = SERPENT_BLOCK_SIZE, .setkey = serpent_setkey, .encrypt = ctr_crypt, .decrypt = ctr_crypt, }, }, }, { .cra_name = "__lrw-serpent-avx", .cra_driver_name = "__driver-lrw-serpent-avx", .cra_priority = 0, .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_lrw_ctx), .cra_alignmask = 0, .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_exit = lrw_serpent_exit_tfm, .cra_u = { .blkcipher = { .min_keysize = SERPENT_MIN_KEY_SIZE + SERPENT_BLOCK_SIZE, .max_keysize = SERPENT_MAX_KEY_SIZE + SERPENT_BLOCK_SIZE, .ivsize = SERPENT_BLOCK_SIZE, .setkey = lrw_serpent_setkey, .encrypt = lrw_encrypt, .decrypt = lrw_decrypt, }, }, }, { .cra_name = "__xts-serpent-avx",