Exemple #1
0
static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
    struct scatterlist *dst, struct scatterlist *src,
    unsigned int nbytes)
{
    int ret;
    struct blkcipher_walk walk;
    struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(
            crypto_blkcipher_tfm(desc->tfm));
    struct blkcipher_desc fallback_desc = {
        .tfm = ctx->fallback,
        .info = desc->info,
        .flags = desc->flags
    };

    if (in_interrupt()) {
        ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes);
    } else {
        blkcipher_walk_init(&walk, dst, src, nbytes);
        ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
        while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
            pagefault_disable();
            enable_kernel_altivec();
            aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, walk.dst.virt.addr,
                (nbytes & AES_BLOCK_MASK)/AES_BLOCK_SIZE, &ctx->enc_key, walk.iv);
            pagefault_enable();

            crypto_inc(walk.iv, AES_BLOCK_SIZE);
            nbytes &= AES_BLOCK_SIZE - 1;
            ret = blkcipher_walk_done(desc, &walk, nbytes);
        }
        if (walk.nbytes) {
            p8_aes_ctr_final(ctx, &walk);
            ret = blkcipher_walk_done(desc, &walk, 0);
        }
    }

    return ret;
}

struct crypto_alg p8_aes_ctr_alg = {
    .cra_name = "ctr(aes)",
    .cra_driver_name = "p8_aes_ctr",
    .cra_module = THIS_MODULE,
    .cra_priority = 1000,
    .cra_type = &crypto_blkcipher_type,
    .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
    .cra_alignmask = 0,
    .cra_blocksize = 1,
    .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
    .cra_init = p8_aes_ctr_init,
    .cra_exit = p8_aes_ctr_exit,
    .cra_blkcipher = {
        .ivsize = 0,
        .min_keysize = AES_MIN_KEY_SIZE,
        .max_keysize = AES_MAX_KEY_SIZE,
        .setkey = p8_aes_ctr_setkey,
        .encrypt = p8_aes_ctr_crypt,
        .decrypt = p8_aes_ctr_crypt,
    },
};
Exemple #2
0
static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
			    struct scatterlist *dst,
			    struct scatterlist *src, unsigned int nbytes)
{
	int ret;
	u64 inc;
	struct blkcipher_walk walk;
	struct p8_aes_ctr_ctx *ctx =
		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));

	if (in_interrupt()) {
		SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
		skcipher_request_set_sync_tfm(req, ctx->fallback);
		skcipher_request_set_callback(req, desc->flags, NULL, NULL);
		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
		ret = crypto_skcipher_encrypt(req);
		skcipher_request_zero(req);
	} else {
		blkcipher_walk_init(&walk, dst, src, nbytes);
		ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
		while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
			preempt_disable();
			pagefault_disable();
			enable_kernel_vsx();
			aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
						    walk.dst.virt.addr,
						    (nbytes &
						     AES_BLOCK_MASK) /
						    AES_BLOCK_SIZE,
						    &ctx->enc_key,
						    walk.iv);
			disable_kernel_vsx();
			pagefault_enable();
			preempt_enable();

			/* We need to update IV mostly for last bytes/round */
			inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
			if (inc > 0)
				while (inc--)
					crypto_inc(walk.iv, AES_BLOCK_SIZE);

			nbytes &= AES_BLOCK_SIZE - 1;
			ret = blkcipher_walk_done(desc, &walk, nbytes);
		}
		if (walk.nbytes) {
			p8_aes_ctr_final(ctx, &walk);
			ret = blkcipher_walk_done(desc, &walk, 0);
		}
	}

	return ret;
}
static int p8_aes_xts_crypt(struct blkcipher_desc *desc,
			    struct scatterlist *dst,
			    struct scatterlist *src,
			    unsigned int nbytes, int enc)
{
	int ret;
	u8 tweak[AES_BLOCK_SIZE];
	u8 *iv;
	struct blkcipher_walk walk;
	struct p8_aes_xts_ctx *ctx =
		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));

	if (in_interrupt()) {
		SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
		skcipher_request_set_tfm(req, ctx->fallback);
		skcipher_request_set_callback(req, desc->flags, NULL, NULL);
		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
		ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
		skcipher_request_zero(req);
	} else {
		preempt_disable();
		pagefault_disable();
		enable_kernel_vsx();

		blkcipher_walk_init(&walk, dst, src, nbytes);

		ret = blkcipher_walk_virt(desc, &walk);
		iv = walk.iv;
		memset(tweak, 0, AES_BLOCK_SIZE);
		aes_p8_encrypt(iv, tweak, &ctx->tweak_key);

		while ((nbytes = walk.nbytes)) {
			if (enc)
				aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
						nbytes & AES_BLOCK_MASK, &ctx->enc_key, NULL, tweak);
			else
				aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr,
						nbytes & AES_BLOCK_MASK, &ctx->dec_key, NULL, tweak);

			nbytes &= AES_BLOCK_SIZE - 1;
			ret = blkcipher_walk_done(desc, &walk, nbytes);
		}

		disable_kernel_vsx();
		pagefault_enable();
		preempt_enable();
	}
	return ret;
}
Exemple #4
0
static int tf_self_test_perform_blkcipher(
	const char *alg_name,
	const struct blkcipher_test_vector *tv,
	bool decrypt)
{
	struct blkcipher_desc desc = {0};
	struct scatterlist sg_in, sg_out;
	unsigned char *in = NULL;
	unsigned char *out = NULL;
	unsigned in_size, out_size;
	int error;

	desc.tfm = crypto_alloc_blkcipher(alg_name, 0, 0);
	if (IS_ERR_OR_NULL(desc.tfm)) {
		ERROR("crypto_alloc_blkcipher(%s) failed", alg_name);
		error = (desc.tfm == NULL ? -ENOMEM : (int)desc.tfm);
		goto abort;
	}
	INFO("%s alg_name=%s driver_name=%s key_size=%u block_size=%u",
	     decrypt ? "decrypt" : "encrypt", alg_name,
	     crypto_tfm_alg_driver_name(crypto_blkcipher_tfm(desc.tfm)),
	     tv->key_length * 8,
	     crypto_blkcipher_blocksize(desc.tfm));

	in_size = tv->length;
	in = kmalloc(in_size, GFP_KERNEL);
	if (IS_ERR_OR_NULL(in)) {
		ERROR("kmalloc(%u) failed: %d", in_size, (int)in);
		error = (in == NULL ? -ENOMEM : (int)in);
		goto abort;
	}
	memcpy(in, decrypt ? tv->ciphertext : tv->plaintext,
	       tv->length);

	out_size = tv->length + crypto_blkcipher_blocksize(desc.tfm);
	out = kmalloc(out_size, GFP_KERNEL);
	if (IS_ERR_OR_NULL(out)) {
		ERROR("kmalloc(%u) failed: %d", out_size, (int)out);
		error = (out == NULL ? -ENOMEM : (int)out);
		goto abort;
	}

	error = crypto_blkcipher_setkey(desc.tfm, tv->key, tv->key_length);
	if (error) {
		ERROR("crypto_alloc_setkey(%s) failed", alg_name);
		goto abort;
	}
	TF_TRACE_ARRAY(tv->key, tv->key_length);
	if (tv->iv != NULL) {
		unsigned iv_length = crypto_blkcipher_ivsize(desc.tfm);
		crypto_blkcipher_set_iv(desc.tfm, tv->iv, iv_length);
		TF_TRACE_ARRAY(tv->iv, iv_length);
	}

	sg_init_one(&sg_in, in, tv->length);
	sg_init_one(&sg_out, out, tv->length);
	TF_TRACE_ARRAY(in, tv->length);
	(decrypt ? crypto_blkcipher_decrypt : crypto_blkcipher_encrypt)
		(&desc, &sg_out, &sg_in, tv->length);
	if (error) {
		ERROR("crypto_blkcipher_%s(%s) failed",
		      decrypt ? "decrypt" : "encrypt", alg_name);
		goto abort;
	}
	TF_TRACE_ARRAY(out, tv->length);

	crypto_free_blkcipher(desc.tfm);

	if (memcmp((decrypt ? tv->plaintext : tv->ciphertext),
		   out, tv->length)) {
		ERROR("Wrong %s/%u %s result", alg_name, tv->key_length * 8,
		      decrypt ? "decryption" : "encryption");
		error = -EINVAL;
	} else {
		INFO("%s/%u: %s successful", alg_name, tv->key_length * 8,
		     decrypt ? "decryption" : "encryption");
		error = 0;
	}
	kfree(in);
	kfree(out);
	return error;

abort:
	if (!IS_ERR_OR_NULL(desc.tfm))
		crypto_free_blkcipher(desc.tfm);
	if (!IS_ERR_OR_NULL(out))
		kfree(out);
	if (!IS_ERR_OR_NULL(in))
		kfree(in);
	return error;
}