static int sha256_s390_init(void)
{
	if (!crypt_s390_func_available(KIMD_SHA_256))
		return -EOPNOTSUPP;

	return crypto_register_shash(&alg);
}
Exemple #2
0
static int __init init(void)
{
	if (!crypt_s390_func_available(KIMD_SHA_1))
		return -EOPNOTSUPP;

	return crypto_register_alg(&alg);
}
Exemple #3
0
static int __init aes_init(void)
{
	int ret;

	if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
		keylen_flag |= AES_KEYLEN_128;
	if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
		keylen_flag |= AES_KEYLEN_192;
	if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
		keylen_flag |= AES_KEYLEN_256;

	if (!keylen_flag)
		return -EOPNOTSUPP;

	/* z9 109 and z9 BC/EC only support 128 bit key length */
	if (keylen_flag == AES_KEYLEN_128) {
		aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
		ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
		cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
		printk(KERN_INFO
		       "aes_s390: hardware acceleration only available for"
		       "128 bit keys\n");
	}

	ret = crypto_register_alg(&aes_alg);
	if (ret)
		goto aes_err;

	ret = crypto_register_alg(&ecb_aes_alg);
	if (ret)
		goto ecb_aes_err;

	ret = crypto_register_alg(&cbc_aes_alg);
	if (ret)
		goto cbc_aes_err;

out:
	return ret;

cbc_aes_err:
	crypto_unregister_alg(&ecb_aes_alg);
ecb_aes_err:
	crypto_unregister_alg(&aes_alg);
aes_err:
	goto out;
}
Exemple #4
0
static int init(void)
{
	int ret = 0;

	if (!crypt_s390_func_available(KM_DEA_ENCRYPT) ||
	    !crypt_s390_func_available(KM_TDEA_128_ENCRYPT) ||
	    !crypt_s390_func_available(KM_TDEA_192_ENCRYPT))
		return -ENOSYS;

	ret |= (crypto_register_alg(&des_alg) == 0) ? 0:1;
	ret |= (crypto_register_alg(&des3_128_alg) == 0) ? 0:2;
	ret |= (crypto_register_alg(&des3_192_alg) == 0) ? 0:4;
	if (ret) {
		crypto_unregister_alg(&des3_192_alg);
		crypto_unregister_alg(&des3_128_alg);
		crypto_unregister_alg(&des_alg);
		return -EEXIST;
	}
	return 0;
}
static int __init aes_s390_init(void)
{
	int ret;

	if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
		keylen_flag |= AES_KEYLEN_128;
	if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
		keylen_flag |= AES_KEYLEN_192;
	if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
		keylen_flag |= AES_KEYLEN_256;

	if (!keylen_flag)
		return -EOPNOTSUPP;

	/* z9 109 and z9 BC/EC only support 128 bit key length */
	if (keylen_flag == AES_KEYLEN_128)
		pr_info("AES hardware acceleration is only available for"
			" 128-bit keys\n");

	ret = crypto_register_alg(&aes_alg);
	if (ret)
		goto aes_err;

	ret = crypto_register_alg(&ecb_aes_alg);
	if (ret)
		goto ecb_aes_err;

	ret = crypto_register_alg(&cbc_aes_alg);
	if (ret)
		goto cbc_aes_err;

out:
	return ret;

cbc_aes_err:
	crypto_unregister_alg(&ecb_aes_alg);
ecb_aes_err:
	crypto_unregister_alg(&aes_alg);
aes_err:
	goto out;
}
static int init(void)
{
	int ret;

	if (!crypt_s390_func_available(KIMD_SHA_256))
		return -ENOSYS;

	ret = crypto_register_alg(&alg);
	if (ret != 0)
		printk(KERN_INFO "crypt_s390: sha256_s390 couldn't be loaded.");
	return ret;
}
Exemple #7
0
static int __init init(void)
{
	int ret;

	if (!crypt_s390_func_available(KIMD_SHA_512))
		return -EOPNOTSUPP;
	if ((ret = crypto_register_shash(&sha512_alg)) < 0)
		goto out;
	if ((ret = crypto_register_shash(&sha384_alg)) < 0)
		crypto_unregister_shash(&sha512_alg);
out:
	return ret;
}
Exemple #8
0
static int
init(void)
{
	int ret = -ENOSYS;

	if (crypt_s390_func_available(KIMD_SHA_1)){
		ret = crypto_register_alg(&alg);
		if (ret == 0){
			printk(KERN_INFO "crypt_s390: sha1_s390 loaded.\n");
		}
	}
	return ret;
}
Exemple #9
0
static int __init sha256_s390_init(void)
{
	int ret;

	if (!crypt_s390_func_available(KIMD_SHA_256, CRYPT_S390_MSA))
		return -EOPNOTSUPP;
	ret = crypto_register_shash(&sha256_alg);
	if (ret < 0)
		goto out;
	ret = crypto_register_shash(&sha224_alg);
	if (ret < 0)
		crypto_unregister_shash(&sha256_alg);
out:
	return ret;
}
Exemple #10
0
static int __init aes_s390_init(void)
{
	int ret;

	if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
		keylen_flag |= AES_KEYLEN_128;
	if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
		keylen_flag |= AES_KEYLEN_192;
	if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
		keylen_flag |= AES_KEYLEN_256;

	if (!keylen_flag)
		return -EOPNOTSUPP;

	/* z9 109 and z9 BC/EC only support 128 bit key length */
	if (keylen_flag == AES_KEYLEN_128)
		pr_info("AES hardware acceleration is only available for"
			" 128-bit keys\n");

	ret = crypto_register_alg(&aes_alg);
	if (ret)
		goto aes_err;

	ret = crypto_register_alg(&ecb_aes_alg);
	if (ret)
		goto ecb_aes_err;

	ret = crypto_register_alg(&cbc_aes_alg);
	if (ret)
		goto cbc_aes_err;

	if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
			CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
	    crypt_s390_func_available(KM_XTS_256_ENCRYPT,
			CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
		ret = crypto_register_alg(&xts_aes_alg);
		if (ret)
			goto xts_aes_err;
		xts_aes_alg_reg = 1;
	}

	if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
				CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
	    crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
				CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
	    crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
				CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
		if (!ctrblk) {
			ret = -ENOMEM;
			goto ctr_aes_err;
		}
		ret = crypto_register_alg(&ctr_aes_alg);
		if (ret) {
			free_page((unsigned long) ctrblk);
			goto ctr_aes_err;
		}
		ctr_aes_alg_reg = 1;
	}

out:
	return ret;

ctr_aes_err:
	crypto_unregister_alg(&xts_aes_alg);
xts_aes_err:
	crypto_unregister_alg(&cbc_aes_alg);
cbc_aes_err:
	crypto_unregister_alg(&ecb_aes_alg);
ecb_aes_err:
	crypto_unregister_alg(&aes_alg);
aes_err:
	goto out;
}
Exemple #11
0
static int __init sha1_s390_init(void)
{
	if (!crypt_s390_func_available(KIMD_SHA_1, CRYPT_S390_MSA))
		return -EOPNOTSUPP;
	return crypto_register_shash(&alg);
}
static void query_available_functions(void)
{
	printk(KERN_INFO "#####################\n");

	/* query available KM functions */
	printk(KERN_INFO "KM_QUERY: %d\n",
		crypt_s390_func_available(KM_QUERY));
	printk(KERN_INFO "KM_DEA: %d\n",
		crypt_s390_func_available(KM_DEA_ENCRYPT));
	printk(KERN_INFO "KM_TDEA_128: %d\n",
		crypt_s390_func_available(KM_TDEA_128_ENCRYPT));
	printk(KERN_INFO "KM_TDEA_192: %d\n",
		crypt_s390_func_available(KM_TDEA_192_ENCRYPT));
	printk(KERN_INFO "KM_AES_128: %d\n",
		crypt_s390_func_available(KM_AES_128_ENCRYPT));
	printk(KERN_INFO "KM_AES_192: %d\n",
		crypt_s390_func_available(KM_AES_192_ENCRYPT));
	printk(KERN_INFO "KM_AES_256: %d\n",
		crypt_s390_func_available(KM_AES_256_ENCRYPT));

	/* query available KMC functions */
	printk(KERN_INFO "KMC_QUERY: %d\n",
		crypt_s390_func_available(KMC_QUERY));
	printk(KERN_INFO "KMC_DEA: %d\n",
		crypt_s390_func_available(KMC_DEA_ENCRYPT));
	printk(KERN_INFO "KMC_TDEA_128: %d\n",
		crypt_s390_func_available(KMC_TDEA_128_ENCRYPT));
	printk(KERN_INFO "KMC_TDEA_192: %d\n",
		crypt_s390_func_available(KMC_TDEA_192_ENCRYPT));
	printk(KERN_INFO "KMC_AES_128: %d\n",
		crypt_s390_func_available(KMC_AES_128_ENCRYPT));
	printk(KERN_INFO "KMC_AES_192: %d\n",
		crypt_s390_func_available(KMC_AES_192_ENCRYPT));
	printk(KERN_INFO "KMC_AES_256: %d\n",
		crypt_s390_func_available(KMC_AES_256_ENCRYPT));

	/* query available KIMD functions */
	printk(KERN_INFO "KIMD_QUERY: %d\n",
		crypt_s390_func_available(KIMD_QUERY));
	printk(KERN_INFO "KIMD_SHA_1: %d\n",
		crypt_s390_func_available(KIMD_SHA_1));
	printk(KERN_INFO "KIMD_SHA_256: %d\n",
		crypt_s390_func_available(KIMD_SHA_256));

	/* query available KLMD functions */
	printk(KERN_INFO "KLMD_QUERY: %d\n",
		crypt_s390_func_available(KLMD_QUERY));
	printk(KERN_INFO "KLMD_SHA_1: %d\n",
		crypt_s390_func_available(KLMD_SHA_1));
	printk(KERN_INFO "KLMD_SHA_256: %d\n",
		crypt_s390_func_available(KLMD_SHA_256));

	/* query available KMAC functions */
	printk(KERN_INFO "KMAC_QUERY: %d\n",
		crypt_s390_func_available(KMAC_QUERY));
	printk(KERN_INFO "KMAC_DEA: %d\n",
		crypt_s390_func_available(KMAC_DEA));
	printk(KERN_INFO "KMAC_TDEA_128: %d\n",
		crypt_s390_func_available(KMAC_TDEA_128));
	printk(KERN_INFO "KMAC_TDEA_192: %d\n",
		crypt_s390_func_available(KMAC_TDEA_192));
}
Exemple #13
0
static int __init des_s390_init(void)
{
	int ret;

	if (!crypt_s390_func_available(KM_DEA_ENCRYPT, CRYPT_S390_MSA) ||
	    !crypt_s390_func_available(KM_TDEA_192_ENCRYPT, CRYPT_S390_MSA))
		return -EOPNOTSUPP;

	ret = crypto_register_alg(&des_alg);
	if (ret)
		goto des_err;
	ret = crypto_register_alg(&ecb_des_alg);
	if (ret)
		goto ecb_des_err;
	ret = crypto_register_alg(&cbc_des_alg);
	if (ret)
		goto cbc_des_err;
	ret = crypto_register_alg(&des3_alg);
	if (ret)
		goto des3_err;
	ret = crypto_register_alg(&ecb_des3_alg);
	if (ret)
		goto ecb_des3_err;
	ret = crypto_register_alg(&cbc_des3_alg);
	if (ret)
		goto cbc_des3_err;

	if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT,
			CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
	    crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT,
			CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
		ret = crypto_register_alg(&ctr_des_alg);
		if (ret)
			goto ctr_des_err;
		ret = crypto_register_alg(&ctr_des3_alg);
		if (ret)
			goto ctr_des3_err;
		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
		if (!ctrblk) {
			ret = -ENOMEM;
			goto ctr_mem_err;
		}
	}
out:
	return ret;

ctr_mem_err:
	crypto_unregister_alg(&ctr_des3_alg);
ctr_des3_err:
	crypto_unregister_alg(&ctr_des_alg);
ctr_des_err:
	crypto_unregister_alg(&cbc_des3_alg);
cbc_des3_err:
	crypto_unregister_alg(&ecb_des3_alg);
ecb_des3_err:
	crypto_unregister_alg(&des3_alg);
des3_err:
	crypto_unregister_alg(&cbc_des_alg);
cbc_des_err:
	crypto_unregister_alg(&ecb_des_alg);
ecb_des_err:
	crypto_unregister_alg(&des_alg);
des_err:
	goto out;
}