Example #1
0
static int __init padlock_init(void)
{
	int rc = -ENODEV;

	if (!cpu_has_phe) {
		printk(KERN_NOTICE PFX "VIA PadLock Hash Engine not detected.\n");
		return -ENODEV;
	}

	if (!cpu_has_phe_enabled) {
		printk(KERN_NOTICE PFX "VIA PadLock detected, but not enabled. Hmm, strange...\n");
		return -ENODEV;
	}

	rc = crypto_register_alg(&sha1_alg);
	if (rc)
		goto out;

	rc = crypto_register_alg(&sha256_alg);
	if (rc)
		goto out_unreg1;

	printk(KERN_NOTICE PFX "Using VIA PadLock ACE for SHA1/SHA256 algorithms.\n");

	return 0;

out_unreg1:
	crypto_unregister_alg(&sha1_alg);
out:
	printk(KERN_ERR PFX "VIA PadLock SHA1/SHA256 initialization failed.\n");
	return rc;
}
static int __init crypto_null_mod_init(void)
{
	int ret = 0;

	ret = crypto_register_alg(&cipher_null);
	if (ret < 0)
		goto out;

	ret = crypto_register_alg(&skcipher_null);
	if (ret < 0)
		goto out_unregister_cipher;

	ret = crypto_register_shash(&digest_null);
	if (ret < 0)
		goto out_unregister_skcipher;

	ret = crypto_register_alg(&compress_null);
	if (ret < 0)
		goto out_unregister_digest;

out:
	return ret;

out_unregister_digest:
	crypto_unregister_shash(&digest_null);
out_unregister_skcipher:
	crypto_unregister_alg(&skcipher_null);
out_unregister_cipher:
	crypto_unregister_alg(&cipher_null);
	goto out;
}
Example #3
0
static int __devinit
geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
{
	int ret;
	ret = pci_enable_device(dev);
	if (ret)
		return ret;

	ret = pci_request_regions(dev, "geode-aes");
	if (ret)
		goto eenable;

	_iobase = pci_iomap(dev, 0, 0);

	if (_iobase == NULL) {
		ret = -ENOMEM;
		goto erequest;
	}

	spin_lock_init(&lock);

	/* Clear any pending activity */
	iowrite32(AES_INTR_PENDING | AES_INTR_MASK, _iobase + AES_INTR_REG);

	ret = crypto_register_alg(&geode_alg);
	if (ret)
		goto eiomap;

	ret = crypto_register_alg(&geode_ecb_alg);
	if (ret)
		goto ealg;

	ret = crypto_register_alg(&geode_cbc_alg);
	if (ret)
		goto eecb;

	printk(KERN_NOTICE "geode-aes: GEODE AES engine enabled.\n");
	return 0;

 eecb:
	crypto_unregister_alg(&geode_ecb_alg);

 ealg:
	crypto_unregister_alg(&geode_alg);

 eiomap:
	pci_iounmap(dev, _iobase);

 erequest:
	pci_release_regions(dev);

 eenable:
	pci_disable_device(dev);

	printk(KERN_ERR "geode-aes:  GEODE AES initialization failed.\n");
	return ret;
}
Example #4
0
static int __init xlr_crypt_alg_init(void)
{
	int ret;

	if ((ret = crypto_register_alg(&xlr_aes_alg)))
		goto err_out;

	if ((ret = crypto_register_alg(&xlr_ecb_aes_alg)))
		goto err1;

	if ((ret = crypto_register_alg(&xlr_cbc_aes_alg)))
		goto err2;

	if ((ret = crypto_register_alg(&xlr_des_alg)))
		goto err3;

	if ((ret = crypto_register_alg(&xlr_ecb_des_alg)))
		goto err4;

	if ((ret = crypto_register_alg(&xlr_cbc_des_alg)))
		goto err5;

	if ((ret = crypto_register_alg(&xlr_des3_alg)))
		goto err6;

	if ((ret = crypto_register_alg(&xlr_ecb_des3_alg)))
		goto err7;

	if ((ret = crypto_register_alg(&xlr_cbc_des3_alg)))
		goto err8;
	
//	if ((ret = crypto_register_alg(&xlr_ctr_aes_alg)))
//		goto err9;

	printk(KERN_NOTICE "Using XLR hardware for AES/DES/3DES algorithm.\n");
	return 0;

//err9:
//	crypto_unregister_alg(&xlr_cbc_des3_alg);
err8:
	crypto_unregister_alg(&xlr_ecb_des3_alg);
err7:
	crypto_unregister_alg(&xlr_des3_alg);
err6:
	crypto_unregister_alg(&xlr_cbc_des_alg);
err5:
	crypto_unregister_alg(&xlr_ecb_des_alg);
err4:
	crypto_unregister_alg(&xlr_des_alg);
err3:
	crypto_unregister_alg(&xlr_cbc_aes_alg);
err2:
	crypto_unregister_alg(&xlr_ecb_aes_alg);
err1:
	crypto_unregister_alg(&xlr_aes_alg);
err_out:
	printk(KERN_ERR "XLR hardware AES/DES/3DES initialization failed.\n");
	return ret;
}
Example #5
0
static int __init AesEngineInit(void)
{
	int err;

	spin_lock_init(&AES_Entry.page_lock);

	aes_engine_reset();

	printk("MTK AES Engine Module, HW verson: %02X\n", sysRegRead(AES_INFO) >> 28);

	err = aes_engine_desc_init();
	if (err != 0) {
		printk(KERN_WARNING "%s: ring_alloc FAILED!\n", AES_MODNAME);
		return err;
	}

#if defined (CONFIG_CRYPTO_DEV_MTK_AES_INT)
	init_completion(&AES_Entry.op_complete);

	err = request_irq(SURFBOARDINT_AESENGINE, AesEngineIrqHandler, IRQF_DISABLED, "aes_engine", NULL);
	if (err) {
		printk("%s: IRQ %d is not free!\n", AES_MODNAME, SURFBOARDINT_AESENGINE);
		aes_engine_desc_free();
		return err;
	}
#endif

	aes_engine_start();

	printk("%s: register %s crypto api\n", AES_MODNAME, mcrypto_aes_cbc_alg.cra_name);
	err = crypto_register_alg(&mcrypto_aes_cbc_alg);
	if (err) {
		printk("%s: register %s crypto api failed!\n", AES_MODNAME, mcrypto_aes_cbc_alg.cra_name);
		goto init_failed;
	}

	printk("%s: register %s crypto api\n", AES_MODNAME, mcrypto_aes_ecb_alg.cra_name);
	err = crypto_register_alg(&mcrypto_aes_ecb_alg);
	if (err) {
		printk("%s: register %s crypto api failed!\n", AES_MODNAME, mcrypto_aes_ecb_alg.cra_name);
		crypto_unregister_alg(&mcrypto_aes_cbc_alg);
		goto init_failed;
	}

	return 0;

init_failed:

	aes_engine_uninit();

	return err;
}
static int __init init(void)
{
	int ret;

	if (!crypt_s390_func_available(KIMD_SHA_512))
		return -EOPNOTSUPP;
	if ((ret = crypto_register_alg(&sha512_alg)) < 0)
		goto out;
	if ((ret = crypto_register_alg(&sha384_alg)) < 0)
		crypto_unregister_alg(&sha512_alg);
out:
	return ret;
}
Example #7
0
File: crc32c.c Project: E-LLP/n900
static int __init crc32c_mod_init(void)
{
	int err;

	err = crypto_register_alg(&old_alg);
	if (err)
		return err;

	err = crypto_register_alg(&alg);
	if (err)
		crypto_unregister_alg(&old_alg);

	return err;
}
Example #8
0
static int __init paes_s390_init(void)
{
	int ret;

	/* Query available functions for KM, KMC and KMCTR */
	cpacf_query(CPACF_KM, &km_functions);
	cpacf_query(CPACF_KMC, &kmc_functions);
	cpacf_query(CPACF_KMCTR, &kmctr_functions);

	if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
	    cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
	    cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
		ret = crypto_register_alg(&ecb_paes_alg);
		if (ret)
			goto out_err;
	}

	if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
		ret = crypto_register_alg(&cbc_paes_alg);
		if (ret)
			goto out_err;
	}

	if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
	    cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
		ret = crypto_register_alg(&xts_paes_alg);
		if (ret)
			goto out_err;
	}

	if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
		ret = crypto_register_alg(&ctr_paes_alg);
		if (ret)
			goto out_err;
		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
		if (!ctrblk) {
			ret = -ENOMEM;
			goto out_err;
		}
	}

	return 0;
out_err:
	paes_s390_fini();
	return ret;
}
Example #9
0
static int __init serpent_mod_init(void)
{
	int ret = crypto_register_alg(&serpent_alg);

	if (ret)
		return ret;

	ret = crypto_register_alg(&tnepres_alg);

	if (ret)
		crypto_unregister_alg(&serpent_alg);

	return ret;
}
Example #10
0
/* Module initalization */
static int __init prng_mod_init(void)
{
	int rc = 0;

	rc = crypto_register_alg(&rng_alg);
#ifdef CONFIG_CRYPTO_FIPS
	if (rc)
		goto out;

	rc = crypto_register_alg(&fips_rng_alg);

out:
#endif
	return rc;
}
static int rk_crypto_register(struct rk_crypto_info *crypto_info)
{
	unsigned int i, k;
	int err = 0;

	for (i = 0; i < ARRAY_SIZE(rk_cipher_algs); i++) {
		rk_cipher_algs[i]->dev = crypto_info;
		if (rk_cipher_algs[i]->type == ALG_TYPE_CIPHER)
			err = crypto_register_alg(
					&rk_cipher_algs[i]->alg.crypto);
		else
			err = crypto_register_ahash(
					&rk_cipher_algs[i]->alg.hash);
		if (err)
			goto err_cipher_algs;
	}
	return 0;

err_cipher_algs:
	for (k = 0; k < i; k++) {
		if (rk_cipher_algs[i]->type == ALG_TYPE_CIPHER)
			crypto_unregister_alg(&rk_cipher_algs[k]->alg.crypto);
		else
			crypto_unregister_ahash(&rk_cipher_algs[i]->alg.hash);
	}
	return err;
}
Example #12
0
static int __init chacha20_simd_mod_init(void)
{
	if (!(elf_hwcap & HWCAP_NEON))
		return -ENODEV;

	return crypto_register_alg(&alg);
}
int __init p8_init(void)
{
	int ret = 0;
	struct crypto_alg **alg_it;

	for (alg_it = algs; *alg_it; alg_it++) {
		ret = crypto_register_alg(*alg_it);
		printk(KERN_INFO "crypto_register_alg '%s' = %d\n",
		       (*alg_it)->cra_name, ret);
		if (ret) {
			for (alg_it--; alg_it >= algs; alg_it--)
				crypto_unregister_alg(*alg_it);
			break;
		}
	}
	if (ret)
		return ret;

	ret = crypto_register_shash(&p8_ghash_alg);
	if (ret) {
		for (alg_it = algs; *alg_it; alg_it++)
			crypto_unregister_alg(*alg_it);
	}
	return ret;
}
Example #14
0
static int __init init(void)
{
	if (!crypt_s390_func_available(KIMD_SHA_1))
		return -EOPNOTSUPP;

	return crypto_register_alg(&alg);
}
Example #15
0
static int mv_cesa_add_algs(struct mv_cesa_dev *cesa)
{
	int ret;
	int i, j;

	for (i = 0; i < cesa->caps->ncipher_algs; i++) {
		ret = crypto_register_alg(cesa->caps->cipher_algs[i]);
		if (ret)
			goto err_unregister_crypto;
	}

	for (i = 0; i < cesa->caps->nahash_algs; i++) {
		ret = crypto_register_ahash(cesa->caps->ahash_algs[i]);
		if (ret)
			goto err_unregister_ahash;
	}

	return 0;

err_unregister_ahash:
	for (j = 0; j < i; j++)
		crypto_unregister_ahash(cesa->caps->ahash_algs[j]);
	i = cesa->caps->ncipher_algs;

err_unregister_crypto:
	for (j = 0; j < i; j++)
		crypto_unregister_alg(cesa->caps->cipher_algs[j]);

	return ret;
}
Example #16
0
static int __init aes_init(void)
{
	int ret;

	if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
		keylen_flag |= AES_KEYLEN_128;
	if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
		keylen_flag |= AES_KEYLEN_192;
	if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
		keylen_flag |= AES_KEYLEN_256;

	if (!keylen_flag)
		return -EOPNOTSUPP;

	/* z9 109 and z9 BC/EC only support 128 bit key length */
	if (keylen_flag == AES_KEYLEN_128) {
		aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
		ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
		cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
		printk(KERN_INFO
		       "aes_s390: hardware acceleration only available for"
		       "128 bit keys\n");
	}

	ret = crypto_register_alg(&aes_alg);
	if (ret)
		goto aes_err;

	ret = crypto_register_alg(&ecb_aes_alg);
	if (ret)
		goto ecb_aes_err;

	ret = crypto_register_alg(&cbc_aes_alg);
	if (ret)
		goto cbc_aes_err;

out:
	return ret;

cbc_aes_err:
	crypto_unregister_alg(&ecb_aes_alg);
ecb_aes_err:
	crypto_unregister_alg(&aes_alg);
aes_err:
	goto out;
}
int cfs_crypto_crc32_register(void)
{
#ifdef HAVE_STRUCT_SHASH_ALG
    return crypto_register_shash(&alg);
#else
    return crypto_register_alg(&alg);
#endif
}
Example #18
0
int crypto_register_akcipher(struct akcipher_alg *alg)
{
	struct crypto_alg *base = &alg->base;

	base->cra_type = &crypto_akcipher_type;
	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
	base->cra_flags |= CRYPTO_ALG_TYPE_AKCIPHER;
	return crypto_register_alg(base);
}
Example #19
0
static int des_s390_register_alg(struct crypto_alg *alg)
{
	int ret;

	ret = crypto_register_alg(alg);
	if (!ret)
		des_s390_algs_ptr[des_s390_algs_num++] = alg;
	return ret;
}
int crypto_register_pcomp(struct pcomp_alg *alg)
{
    struct crypto_alg *base = &alg->base;

    base->cra_type = &crypto_pcomp_type;
    base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
    base->cra_flags |= CRYPTO_ALG_TYPE_PCOMPRESS;

    return crypto_register_alg(base);
}
static int __init padlock_init(void)
{
	int ret;
	struct cpuinfo_x86 *c = &cpu_data(0);

	if (!cpu_has_xcrypt) {
		printk(KERN_NOTICE PFX "VIA PadLock not detected.\n");
		return -ENODEV;
	}

	if (!cpu_has_xcrypt_enabled) {
		printk(KERN_NOTICE PFX "VIA PadLock detected, but not enabled. Hmm, strange...\n");
		return -ENODEV;
	}

	if ((ret = crypto_register_alg(&aes_alg)))
		goto aes_err;

	if ((ret = crypto_register_alg(&ecb_aes_alg)))
		goto ecb_aes_err;

	if ((ret = crypto_register_alg(&cbc_aes_alg)))
		goto cbc_aes_err;

	printk(KERN_NOTICE PFX "Using VIA PadLock ACE for AES algorithm.\n");

	if (c->x86 == 6 && c->x86_model == 15 && c->x86_mask == 2) {
		ecb_fetch_blocks = MAX_ECB_FETCH_BLOCKS;
		cbc_fetch_blocks = MAX_CBC_FETCH_BLOCKS;
		printk(KERN_NOTICE PFX "VIA Nano stepping 2 detected: enabling workaround.\n");
	}

out:
	return ret;

cbc_aes_err:
	crypto_unregister_alg(&ecb_aes_alg);
ecb_aes_err:
	crypto_unregister_alg(&aes_alg);
aes_err:
	printk(KERN_ERR PFX "VIA PadLock AES initialization failed.\n");
	goto out;
}
int __init ifxdeu_init_md5(void)
{
	printk
	(
		KERN_NOTICE "Using Infineon DEU for MD5 algorithm%s.\n",
		disable_multiblock ? "" : " (multiblock)",
		disable_deudma ? "" : " (DMA)"
	);

	return crypto_register_alg(&alg);
}
Example #23
0
static int __init chacha20_simd_mod_init(void)
{
	if (!cpu_has_ssse3)
		return -ENODEV;

#ifdef CONFIG_AS_AVX2
	chacha20_use_avx2 = cpu_has_avx && cpu_has_avx2 &&
			    cpu_has_xfeatures(XSTATE_SSE | XSTATE_YMM, NULL);
#endif
	return crypto_register_alg(&alg);
}
Example #24
0
static int init(void)
{
	int ret = 0;

	if (!crypt_s390_func_available(KM_DEA_ENCRYPT) ||
	    !crypt_s390_func_available(KM_TDEA_128_ENCRYPT) ||
	    !crypt_s390_func_available(KM_TDEA_192_ENCRYPT))
		return -ENOSYS;

	ret |= (crypto_register_alg(&des_alg) == 0) ? 0:1;
	ret |= (crypto_register_alg(&des3_128_alg) == 0) ? 0:2;
	ret |= (crypto_register_alg(&des3_192_alg) == 0) ? 0:4;
	if (ret) {
		crypto_unregister_alg(&des3_192_alg);
		crypto_unregister_alg(&des3_128_alg);
		crypto_unregister_alg(&des_alg);
		return -EEXIST;
	}
	return 0;
}
static int __init aes_s390_init(void)
{
	int ret;

	if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
		keylen_flag |= AES_KEYLEN_128;
	if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
		keylen_flag |= AES_KEYLEN_192;
	if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
		keylen_flag |= AES_KEYLEN_256;

	if (!keylen_flag)
		return -EOPNOTSUPP;

	/* z9 109 and z9 BC/EC only support 128 bit key length */
	if (keylen_flag == AES_KEYLEN_128)
		pr_info("AES hardware acceleration is only available for"
			" 128-bit keys\n");

	ret = crypto_register_alg(&aes_alg);
	if (ret)
		goto aes_err;

	ret = crypto_register_alg(&ecb_aes_alg);
	if (ret)
		goto ecb_aes_err;

	ret = crypto_register_alg(&cbc_aes_alg);
	if (ret)
		goto cbc_aes_err;

out:
	return ret;

cbc_aes_err:
	crypto_unregister_alg(&ecb_aes_alg);
ecb_aes_err:
	crypto_unregister_alg(&aes_alg);
aes_err:
	goto out;
}
Example #26
0
static int init(void)
{
	int ret;

	if (!crypt_s390_func_available(KIMD_SHA_256))
		return -ENOSYS;

	ret = crypto_register_alg(&alg);
	if (ret != 0)
		printk(KERN_INFO "crypt_s390: sha256_s390 couldn't be loaded.");
	return ret;
}
Example #27
0
static int __init chacha20_simd_mod_init(void)
{
    if (!boot_cpu_has(X86_FEATURE_SSSE3))
        return -ENODEV;

#ifdef CONFIG_AS_AVX2
    chacha20_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) &&
                        boot_cpu_has(X86_FEATURE_AVX2) &&
                        cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL);
#endif
    return crypto_register_alg(&alg);
}
Example #28
0
int crypto_register_rng(struct rng_alg *alg)
{
	struct crypto_alg *base = &alg->base;

	if (alg->seedsize > PAGE_SIZE / 8)
		return -EINVAL;

	base->cra_type = &crypto_rng_type;
	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
	base->cra_flags |= CRYPTO_ALG_TYPE_RNG;

	return crypto_register_alg(base);
}
Example #29
0
static int
init(void)
{
	int ret = -ENOSYS;

	if (crypt_s390_func_available(KIMD_SHA_1)){
		ret = crypto_register_alg(&alg);
		if (ret == 0){
			printk(KERN_INFO "crypt_s390: sha1_s390 loaded.\n");
		}
	}
	return ret;
}
Example #30
0
/* Module initalization */
static int __init prng_mod_init(void)
{
	int ret = 0;

	if (fips_enabled)
		rng_alg.cra_priority += 200;

	ret = crypto_register_alg(&rng_alg);

	if (ret)
		goto out;
out:
	return 0;
}