static int __init crypto_null_mod_init(void) { int ret = 0; ret = crypto_register_alg(&cipher_null); if (ret < 0) goto out; ret = crypto_register_alg(&skcipher_null); if (ret < 0) goto out_unregister_cipher; ret = crypto_register_shash(&digest_null); if (ret < 0) goto out_unregister_skcipher; ret = crypto_register_alg(&compress_null); if (ret < 0) goto out_unregister_digest; out: return ret; out_unregister_digest: crypto_unregister_shash(&digest_null); out_unregister_skcipher: crypto_unregister_alg(&skcipher_null); out_unregister_cipher: crypto_unregister_alg(&cipher_null); goto out; }
static void __exit crypto_null_mod_fini(void) { crypto_unregister_alg(&compress_null); crypto_unregister_shash(&digest_null); crypto_unregister_alg(&skcipher_null); crypto_unregister_alg(&cipher_null); }
void cfs_crypto_crc32_unregister(void) { #ifdef HAVE_STRUCT_SHASH_ALG crypto_unregister_shash(&alg); #else crypto_unregister_alg(&alg); #endif }
static int __init skein_generic_init(void) { if (crypto_register_shash(&alg256)) goto out; if (crypto_register_shash(&alg512)) goto unreg256; if (crypto_register_shash(&alg1024)) goto unreg512; return 0; unreg512: crypto_unregister_shash(&alg512); unreg256: crypto_unregister_shash(&alg256); out: return -1; }
void __exit p8_exit(void) { struct crypto_alg **alg_it; for (alg_it = algs; *alg_it; alg_it++) { printk(KERN_INFO "Removing '%s'\n", (*alg_it)->cra_name); crypto_unregister_alg(*alg_it); } crypto_unregister_shash(&p8_ghash_alg); }
static int __init init(void) { int ret; if (!crypt_s390_func_available(KIMD_SHA_512)) return -EOPNOTSUPP; if ((ret = crypto_register_shash(&sha512_alg)) < 0) goto out; if ((ret = crypto_register_shash(&sha384_alg)) < 0) crypto_unregister_shash(&sha512_alg); out: return ret; }
static int nx_remove(struct vio_dev *viodev) { dev_dbg(&viodev->dev, "entering nx_remove for UA 0x%x\n", viodev->unit_address); if (nx_driver.of.status == NX_OKAY) { NX_DEBUGFS_FINI(&nx_driver); crypto_unregister_alg(&nx_ccm_aes_alg); crypto_unregister_alg(&nx_ccm4309_aes_alg); crypto_unregister_alg(&nx_gcm_aes_alg); crypto_unregister_alg(&nx_gcm4106_aes_alg); crypto_unregister_alg(&nx_ctr_aes_alg); crypto_unregister_alg(&nx_ctr3686_aes_alg); crypto_unregister_alg(&nx_cbc_aes_alg); crypto_unregister_alg(&nx_ecb_aes_alg); crypto_unregister_shash(&nx_shash_sha256_alg); crypto_unregister_shash(&nx_shash_sha512_alg); crypto_unregister_shash(&nx_shash_aes_xcbc_alg); } return 0; }
static int __init sha256_s390_init(void) { int ret; if (!crypt_s390_func_available(KIMD_SHA_256, CRYPT_S390_MSA)) return -EOPNOTSUPP; ret = crypto_register_shash(&sha256_alg); if (ret < 0) goto out; ret = crypto_register_shash(&sha224_alg); if (ret < 0) crypto_unregister_shash(&sha256_alg); out: return ret; }
static int __init sha256_generic_mod_init(void) { int ret = 0; ret = crypto_register_shash(&sha224); if (ret < 0) return ret; ret = crypto_register_shash(&sha256); if (ret < 0) crypto_unregister_shash(&sha224); return ret; }
static int __init crc32_mod_init(void) { int err; err = crypto_register_shash(&crc32_alg); if (err) return err; err = crypto_register_shash(&crc32c_alg); if (err) { crypto_unregister_shash(&crc32_alg); return err; } return 0; }
static int __init ghash_pclmulqdqni_mod_init(void) { int err; if (!x86_match_cpu(pcmul_cpu_id)) return -ENODEV; err = crypto_register_shash(&ghash_alg); if (err) goto err_out; err = crypto_register_ahash(&ghash_async_alg); if (err) goto err_shash; return 0; err_shash: crypto_unregister_shash(&ghash_alg); err_out: return err; }
static int __init ghash_pclmulqdqni_mod_init(void) { int err; if (!cpu_has_pclmulqdq) { printk(KERN_INFO "Intel PCLMULQDQ-NI instructions are not" " detected.\n"); return -ENODEV; } err = crypto_register_shash(&ghash_alg); if (err) goto err_out; err = crypto_register_ahash(&ghash_async_alg); if (err) goto err_shash; return 0; err_shash: crypto_unregister_shash(&ghash_alg); err_out: return err; }
static void __exit sha1_ce_mod_fini(void) { crypto_unregister_shash(&alg); }
static void __exit ghash_mod_exit(void) { crypto_unregister_shash(&ghash_alg); }
/** * nx_register_algs - register algorithms with the crypto API * * Called from nx_probe() * * If all OF properties are in an acceptable state, the driver flags will * indicate that we're ready and we'll create our debugfs files and register * out crypto algorithms. */ static int nx_register_algs(void) { int rc = -1; if (nx_driver.of.flags != NX_OF_FLAG_MASK_READY) goto out; memset(&nx_driver.stats, 0, sizeof(struct nx_stats)); rc = NX_DEBUGFS_INIT(&nx_driver); if (rc) goto out; nx_driver.of.status = NX_OKAY; rc = crypto_register_alg(&nx_ecb_aes_alg); if (rc) goto out; rc = crypto_register_alg(&nx_cbc_aes_alg); if (rc) goto out_unreg_ecb; rc = crypto_register_alg(&nx_ctr_aes_alg); if (rc) goto out_unreg_cbc; rc = crypto_register_alg(&nx_ctr3686_aes_alg); if (rc) goto out_unreg_ctr; rc = crypto_register_alg(&nx_gcm_aes_alg); if (rc) goto out_unreg_ctr3686; rc = crypto_register_alg(&nx_gcm4106_aes_alg); if (rc) goto out_unreg_gcm; rc = crypto_register_alg(&nx_ccm_aes_alg); if (rc) goto out_unreg_gcm4106; rc = crypto_register_alg(&nx_ccm4309_aes_alg); if (rc) goto out_unreg_ccm; rc = crypto_register_shash(&nx_shash_sha256_alg); if (rc) goto out_unreg_ccm4309; rc = crypto_register_shash(&nx_shash_sha512_alg); if (rc) goto out_unreg_s256; rc = crypto_register_shash(&nx_shash_aes_xcbc_alg); if (rc) goto out_unreg_s512; goto out; out_unreg_s512: crypto_unregister_shash(&nx_shash_sha512_alg); out_unreg_s256: crypto_unregister_shash(&nx_shash_sha256_alg); out_unreg_ccm4309: crypto_unregister_alg(&nx_ccm4309_aes_alg); out_unreg_ccm: crypto_unregister_alg(&nx_ccm_aes_alg); out_unreg_gcm4106: crypto_unregister_alg(&nx_gcm4106_aes_alg); out_unreg_gcm: crypto_unregister_alg(&nx_gcm_aes_alg); out_unreg_ctr3686: crypto_unregister_alg(&nx_ctr3686_aes_alg); out_unreg_ctr: crypto_unregister_alg(&nx_ctr_aes_alg); out_unreg_cbc: crypto_unregister_alg(&nx_cbc_aes_alg); out_unreg_ecb: crypto_unregister_alg(&nx_ecb_aes_alg); out: return rc; }
static void __exit rmd320_mod_fini(void) { crypto_unregister_shash(&alg); }
static void __exit skein_generic_fini(void) { crypto_unregister_shash(&alg256); crypto_unregister_shash(&alg512); crypto_unregister_shash(&alg1024); }
static void __exit sm3_generic_mod_fini(void) { crypto_unregister_shash(&sm3_alg); }
static void unregister_sha1_ni(void) { if (boot_cpu_has(X86_FEATURE_SHA_NI)) crypto_unregister_shash(&sha1_ni_alg); }
static void __exit fini(void) { crypto_unregister_shash(&sha512_alg); crypto_unregister_shash(&sha384_alg); }
static void unregister_sha1_avx2(void) { if (avx2_usable()) crypto_unregister_shash(&sha1_avx2_alg); }
static void __exit sha256_s390_fini(void) { crypto_unregister_shash(&sha224_alg); crypto_unregister_shash(&sha256_alg); }
static void __exit ghash_pclmulqdqni_mod_exit(void) { crypto_unregister_ahash(&ghash_async_alg); crypto_unregister_shash(&ghash_alg); }
static void __exit crct10dif_intel_mod_fini(void) { crypto_unregister_shash(&alg); }
void cfs_crypto_adler32_unregister(void) { crypto_unregister_shash(&alg); }
static void __exit sha256_generic_mod_fini(void) { crypto_unregister_shash(&sha224); crypto_unregister_shash(&sha256); }
void cfs_crypto_crc32c_pclmul_unregister(void) { crypto_unregister_shash(&alg); }
void __exit ifxdeu_fini_md5 (void) { crypto_unregister_shash(&ifxdeu_md5_alg); }
static void unregister_sha1_ssse3(void) { if (boot_cpu_has(X86_FEATURE_SSSE3)) crypto_unregister_shash(&sha1_ssse3_alg); }
static void __exit crc32c_mod_fini(void) { crypto_unregister_shash(&alg); }