static void sahara_unregister_algs(struct sahara_dev *dev) { unsigned int i; for (i = 0; i < ARRAY_SIZE(aes_algs); i++) crypto_unregister_alg(&aes_algs[i]); for (i = 0; i < ARRAY_SIZE(sha_v4_algs); i++) crypto_unregister_ahash(&sha_v3_algs[i]); if (dev->version > SAHARA_VERSION_3) for (i = 0; i < ARRAY_SIZE(sha_v4_algs); i++) crypto_unregister_ahash(&sha_v4_algs[i]); }
static int rk_crypto_register(struct rk_crypto_info *crypto_info) { unsigned int i, k; int err = 0; for (i = 0; i < ARRAY_SIZE(rk_cipher_algs); i++) { rk_cipher_algs[i]->dev = crypto_info; if (rk_cipher_algs[i]->type == ALG_TYPE_CIPHER) err = crypto_register_alg( &rk_cipher_algs[i]->alg.crypto); else err = crypto_register_ahash( &rk_cipher_algs[i]->alg.hash); if (err) goto err_cipher_algs; } return 0; err_cipher_algs: for (k = 0; k < i; k++) { if (rk_cipher_algs[i]->type == ALG_TYPE_CIPHER) crypto_unregister_alg(&rk_cipher_algs[k]->alg.crypto); else crypto_unregister_ahash(&rk_cipher_algs[i]->alg.hash); } return err; }
static int mv_cesa_add_algs(struct mv_cesa_dev *cesa) { int ret; int i, j; for (i = 0; i < cesa->caps->ncipher_algs; i++) { ret = crypto_register_alg(cesa->caps->cipher_algs[i]); if (ret) goto err_unregister_crypto; } for (i = 0; i < cesa->caps->nahash_algs; i++) { ret = crypto_register_ahash(cesa->caps->ahash_algs[i]); if (ret) goto err_unregister_ahash; } return 0; err_unregister_ahash: for (j = 0; j < i; j++) crypto_unregister_ahash(cesa->caps->ahash_algs[j]); i = cesa->caps->ncipher_algs; err_unregister_crypto: for (j = 0; j < i; j++) crypto_unregister_alg(cesa->caps->cipher_algs[j]); return ret; }
static void mv_cesa_remove_algs(struct mv_cesa_dev *cesa) { int i; for (i = 0; i < cesa->caps->nahash_algs; i++) crypto_unregister_ahash(cesa->caps->ahash_algs[i]); for (i = 0; i < cesa->caps->ncipher_algs; i++) crypto_unregister_alg(cesa->caps->cipher_algs[i]); }
static void sunxi_ss_alg_unregister(void) { int i; for (i=0; i<ARRAY_SIZE(sunxi_ss_algs); i++) crypto_unregister_alg(&sunxi_ss_algs[i]); for (i=0; i<ARRAY_SIZE(sunxi_ss_algs_hash); i++) crypto_unregister_ahash(&sunxi_ss_algs_hash[i]); }
static void rk_crypto_unregister(void) { unsigned int i; for (i = 0; i < ARRAY_SIZE(rk_cipher_algs); i++) { if (rk_cipher_algs[i]->type == ALG_TYPE_CIPHER) crypto_unregister_alg(&rk_cipher_algs[i]->alg.crypto); else crypto_unregister_ahash(&rk_cipher_algs[i]->alg.hash); } }
static int sahara_register_algs(struct sahara_dev *dev) { int err; unsigned int i, j, k, l; for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { INIT_LIST_HEAD(&aes_algs[i].cra_list); err = crypto_register_alg(&aes_algs[i]); if (err) goto err_aes_algs; } for (k = 0; k < ARRAY_SIZE(sha_v3_algs); k++) { err = crypto_register_ahash(&sha_v3_algs[k]); if (err) goto err_sha_v3_algs; } if (dev->version > SAHARA_VERSION_3) for (l = 0; l < ARRAY_SIZE(sha_v4_algs); l++) { err = crypto_register_ahash(&sha_v4_algs[l]); if (err) goto err_sha_v4_algs; } return 0; err_sha_v4_algs: for (j = 0; j < l; j++) crypto_unregister_ahash(&sha_v4_algs[j]); err_sha_v3_algs: for (j = 0; j < k; j++) crypto_unregister_ahash(&sha_v4_algs[j]); err_aes_algs: for (j = 0; j < i; j++) crypto_unregister_alg(&aes_algs[j]); return err; }
static void __exit ghash_pclmulqdqni_mod_exit(void) { crypto_unregister_ahash(&ghash_async_alg); crypto_unregister_shash(&ghash_alg); }