static int rk_crypto_register(struct rk_crypto_info *crypto_info) { unsigned int i, k; int err = 0; for (i = 0; i < ARRAY_SIZE(rk_cipher_algs); i++) { rk_cipher_algs[i]->dev = crypto_info; if (rk_cipher_algs[i]->type == ALG_TYPE_CIPHER) err = crypto_register_alg( &rk_cipher_algs[i]->alg.crypto); else err = crypto_register_ahash( &rk_cipher_algs[i]->alg.hash); if (err) goto err_cipher_algs; } return 0; err_cipher_algs: for (k = 0; k < i; k++) { if (rk_cipher_algs[i]->type == ALG_TYPE_CIPHER) crypto_unregister_alg(&rk_cipher_algs[k]->alg.crypto); else crypto_unregister_ahash(&rk_cipher_algs[i]->alg.hash); } return err; }
static int mv_cesa_add_algs(struct mv_cesa_dev *cesa) { int ret; int i, j; for (i = 0; i < cesa->caps->ncipher_algs; i++) { ret = crypto_register_alg(cesa->caps->cipher_algs[i]); if (ret) goto err_unregister_crypto; } for (i = 0; i < cesa->caps->nahash_algs; i++) { ret = crypto_register_ahash(cesa->caps->ahash_algs[i]); if (ret) goto err_unregister_ahash; } return 0; err_unregister_ahash: for (j = 0; j < i; j++) crypto_unregister_ahash(cesa->caps->ahash_algs[j]); i = cesa->caps->ncipher_algs; err_unregister_crypto: for (j = 0; j < i; j++) crypto_unregister_alg(cesa->caps->cipher_algs[j]); return ret; }
static int sahara_register_algs(struct sahara_dev *dev) { int err; unsigned int i, j, k, l; for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { INIT_LIST_HEAD(&aes_algs[i].cra_list); err = crypto_register_alg(&aes_algs[i]); if (err) goto err_aes_algs; } for (k = 0; k < ARRAY_SIZE(sha_v3_algs); k++) { err = crypto_register_ahash(&sha_v3_algs[k]); if (err) goto err_sha_v3_algs; } if (dev->version > SAHARA_VERSION_3) for (l = 0; l < ARRAY_SIZE(sha_v4_algs); l++) { err = crypto_register_ahash(&sha_v4_algs[l]); if (err) goto err_sha_v4_algs; } return 0; err_sha_v4_algs: for (j = 0; j < l; j++) crypto_unregister_ahash(&sha_v4_algs[j]); err_sha_v3_algs: for (j = 0; j < k; j++) crypto_unregister_ahash(&sha_v4_algs[j]); err_aes_algs: for (j = 0; j < i; j++) crypto_unregister_alg(&aes_algs[j]); return err; }
static int __init ghash_pclmulqdqni_mod_init(void) { int err; if (!x86_match_cpu(pcmul_cpu_id)) return -ENODEV; err = crypto_register_shash(&ghash_alg); if (err) goto err_out; err = crypto_register_ahash(&ghash_async_alg); if (err) goto err_shash; return 0; err_shash: crypto_unregister_shash(&ghash_alg); err_out: return err; }
static int sunxi_ss_alg_register(void) { int i; int ret = 0; for (i=0; i<ARRAY_SIZE(sunxi_ss_algs); i++) { INIT_LIST_HEAD(&sunxi_ss_algs[i].cra_list); sunxi_ss_algs[i].cra_priority = 300; sunxi_ss_algs[i].cra_ctxsize = sizeof(ss_aes_ctx_t); sunxi_ss_algs[i].cra_module = THIS_MODULE; sunxi_ss_algs[i].cra_exit = sunxi_ss_cra_exit; if (strncmp(sunxi_ss_algs[i].cra_name, "prng", 4) == 0) sunxi_ss_algs[i].cra_init = sunxi_ss_cra_rng_init; else sunxi_ss_algs[i].cra_init = sunxi_ss_cra_init; ret = crypto_register_alg(&sunxi_ss_algs[i]); if (ret != 0) { SS_ERR("crypto_register_alg(%s) failed! return %d \n", sunxi_ss_algs[i].cra_name, ret); return ret; } } for (i=0; i<ARRAY_SIZE(sunxi_ss_algs_hash); i++) { sunxi_ss_algs_hash[i].halg.base.cra_priority = 300; ret = crypto_register_ahash(&sunxi_ss_algs_hash[i]); if (ret != 0) { SS_ERR("crypto_register_ahash(%s) failed! return %d \n", sunxi_ss_algs_hash[i].halg.base.cra_name, ret); return ret; } } return 0; }
static int __init ghash_pclmulqdqni_mod_init(void) { int err; if (!cpu_has_pclmulqdq) { printk(KERN_INFO "Intel PCLMULQDQ-NI instructions are not" " detected.\n"); return -ENODEV; } err = crypto_register_shash(&ghash_alg); if (err) goto err_out; err = crypto_register_ahash(&ghash_async_alg); if (err) goto err_shash; return 0; err_shash: crypto_unregister_shash(&ghash_alg); err_out: return err; }