static int tegra_crypto_dev_release(struct inode *inode, struct file *filp) { struct tegra_crypto_ctx *ctx = filp->private_data; crypto_free_ablkcipher(ctx->ecb_tfm); crypto_free_ablkcipher(ctx->cbc_tfm); if (tegra_get_chipid() != TEGRA_CHIPID_TEGRA2) { crypto_free_ablkcipher(ctx->ofb_tfm); crypto_free_ablkcipher(ctx->ctr_tfm); } if (tegra_get_chipid() != TEGRA_CHIPID_TEGRA2 && tegra_get_chipid() != TEGRA_CHIPID_TEGRA3) crypto_free_rng(ctx->rng_drbg); else crypto_free_rng(ctx->rng); crypto_free_ahash(ctx->rsa2048_tfm); crypto_free_ahash(ctx->rsa1536_tfm); crypto_free_ahash(ctx->rsa1024_tfm); crypto_free_ahash(ctx->rsa512_tfm); kfree(ctx); filp->private_data = NULL; return 0; }
int crypto_get_default_rng(void) { struct crypto_rng *rng; int err; mutex_lock(&crypto_default_rng_lock); if (!crypto_default_rng) { rng = crypto_alloc_rng("stdrng", 0, 0); err = PTR_ERR(rng); if (IS_ERR(rng)) goto unlock; err = crypto_rng_reset(rng, NULL, crypto_rng_seedsize(rng)); if (err) { crypto_free_rng(rng); goto unlock; } crypto_default_rng = rng; } crypto_default_rng_refcnt++; err = 0; unlock: mutex_unlock(&crypto_default_rng_lock); return err; }
int cryptodev_rng_init(struct rng_data *rdata, const char *alg_name, void *key, size_t keylen) { int ret; rdata->s = crypto_alloc_rng(alg_name, 0, 0); if (unlikely(IS_ERR(rdata->s))) { ddebug(1, "Failed to load transform for %s", alg_name); return -EINVAL; } /* Copy the key from user and set to TFM. */ ret = crypto_rng_reset(rdata->s, key, keylen); if (unlikely(ret)) { ddebug(1, "Seeding failed for %s-%zu.", alg_name, keylen*8); ret = -EINVAL; goto error; } rdata->seedsize = crypto_rng_seedsize(rdata->s); rdata->init = 1; return 0; error: crypto_free_rng(rdata->s); return ret; }
/* * Initialize big_key crypto and RNG algorithms */ static int __init big_key_crypto_init(void) { int ret = -EINVAL; /* init RNG */ big_key_rng = crypto_alloc_rng(big_key_rng_name, 0, 0); if (IS_ERR(big_key_rng)) { big_key_rng = NULL; return -EFAULT; } /* seed RNG */ ret = crypto_rng_reset(big_key_rng, NULL, crypto_rng_seedsize(big_key_rng)); if (ret) goto error; /* init block cipher */ big_key_skcipher = crypto_alloc_skcipher(big_key_alg_name, 0, CRYPTO_ALG_ASYNC); if (IS_ERR(big_key_skcipher)) { big_key_skcipher = NULL; ret = -EFAULT; goto error; } return 0; error: crypto_free_rng(big_key_rng); big_key_rng = NULL; return ret; }
void cryptodev_rng_deinit(struct rng_data *rdata) { if (rdata->init) { if (rdata->s) crypto_free_rng(rdata->s); rdata->init = 0; } }
void crypto_put_default_rng(void) { mutex_lock(&crypto_default_rng_lock); if (!--crypto_default_rng_refcnt) { crypto_free_rng(crypto_default_rng); crypto_default_rng = NULL; } mutex_unlock(&crypto_default_rng_lock); }
static int tegra_crypto_dev_release(struct inode *inode, struct file *filp) { struct tegra_crypto_ctx *ctx = filp->private_data; crypto_free_ablkcipher(ctx->ecb_tfm); crypto_free_ablkcipher(ctx->cbc_tfm); crypto_free_rng(ctx->rng); kfree(ctx); filp->private_data = NULL; return 0; }
/* * Register key type */ static int __init big_key_init(void) { struct crypto_skcipher *cipher; struct crypto_rng *rng; int ret; rng = crypto_alloc_rng(big_key_rng_name, 0, 0); if (IS_ERR(rng)) { pr_err("Can't alloc rng: %ld\n", PTR_ERR(rng)); return PTR_ERR(rng); } big_key_rng = rng; /* seed RNG */ ret = crypto_rng_reset(rng, NULL, crypto_rng_seedsize(rng)); if (ret) { pr_err("Can't reset rng: %d\n", ret); goto error_rng; } /* init block cipher */ cipher = crypto_alloc_skcipher(big_key_alg_name, 0, CRYPTO_ALG_ASYNC); if (IS_ERR(cipher)) { ret = PTR_ERR(cipher); pr_err("Can't alloc crypto: %d\n", ret); goto error_rng; } big_key_skcipher = cipher; ret = register_key_type(&key_type_big_key); if (ret < 0) { pr_err("Can't register type: %d\n", ret); goto error_cipher; } return 0; error_cipher: crypto_free_skcipher(big_key_skcipher); error_rng: crypto_free_rng(big_key_rng); return ret; }
int crypto_del_default_rng(void) { int err = -EBUSY; mutex_lock(&crypto_default_rng_lock); if (crypto_default_rng_refcnt) goto out; crypto_free_rng(crypto_default_rng); crypto_default_rng = NULL; err = 0; out: mutex_unlock(&crypto_default_rng_lock); return err; }
static int tegra_crypto_dev_open(struct inode *inode, struct file *filp) { struct tegra_crypto_ctx *ctx; int ret = 0; ctx = kzalloc(sizeof(struct tegra_crypto_ctx), GFP_KERNEL); if (!ctx) { pr_err("no memory for context\n"); return -ENOMEM; } ctx->ecb_tfm = crypto_alloc_ablkcipher("ecb-aes-tegra", CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 0); if (IS_ERR(ctx->ecb_tfm)) { pr_err("Failed to load transform for ecb-aes-tegra: %ld\n", PTR_ERR(ctx->ecb_tfm)); ret = PTR_ERR(ctx->ecb_tfm); goto fail_ecb; } ctx->cbc_tfm = crypto_alloc_ablkcipher("cbc-aes-tegra", CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 0); if (IS_ERR(ctx->cbc_tfm)) { pr_err("Failed to load transform for cbc-aes-tegra: %ld\n", PTR_ERR(ctx->cbc_tfm)); ret = PTR_ERR(ctx->cbc_tfm); goto fail_cbc; } if (tegra_get_chipid() != TEGRA_CHIPID_TEGRA2) { ctx->ofb_tfm = crypto_alloc_ablkcipher("ofb-aes-tegra", CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 0); if (IS_ERR(ctx->ofb_tfm)) { pr_err("Failed to load transform for ofb-aes-tegra: %ld\n", PTR_ERR(ctx->ofb_tfm)); ret = PTR_ERR(ctx->ofb_tfm); goto fail_ofb; } ctx->ctr_tfm = crypto_alloc_ablkcipher("ctr-aes-tegra", CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 0); if (IS_ERR(ctx->ctr_tfm)) { pr_err("Failed to load transform for ctr-aes-tegra: %ld\n", PTR_ERR(ctx->ctr_tfm)); ret = PTR_ERR(ctx->ctr_tfm); goto fail_ctr; } } if (tegra_get_chipid() != TEGRA_CHIPID_TEGRA2 && tegra_get_chipid() != TEGRA_CHIPID_TEGRA3) { ctx->rng_drbg = crypto_alloc_rng("rng_drbg-aes-tegra", CRYPTO_ALG_TYPE_RNG, 0); if (IS_ERR(ctx->rng_drbg)) { pr_err("Failed to load transform for rng_drbg tegra: %ld\n", PTR_ERR(ctx->rng_drbg)); ret = PTR_ERR(ctx->rng_drbg); goto fail_rng; } } else { ctx->rng = crypto_alloc_rng("rng-aes-tegra", CRYPTO_ALG_TYPE_RNG, 0); if (IS_ERR(ctx->rng)) { pr_err("Failed to load transform for tegra rng: %ld\n", PTR_ERR(ctx->rng)); ret = PTR_ERR(ctx->rng); goto fail_rng; } } ctx->rsa512_tfm = crypto_alloc_ahash("tegra-se-rsa512", CRYPTO_ALG_TYPE_AHASH, 0); if (IS_ERR(ctx->rsa512_tfm)) { pr_err("Failed to load transform for rsa512: %ld\n", PTR_ERR(ctx->rsa512_tfm)); goto fail_rsa512; } ctx->rsa1024_tfm = crypto_alloc_ahash("tegra-se-rsa1024", CRYPTO_ALG_TYPE_AHASH, 0); if (IS_ERR(ctx->rsa1024_tfm)) { pr_err("Failed to load transform for rsa1024: %ld\n", PTR_ERR(ctx->rsa1024_tfm)); goto fail_rsa1024; } ctx->rsa1536_tfm = crypto_alloc_ahash("tegra-se-rsa1536", CRYPTO_ALG_TYPE_AHASH, 0); if (IS_ERR(ctx->rsa1536_tfm)) { pr_err("Failed to load transform for rsa1536: %ld\n", PTR_ERR(ctx->rsa1536_tfm)); goto fail_rsa1536; } ctx->rsa2048_tfm = crypto_alloc_ahash("tegra-se-rsa2048", CRYPTO_ALG_TYPE_AHASH, 0); if (IS_ERR(ctx->rsa2048_tfm)) { pr_err("Failed to load transform for rsa2048: %ld\n", PTR_ERR(ctx->rsa2048_tfm)); goto fail_rsa2048; } filp->private_data = ctx; return ret; fail_rsa2048: crypto_free_ahash(ctx->rsa1536_tfm); fail_rsa1536: crypto_free_ahash(ctx->rsa1024_tfm); fail_rsa1024: crypto_free_ahash(ctx->rsa512_tfm); fail_rsa512: if (tegra_get_chipid() != TEGRA_CHIPID_TEGRA2 && tegra_get_chipid() != TEGRA_CHIPID_TEGRA3) crypto_free_rng(ctx->rng_drbg); else crypto_free_rng(ctx->rng); fail_rng: if (tegra_get_chipid() != TEGRA_CHIPID_TEGRA2) crypto_free_ablkcipher(ctx->ctr_tfm); fail_ctr: if (tegra_get_chipid() != TEGRA_CHIPID_TEGRA2) crypto_free_ablkcipher(ctx->ofb_tfm); fail_ofb: crypto_free_ablkcipher(ctx->cbc_tfm); fail_cbc: crypto_free_ablkcipher(ctx->ecb_tfm); fail_ecb: kfree(ctx); return ret; }