static int n2_hash_cra_init(struct crypto_tfm *tfm) { const char *fallback_driver_name = tfm->__crt_alg->cra_name; struct crypto_ahash *ahash = __crypto_ahash_cast(tfm); struct n2_hash_ctx *ctx = crypto_ahash_ctx(ahash); struct crypto_ahash *fallback_tfm; int err; fallback_tfm = crypto_alloc_ahash(fallback_driver_name, 0, CRYPTO_ALG_NEED_FALLBACK); if (IS_ERR(fallback_tfm)) { pr_warning("Fallback driver '%s' could not be loaded!\n", fallback_driver_name); err = PTR_ERR(fallback_tfm); goto out; } crypto_ahash_set_reqsize(ahash, (sizeof(struct n2_hash_req_ctx) + crypto_ahash_reqsize(fallback_tfm))); ctx->fallback_tfm = fallback_tfm; return 0; out: return err; }
static void n2_hash_cra_exit(struct crypto_tfm *tfm) { struct crypto_ahash *ahash = __crypto_ahash_cast(tfm); struct n2_hash_ctx *ctx = crypto_ahash_ctx(ahash); crypto_free_ahash(ctx->fallback_tfm); }
static int rk_cra_hash_init(struct crypto_tfm *tfm) { struct rk_ahash_ctx *tctx = crypto_tfm_ctx(tfm); struct rk_crypto_tmp *algt; struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg); const char *alg_name = crypto_tfm_alg_name(tfm); algt = container_of(alg, struct rk_crypto_tmp, alg.hash); tctx->dev = algt->dev; tctx->dev->addr_vir = (void *)__get_free_page(GFP_KERNEL); if (!tctx->dev->addr_vir) { dev_err(tctx->dev->dev, "failed to kmalloc for addr_vir\n"); return -ENOMEM; } tctx->dev->start = rk_ahash_start; tctx->dev->update = rk_ahash_crypto_rx; tctx->dev->complete = rk_ahash_crypto_complete; /* for fallback */ tctx->fallback_tfm = crypto_alloc_ahash(alg_name, 0, CRYPTO_ALG_NEED_FALLBACK); if (IS_ERR(tctx->fallback_tfm)) { dev_err(tctx->dev->dev, "Could not load fallback driver.\n"); return PTR_ERR(tctx->fallback_tfm); } crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct rk_ahash_rctx) + crypto_ahash_reqsize(tctx->fallback_tfm)); return tctx->dev->enable_clk(tctx->dev); }
static int qce_ahash_cra_init(struct crypto_tfm *tfm) { struct crypto_ahash *ahash = __crypto_ahash_cast(tfm); struct qce_sha_ctx *ctx = crypto_tfm_ctx(tfm); crypto_ahash_set_reqsize(ahash, sizeof(struct qce_sha_reqctx)); memset(ctx, 0, sizeof(*ctx)); return 0; }
/** * HASH SHA1 Functions */ static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm, unsigned int sa_len, unsigned char ha, unsigned char hm) { struct crypto_alg *alg = tfm->__crt_alg; struct crypto4xx_alg *my_alg = crypto_alg_to_crypto4xx_alg(alg); struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); struct dynamic_sa_ctl *sa; struct dynamic_sa_hash160 *sa_in; int rc; ctx->dev = my_alg->dev; ctx->is_hash = 1; ctx->hash_final = 0; /* Create SA */ if (ctx->sa_in_dma_addr || ctx->sa_out_dma_addr) crypto4xx_free_sa(ctx); rc = crypto4xx_alloc_sa(ctx, sa_len); if (rc) return rc; if (ctx->state_record_dma_addr == 0) { crypto4xx_alloc_state_record(ctx); if (!ctx->state_record_dma_addr) { crypto4xx_free_sa(ctx); return -ENOMEM; } } crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct crypto4xx_ctx)); sa = (struct dynamic_sa_ctl *) ctx->sa_in; set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV, SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA, SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL, SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC, SA_OPCODE_HASH, DIR_INBOUND); set_dynamic_sa_command_1(sa, 0, SA_HASH_MODE_HASH, CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF, SA_SEQ_MASK_OFF, SA_MC_ENABLE, SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD, SA_NOT_COPY_HDR); ctx->direction = DIR_INBOUND; sa->sa_contents = SA_HASH160_CONTENTS; sa_in = (struct dynamic_sa_hash160 *) ctx->sa_in; /* Need to zero hash digest in SA */ memset(sa_in->inner_digest, 0, sizeof(sa_in->inner_digest)); memset(sa_in->outer_digest, 0, sizeof(sa_in->outer_digest)); sa_in->state_ptr = ctx->state_record_dma_addr; ctx->offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(ctx); return 0; }
static inline int mv_cesa_ahash_cra_init(struct crypto_tfm *tfm) { struct mv_cesa_hash_ctx *ctx = crypto_tfm_ctx(tfm); ctx->base.ops = &mv_cesa_ahash_req_ops; crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct mv_cesa_ahash_req)); return 0; }
static int sunxi_ss_cra_hash_init(struct crypto_tfm *tfm) { if (ss_flow_request(crypto_tfm_ctx(tfm)) < 0) return -1; crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(ss_aes_req_ctx_t)); SS_DBG("reqsize = %d \n", sizeof(ss_aes_req_ctx_t)); return 0; }
int crypto4xx_hash_init(struct ahash_request *req) { struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); int ds; struct dynamic_sa_ctl *sa; sa = ctx->sa_in; ds = crypto_ahash_digestsize( __crypto_ahash_cast(req->base.tfm)); sa->sa_command_0.bf.digest_len = ds >> 2; sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA; return 0; }
static int ghash_async_init_tfm(struct crypto_tfm *tfm) { struct cryptd_ahash *cryptd_tfm; struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm); cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni", 0, 0); if (IS_ERR(cryptd_tfm)) return PTR_ERR(cryptd_tfm); ctx->cryptd_tfm = cryptd_tfm; crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct ahash_request) + crypto_ahash_reqsize(&cryptd_tfm->base)); return 0; }
static int sahara_sha_cra_init(struct crypto_tfm *tfm) { const char *name = crypto_tfm_alg_name(tfm); struct sahara_ctx *ctx = crypto_tfm_ctx(tfm); ctx->shash_fallback = crypto_alloc_shash(name, 0, CRYPTO_ALG_NEED_FALLBACK); if (IS_ERR(ctx->shash_fallback)) { pr_err("Error allocating fallback algo %s\n", name); return PTR_ERR(ctx->shash_fallback); } crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct sahara_sha_reqctx) + SHA_BUFFER_LEN + SHA256_BLOCK_SIZE); return 0; }
/** * HASH SHA1 Functions */ static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm, unsigned int sa_len, unsigned char ha, unsigned char hm) { struct crypto_alg *alg = tfm->__crt_alg; struct crypto4xx_alg *my_alg; struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); struct dynamic_sa_hash160 *sa; int rc; my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg, alg.u.hash); ctx->dev = my_alg->dev; /* Create SA */ if (ctx->sa_in || ctx->sa_out) crypto4xx_free_sa(ctx); rc = crypto4xx_alloc_sa(ctx, sa_len); if (rc) return rc; crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct crypto4xx_ctx)); sa = (struct dynamic_sa_hash160 *)ctx->sa_in; set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV, SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA, SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL, SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC, SA_OPCODE_HASH, DIR_INBOUND); set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH, CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF, SA_SEQ_MASK_OFF, SA_MC_ENABLE, SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD, SA_NOT_COPY_HDR); /* Need to zero hash digest in SA */ memset(sa->inner_digest, 0, sizeof(sa->inner_digest)); memset(sa->outer_digest, 0, sizeof(sa->outer_digest)); return 0; }