static int ccm_aes_nx_set_key(struct crypto_aead *tfm, const u8 *in_key, unsigned int key_len) { struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead; nx_ctx_init(nx_ctx, HCOP_FC_AES); switch (key_len) { case AES_KEYSIZE_128: NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128); NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_128); nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; break; default: return -EINVAL; } csbcpb->cpb.hdr.mode = NX_MODE_AES_CCM; memcpy(csbcpb->cpb.aes_ccm.key, in_key, key_len); csbcpb_aead->cpb.hdr.mode = NX_MODE_AES_CCA; memcpy(csbcpb_aead->cpb.aes_cca.key, in_key, key_len); return 0; }
static int ctr_aes_nx_set_key(struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len) { struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; nx_ctx_init(nx_ctx, HCOP_FC_AES); switch (key_len) { case AES_KEYSIZE_128: NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128); nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; break; case AES_KEYSIZE_192: NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_192); nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192]; break; case AES_KEYSIZE_256: NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_256); nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256]; break; default: return -EINVAL; } csbcpb->cpb.hdr.mode = NX_MODE_AES_CTR; memcpy(csbcpb->cpb.aes_ctr.key, in_key, key_len); return 0; }
static int nx_sha256_init(struct shash_desc *desc) { struct sha256_state *sctx = shash_desc_ctx(desc); struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); struct nx_sg *out_sg; int len; u32 max_sg_len; nx_ctx_init(nx_ctx, HCOP_FC_SHA); memset(sctx, 0, sizeof *sctx); nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA256]; NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA256); max_sg_len = min_t(u64, nx_ctx->ap->sglen, nx_driver.of.max_sg_len/sizeof(struct nx_sg)); max_sg_len = min_t(u64, max_sg_len, nx_ctx->ap->databytelen/NX_PAGE_SIZE); len = SHA256_DIGEST_SIZE; out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, &len, max_sg_len); nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); if (len != SHA256_DIGEST_SIZE) return -EINVAL; sctx->state[0] = __cpu_to_be32(SHA256_H0); sctx->state[1] = __cpu_to_be32(SHA256_H1); sctx->state[2] = __cpu_to_be32(SHA256_H2); sctx->state[3] = __cpu_to_be32(SHA256_H3); sctx->state[4] = __cpu_to_be32(SHA256_H4); sctx->state[5] = __cpu_to_be32(SHA256_H5); sctx->state[6] = __cpu_to_be32(SHA256_H6); sctx->state[7] = __cpu_to_be32(SHA256_H7); sctx->count = 0; return 0; }
static int nx_sha512_init(struct shash_desc *desc) { struct sha512_state *sctx = shash_desc_ctx(desc); struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); int len; int rc; nx_ctx_init(nx_ctx, HCOP_FC_SHA); memset(sctx, 0, sizeof *sctx); nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA512]; NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA512); len = SHA512_DIGEST_SIZE; rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->out_sg, &nx_ctx->op.outlen, &len, (u8 *)sctx->state, NX_DS_SHA512); if (rc || len != SHA512_DIGEST_SIZE) goto out; sctx->state[0] = __cpu_to_be64(SHA512_H0); sctx->state[1] = __cpu_to_be64(SHA512_H1); sctx->state[2] = __cpu_to_be64(SHA512_H2); sctx->state[3] = __cpu_to_be64(SHA512_H3); sctx->state[4] = __cpu_to_be64(SHA512_H4); sctx->state[5] = __cpu_to_be64(SHA512_H5); sctx->state[6] = __cpu_to_be64(SHA512_H6); sctx->state[7] = __cpu_to_be64(SHA512_H7); sctx->count[0] = 0; out: return 0; }