static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt) { struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); struct crypto_aead *aead = crypto_aead_reqtfm(req); unsigned int len = req->cryptlen; __le32 iv[16]; u32 tmp_sa[ctx->sa_len * 4]; struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa; if (crypto4xx_aead_need_fallback(req, true, decrypt)) return crypto4xx_aead_fallback(req, ctx, decrypt); if (decrypt) len -= crypto_aead_authsize(aead); memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, sizeof(tmp_sa)); sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2; if (req->iv[0] == 1) { /* CRYPTO_MODE_AES_ICM */ sa->sa_command_1.bf.crypto_mode9_8 = 1; } iv[3] = cpu_to_le32(0); crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1)); return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, len, iv, sizeof(iv), sa, ctx->sa_len, req->assoclen); }
int crypto4xx_hash_digest(struct ahash_request *req) { struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); ctx->hash_final = 1; ctx->pd_ctl = 0x11; ctx->direction = DIR_INBOUND; return crypto4xx_build_pd(&req->base, ctx, req->src, (struct scatterlist *) req->result, req->nbytes, NULL, 0); }
int crypto4xx_decrypt(struct ablkcipher_request *req) { struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); unsigned int ivlen = crypto_ablkcipher_ivsize( crypto_ablkcipher_reqtfm(req)); __le32 iv[ivlen]; if (ivlen) crypto4xx_memcpy_to_le32(iv, req->info, ivlen); return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, req->nbytes, iv, ivlen, ctx->sa_in, ctx->sa_len, 0); }
int crypto4xx_hash_digest(struct ahash_request *req) { struct crypto_ahash *ahash = crypto_ahash_reqtfm(req); struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); struct scatterlist dst; unsigned int ds = crypto_ahash_digestsize(ahash); sg_init_one(&dst, req->result, ds); return crypto4xx_build_pd(&req->base, ctx, req->src, &dst, req->nbytes, NULL, 0, ctx->sa_in, ctx->sa_len, 0); }
int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req) { struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); __le32 iv[AES_IV_SIZE / 4] = { ctx->iv_nonce, cpu_to_le32p((u32 *) req->info), cpu_to_le32p((u32 *) (req->info + 4)), cpu_to_le32(1) }; return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, req->nbytes, iv, AES_IV_SIZE, ctx->sa_out, ctx->sa_len, 0); }
int crypto4xx_decrypt(struct ablkcipher_request *req) { struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); ctx->direction = DIR_INBOUND; ctx->hash_final = 0; ctx->is_hash = 0; ctx->pd_ctl = 1; return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, req->nbytes, req->info, get_dynamic_sa_iv_size(ctx)); }
static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req, bool decrypt) { struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); unsigned int len = req->cryptlen; __le32 iv[4]; if (crypto4xx_aead_need_fallback(req, false, decrypt)) return crypto4xx_aead_fallback(req, ctx, decrypt); crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE); iv[3] = cpu_to_le32(1); if (decrypt) len -= crypto_aead_authsize(crypto_aead_reqtfm(req)); return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, len, iv, sizeof(iv), decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len, req->assoclen); }