static int sha256_final(struct shash_desc *desc, u8 *out) { struct sha256_state *sctx = shash_desc_ctx(desc); __be32 *dst = (__be32 *)out; __be64 bits; unsigned int index, pad_len; int i; static const u8 padding[64] = { 0x80, }; /* Save number of bits */ bits = cpu_to_be64(sctx->count << 3); /* Pad out to 56 mod 64. */ index = sctx->count & 0x3f; pad_len = (index < 56) ? (56 - index) : ((64+56) - index); crypto_sha256_update(desc, padding, pad_len); /* Append length (before padding) */ crypto_sha256_update(desc, (const u8 *)&bits, sizeof(bits)); /* Store state in digest */ for (i = 0; i < 8; i++) dst[i] = cpu_to_be32(sctx->state[i]); /* Zeroize sensitive information. */ memset(sctx, 0, sizeof(*sctx)); return 0; }
static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data, unsigned int len) { struct sha256_state *sctx = shash_desc_ctx(desc); unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; int res; /* Handle the fast case right here */ if (partial + len < SHA256_BLOCK_SIZE) { sctx->count += len; memcpy(sctx->buf + partial, data, len); return 0; } if (!irq_fpu_usable()) { res = crypto_sha256_update(desc, data, len); } else { kernel_fpu_begin(); res = __sha256_ssse3_update(desc, data, len, partial); kernel_fpu_end(); } return res; }
/* Add padding and return the message digest. */ static int sha256_ssse3_final(struct shash_desc *desc, u8 *out) { struct sha256_state *sctx = shash_desc_ctx(desc); unsigned int i, index, padlen; __be32 *dst = (__be32 *)out; __be64 bits; static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, }; bits = cpu_to_be64(sctx->count << 3); /* Pad out to 56 mod 64 and append length */ index = sctx->count % SHA256_BLOCK_SIZE; padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56)-index); if (!irq_fpu_usable()) { crypto_sha256_update(desc, padding, padlen); crypto_sha256_update(desc, (const u8 *)&bits, sizeof(bits)); } else { kernel_fpu_begin(); /* We need to fill a whole block for __sha256_ssse3_update() */ if (padlen <= 56) { sctx->count += padlen; memcpy(sctx->buf + index, padding, padlen); } else { __sha256_ssse3_update(desc, padding, padlen, index); } __sha256_ssse3_update(desc, (const u8 *)&bits, sizeof(bits), 56); kernel_fpu_end(); } /* Store state in digest */ for (i = 0; i < 8; i++) dst[i] = cpu_to_be32(sctx->state[i]); /* Wipe context */ memset(sctx, 0, sizeof(*sctx)); return 0; }
static int sha256_update(struct shash_desc *desc, const u8 *data, unsigned int len, sha256_transform_fn *sha256_xform) { struct sha256_state *sctx = shash_desc_ctx(desc); if (!irq_fpu_usable() || (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) return crypto_sha256_update(desc, data, len); /* make sure casting to sha256_block_fn() is safe */ BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0); kernel_fpu_begin(); sha256_base_do_update(desc, data, len, (sha256_block_fn *)sha256_xform); kernel_fpu_end(); return 0; }