/* No final XOR 0xFFFFFFFF, like crc32_le */
static int __crc32c_pclmul_finup(u32 *crcp, const u8 *data, unsigned int len,
				u8 *out)
{
	kernel_fpu_begin();
	*(__le32 *)out = ~cpu_to_le32(crc_pcl(data, len, *crcp));
	kernel_fpu_end();
	return 0;
}
static int crc32c_pclmul_update(struct shash_desc *desc, const u8 *data,
			       unsigned int len)
{
	u32 *crcp = shash_desc_ctx(desc);

	kernel_fpu_begin();
	*crcp = crc_pcl(data, len, *crcp);
	kernel_fpu_end();
	return 0;
}
static int __crc32c_pcl_intel_finup(u32 *crcp, const u8 *data, unsigned int len,
				u8 *out)
{
	if (len >= CRC32C_PCL_BREAKEVEN && irq_fpu_usable()) {
		kernel_fpu_begin();
		*(__le32 *)out = ~cpu_to_le32(crc_pcl(data, len, *crcp));
		kernel_fpu_end();
	} else
		*(__le32 *)out =
			~cpu_to_le32(crc32c_intel_le_hw(*crcp, data, len));
	return 0;
}
static int crc32c_pcl_intel_update(struct shash_desc *desc, const u8 *data,
			       unsigned int len)
{
	u32 *crcp = shash_desc_ctx(desc);

	/*
	 * use faster PCL version if datasize is large enough to
	 * overcome kernel fpu state save/restore overhead
	 */
	if (len >= CRC32C_PCL_BREAKEVEN && irq_fpu_usable()) {
		kernel_fpu_begin();
		*crcp = crc_pcl(data, len, *crcp);
		kernel_fpu_end();
	} else
		*crcp = crc32c_intel_le_hw(*crcp, data, len);
	return 0;
}