void fletcher_4_native(const void *buf, uint64_t size, zio_cksum_t *zcp) { const fletcher_4_ops_t *ops; uint64_t p2size = P2ALIGN(size, 64); ASSERT(IS_P2ALIGNED(size, sizeof (uint32_t))); if (size == 0) { ZIO_SET_CHECKSUM(zcp, 0, 0, 0, 0); } else if (p2size == 0) { ops = &fletcher_4_scalar_ops; fletcher_4_native_impl(ops, buf, size, zcp); } else { ops = fletcher_4_impl_get(); fletcher_4_native_impl(ops, buf, p2size, zcp); if (p2size < size) fletcher_4_incremental_native((char *)buf + p2size, size - p2size, zcp); } }
/*ARGSUSED*/ void fletcher_4_native(const void *buf, uint64_t size, const void *ctx_template, zio_cksum_t *zcp) { const uint64_t p2size = P2ALIGN(size, 64); ASSERT(IS_P2ALIGNED(size, sizeof (uint32_t))); if (size == 0 || p2size == 0) { ZIO_SET_CHECKSUM(zcp, 0, 0, 0, 0); if (size > 0) fletcher_4_scalar_native((fletcher_4_ctx_t *)zcp, buf, size); } else { fletcher_4_native_impl(buf, p2size, zcp); if (p2size < size) fletcher_4_scalar_native((fletcher_4_ctx_t *)zcp, (char *)buf + p2size, size - p2size); } }
void fletcher_4_native_varsize(const void *buf, uint64_t size, zio_cksum_t *zcp) { fletcher_4_native_impl(&fletcher_4_scalar_ops, buf, size, zcp); }