Example #1
0
/*
 * Initializes a skein MAC context. You may pass a ctx_template, in which
 * case the template will be reused to make initialization more efficient.
 * Otherwise a new context will be constructed. The mechanism cm_type must
 * be one of SKEIN_*_MAC_MECH_INFO_TYPE. Same as in skein_digest_init, you
 * may pass a skein_param_t in cm_param to configure the length of the
 * digest. The key must be in raw format.
 */
static int
skein_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
    crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
    crypto_req_handle_t req)
{
	int	error;

	SKEIN_CTX_LVALUE(ctx) = kmem_alloc(sizeof (*SKEIN_CTX(ctx)),
	    crypto_kmflag(req));
	if (SKEIN_CTX(ctx) == NULL)
		return (CRYPTO_HOST_MEMORY);

	if (ctx_template != NULL) {
		bcopy(ctx_template, SKEIN_CTX(ctx),
		    sizeof (*SKEIN_CTX(ctx)));
	} else {
		error = skein_mac_ctx_build(SKEIN_CTX(ctx), mechanism, key);
		if (error != CRYPTO_SUCCESS)
			goto errout;
	}

	return (CRYPTO_SUCCESS);
errout:
	bzero(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
	kmem_free(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
	return (error);
}
Example #2
0
/*
 * Initializes a skein digest context to the configuration in `mechanism'.
 * The mechanism cm_type must be one of SKEIN_*_MECH_INFO_TYPE. The cm_param
 * field may contain a skein_param_t structure indicating the length of the
 * digest the algorithm should produce. Otherwise the default output lengths
 * are applied (32 bytes for Skein-256, 64 bytes for Skein-512 and 128 bytes
 * for Skein-1024).
 */
static int
skein_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
    crypto_req_handle_t req)
{
	int	error = CRYPTO_SUCCESS;

	if (!VALID_SKEIN_DIGEST_MECH(mechanism->cm_type))
		return (CRYPTO_MECHANISM_INVALID);

	SKEIN_CTX_LVALUE(ctx) = kmem_alloc(sizeof (*SKEIN_CTX(ctx)),
	    crypto_kmflag(req));
	if (SKEIN_CTX(ctx) == NULL)
		return (CRYPTO_HOST_MEMORY);

	SKEIN_CTX(ctx)->sc_mech_type = mechanism->cm_type;
	error = skein_get_digest_bitlen(mechanism,
	    &SKEIN_CTX(ctx)->sc_digest_bitlen);
	if (error != CRYPTO_SUCCESS)
		goto errout;
	SKEIN_OP(SKEIN_CTX(ctx), Init, SKEIN_CTX(ctx)->sc_digest_bitlen);

	return (CRYPTO_SUCCESS);
errout:
	bzero(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
	kmem_free(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
	SKEIN_CTX_LVALUE(ctx) = NULL;
	return (error);
}
/* ARGSUSED */
static int
sha1_create_ctx_template(crypto_provider_handle_t provider,
    crypto_mechanism_t *mechanism, crypto_key_t *key,
    crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
    crypto_req_handle_t req)
{
	sha1_hmac_ctx_t *sha1_hmac_ctx_tmpl;
	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);

	if ((mechanism->cm_type != SHA1_HMAC_MECH_INFO_TYPE) &&
	    (mechanism->cm_type != SHA1_HMAC_GEN_MECH_INFO_TYPE)) {
		return (CRYPTO_MECHANISM_INVALID);
	}

	/* Add support for key by attributes (RFE 4706552) */
	if (key->ck_format != CRYPTO_KEY_RAW)
		return (CRYPTO_ARGUMENTS_BAD);

	/*
	 * Allocate and initialize SHA1 context.
	 */
	sha1_hmac_ctx_tmpl = kmem_alloc(sizeof (sha1_hmac_ctx_t),
	    crypto_kmflag(req));
	if (sha1_hmac_ctx_tmpl == NULL)
		return (CRYPTO_HOST_MEMORY);

	if (keylen_in_bytes > SHA1_HMAC_BLOCK_SIZE) {
		uchar_t digested_key[SHA1_DIGEST_LENGTH];

		/*
		 * Hash the passed-in key to get a smaller key.
		 * The inner context is used since it hasn't been
		 * initialized yet.
		 */
		PROV_SHA1_DIGEST_KEY(&sha1_hmac_ctx_tmpl->hc_icontext,
		    key->ck_data, keylen_in_bytes, digested_key);
		sha1_mac_init_ctx(sha1_hmac_ctx_tmpl, digested_key,
		    SHA1_DIGEST_LENGTH);
	} else {
		sha1_mac_init_ctx(sha1_hmac_ctx_tmpl, key->ck_data,
		    keylen_in_bytes);
	}

	sha1_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
	*ctx_template = (crypto_spi_ctx_template_t)sha1_hmac_ctx_tmpl;
	*ctx_template_size = sizeof (sha1_hmac_ctx_t);


	return (CRYPTO_SUCCESS);
}
Example #4
0
/*
 * Performs a Final on a context and writes to an mblk digest output.
 */
static int
skein_digest_final_mblk(skein_ctx_t *ctx, crypto_data_t *digest,
    crypto_req_handle_t req)
{
	off_t	offset = digest->cd_offset;
	mblk_t	*mp;

	/* Jump to the first mblk_t that will be used to store the digest. */
	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
	    offset -= MBLKL(mp), mp = mp->b_cont)
		;
	if (mp == NULL) {
		/* caller specified offset is too large */
		return (CRYPTO_DATA_LEN_RANGE);
	}

	if (offset + CRYPTO_BITS2BYTES(ctx->sc_digest_bitlen) <= MBLKL(mp)) {
		/* The digest will fit in the current mblk. */
		SKEIN_OP(ctx, Final, mp->b_rptr + offset);
	} else {
		/* Split the digest up between the individual buffers. */
		uint8_t *digest_tmp;
		off_t scratch_offset = 0;
		size_t length = CRYPTO_BITS2BYTES(ctx->sc_digest_bitlen);
		size_t cur_len;

		digest_tmp = kmem_alloc(CRYPTO_BITS2BYTES(
		    ctx->sc_digest_bitlen), crypto_kmflag(req));
		if (digest_tmp == NULL)
			return (CRYPTO_HOST_MEMORY);
		SKEIN_OP(ctx, Final, digest_tmp);
		while (mp != NULL && length > 0) {
			cur_len = MIN(MBLKL(mp) - offset, length);
			bcopy(digest_tmp + scratch_offset,
			    mp->b_rptr + offset, cur_len);
			length -= cur_len;
			mp = mp->b_cont;
			scratch_offset += cur_len;
			offset = 0;
		}
		kmem_free(digest_tmp, CRYPTO_BITS2BYTES(ctx->sc_digest_bitlen));
		if (mp == NULL && length > 0) {
			/* digest too long to fit in the mblk buffers */
			return (CRYPTO_DATA_LEN_RANGE);
		}
	}

	return (CRYPTO_SUCCESS);
}
static int
sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
    crypto_req_handle_t req)
{

	/*
	 * Allocate and initialize SHA2 context.
	 */
	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
	    crypto_kmflag(req));
	if (ctx->cc_provider_private == NULL)
		return (CRYPTO_HOST_MEMORY);

	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);

	return (CRYPTO_SUCCESS);
}
Example #6
0
static int
md5_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
    crypto_req_handle_t req)
{
	if (mechanism->cm_type != MD5_MECH_INFO_TYPE)
		return (CRYPTO_MECHANISM_INVALID);

	/*
	 * Allocate and initialize MD5 context.
	 */
	ctx->cc_provider_private = kmem_alloc(sizeof (md5_ctx_t),
	    crypto_kmflag(req));
	if (ctx->cc_provider_private == NULL)
		return (CRYPTO_HOST_MEMORY);

	PROV_MD5_CTX(ctx)->mc_mech_type = MD5_MECH_INFO_TYPE;
	MD5Init(&PROV_MD5_CTX(ctx)->mc_md5_ctx);

	return (CRYPTO_SUCCESS);
}
Example #7
0
/*ARGSUSED*/
static int
skein_create_ctx_template(crypto_provider_handle_t provider,
    crypto_mechanism_t *mechanism, crypto_key_t *key,
    crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
    crypto_req_handle_t req)
{
	int		error;
	skein_ctx_t	*ctx_tmpl;

	ctx_tmpl = kmem_alloc(sizeof (*ctx_tmpl), crypto_kmflag(req));
	if (ctx_tmpl == NULL)
		return (CRYPTO_HOST_MEMORY);
	error = skein_mac_ctx_build(ctx_tmpl, mechanism, key);
	if (error != CRYPTO_SUCCESS)
		goto errout;
	*ctx_template = ctx_tmpl;
	*ctx_template_size = sizeof (*ctx_tmpl);

	return (CRYPTO_SUCCESS);
errout:
	bzero(ctx_tmpl, sizeof (*ctx_tmpl));
	kmem_free(ctx_tmpl, sizeof (*ctx_tmpl));
	return (error);
}
Example #8
0
/*
 * Performs a Final on a context and writes to a uio digest output.
 */
static int
skein_digest_final_uio(skein_ctx_t *ctx, crypto_data_t *digest,
    crypto_req_handle_t req)
{
	off_t	offset = digest->cd_offset;
	uint_t	vec_idx;
	uio_t	*uio = digest->cd_uio;

	/* we support only kernel buffer */
	if (uio->uio_segflg != UIO_SYSSPACE)
		return (CRYPTO_ARGUMENTS_BAD);

	/*
	 * Jump to the first iovec containing ptr to the digest to be returned.
	 */
	for (vec_idx = 0; offset >= uio->uio_iov[vec_idx].iov_len &&
	    vec_idx < uio->uio_iovcnt;
	    offset -= uio->uio_iov[vec_idx++].iov_len)
		;
	if (vec_idx == uio->uio_iovcnt) {
		/*
		 * The caller specified an offset that is larger than the
		 * total size of the buffers it provided.
		 */
		return (CRYPTO_DATA_LEN_RANGE);
	}
	if (offset + CRYPTO_BITS2BYTES(ctx->sc_digest_bitlen) <=
	    uio->uio_iov[vec_idx].iov_len) {
		/* The computed digest will fit in the current iovec. */
		SKEIN_OP(ctx, Final,
		    (uchar_t *)uio->uio_iov[vec_idx].iov_base + offset);
	} else {
		uint8_t *digest_tmp;
		off_t scratch_offset = 0;
		size_t length = CRYPTO_BITS2BYTES(ctx->sc_digest_bitlen);
		size_t cur_len;

		digest_tmp = kmem_alloc(CRYPTO_BITS2BYTES(
		    ctx->sc_digest_bitlen), crypto_kmflag(req));
		if (digest_tmp == NULL)
			return (CRYPTO_HOST_MEMORY);
		SKEIN_OP(ctx, Final, digest_tmp);
		while (vec_idx < uio->uio_iovcnt && length > 0) {
			cur_len = MIN(uio->uio_iov[vec_idx].iov_len - offset,
			    length);
			bcopy(digest_tmp + scratch_offset,
			    uio->uio_iov[vec_idx].iov_base + offset, cur_len);

			length -= cur_len;
			vec_idx++;
			scratch_offset += cur_len;
			offset = 0;
		}
		kmem_free(digest_tmp, CRYPTO_BITS2BYTES(ctx->sc_digest_bitlen));

		if (vec_idx == uio->uio_iovcnt && length > 0) {
			/*
			 * The end of the specified iovec's was reached but
			 * the length requested could not be processed, i.e.
			 * The caller requested to digest more data than it
			 * provided.
			 */
			return (CRYPTO_DATA_LEN_RANGE);
		}
	}

	return (CRYPTO_SUCCESS);
}
Example #9
0
/*
 * Initializes a multi-part MAC operation.
 */
static int
md5_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
    crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
    crypto_req_handle_t req)
{
	int ret = CRYPTO_SUCCESS;
	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);

	if (mechanism->cm_type != MD5_HMAC_MECH_INFO_TYPE &&
	    mechanism->cm_type != MD5_HMAC_GEN_MECH_INFO_TYPE)
		return (CRYPTO_MECHANISM_INVALID);

	/* Add support for key by attributes (RFE 4706552) */
	if (key->ck_format != CRYPTO_KEY_RAW)
		return (CRYPTO_ARGUMENTS_BAD);

	ctx->cc_provider_private = kmem_alloc(sizeof (md5_hmac_ctx_t),
	    crypto_kmflag(req));
	if (ctx->cc_provider_private == NULL)
		return (CRYPTO_HOST_MEMORY);

	if (ctx_template != NULL) {
		/* reuse context template */
		bcopy(ctx_template, PROV_MD5_HMAC_CTX(ctx),
		    sizeof (md5_hmac_ctx_t));
	} else {
		/* no context template, compute context */
		if (keylen_in_bytes > MD5_HMAC_BLOCK_SIZE) {
			uchar_t digested_key[MD5_DIGEST_LENGTH];
			md5_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;

			/*
			 * Hash the passed-in key to get a smaller key.
			 * The inner context is used since it hasn't been
			 * initialized yet.
			 */
			PROV_MD5_DIGEST_KEY(&hmac_ctx->hc_icontext,
			    key->ck_data, keylen_in_bytes, digested_key);
			md5_mac_init_ctx(PROV_MD5_HMAC_CTX(ctx),
			    digested_key, MD5_DIGEST_LENGTH);
		} else {
			md5_mac_init_ctx(PROV_MD5_HMAC_CTX(ctx),
			    key->ck_data, keylen_in_bytes);
		}
	}

	/*
	 * Get the mechanism parameters, if applicable.
	 */
	PROV_MD5_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
	if (mechanism->cm_type == MD5_HMAC_GEN_MECH_INFO_TYPE) {
		if (mechanism->cm_param == NULL ||
		    mechanism->cm_param_len != sizeof (ulong_t))
			ret = CRYPTO_MECHANISM_PARAM_INVALID;
		PROV_MD5_GET_DIGEST_LEN(mechanism,
		    PROV_MD5_HMAC_CTX(ctx)->hc_digest_len);
		if (PROV_MD5_HMAC_CTX(ctx)->hc_digest_len >
		    MD5_DIGEST_LENGTH)
			ret = CRYPTO_MECHANISM_PARAM_INVALID;
	}

	if (ret != CRYPTO_SUCCESS) {
		bzero(ctx->cc_provider_private, sizeof (md5_hmac_ctx_t));
		kmem_free(ctx->cc_provider_private, sizeof (md5_hmac_ctx_t));
		ctx->cc_provider_private = NULL;
	}

	return (ret);
}
Example #10
0
static int
sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
    crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
    crypto_req_handle_t req)
{
	int ret = CRYPTO_SUCCESS;
	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
	uint_t sha_digest_len, sha_hmac_block_size;

	/*
	 * Set the digest length and block size to values appropriate to the
	 * mechanism
	 */
	switch (mechanism->cm_type) {
	case SHA256_HMAC_MECH_INFO_TYPE:
	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
		sha_digest_len = SHA256_DIGEST_LENGTH;
		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
		break;
	case SHA384_HMAC_MECH_INFO_TYPE:
	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
	case SHA512_HMAC_MECH_INFO_TYPE:
	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
		sha_digest_len = SHA512_DIGEST_LENGTH;
		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
		break;
	default:
		return (CRYPTO_MECHANISM_INVALID);
	}

	if (key->ck_format != CRYPTO_KEY_RAW)
		return (CRYPTO_ARGUMENTS_BAD);

	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
	    crypto_kmflag(req));
	if (ctx->cc_provider_private == NULL)
		return (CRYPTO_HOST_MEMORY);

	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
	if (ctx_template != NULL) {
		/* reuse context template */
		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
		    sizeof (sha2_hmac_ctx_t));
	} else {
		/* no context template, compute context */
		if (keylen_in_bytes > sha_hmac_block_size) {
			uchar_t digested_key[SHA512_DIGEST_LENGTH];
			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;

			/*
			 * Hash the passed-in key to get a smaller key.
			 * The inner context is used since it hasn't been
			 * initialized yet.
			 */
			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
			    &hmac_ctx->hc_icontext,
			    key->ck_data, keylen_in_bytes, digested_key);
			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
			    digested_key, sha_digest_len);
		} else {
			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
			    key->ck_data, keylen_in_bytes);
		}
	}

	/*
	 * Get the mechanism parameters, if applicable.
	 */
	if (mechanism->cm_type % 3 == 2) {
		if (mechanism->cm_param == NULL ||
		    mechanism->cm_param_len != sizeof (ulong_t))
			ret = CRYPTO_MECHANISM_PARAM_INVALID;
		PROV_SHA2_GET_DIGEST_LEN(mechanism,
		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
			ret = CRYPTO_MECHANISM_PARAM_INVALID;
	}

	if (ret != CRYPTO_SUCCESS) {
		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
		ctx->cc_provider_private = NULL;
	}

	return (ret);
}
Example #11
0
/* ARGSUSED */
static int
sha2_create_ctx_template(crypto_provider_handle_t provider,
    crypto_mechanism_t *mechanism, crypto_key_t *key,
    crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
    crypto_req_handle_t req)
{
	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
	uint32_t sha_digest_len, sha_hmac_block_size;

	/*
	 * Set the digest length and block size to values appropriate to the
	 * mechanism
	 */
	switch (mechanism->cm_type) {
	case SHA256_HMAC_MECH_INFO_TYPE:
	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
		sha_digest_len = SHA256_DIGEST_LENGTH;
		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
		break;
	case SHA384_HMAC_MECH_INFO_TYPE:
	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
	case SHA512_HMAC_MECH_INFO_TYPE:
	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
		sha_digest_len = SHA512_DIGEST_LENGTH;
		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
		break;
	default:
		return (CRYPTO_MECHANISM_INVALID);
	}

	/* Add support for key by attributes (RFE 4706552) */
	if (key->ck_format != CRYPTO_KEY_RAW)
		return (CRYPTO_ARGUMENTS_BAD);

	/*
	 * Allocate and initialize SHA2 context.
	 */
	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
	    crypto_kmflag(req));
	if (sha2_hmac_ctx_tmpl == NULL)
		return (CRYPTO_HOST_MEMORY);

	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;

	if (keylen_in_bytes > sha_hmac_block_size) {
		uchar_t digested_key[SHA512_DIGEST_LENGTH];

		/*
		 * Hash the passed-in key to get a smaller key.
		 * The inner context is used since it hasn't been
		 * initialized yet.
		 */
		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
		    &sha2_hmac_ctx_tmpl->hc_icontext,
		    key->ck_data, keylen_in_bytes, digested_key);
		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
		    sha_digest_len);
	} else {
		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
		    keylen_in_bytes);
	}

	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
	*ctx_template_size = sizeof (sha2_hmac_ctx_t);

	return (CRYPTO_SUCCESS);
}