Esempio n. 1
0
static int cipher_ctrl(EVP_CIPHER_CTX *ctx, int type, int p1, void* p2)
{
    struct cipher_ctx *cipher_ctx =
        (struct cipher_ctx *)EVP_CIPHER_CTX_get_cipher_data(ctx);
    EVP_CIPHER_CTX *to_ctx = (EVP_CIPHER_CTX *)p2;
    struct cipher_ctx *to_cipher_ctx;

    switch (type) {

    case EVP_CTRL_COPY:
        if (cipher_ctx == NULL)
            return 1;
        /* when copying the context, a new session needs to be initialized */
        to_cipher_ctx =
            (struct cipher_ctx *)EVP_CIPHER_CTX_get_cipher_data(to_ctx);
        memset(&to_cipher_ctx->sess, 0, sizeof(to_cipher_ctx->sess));
        return cipher_init(to_ctx, cipher_ctx->sess.key, EVP_CIPHER_CTX_iv(ctx),
                           (cipher_ctx->op == COP_ENCRYPT));

    case EVP_CTRL_INIT:
        memset(&cipher_ctx->sess, 0, sizeof(cipher_ctx->sess));
        return 1;

    default:
        break;
    }

    return -1;
}
Esempio n. 2
0
static int
padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
                   const unsigned char *in_arg, size_t nbytes)
{
    struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
    size_t chunk;

    /*
     * ctx->num is maintained in byte-oriented modes, such as CFB and OFB...
     */
    if ((chunk = EVP_CIPHER_CTX_num(ctx))) {   /* borrow chunk variable */
        unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);

        if (chunk >= AES_BLOCK_SIZE)
            return 0;           /* bogus value */

        while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
            *(out_arg++) = *(in_arg++) ^ ivp[chunk];
            chunk++, nbytes--;
        }

        EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
    }

    if (nbytes == 0)
        return 1;

    memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);

    if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
        if (!padlock_ofb_encrypt(out_arg, in_arg, cdata, chunk))
            return 0;
        nbytes -= chunk;
    }

    if (nbytes) {
        unsigned char *ivp = cdata->iv;

        out_arg += chunk;
        in_arg += chunk;
        EVP_CIPHER_CTX_set_num(ctx, nbytes);
        padlock_reload_key();   /* empirically found */
        padlock_aes_block(ivp, ivp, cdata);
        padlock_reload_key();   /* empirically found */
        while (nbytes) {
            *(out_arg++) = *(in_arg++) ^ *ivp;
            ivp++, nbytes--;
        }
    }

    memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);

    return 1;
}
Esempio n. 3
0
static int
padlock_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
                   const unsigned char *in_arg, size_t nbytes)
{
    struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
    int ret;

    memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
    if ((ret = padlock_cbc_encrypt(out_arg, in_arg, cdata, nbytes)))
        memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
    return ret;
}
Esempio n. 4
0
static int afalg_do_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
                           const unsigned char *in, size_t inl)
{
    afalg_ctx *actx;
    int ret;
    char nxtiv[ALG_AES_IV_LEN] = { 0 };

    if (ctx == NULL || out == NULL || in == NULL) {
        ALG_WARN("NULL parameter passed to function %s(%d)\n", __FILE__,
                 __LINE__);
        return 0;
    }

    actx = (afalg_ctx *) EVP_CIPHER_CTX_get_cipher_data(ctx);
    if (actx == NULL || actx->init_done != MAGIC_INIT_NUM) {
        ALG_WARN("%s afalg ctx passed\n",
                 ctx == NULL ? "NULL" : "Uninitialised");
        return 0;
    }

    /*
     * set iv now for decrypt operation as the input buffer can be
     * overwritten for inplace operation where in = out.
     */
    if (EVP_CIPHER_CTX_encrypting(ctx) == 0) {
        memcpy(nxtiv, in + (inl - ALG_AES_IV_LEN), ALG_AES_IV_LEN);
    }

    /* Send input data to kernel space */
    ret = afalg_start_cipher_sk(actx, (unsigned char *)in, inl,
                                EVP_CIPHER_CTX_iv(ctx),
                                EVP_CIPHER_CTX_encrypting(ctx));
    if (ret < 1) {
        return 0;
    }

    /* Perform async crypto operation in kernel space */
    ret = afalg_fin_cipher_aio(&actx->aio, actx->sfd, out, inl);
    if (ret < 1)
        return 0;

    if (EVP_CIPHER_CTX_encrypting(ctx)) {
        memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), out + (inl - ALG_AES_IV_LEN),
               ALG_AES_IV_LEN);
    } else {
        memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), nxtiv, ALG_AES_IV_LEN);
    }

    return 1;
}
Esempio n. 5
0
int
cipher_get_keyiv(struct sshcipher_ctx *cc, u_char *iv, u_int len)
{
#ifdef WITH_OPENSSL
	const struct sshcipher *c = cc->cipher;
	int evplen;
#endif

	if ((cc->cipher->flags & CFLAG_CHACHAPOLY) != 0) {
		if (len != 0)
			return SSH_ERR_INVALID_ARGUMENT;
		return 0;
	}
	if ((cc->cipher->flags & CFLAG_AESCTR) != 0) {
		if (len != sizeof(cc->ac_ctx.ctr))
			return SSH_ERR_INVALID_ARGUMENT;
		memcpy(iv, cc->ac_ctx.ctr, len);
		return 0;
	}
	if ((cc->cipher->flags & CFLAG_NONE) != 0)
		return 0;

#ifdef WITH_OPENSSL
	evplen = EVP_CIPHER_CTX_iv_length(cc->evp);
	if (evplen == 0)
		return 0;
	else if (evplen < 0)
		return SSH_ERR_LIBCRYPTO_ERROR;
	if ((u_int)evplen != len)
		return SSH_ERR_INVALID_ARGUMENT;
	if (cipher_authlen(c)) {
		if (!EVP_CIPHER_CTX_ctrl(cc->evp, EVP_CTRL_GCM_IV_GEN,
		   len, iv))
		       return SSH_ERR_LIBCRYPTO_ERROR;
	} else
		memcpy(iv, EVP_CIPHER_CTX_iv(cc->evp), len);
#endif
	return 0;
}
Esempio n. 6
0
static int
padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
                   const unsigned char *in_arg, size_t nbytes)
{
    struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
    size_t chunk;

    if ((chunk = EVP_CIPHER_CTX_num(ctx))) {   /* borrow chunk variable */
        unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);

        if (chunk >= AES_BLOCK_SIZE)
            return 0;           /* bogus value */

        if (EVP_CIPHER_CTX_encrypting(ctx))
            while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
                ivp[chunk] = *(out_arg++) = *(in_arg++) ^ ivp[chunk];
                chunk++, nbytes--;
        } else
            while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
                unsigned char c = *(in_arg++);
                *(out_arg++) = c ^ ivp[chunk];
                ivp[chunk++] = c, nbytes--;
            }

        EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
    }

    if (nbytes == 0)
        return 1;

    memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);

    if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
        if (!padlock_cfb_encrypt(out_arg, in_arg, cdata, chunk))
            return 0;
        nbytes -= chunk;
    }

    if (nbytes) {
        unsigned char *ivp = cdata->iv;

        out_arg += chunk;
        in_arg += chunk;
        EVP_CIPHER_CTX_set_num(ctx, nbytes);
        if (cdata->cword.b.encdec) {
            cdata->cword.b.encdec = 0;
            padlock_reload_key();
            padlock_aes_block(ivp, ivp, cdata);
            cdata->cword.b.encdec = 1;
            padlock_reload_key();
            while (nbytes) {
                unsigned char c = *(in_arg++);
                *(out_arg++) = c ^ *ivp;
                *(ivp++) = c, nbytes--;
            }
        } else {
            padlock_reload_key();
            padlock_aes_block(ivp, ivp, cdata);
            padlock_reload_key();
            while (nbytes) {
                *ivp = *(out_arg++) = *(in_arg++) ^ *ivp;
                ivp++, nbytes--;
            }
        }
    }

    memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);

    return 1;
}