static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, const unsigned char *iv, int enc) { EVP_AES_XTS_CTX *xctx = ctx->cipher_data; if (!iv && !key) return 1; if (key) { /* key_len is two AES keys */ if (enc) { aesni_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1); xctx->xts.block1 = (block128_f) aesni_encrypt; xctx->stream = aesni_xts_encrypt; } else { aesni_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1); xctx->xts.block1 = (block128_f) aesni_decrypt; xctx->stream = aesni_xts_decrypt; } aesni_set_encrypt_key(key + ctx->key_len / 2, ctx->key_len * 4, &xctx->ks2); xctx->xts.block2 = (block128_f) aesni_encrypt; xctx->xts.key1 = &xctx->ks1; } if (iv) { xctx->xts.key2 = &xctx->ks2; memcpy(ctx->iv, iv, 16); } return 1; }
static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key, const uint8_t *iv, int enc) { EVP_AES_GCM_CTX *gctx = ctx->cipher_data; if (!iv && !key) { return 1; } if (key) { aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt, 1); gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks; // If we have an iv can set it directly, otherwise use // saved IV. if (iv == NULL && gctx->iv_set) { iv = gctx->iv; } if (iv) { CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen); gctx->iv_set = 1; } gctx->key_set = 1; } else { // If key set use IV, otherwise copy if (gctx->key_set) { CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen); } else { OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen); } gctx->iv_set = 1; gctx->iv_gen = 0; } return 1; }
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key, const uint8_t *iv, int enc) { int ret, mode; EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data; mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK; if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) { ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data); dat->block = (block128_f)aesni_decrypt; dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL; } else { ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data); dat->block = (block128_f)aesni_encrypt; if (mode == EVP_CIPH_CBC_MODE) { dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt; } else if (mode == EVP_CIPH_CTR_MODE) { dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks; } else { dat->stream.cbc = NULL; } } if (ret < 0) { OPENSSL_PUT_ERROR(CIPHER, aesni_init_key, CIPHER_R_AES_KEY_SETUP_FAILED); return 0; } return 1; }
static int aesni_cbc_hmac_sha256_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *inkey, const unsigned char *iv, int enc) { EVP_AES_HMAC_SHA256 *key = data(ctx); int ret; if (enc) memset(&key->ks, 0, sizeof(key->ks.rd_key)), ret = aesni_set_encrypt_key(inkey, EVP_CIPHER_CTX_key_length(ctx) * 8, &key->ks); else ret = aesni_set_decrypt_key(inkey, EVP_CIPHER_CTX_key_length(ctx) * 8, &key->ks); SHA256_Init(&key->head); /* handy when benchmarking */ key->tail = key->head; key->md = key->head; key->payload_length = NO_PAYLOAD_LENGTH; return ret < 0 ? 0 : 1; }
static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, const unsigned char *iv, int enc) { EVP_AES_GCM_CTX *gctx = ctx->cipher_data; if (!iv && !key) return 1; if (key) { aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks); CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt); gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks; /* If we have an iv can set it directly, otherwise use * saved IV. */ if (iv == NULL && gctx->iv_set) iv = gctx->iv; if (iv) { CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); gctx->iv_set = 1; } gctx->key_set = 1; } else { /* If key set use IV, otherwise copy */ if (gctx->key_set) CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); else memcpy(gctx->iv, iv, gctx->ivlen); gctx->iv_set = 1; gctx->iv_gen = 0; } return 1; }
static void x86_aes256_set_encrypt_key(void *_ctx, const uint8_t * key) { AES_KEY *ctx = _ctx; aesni_set_encrypt_key(key, 32*8, ctx); }
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, const unsigned char *iv, int enc) { int ret, mode; EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data; mode = ctx->cipher->flags & EVP_CIPH_MODE; if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) { ret = aesni_set_decrypt_key(key, ctx->key_len*8, ctx->cipher_data); dat->block = (block128_f)aesni_decrypt; dat->stream.cbc = mode==EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL; } else { ret = aesni_set_encrypt_key(key, ctx->key_len*8, ctx->cipher_data); dat->block = (block128_f)aesni_encrypt; if (mode==EVP_CIPH_CBC_MODE) dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt; else if (mode==EVP_CIPH_CTR_MODE) dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks; else dat->stream.cbc = NULL; } if(ret < 0) { EVPerr(EVP_F_AESNI_INIT_KEY,EVP_R_AES_KEY_SETUP_FAILED); return 0; } return 1; }
static int aes_ccm_cipher_setkey(void *_ctx, const void *key, size_t length) { struct ccm_x86_aes_ctx *ctx = _ctx; aesni_set_encrypt_key(key, length*8, &ctx->key); return 0; }
static void x86_aes_set_encrypt_key(void *_ctx, unsigned length, const uint8_t * key) { AES_KEY *ctx = _ctx; aesni_set_encrypt_key(key, length*8, ctx); }
ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx, block128_f *out_block, const uint8_t *key, size_t key_bytes) { if (aesni_capable()) { aesni_set_encrypt_key(key, key_bytes * 8, aes_key); if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt, 1); } if (out_block) { *out_block = (block128_f) aesni_encrypt; } return (ctr128_f)aesni_ctr32_encrypt_blocks; } if (hwaes_capable()) { aes_hw_set_encrypt_key(key, key_bytes * 8, aes_key); if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_hw_encrypt, 0); } if (out_block) { *out_block = (block128_f) aes_hw_encrypt; } return (ctr128_f)aes_hw_ctr32_encrypt_blocks; } if (bsaes_capable()) { AES_set_encrypt_key(key, key_bytes * 8, aes_key); if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt, 0); } if (out_block) { *out_block = (block128_f) AES_encrypt; } return (ctr128_f)bsaes_ctr32_encrypt_blocks; } if (vpaes_capable()) { vpaes_set_encrypt_key(key, key_bytes * 8, aes_key); if (out_block) { *out_block = (block128_f) vpaes_encrypt; } if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt, 0); } return NULL; } AES_set_encrypt_key(key, key_bytes * 8, aes_key); if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt, 0); } if (out_block) { *out_block = (block128_f) AES_encrypt; } return NULL; }
static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const unsigned char *key, size_t key_len, size_t tag_len) { struct aead_aes_gcm_ctx *gcm_ctx; const size_t key_bits = key_len * 8; /* EVP_AEAD_CTX_init should catch this. */ if (key_bits != 128 && key_bits != 256) { EVPerr(EVP_F_AEAD_AES_GCM_INIT, EVP_R_BAD_KEY_LENGTH); return 0; } if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) tag_len = EVP_AEAD_AES_GCM_TAG_LEN; if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) { EVPerr(EVP_F_AEAD_AES_GCM_INIT, EVP_R_TAG_TOO_LARGE); return 0; } gcm_ctx = malloc(sizeof(struct aead_aes_gcm_ctx)); if (gcm_ctx == NULL) return 0; #ifdef AESNI_CAPABLE if (AESNI_CAPABLE) { aesni_set_encrypt_key(key, key_bits, &gcm_ctx->ks.ks); CRYPTO_gcm128_init(&gcm_ctx->gcm, &gcm_ctx->ks.ks, (block128_f)aesni_encrypt); gcm_ctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; } else #endif { gcm_ctx->ctr = aes_gcm_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, key, key_len); } gcm_ctx->tag_len = tag_len; ctx->aead_state = gcm_ctx; return 1; }
static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, const unsigned char *iv, int enc) { EVP_AES_CCM_CTX *cctx = ctx->cipher_data; if (!iv && !key) return 1; if (key) { aesni_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks); CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, &cctx->ks, (block128_f) aesni_encrypt); cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks : (ccm128_f) aesni_ccm64_decrypt_blocks; cctx->key_set = 1; } if (iv) { memcpy(ctx->iv, iv, 15 - cctx->L); cctx->iv_set = 1; } return 1; }
static int aesni_cbc_hmac_sha1_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *inkey, const unsigned char *iv, int enc) { EVP_AES_HMAC_SHA1 *key = data(ctx); int ret; if (enc) ret=aesni_set_encrypt_key(inkey,ctx->key_len*8,&key->ks); else ret=aesni_set_decrypt_key(inkey,ctx->key_len*8,&key->ks); SHA1_Init(&key->head); /* handy when benchmarking */ key->tail = key->head; key->md = key->head; key->payload_length = 0; return ret<0?0:1; }
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *user_key, const unsigned char *iv, int enc) { int ret; AES_KEY *key = AESNI_ALIGN(ctx->cipher_data); if ((ctx->cipher->flags & EVP_CIPH_MODE) == EVP_CIPH_CFB_MODE || (ctx->cipher->flags & EVP_CIPH_MODE) == EVP_CIPH_OFB_MODE || enc) ret=aesni_set_encrypt_key(user_key, ctx->key_len * 8, key); else ret=aesni_set_decrypt_key(user_key, ctx->key_len * 8, key); if(ret < 0) { EVPerr(EVP_F_AESNI_INIT_KEY,EVP_R_AES_KEY_SETUP_FAILED); return 0; } return 1; }
static int aes_cipher_setkey(void *_ctx, const void *userkey, size_t keysize) { struct aes_ctx *ctx = _ctx; int ret; CHECK_AES_KEYSIZE(keysize); if (ctx->enc) ret = aesni_set_encrypt_key(userkey, keysize * 8, ALIGN16(&ctx->expanded_key)); else ret = aesni_set_decrypt_key(userkey, keysize * 8, ALIGN16(&ctx->expanded_key)); if (ret != 0) return gnutls_assert_val(GNUTLS_E_ENCRYPTION_FAILED); return 0; }
static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key, size_t key_len, size_t tag_len) { struct aead_aes_gcm_ctx *gcm_ctx; const size_t key_bits = key_len * 8; if (key_bits != 128 && key_bits != 256) { OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_init, CIPHER_R_BAD_KEY_LENGTH); return 0; /* EVP_AEAD_CTX_init should catch this. */ } if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) { tag_len = EVP_AEAD_AES_GCM_TAG_LEN; } if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) { OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_init, CIPHER_R_TAG_TOO_LARGE); return 0; } gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx)); if (gcm_ctx == NULL) { return 0; } if (aesni_capable()) { aesni_set_encrypt_key(key, key_len * 8, &gcm_ctx->ks.ks); CRYPTO_gcm128_init(&gcm_ctx->gcm, &gcm_ctx->ks.ks, (block128_f)aesni_encrypt); gcm_ctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks; } else { gcm_ctx->ctr = aes_gcm_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, key, key_len); } gcm_ctx->tag_len = tag_len; ctx->aead_state = gcm_ctx; return 1; }
static ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx, block128_f *out_block, const uint8_t *key, size_t key_len) { #if defined(AESNI) if (aesni_capable()) { aesni_set_encrypt_key(key, key_len * 8, aes_key); if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt); } if (out_block) { *out_block = (block128_f) aesni_encrypt; } return (ctr128_f)aesni_ctr32_encrypt_blocks; } #endif #if defined(HWAES) if (hwaes_capable()) { aes_v8_set_encrypt_key(key, key_len * 8, aes_key); if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_v8_encrypt); } if (out_block) { *out_block = (block128_f) aes_v8_encrypt; } return (ctr128_f)aes_v8_ctr32_encrypt_blocks; } #endif #if defined(BSAES) if (bsaes_capable()) { AES_set_encrypt_key(key, key_len * 8, aes_key); if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt); } if (out_block) { *out_block = (block128_f) AES_encrypt; } return (ctr128_f)bsaes_ctr32_encrypt_blocks; } #endif #if defined(VPAES) if (vpaes_capable()) { vpaes_set_encrypt_key(key, key_len * 8, aes_key); if (out_block) { *out_block = (block128_f) vpaes_encrypt; } if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt); } return NULL; } #endif AES_set_encrypt_key(key, key_len * 8, aes_key); if (gcm_ctx != NULL) { CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt); } if (out_block) { *out_block = (block128_f) AES_encrypt; } return NULL; }