void tmd_aes_ctx_init(const aes_gcm *gcm, aes_ctx *ctx , const aes_key *key, const uint8_t *iv, uint32_t len) { ctx->length_aad = 0; ctx->length_input = 0; block128_zero(&ctx->tag); block128_zero(&ctx->iv); if (len == 12) { block128_copy_bytes(&ctx->iv, iv, 12); ctx->iv.b[15] = 0x01; } else { uint32_t origlen = len << 3; int i; for (; len >= 16; len -= 16, iv += 16) { block128_xor(&ctx->iv, (block128 *) iv); tmd_gf_mul(&ctx->iv, &gcm->h); } if (len > 0) { block128_xor_bytes(&ctx->iv, iv, len); tmd_gf_mul(&ctx->iv, &gcm->h); } for (i = 15; origlen; --i, origlen >>= 8) ctx->iv.b[i] ^= (uint8_t) origlen; tmd_gf_mul(&ctx->iv, &gcm->h); } block128_copy(&ctx->civ, &ctx->iv); }
void aes_gcm_init(aes_gcm *gcm, aes_key *key, uint8_t *iv, uint32_t len) { gcm->length_aad = 0; gcm->length_input = 0; block128_zero(&gcm->h); block128_zero(&gcm->tag); block128_zero(&gcm->iv); memcpy(&gcm->key, key, sizeof(aes_key)); /* prepare H : encrypt_K(0^128) */ aes_encrypt_block(&gcm->h, key, &gcm->h); if (len == 12) { block128_copy_bytes(&gcm->iv, iv, 12); gcm->iv.b[15] = 0x01; } else { uint32_t origlen = len << 3; int i; for (; len >= 16; len -= 16, iv += 16) { block128_xor(&gcm->iv, (block128 *) iv); gf_mul(&gcm->iv, &gcm->h); } if (len > 0) { block128_xor_bytes(&gcm->iv, iv, len); gf_mul(&gcm->iv, &gcm->h); } for (i = 15; origlen; --i, origlen >>= 8) gcm->iv.b[i] ^= (uint8_t) origlen; gf_mul(&gcm->iv, &gcm->h); } block128_copy(&gcm->civ, &gcm->iv); }
void tmd_aes_generic_gcm_decrypt(uint8_t *output, const aes_gcm *gcm, const aes_ctx *ctx, const aes_key *key, const uint8_t *input, uint32_t length, aes_ctx *newCTX) { aes_block out; memcpy(newCTX, ctx, sizeof(aes_ctx)); newCTX->length_input += length; for (; length >= 16; input += 16, output += 16, length -= 16) { block128_inc_be(&newCTX->civ); aes_encrypt_block(&out, key, &newCTX->civ); gcm_ghash_add(gcm, newCTX, (block128 *) input); block128_xor(&out, (block128 *) input); block128_copy((block128 *) output, &out); } if (length > 0) { aes_block tmp; int i; block128_inc_be(&newCTX->civ); block128_zero(&tmp); block128_copy_bytes(&tmp, input, length); gcm_ghash_add(gcm, newCTX, &tmp); aes_encrypt_block(&out, key, &newCTX->civ); block128_xor_bytes(&tmp, out.b, length); for (i = 0; i < length; i++) { output[i] = tmp.b[i]; } } }
void aes_gcm_decrypt(uint8_t *output, aes_gcm *gcm, uint8_t *input, uint32_t length) { aes_block out; gcm->length_input += length; for (; length >= 16; input += 16, output += 16, length -= 16) { block128_inc_be(&gcm->civ); aes_encrypt_block(&out, &gcm->key, &gcm->civ); gcm_ghash_add(gcm, (block128 *) input); block128_xor(&out, (block128 *) input); block128_copy((block128 *) output, &out); } if (length > 0) { aes_block tmp; int i; block128_inc_be(&gcm->civ); block128_zero(&tmp); block128_copy_bytes(&tmp, input, length); gcm_ghash_add(gcm, &tmp); aes_encrypt_block(&out, &gcm->key, &gcm->civ); block128_xor_bytes(&tmp, out.b, length); for (i = 0; i < length; i++) { output[i] = tmp.b[i]; } } }
void tmd_aes_gcm_aad(const aes_gcm *gcm, aes_ctx *ctx, const uint8_t *input, uint32_t length) { ctx->length_aad += length; for (; length >= 16; input += 16, length -= 16) { gcm_ghash_add(gcm, ctx, (block128 *) input); } if (length > 0) { aes_block tmp; block128_zero(&tmp); block128_copy_bytes(&tmp, input, length); gcm_ghash_add(gcm, ctx, &tmp); } }
void aes_gcm_aad(aes_gcm *gcm, uint8_t *input, uint32_t length) { gcm->length_aad += length; for (; length >= 16; input += 16, length -= 16) { gcm_ghash_add(gcm, (block128 *) input); } if (length > 0) { aes_block tmp; block128_zero(&tmp); block128_copy_bytes(&tmp, input, length); gcm_ghash_add(gcm, &tmp); } }
void tmd_aes_gcm_init(aes_gcm *gcm, const aes_key *key) { block128_zero(&gcm->h); /* prepare H : encrypt_K(0^128) */ aes_encrypt_block(&gcm->h, key, &gcm->h); }