static int wrap_nettle_cipher_setiv(void *_ctx, const void *iv, size_t ivsize) { struct nettle_cipher_ctx *ctx = _ctx; switch (ctx->algo) { case GNUTLS_CIPHER_AES_128_GCM: case GNUTLS_CIPHER_AES_256_GCM: gcm_aes_set_iv(&ctx->ctx.aes_gcm, ivsize, iv); break; case GNUTLS_CIPHER_CAMELLIA_128_GCM: case GNUTLS_CIPHER_CAMELLIA_256_GCM: _gcm_camellia_set_iv(&ctx->ctx.camellia_gcm, ivsize, iv); break; case GNUTLS_CIPHER_SALSA20_256: case GNUTLS_CIPHER_ESTREAM_SALSA20_256: if (ivsize != SALSA20_IV_SIZE) return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); salsa20_set_iv(&ctx->ctx.salsa20, iv); break; default: if (ivsize > ctx->block_size) return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); memcpy(ctx->iv, iv, ivsize); } return 0; }
static int wrap_nettle_cipher_setiv (void *_ctx, const void *iv, size_t ivsize) { struct nettle_cipher_ctx *ctx = _ctx; switch (ctx->algo) { case GNUTLS_CIPHER_AES_128_GCM: case GNUTLS_CIPHER_AES_256_GCM: if (ivsize != GCM_DEFAULT_NONCE_SIZE) { gnutls_assert (); return GNUTLS_E_INVALID_REQUEST; } gcm_aes_set_iv(&ctx->ctx.aes_gcm, GCM_DEFAULT_NONCE_SIZE, iv); break; default: if (ivsize > ctx->block_size) { gnutls_assert (); return GNUTLS_E_INVALID_REQUEST; } memcpy (ctx->iv, iv, ivsize); } return 0; }
static void gcm_unified_aes128_set_iv (void *ctx, const uint8_t *iv) { gcm_aes_set_iv (ctx, GCM_IV_SIZE, iv); }