Beispiel #1
0
static
void register_x86_padlock_crypto(unsigned capabilities)
{
	int ret, phe;
	unsigned edx;

	if (check_via() == 0)
		return;

	if (capabilities == 0)
		edx = padlock_capability();
	else
		edx = capabilities_to_via_edx(capabilities);

	if (check_padlock(edx)) {
		_gnutls_debug_log
		    ("Padlock AES accelerator was detected\n");
		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_128_CBC, 80, &_gnutls_aes_padlock, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		/* register GCM ciphers */
		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_128_GCM, 80,
		     &_gnutls_aes_gcm_padlock, 0);
		if (ret < 0) {
			gnutls_assert();
		}
#ifdef HAVE_LIBNETTLE
		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_192_CBC, 80, &_gnutls_aes_padlock, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_256_CBC, 80, &_gnutls_aes_padlock, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_256_GCM, 80,
		     &_gnutls_aes_gcm_padlock, 0);
		if (ret < 0) {
			gnutls_assert();
		}
#endif
	}
#ifdef HAVE_LIBNETTLE
	phe = check_phe(edx);

	if (phe && check_phe_partial()) {
		_gnutls_debug_log
		    ("Padlock SHA1 and SHA256 (partial) accelerator was detected\n");
		if (check_phe_sha512(edx)) {
			_gnutls_debug_log
			    ("Padlock SHA512 (partial) accelerator was detected\n");
			ret =
			    gnutls_crypto_single_digest_register
			    (GNUTLS_DIG_SHA384, 80,
			     &_gnutls_sha_padlock_nano, 0);
			if (ret < 0) {
				gnutls_assert();
			}

			ret =
			    gnutls_crypto_single_digest_register
			    (GNUTLS_DIG_SHA512, 80,
			     &_gnutls_sha_padlock_nano, 0);
			if (ret < 0) {
				gnutls_assert();
			}

			ret =
			    gnutls_crypto_single_mac_register
			    (GNUTLS_MAC_SHA384, 80,
			     &_gnutls_hmac_sha_padlock_nano, 0);
			if (ret < 0) {
				gnutls_assert();
			}

			ret =
			    gnutls_crypto_single_mac_register
			    (GNUTLS_MAC_SHA512, 80,
			     &_gnutls_hmac_sha_padlock_nano, 0);
			if (ret < 0) {
				gnutls_assert();
			}
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA1,
							 80,
							 &_gnutls_sha_padlock_nano, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA224,
							 80,
							 &_gnutls_sha_padlock_nano, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA256,
							 80,
							 &_gnutls_sha_padlock_nano, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA1,
						      80,
						      &_gnutls_hmac_sha_padlock_nano, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		/* we don't register MAC_SHA224 because it is not used by TLS */

		ret =
		    gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA256,
						      80,
						      &_gnutls_hmac_sha_padlock_nano, 0);
		if (ret < 0) {
			gnutls_assert();
		}
	} else if (phe) {
		/* Original padlock PHE. Does not support incremental operations.
		 */
		_gnutls_debug_log
		    ("Padlock SHA1 and SHA256 accelerator was detected\n");
		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA1,
							 80,
							 &_gnutls_sha_padlock, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA256,
							 80,
							 &_gnutls_sha_padlock, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA1,
						      80,
						      &_gnutls_hmac_sha_padlock, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA256,
						      80,
						      &_gnutls_hmac_sha_padlock, 0);
		if (ret < 0) {
			gnutls_assert();
		}
	}
#endif

	return;
}
Beispiel #2
0
static
void register_x86_intel_crypto(unsigned capabilities)
{
	int ret;
	unsigned t;

	if (check_intel_or_amd() == 0)
		return;

	if (capabilities == 0) {
		gnutls_cpuid(1, &t, &_gnutls_x86_cpuid_s[0], 
			&_gnutls_x86_cpuid_s[1], &_gnutls_x86_cpuid_s[2]);
	} else {
		capabilities_to_intel_cpuid(capabilities);
	}

	if (check_ssse3()) {
		_gnutls_debug_log("Intel SSSE3 was detected\n");

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_128_GCM, 90,
		     &_gnutls_aes_gcm_x86_ssse3, 0);
			if (ret < 0) {
				gnutls_assert();
			}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_256_GCM, 90,
		     &_gnutls_aes_gcm_x86_ssse3, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_128_CBC, 90, &_gnutls_aes_ssse3, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_192_CBC, 90, &_gnutls_aes_ssse3, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_256_CBC, 90, &_gnutls_aes_ssse3, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA1,
							 80,
							 &_gnutls_sha_x86_ssse3, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA224,
							 80,
							 &_gnutls_sha_x86_ssse3, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA256,
							 80,
							 &_gnutls_sha_x86_ssse3, 0);
		if (ret < 0) {
			gnutls_assert();
		}


		ret =
		    gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA1,
							 80,
							 &_gnutls_hmac_sha_x86_ssse3, 0);
		if (ret < 0)
			gnutls_assert();

		ret =
		    gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA224,
							 80,
							 &_gnutls_hmac_sha_x86_ssse3, 0);
		if (ret < 0)
			gnutls_assert();

		ret =
		    gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA256,
							 80,
							 &_gnutls_hmac_sha_x86_ssse3, 0);
		if (ret < 0)
			gnutls_assert();

#ifdef ENABLE_SHA512
		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA384,
							 80,
							 &_gnutls_sha_x86_ssse3, 0);
		if (ret < 0)
			gnutls_assert();

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA512,
							 80,
							 &_gnutls_sha_x86_ssse3, 0);
		if (ret < 0)
			gnutls_assert();
		ret =
		    gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA384,
							 80,
							 &_gnutls_hmac_sha_x86_ssse3, 0);
		if (ret < 0)
			gnutls_assert();

		ret =
		    gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA512,
							 80,
							 &_gnutls_hmac_sha_x86_ssse3, 0);
		if (ret < 0)
			gnutls_assert();
#endif
	}

	if (check_optimized_aes()) {
		_gnutls_debug_log("Intel AES accelerator was detected\n");
		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_128_CBC, 80, &_gnutls_aesni_x86, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_192_CBC, 80, &_gnutls_aesni_x86, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_256_CBC, 80, &_gnutls_aesni_x86, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_128_CCM, 80,
		     &_gnutls_aes_ccm_x86_aesni, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_256_CCM, 80,
		     &_gnutls_aes_ccm_x86_aesni, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_128_CCM_8, 80,
		     &_gnutls_aes_ccm_x86_aesni, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_256_CCM_8, 80,
		     &_gnutls_aes_ccm_x86_aesni, 0);
		if (ret < 0) {
			gnutls_assert();
		}

#ifdef ASM_X86_64
		if (check_pclmul()) {
			/* register GCM ciphers */
			_gnutls_debug_log
			    ("Intel GCM accelerator was detected\n");
			ret =
			    gnutls_crypto_single_cipher_register
			    (GNUTLS_CIPHER_AES_128_GCM, 80,
			     &_gnutls_aes_gcm_pclmul, 0);
			if (ret < 0) {
				gnutls_assert();
			}

			ret =
			    gnutls_crypto_single_cipher_register
			    (GNUTLS_CIPHER_AES_256_GCM, 80,
			     &_gnutls_aes_gcm_pclmul, 0);
			if (ret < 0) {
				gnutls_assert();
			}
		} else
#endif
		{
			ret =
			    gnutls_crypto_single_cipher_register
			    (GNUTLS_CIPHER_AES_128_GCM, 80,
			     &_gnutls_aes_gcm_x86_aesni, 0);
			if (ret < 0) {
				gnutls_assert();
			}

			ret =
			    gnutls_crypto_single_cipher_register
			    (GNUTLS_CIPHER_AES_256_GCM, 80,
			     &_gnutls_aes_gcm_x86_aesni, 0);
			if (ret < 0) {
				gnutls_assert();
			}
		}
	}

	return;
}
Beispiel #3
0
static
void _register_aarch64_crypto(unsigned capabilities)
{
	int ret;

	if (capabilities == 0) {
		discover_caps(&_gnutls_arm_cpuid_s);
	} else {
		capabilities_to_cpuid(capabilities);
	}

	if (_gnutls_arm_cpuid_s & ARMV8_SHA1) {
		_gnutls_debug_log("Aarch64 SHA1 was detected\n");

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA1,
							 80,
							 &_gnutls_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_mac_register
		    (GNUTLS_MAC_SHA1, 80, &_gnutls_hmac_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}
	}

	if (_gnutls_arm_cpuid_s & ARMV8_SHA256) {
		_gnutls_debug_log("Aarch64 SHA2 was detected\n");

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA224,
							 80,
							 &_gnutls_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_mac_register
		    (GNUTLS_MAC_SHA224, 80, &_gnutls_hmac_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA256,
							 80,
							 &_gnutls_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_mac_register
		    (GNUTLS_MAC_SHA256, 80, &_gnutls_hmac_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA384,
							 80,
							 &_gnutls_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_mac_register
		    (GNUTLS_MAC_SHA384, 80, &_gnutls_hmac_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA512,
							 80,
							 &_gnutls_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_mac_register
		    (GNUTLS_MAC_SHA512, 80, &_gnutls_hmac_sha_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}
	}

	if (_gnutls_arm_cpuid_s & ARMV8_AES) {
		_gnutls_debug_log("Aarch64 AES was detected\n");

		if (_gnutls_arm_cpuid_s & ARMV8_PMULL) {
			_gnutls_debug_log("Aarch64 PMULL was detected\n");

			ret =
			    gnutls_crypto_single_cipher_register
			    (GNUTLS_CIPHER_AES_128_GCM, 90,
			     &_gnutls_aes_gcm_aarch64, 0);
			if (ret < 0) {
					gnutls_assert();
				}

			ret =
			    gnutls_crypto_single_cipher_register
			    (GNUTLS_CIPHER_AES_256_GCM, 90,
			     &_gnutls_aes_gcm_aarch64, 0);
			if (ret < 0) {
				gnutls_assert();
			}
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_128_CBC, 90, &_gnutls_aes_cbc_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_256_CBC, 90, &_gnutls_aes_cbc_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_128_CCM, 90, &_gnutls_aes_ccm_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}

		ret =
		    gnutls_crypto_single_cipher_register
		    (GNUTLS_CIPHER_AES_256_CCM, 90, &_gnutls_aes_ccm_aarch64, 0);
		if (ret < 0) {
			gnutls_assert();
		}
	}

	return;
}