static void aes_exit(void) { int i; for (i = 0; i < ARRAY_SIZE(aes_simd_algs) && aes_simd_algs[i]; i++) simd_skcipher_free(aes_simd_algs[i]); crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); }
static void __exit aesni_exit(void) { aesni_free_simds(); crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); crypto_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers)); crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); crypto_fpu_exit(); }
void simd_unregister_skciphers(struct skcipher_alg *algs, int count, struct simd_skcipher_alg **simd_algs) { int i; crypto_unregister_skciphers(algs, count); for (i = 0; i < count; i++) { if (simd_algs[i]) { simd_skcipher_free(simd_algs[i]); simd_algs[i] = NULL; } } }
static void __exit chacha_generic_mod_fini(void) { crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); }
static int __init aesni_init(void) { struct simd_skcipher_alg *simd; const char *basename; const char *algname; const char *drvname; int err; int i; if (!x86_match_cpu(aesni_cpu_id)) return -ENODEV; #ifdef CONFIG_X86_64 #ifdef CONFIG_AS_AVX2 if (boot_cpu_has(X86_FEATURE_AVX2)) { pr_info("AVX2 version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc_avx2; aesni_gcm_dec_tfm = aesni_gcm_dec_avx2; } else #endif #ifdef CONFIG_AS_AVX if (boot_cpu_has(X86_FEATURE_AVX)) { pr_info("AVX version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc_avx; aesni_gcm_dec_tfm = aesni_gcm_dec_avx; } else #endif { pr_info("SSE version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc; aesni_gcm_dec_tfm = aesni_gcm_dec; } aesni_ctr_enc_tfm = aesni_ctr_enc; #ifdef CONFIG_AS_AVX if (boot_cpu_has(X86_FEATURE_AVX)) { /* optimize performance of ctr mode encryption transform */ aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm; pr_info("AES CTR mode by8 optimization enabled\n"); } #endif #endif err = crypto_fpu_init(); if (err) return err; err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); if (err) goto fpu_exit; err = crypto_register_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers)); if (err) goto unregister_algs; err = crypto_register_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); if (err) goto unregister_skciphers; for (i = 0; i < ARRAY_SIZE(aesni_skciphers); i++) { algname = aesni_skciphers[i].base.cra_name + 2; drvname = aesni_skciphers[i].base.cra_driver_name + 2; basename = aesni_skciphers[i].base.cra_driver_name; simd = simd_skcipher_create_compat(algname, drvname, basename); err = PTR_ERR(simd); if (IS_ERR(simd)) goto unregister_simds; aesni_simd_skciphers[i] = simd; } for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++) { algname = aesni_simd_skciphers2[i].algname; drvname = aesni_simd_skciphers2[i].drvname; basename = aesni_simd_skciphers2[i].basename; simd = simd_skcipher_create_compat(algname, drvname, basename); err = PTR_ERR(simd); if (IS_ERR(simd)) continue; aesni_simd_skciphers2[i].simd = simd; } return 0; unregister_simds: aesni_free_simds(); crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); unregister_skciphers: crypto_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers)); unregister_algs: crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); fpu_exit: crypto_fpu_exit(); return err; }