static int __init aesni_init(void) { int err; if (!x86_match_cpu(aesni_cpu_id)) return -ENODEV; #ifdef CONFIG_X86_64 #ifdef CONFIG_AS_AVX2 if (boot_cpu_has(X86_FEATURE_AVX2)) { pr_info("AVX2 version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc_avx2; aesni_gcm_dec_tfm = aesni_gcm_dec_avx2; } else #endif #ifdef CONFIG_AS_AVX if (boot_cpu_has(X86_FEATURE_AVX)) { pr_info("AVX version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc_avx; aesni_gcm_dec_tfm = aesni_gcm_dec_avx; } else #endif { pr_info("SSE version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc; aesni_gcm_dec_tfm = aesni_gcm_dec; } #endif err = crypto_fpu_init(); if (err) return err; return crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); }
static int __init aesni_init(void) { int err; if (!x86_match_cpu(aesni_cpu_id)) return -ENODEV; #ifdef CONFIG_X86_64 #ifdef CONFIG_AS_AVX2 if (boot_cpu_has(X86_FEATURE_AVX2)) { pr_info("AVX2 version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc_avx2; aesni_gcm_dec_tfm = aesni_gcm_dec_avx2; } else #endif #ifdef CONFIG_AS_AVX if (boot_cpu_has(X86_FEATURE_AVX)) { pr_info("AVX version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc_avx; aesni_gcm_dec_tfm = aesni_gcm_dec_avx; } else #endif { pr_info("SSE version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc; aesni_gcm_dec_tfm = aesni_gcm_dec; } aesni_ctr_enc_tfm = aesni_ctr_enc; #ifdef CONFIG_AS_AVX if (boot_cpu_has(X86_FEATURE_AVX)) { /* optimize performance of ctr mode encryption transform */ aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm; pr_info("AES CTR mode by8 optimization enabled\n"); } #endif #endif err = crypto_fpu_init(); if (err) return err; err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); if (err) goto fpu_exit; err = crypto_register_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); if (err) goto unregister_algs; return err; unregister_algs: crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); fpu_exit: crypto_fpu_exit(); return err; }
static int __init aesni_init(void) { int err; if (!x86_match_cpu(aesni_cpu_id)) return -ENODEV; err = crypto_fpu_init(); if (err) return err; return crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); }
static int __init aesni_init(void) { int err, i; if (!x86_match_cpu(aesni_cpu_id)) return -ENODEV; err = crypto_fpu_init(); if (err) return err; for (i = 0; i < ARRAY_SIZE(aesni_algs); i++) INIT_LIST_HEAD(&aesni_algs[i].cra_list); return crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); }
static int __init aesni_init(void) { int err, i; if (!cpu_has_aes) { printk(KERN_INFO "Intel AES-NI instructions are not detected.\n"); return -ENODEV; } err = crypto_fpu_init(); if (err) return err; for (i = 0; i < ARRAY_SIZE(aesni_algs); i++) INIT_LIST_HEAD(&aesni_algs[i].cra_list); return crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); }
static int __init aesni_init(void) { int err; if (!cpu_has_aes) { printk(KERN_INFO "Intel AES-NI instructions are not detected.\n"); return -ENODEV; } if ((err = crypto_fpu_init())) goto fpu_err; if ((err = crypto_register_alg(&aesni_alg))) goto aes_err; if ((err = crypto_register_alg(&__aesni_alg))) goto __aes_err; if ((err = crypto_register_alg(&blk_ecb_alg))) goto blk_ecb_err; if ((err = crypto_register_alg(&blk_cbc_alg))) goto blk_cbc_err; if ((err = crypto_register_alg(&ablk_ecb_alg))) goto ablk_ecb_err; if ((err = crypto_register_alg(&ablk_cbc_alg))) goto ablk_cbc_err; #ifdef CONFIG_X86_64 if ((err = crypto_register_alg(&blk_ctr_alg))) goto blk_ctr_err; if ((err = crypto_register_alg(&ablk_ctr_alg))) goto ablk_ctr_err; if ((err = crypto_register_alg(&__rfc4106_alg))) goto __aead_gcm_err; if ((err = crypto_register_alg(&rfc4106_alg))) goto aead_gcm_err; #ifdef HAS_CTR if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg))) goto ablk_rfc3686_ctr_err; #endif #endif #ifdef HAS_LRW if ((err = crypto_register_alg(&ablk_lrw_alg))) goto ablk_lrw_err; #endif #ifdef HAS_PCBC if ((err = crypto_register_alg(&ablk_pcbc_alg))) goto ablk_pcbc_err; #endif #ifdef HAS_XTS if ((err = crypto_register_alg(&ablk_xts_alg))) goto ablk_xts_err; #endif return err; #ifdef HAS_XTS ablk_xts_err: #endif #ifdef HAS_PCBC crypto_unregister_alg(&ablk_pcbc_alg); ablk_pcbc_err: #endif #ifdef HAS_LRW crypto_unregister_alg(&ablk_lrw_alg); ablk_lrw_err: #endif #ifdef CONFIG_X86_64 #ifdef HAS_CTR crypto_unregister_alg(&ablk_rfc3686_ctr_alg); ablk_rfc3686_ctr_err: #endif crypto_unregister_alg(&rfc4106_alg); aead_gcm_err: crypto_unregister_alg(&__rfc4106_alg); __aead_gcm_err: crypto_unregister_alg(&ablk_ctr_alg); ablk_ctr_err: crypto_unregister_alg(&blk_ctr_alg); blk_ctr_err: #endif crypto_unregister_alg(&ablk_cbc_alg); ablk_cbc_err: crypto_unregister_alg(&ablk_ecb_alg); ablk_ecb_err: crypto_unregister_alg(&blk_cbc_alg); blk_cbc_err: crypto_unregister_alg(&blk_ecb_alg); blk_ecb_err: crypto_unregister_alg(&__aesni_alg); __aes_err: crypto_unregister_alg(&aesni_alg); aes_err: fpu_err: return err; }
static int __init aesni_init(void) { int err; if (!x86_match_cpu(aesni_cpu_id)) return -ENODEV; if ((err = crypto_fpu_init())) goto fpu_err; if ((err = crypto_register_alg(&aesni_alg))) goto aes_err; if ((err = crypto_register_alg(&__aesni_alg))) goto __aes_err; if ((err = crypto_register_alg(&blk_ecb_alg))) goto blk_ecb_err; if ((err = crypto_register_alg(&blk_cbc_alg))) goto blk_cbc_err; if ((err = crypto_register_alg(&ablk_ecb_alg))) goto ablk_ecb_err; if ((err = crypto_register_alg(&ablk_cbc_alg))) goto ablk_cbc_err; #ifdef CONFIG_X86_64 if ((err = crypto_register_alg(&blk_ctr_alg))) goto blk_ctr_err; if ((err = crypto_register_alg(&ablk_ctr_alg))) goto ablk_ctr_err; if ((err = crypto_register_alg(&__rfc4106_alg))) goto __aead_gcm_err; if ((err = crypto_register_alg(&rfc4106_alg))) goto aead_gcm_err; #ifdef HAS_CTR if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg))) goto ablk_rfc3686_ctr_err; #endif #endif #ifdef HAS_LRW if ((err = crypto_register_alg(&ablk_lrw_alg))) goto ablk_lrw_err; #endif #ifdef HAS_PCBC if ((err = crypto_register_alg(&ablk_pcbc_alg))) goto ablk_pcbc_err; #endif #ifdef HAS_XTS if ((err = crypto_register_alg(&ablk_xts_alg))) goto ablk_xts_err; #endif return err; #ifdef HAS_XTS ablk_xts_err: #endif #ifdef HAS_PCBC crypto_unregister_alg(&ablk_pcbc_alg); ablk_pcbc_err: #endif #ifdef HAS_LRW crypto_unregister_alg(&ablk_lrw_alg); ablk_lrw_err: #endif #ifdef CONFIG_X86_64 #ifdef HAS_CTR crypto_unregister_alg(&ablk_rfc3686_ctr_alg); ablk_rfc3686_ctr_err: #endif crypto_unregister_alg(&rfc4106_alg); aead_gcm_err: crypto_unregister_alg(&__rfc4106_alg); __aead_gcm_err: crypto_unregister_alg(&ablk_ctr_alg); ablk_ctr_err: crypto_unregister_alg(&blk_ctr_alg); blk_ctr_err: #endif crypto_unregister_alg(&ablk_cbc_alg); ablk_cbc_err: crypto_unregister_alg(&ablk_ecb_alg); ablk_ecb_err: crypto_unregister_alg(&blk_cbc_alg); blk_cbc_err: crypto_unregister_alg(&blk_ecb_alg); blk_ecb_err: crypto_unregister_alg(&__aesni_alg); __aes_err: crypto_unregister_alg(&aesni_alg); aes_err: fpu_err: return err; }
static int __init aesni_init(void) { struct simd_skcipher_alg *simd; const char *basename; const char *algname; const char *drvname; int err; int i; if (!x86_match_cpu(aesni_cpu_id)) return -ENODEV; #ifdef CONFIG_X86_64 #ifdef CONFIG_AS_AVX2 if (boot_cpu_has(X86_FEATURE_AVX2)) { pr_info("AVX2 version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc_avx2; aesni_gcm_dec_tfm = aesni_gcm_dec_avx2; } else #endif #ifdef CONFIG_AS_AVX if (boot_cpu_has(X86_FEATURE_AVX)) { pr_info("AVX version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc_avx; aesni_gcm_dec_tfm = aesni_gcm_dec_avx; } else #endif { pr_info("SSE version of gcm_enc/dec engaged.\n"); aesni_gcm_enc_tfm = aesni_gcm_enc; aesni_gcm_dec_tfm = aesni_gcm_dec; } aesni_ctr_enc_tfm = aesni_ctr_enc; #ifdef CONFIG_AS_AVX if (boot_cpu_has(X86_FEATURE_AVX)) { /* optimize performance of ctr mode encryption transform */ aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm; pr_info("AES CTR mode by8 optimization enabled\n"); } #endif #endif err = crypto_fpu_init(); if (err) return err; err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); if (err) goto fpu_exit; err = crypto_register_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers)); if (err) goto unregister_algs; err = crypto_register_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); if (err) goto unregister_skciphers; for (i = 0; i < ARRAY_SIZE(aesni_skciphers); i++) { algname = aesni_skciphers[i].base.cra_name + 2; drvname = aesni_skciphers[i].base.cra_driver_name + 2; basename = aesni_skciphers[i].base.cra_driver_name; simd = simd_skcipher_create_compat(algname, drvname, basename); err = PTR_ERR(simd); if (IS_ERR(simd)) goto unregister_simds; aesni_simd_skciphers[i] = simd; } for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++) { algname = aesni_simd_skciphers2[i].algname; drvname = aesni_simd_skciphers2[i].drvname; basename = aesni_simd_skciphers2[i].basename; simd = simd_skcipher_create_compat(algname, drvname, basename); err = PTR_ERR(simd); if (IS_ERR(simd)) continue; aesni_simd_skciphers2[i].simd = simd; } return 0; unregister_simds: aesni_free_simds(); crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs)); unregister_skciphers: crypto_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers)); unregister_algs: crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); fpu_exit: crypto_fpu_exit(); return err; }