summaryrefslogtreecommitdiffstats
path: root/arch/x86/crypto/sha1_ssse3_glue.c
diff options
context:
space:
mode:
authorMathias Krause <minipli@googlemail.com>2014-03-24 17:10:37 +0100
committerHerbert Xu <herbert@gondor.apana.org.au>2014-03-25 20:25:42 +0800
commit6ca5afb8c26991cf4f13a8bcca870ec2a9522bf7 (patch)
tree095efa7d609b3c8e1c7d85e56480a22f5354ea46 /arch/x86/crypto/sha1_ssse3_glue.c
parent7c1da8d0d046174a4188b5729d7579abf3d29427 (diff)
downloadlinux-6ca5afb8c26991cf4f13a8bcca870ec2a9522bf7.tar.gz
linux-6ca5afb8c26991cf4f13a8bcca870ec2a9522bf7.tar.bz2
linux-6ca5afb8c26991cf4f13a8bcca870ec2a9522bf7.zip
crypto: x86/sha1 - re-enable the AVX variant
Commit 7c1da8d0d0 "crypto: sha - SHA1 transform x86_64 AVX2" accidentally disabled the AVX variant by making the avx_usable() test not only fail in case the CPU doesn't support AVX or OSXSAVE but also if it doesn't support AVX2. Fix that regression by splitting up the AVX/AVX2 test into two functions. Also test for the BMI1 extension in the avx2_usable() test as the AVX2 implementation not only makes use of BMI2 but also BMI1 instructions. Cc: Chandramouli Narayanan <mouli@linux.intel.com> Signed-off-by: Mathias Krause <minipli@googlemail.com> Reviewed-by: H. Peter Anvin <hpa@linux.intel.com> Reviewed-by: Marek Vasut <marex@denx.de> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/x86/crypto/sha1_ssse3_glue.c')
-rw-r--r--arch/x86/crypto/sha1_ssse3_glue.c26
1 files changed, 16 insertions, 10 deletions
diff --git a/arch/x86/crypto/sha1_ssse3_glue.c b/arch/x86/crypto/sha1_ssse3_glue.c
index 139a55c04d82..74d16ef707c7 100644
--- a/arch/x86/crypto/sha1_ssse3_glue.c
+++ b/arch/x86/crypto/sha1_ssse3_glue.c
@@ -208,11 +208,7 @@ static bool __init avx_usable(void)
{
u64 xcr0;
-#if defined(CONFIG_AS_AVX2)
- if (!cpu_has_avx || !cpu_has_avx2 || !cpu_has_osxsave)
-#else
if (!cpu_has_avx || !cpu_has_osxsave)
-#endif
return false;
xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
@@ -224,11 +220,23 @@ static bool __init avx_usable(void)
return true;
}
+
+#ifdef CONFIG_AS_AVX2
+static bool __init avx2_usable(void)
+{
+ if (avx_usable() && cpu_has_avx2 && boot_cpu_has(X86_FEATURE_BMI1) &&
+ boot_cpu_has(X86_FEATURE_BMI2))
+ return true;
+
+ return false;
+}
+#endif
#endif
static int __init sha1_ssse3_mod_init(void)
{
char *algo_name;
+
/* test for SSSE3 first */
if (cpu_has_ssse3) {
sha1_transform_asm = sha1_transform_ssse3;
@@ -238,13 +246,11 @@ static int __init sha1_ssse3_mod_init(void)
#ifdef CONFIG_AS_AVX
/* allow AVX to override SSSE3, it's a little faster */
if (avx_usable()) {
- if (cpu_has_avx) {
- sha1_transform_asm = sha1_transform_avx;
- algo_name = "AVX";
- }
+ sha1_transform_asm = sha1_transform_avx;
+ algo_name = "AVX";
#ifdef CONFIG_AS_AVX2
- if (cpu_has_avx2 && boot_cpu_has(X86_FEATURE_BMI2)) {
- /* allow AVX2 to override AVX, it's a little faster */
+ /* allow AVX2 to override AVX, it's a little faster */
+ if (avx2_usable()) {
sha1_transform_asm = sha1_apply_transform_avx2;
algo_name = "AVX2";
}