summaryrefslogtreecommitdiffstats
path: root/include/asm-generic/bitops
diff options
context:
space:
mode:
authorBorislav Petkov <borislav.petkov@amd.com>2010-03-05 17:34:46 +0100
committerH. Peter Anvin <hpa@zytor.com>2010-04-06 15:52:11 -0700
commitd61931d89be506372d01a90d1755f6d0a9fafe2d (patch)
tree652c34238edcb6c558163abc3cd9d6ce7c5f91a5 /include/asm-generic/bitops
parent1527bc8b928dd1399c3d3467dd47d9ede210978a (diff)
downloadlinux-stable-d61931d89be506372d01a90d1755f6d0a9fafe2d.tar.gz
linux-stable-d61931d89be506372d01a90d1755f6d0a9fafe2d.tar.bz2
linux-stable-d61931d89be506372d01a90d1755f6d0a9fafe2d.zip
x86: Add optimized popcnt variants
Add support for the hardware version of the Hamming weight function, popcnt, present in CPUs which advertize it under CPUID, Function 0x0000_0001_ECX[23]. On CPUs which don't support it, we fallback to the default lib/hweight.c sw versions. A synthetic benchmark comparing popcnt with __sw_hweight64 showed almost a 3x speedup on a F10h machine. Signed-off-by: Borislav Petkov <borislav.petkov@amd.com> LKML-Reference: <20100318112015.GC11152@aftab> Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Diffstat (limited to 'include/asm-generic/bitops')
-rw-r--r--include/asm-generic/bitops/arch_hweight.h22
1 files changed, 18 insertions, 4 deletions
diff --git a/include/asm-generic/bitops/arch_hweight.h b/include/asm-generic/bitops/arch_hweight.h
index 3a7be842cdce..9a81c1e9436c 100644
--- a/include/asm-generic/bitops/arch_hweight.h
+++ b/include/asm-generic/bitops/arch_hweight.h
@@ -3,9 +3,23 @@
#include <asm/types.h>
-extern unsigned int __arch_hweight32(unsigned int w);
-extern unsigned int __arch_hweight16(unsigned int w);
-extern unsigned int __arch_hweight8(unsigned int w);
-extern unsigned long __arch_hweight64(__u64 w);
+inline unsigned int __arch_hweight32(unsigned int w)
+{
+ return __sw_hweight32(w);
+}
+inline unsigned int __arch_hweight16(unsigned int w)
+{
+ return __sw_hweight16(w);
+}
+
+inline unsigned int __arch_hweight8(unsigned int w)
+{
+ return __sw_hweight8(w);
+}
+
+inline unsigned long __arch_hweight64(__u64 w)
+{
+ return __sw_hweight64(w);
+}
#endif /* _ASM_GENERIC_BITOPS_HWEIGHT_H_ */