summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/asm-x86/div64.h20
-rw-r--r--include/linux/math64.h72
-rw-r--r--lib/div64.c23
3 files changed, 113 insertions, 2 deletions
diff --git a/include/asm-x86/div64.h b/include/asm-x86/div64.h
index 0dbf8bf3ef0a..c7892cfe9ce6 100644
--- a/include/asm-x86/div64.h
+++ b/include/asm-x86/div64.h
@@ -51,6 +51,26 @@ static inline long div_ll_X_l_rem(long long divs, long div, long *rem)
}
+static inline u64 div_u64_rem(u64 dividend, u32 divisor, u32 *remainder)
+{
+ union {
+ u64 v64;
+ u32 v32[2];
+ } d = { dividend };
+ u32 upper;
+
+ upper = d.v32[1];
+ d.v32[1] = 0;
+ if (upper >= divisor) {
+ d.v32[1] = upper / divisor;
+ upper %= divisor;
+ }
+ asm ("divl %2" : "=a" (d.v32[0]), "=d" (*remainder) :
+ "rm" (divisor), "0" (d.v32[0]), "1" (upper));
+ return d.v64;
+}
+#define div_u64_rem div_u64_rem
+
extern uint64_t div64_64(uint64_t dividend, uint64_t divisor);
#else
diff --git a/include/linux/math64.h b/include/linux/math64.h
new file mode 100644
index 000000000000..6d1716641008
--- /dev/null
+++ b/include/linux/math64.h
@@ -0,0 +1,72 @@
+#ifndef _LINUX_MATH64_H
+#define _LINUX_MATH64_H
+
+#include <linux/types.h>
+#include <asm/div64.h>
+
+#if BITS_PER_LONG == 64
+
+/**
+ * div_u64_rem - unsigned 64bit divide with 32bit divisor with remainder
+ *
+ * This is commonly provided by 32bit archs to provide an optimized 64bit
+ * divide.
+ */
+static inline u64 div_u64_rem(u64 dividend, u32 divisor, u32 *remainder)
+{
+ *remainder = dividend % divisor;
+ return dividend / divisor;
+}
+
+/**
+ * div_s64_rem - signed 64bit divide with 32bit divisor with remainder
+ */
+static inline s64 div_s64_rem(s64 dividend, s32 divisor, s32 *remainder)
+{
+ *remainder = dividend % divisor;
+ return dividend / divisor;
+}
+
+#elif BITS_PER_LONG == 32
+
+#ifndef div_u64_rem
+static inline u64 div_u64_rem(u64 dividend, u32 divisor, u32 *remainder)
+{
+ *remainder = do_div(dividend, divisor);
+ return dividend;
+}
+#endif
+
+#ifndef div_s64_rem
+extern s64 div_s64_rem(s64 dividend, s32 divisor, s32 *remainder);
+#endif
+
+#endif /* BITS_PER_LONG */
+
+/**
+ * div_u64 - unsigned 64bit divide with 32bit divisor
+ *
+ * This is the most common 64bit divide and should be used if possible,
+ * as many 32bit archs can optimize this variant better than a full 64bit
+ * divide.
+ */
+#ifndef div_u64
+static inline u64 div_u64(u64 dividend, u32 divisor)
+{
+ u32 remainder;
+ return div_u64_rem(dividend, divisor, &remainder);
+}
+#endif
+
+/**
+ * div_s64 - signed 64bit divide with 32bit divisor
+ */
+#ifndef div_s64
+static inline s64 div_s64(s64 dividend, s32 divisor)
+{
+ s32 remainder;
+ return div_s64_rem(dividend, divisor, &remainder);
+}
+#endif
+
+#endif /* _LINUX_MATH64_H */
diff --git a/lib/div64.c b/lib/div64.c
index b71cf93c529a..689bd76833fa 100644
--- a/lib/div64.c
+++ b/lib/div64.c
@@ -16,9 +16,8 @@
* assembly versions such as arch/ppc/lib/div64.S and arch/sh/lib/div64.S.
*/
-#include <linux/types.h>
#include <linux/module.h>
-#include <asm/div64.h>
+#include <linux/math64.h>
/* Not needed on 64bit architectures */
#if BITS_PER_LONG == 32
@@ -58,6 +57,26 @@ uint32_t __attribute__((weak)) __div64_32(uint64_t *n, uint32_t base)
EXPORT_SYMBOL(__div64_32);
+#ifndef div_s64_rem
+s64 div_s64_rem(s64 dividend, s32 divisor, s32 *remainder)
+{
+ u64 quotient;
+
+ if (dividend < 0) {
+ quotient = div_u64_rem(-dividend, abs(divisor), (u32 *)remainder);
+ *remainder = -*remainder;
+ if (divisor > 0)
+ quotient = -quotient;
+ } else {
+ quotient = div_u64_rem(dividend, abs(divisor), (u32 *)remainder);
+ if (divisor < 0)
+ quotient = -quotient;
+ }
+ return quotient;
+}
+EXPORT_SYMBOL(div_s64_rem);
+#endif
+
/* 64bit divisor, dividend and result. dynamic precision */
uint64_t div64_64(uint64_t dividend, uint64_t divisor)
{