summaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/barrier.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips/include/asm/barrier.h')
-rw-r--r--arch/mips/include/asm/barrier.h60
1 files changed, 43 insertions, 17 deletions
diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h
index 8e9ac313ca3b..c0884f02d3a6 100644
--- a/arch/mips/include/asm/barrier.h
+++ b/arch/mips/include/asm/barrier.h
@@ -88,12 +88,20 @@
: /* no output */ \
: "m" (*(int *)CKSEG1) \
: "memory")
-
-#define fast_wmb() __sync()
-#define fast_rmb() __sync()
-#define fast_mb() __sync()
-#ifdef CONFIG_SGI_IP28
-#define fast_iob() \
+#ifdef CONFIG_CPU_CAVIUM_OCTEON
+# define OCTEON_SYNCW_STR ".set push\n.set arch=octeon\nsyncw\nsyncw\n.set pop\n"
+# define __syncw() __asm__ __volatile__(OCTEON_SYNCW_STR : : : "memory")
+
+# define fast_wmb() __syncw()
+# define fast_rmb() barrier()
+# define fast_mb() __sync()
+# define fast_iob() do { } while (0)
+#else /* ! CONFIG_CPU_CAVIUM_OCTEON */
+# define fast_wmb() __sync()
+# define fast_rmb() __sync()
+# define fast_mb() __sync()
+# ifdef CONFIG_SGI_IP28
+# define fast_iob() \
__asm__ __volatile__( \
".set push\n\t" \
".set noreorder\n\t" \
@@ -104,13 +112,14 @@
: /* no output */ \
: "m" (*(int *)CKSEG1ADDR(0x1fa00004)) \
: "memory")
-#else
-#define fast_iob() \
+# else
+# define fast_iob() \
do { \
__sync(); \
__fast_iob(); \
} while (0)
-#endif
+# endif
+#endif /* CONFIG_CPU_CAVIUM_OCTEON */
#ifdef CONFIG_CPU_HAS_WB
@@ -131,25 +140,42 @@
#endif /* !CONFIG_CPU_HAS_WB */
#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP)
-#define __WEAK_ORDERING_MB " sync \n"
+# ifdef CONFIG_CPU_CAVIUM_OCTEON
+# define smp_mb() __sync()
+# define smp_rmb() barrier()
+# define smp_wmb() __syncw()
+# else
+# define smp_mb() __asm__ __volatile__("sync" : : :"memory")
+# define smp_rmb() __asm__ __volatile__("sync" : : :"memory")
+# define smp_wmb() __asm__ __volatile__("sync" : : :"memory")
+# endif
#else
-#define __WEAK_ORDERING_MB " \n"
+#define smp_mb() barrier()
+#define smp_rmb() barrier()
+#define smp_wmb() barrier()
#endif
+
#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
#define __WEAK_LLSC_MB " sync \n"
#else
#define __WEAK_LLSC_MB " \n"
#endif
-#define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
-#define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
-#define smp_wmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
-
#define set_mb(var, value) \
do { var = value; smp_mb(); } while (0)
#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
-#define smp_llsc_rmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
-#define smp_llsc_wmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+
+#ifdef CONFIG_CPU_CAVIUM_OCTEON
+#define smp_mb__before_llsc() smp_wmb()
+/* Cause previous writes to become visible on all CPUs as soon as possible */
+#define nudge_writes() __asm__ __volatile__(".set push\n\t" \
+ ".set arch=octeon\n\t" \
+ "syncw\n\t" \
+ ".set pop" : : : "memory")
+#else
+#define smp_mb__before_llsc() smp_llsc_mb()
+#define nudge_writes() mb()
+#endif
#endif /* __ASM_BARRIER_H */