MIPS: New macro smp_mb__before_llsc.
Replace some instances of smp_llsc_mb() with a new macro smp_mb__before_llsc(). It is used before ll/sc sequences that are documented as needing write barrier semantics. The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(), so there are no changes in semantics. Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just barrier() in the non-SMP case. Signed-off-by: David Daney <ddaney@caviumnetworks.com> To: linux-mips@linux-mips.org Patchwork: http://patchwork.linux-mips.org/patch/851/ Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
This commit is contained in:
committed by
Ralf Baechle
parent
ec5380c768
commit
f252ffd50c
@ -131,23 +131,26 @@
|
||||
#endif /* !CONFIG_CPU_HAS_WB */
|
||||
|
||||
#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP)
|
||||
#define __WEAK_ORDERING_MB " sync \n"
|
||||
#define smp_mb() __asm__ __volatile__("sync" : : :"memory")
|
||||
#define smp_rmb() __asm__ __volatile__("sync" : : :"memory")
|
||||
#define smp_wmb() __asm__ __volatile__("sync" : : :"memory")
|
||||
#else
|
||||
#define __WEAK_ORDERING_MB " \n"
|
||||
#define smp_mb() barrier()
|
||||
#define smp_rmb() barrier()
|
||||
#define smp_wmb() barrier()
|
||||
#endif
|
||||
|
||||
#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
|
||||
#define __WEAK_LLSC_MB " sync \n"
|
||||
#else
|
||||
#define __WEAK_LLSC_MB " \n"
|
||||
#endif
|
||||
|
||||
#define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
|
||||
#define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
|
||||
#define smp_wmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
|
||||
|
||||
#define set_mb(var, value) \
|
||||
do { var = value; smp_mb(); } while (0)
|
||||
|
||||
#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
|
||||
|
||||
#define smp_mb__before_llsc() smp_llsc_mb()
|
||||
|
||||
#endif /* __ASM_BARRIER_H */
|
||||
|
Reference in New Issue
Block a user