MIPS: New macro smp_mb__before_llsc.
Replace some instances of smp_llsc_mb() with a new macro smp_mb__before_llsc(). It is used before ll/sc sequences that are documented as needing write barrier semantics. The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(), so there are no changes in semantics. Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just barrier() in the non-SMP case. Signed-off-by: David Daney <ddaney@caviumnetworks.com> To: linux-mips@linux-mips.org Patchwork: http://patchwork.linux-mips.org/patch/851/ Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
This commit is contained in:
committed by
Ralf Baechle
parent
ec5380c768
commit
f252ffd50c
@@ -137,7 +137,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
|
||||
{
|
||||
int result;
|
||||
|
||||
smp_llsc_mb();
|
||||
smp_mb__before_llsc();
|
||||
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
int temp;
|
||||
@@ -189,7 +189,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
|
||||
{
|
||||
int result;
|
||||
|
||||
smp_llsc_mb();
|
||||
smp_mb__before_llsc();
|
||||
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
int temp;
|
||||
@@ -249,7 +249,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
|
||||
{
|
||||
int result;
|
||||
|
||||
smp_llsc_mb();
|
||||
smp_mb__before_llsc();
|
||||
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
int temp;
|
||||
@@ -516,7 +516,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
|
||||
{
|
||||
long result;
|
||||
|
||||
smp_llsc_mb();
|
||||
smp_mb__before_llsc();
|
||||
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
long temp;
|
||||
@@ -568,7 +568,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
|
||||
{
|
||||
long result;
|
||||
|
||||
smp_llsc_mb();
|
||||
smp_mb__before_llsc();
|
||||
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
long temp;
|
||||
@@ -628,7 +628,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
|
||||
{
|
||||
long result;
|
||||
|
||||
smp_llsc_mb();
|
||||
smp_mb__before_llsc();
|
||||
|
||||
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
||||
long temp;
|
||||
@@ -788,9 +788,9 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
|
||||
* atomic*_return operations are serializing but not the non-*_return
|
||||
* versions.
|
||||
*/
|
||||
#define smp_mb__before_atomic_dec() smp_llsc_mb()
|
||||
#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
|
||||
#define smp_mb__after_atomic_dec() smp_llsc_mb()
|
||||
#define smp_mb__before_atomic_inc() smp_llsc_mb()
|
||||
#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
|
||||
#define smp_mb__after_atomic_inc() smp_llsc_mb()
|
||||
|
||||
#include <asm-generic/atomic-long.h>
|
||||
|
Reference in New Issue
Block a user