Commit f46e477e authored by Mathieu Desnoyers's avatar Mathieu Desnoyers Committed by Linus Torvalds

atomic.h: add atomic64 cmpxchg, xchg and add_unless to powerpc

[akpm@linux-foundation.org: build fixes]
Signed-off-by: default avatarMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Cc: Paul Mackerras <paulus@samba.org>
Cc: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
parent 8ffe9d0b
...@@ -165,8 +165,7 @@ static __inline__ int atomic_dec_return(atomic_t *v) ...@@ -165,8 +165,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
return t; return t;
} }
#define atomic_cmpxchg(v, o, n) \ #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/** /**
...@@ -414,8 +413,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) ...@@ -414,8 +413,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
return t; return t;
} }
#define atomic64_cmpxchg(v, o, n) \ #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
/** /**
......
...@@ -39,7 +39,6 @@ ...@@ -39,7 +39,6 @@
#ifdef __KERNEL__ #ifdef __KERNEL__
#include <linux/compiler.h> #include <linux/compiler.h>
#include <asm/atomic.h>
#include <asm/asm-compat.h> #include <asm/asm-compat.h>
#include <asm/synch.h> #include <asm/synch.h>
......
...@@ -7,7 +7,6 @@ ...@@ -7,7 +7,6 @@
#include <linux/kernel.h> #include <linux/kernel.h>
#include <asm/hw_irq.h> #include <asm/hw_irq.h>
#include <asm/atomic.h>
/* /*
* Memory barrier. * Memory barrier.
...@@ -227,6 +226,29 @@ __xchg_u32(volatile void *p, unsigned long val) ...@@ -227,6 +226,29 @@ __xchg_u32(volatile void *p, unsigned long val)
return prev; return prev;
} }
/*
* Atomic exchange
*
* Changes the memory location '*ptr' to be val and returns
* the previous value stored there.
*/
static __inline__ unsigned long
__xchg_u32_local(volatile void *p, unsigned long val)
{
unsigned long prev;
__asm__ __volatile__(
"1: lwarx %0,0,%2 \n"
PPC405_ERR77(0,%2)
" stwcx. %3,0,%2 \n\
bne- 1b"
: "=&r" (prev), "+m" (*(volatile unsigned int *)p)
: "r" (p), "r" (val)
: "cc", "memory");
return prev;
}
#ifdef CONFIG_PPC64 #ifdef CONFIG_PPC64
static __inline__ unsigned long static __inline__ unsigned long
__xchg_u64(volatile void *p, unsigned long val) __xchg_u64(volatile void *p, unsigned long val)
...@@ -246,6 +268,23 @@ __xchg_u64(volatile void *p, unsigned long val) ...@@ -246,6 +268,23 @@ __xchg_u64(volatile void *p, unsigned long val)
return prev; return prev;
} }
static __inline__ unsigned long
__xchg_u64_local(volatile void *p, unsigned long val)
{
unsigned long prev;
__asm__ __volatile__(
"1: ldarx %0,0,%2 \n"
PPC405_ERR77(0,%2)
" stdcx. %3,0,%2 \n\
bne- 1b"
: "=&r" (prev), "+m" (*(volatile unsigned long *)p)
: "r" (p), "r" (val)
: "cc", "memory");
return prev;
}
#endif #endif
/* /*
...@@ -269,12 +308,33 @@ __xchg(volatile void *ptr, unsigned long x, unsigned int size) ...@@ -269,12 +308,33 @@ __xchg(volatile void *ptr, unsigned long x, unsigned int size)
return x; return x;
} }
static __inline__ unsigned long
__xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
{
switch (size) {
case 4:
return __xchg_u32_local(ptr, x);
#ifdef CONFIG_PPC64
case 8:
return __xchg_u64_local(ptr, x);
#endif
}
__xchg_called_with_bad_pointer();
return x;
}
#define xchg(ptr,x) \ #define xchg(ptr,x) \
({ \ ({ \
__typeof__(*(ptr)) _x_ = (x); \ __typeof__(*(ptr)) _x_ = (x); \
(__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \ (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
}) })
#define xchg_local(ptr,x) \
({ \
__typeof__(*(ptr)) _x_ = (x); \
(__typeof__(*(ptr))) __xchg_local((ptr), \
(unsigned long)_x_, sizeof(*(ptr))); \
})
#define tas(ptr) (xchg((ptr),1)) #define tas(ptr) (xchg((ptr),1))
/* /*
...@@ -306,6 +366,28 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new) ...@@ -306,6 +366,28 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
return prev; return prev;
} }
static __inline__ unsigned long
__cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
unsigned long new)
{
unsigned int prev;
__asm__ __volatile__ (
"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
cmpw 0,%0,%3\n\
bne- 2f\n"
PPC405_ERR77(0,%2)
" stwcx. %4,0,%2\n\
bne- 1b"
"\n\
2:"
: "=&r" (prev), "+m" (*p)
: "r" (p), "r" (old), "r" (new)
: "cc", "memory");
return prev;
}
#ifdef CONFIG_PPC64 #ifdef CONFIG_PPC64
static __inline__ unsigned long static __inline__ unsigned long
__cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new) __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
...@@ -328,6 +410,27 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new) ...@@ -328,6 +410,27 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
return prev; return prev;
} }
static __inline__ unsigned long
__cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
unsigned long new)
{
unsigned long prev;
__asm__ __volatile__ (
"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
cmpd 0,%0,%3\n\
bne- 2f\n\
stdcx. %4,0,%2\n\
bne- 1b"
"\n\
2:"
: "=&r" (prev), "+m" (*p)
: "r" (p), "r" (old), "r" (new)
: "cc", "memory");
return prev;
}
#endif #endif
/* This function doesn't exist, so you'll get a linker error /* This function doesn't exist, so you'll get a linker error
...@@ -350,6 +453,22 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, ...@@ -350,6 +453,22 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
return old; return old;
} }
static __inline__ unsigned long
__cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,
unsigned int size)
{
switch (size) {
case 4:
return __cmpxchg_u32_local(ptr, old, new);
#ifdef CONFIG_PPC64
case 8:
return __cmpxchg_u64_local(ptr, old, new);
#endif
}
__cmpxchg_called_with_bad_pointer();
return old;
}
#define cmpxchg(ptr,o,n) \ #define cmpxchg(ptr,o,n) \
({ \ ({ \
__typeof__(*(ptr)) _o_ = (o); \ __typeof__(*(ptr)) _o_ = (o); \
...@@ -358,6 +477,15 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, ...@@ -358,6 +477,15 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
(unsigned long)_n_, sizeof(*(ptr))); \ (unsigned long)_n_, sizeof(*(ptr))); \
}) })
#define cmpxchg_local(ptr,o,n) \
({ \
__typeof__(*(ptr)) _o_ = (o); \
__typeof__(*(ptr)) _n_ = (n); \
(__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
(unsigned long)_n_, sizeof(*(ptr))); \
})
#ifdef CONFIG_PPC64 #ifdef CONFIG_PPC64
/* /*
* We handle most unaligned accesses in hardware. On the other hand * We handle most unaligned accesses in hardware. On the other hand
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment