|
|
|
#ifndef _ASM_POWERPC_ATOMIC_H_
|
|
|
|
#define _ASM_POWERPC_ATOMIC_H_
|
|
|
|
|
|
|
|
/*
|
|
|
|
* PowerPC atomic operations
|
|
|
|
*/
|
|
|
|
|
|
|
|
typedef struct { volatile int counter; } atomic_t;
|
|
|
|
|
|
|
|
#ifdef __KERNEL__
|
|
|
|
#include <asm/synch.h>
|
[PATCH] powerpc: Consolidate asm compatibility macros
This patch consolidates macros used to generate assembly for
compatibility across different CPUs or configs. A new header,
asm-powerpc/asm-compat.h contains the main compatibility macros. It
uses some preprocessor magic to make the macros suitable both for use
in .S files, and in inline asm in .c files. Headers (bitops.h,
uaccess.h, atomic.h, bug.h) which had their own such compatibility
macros are changed to use asm-compat.h.
ppc_asm.h is now for use in .S files *only*, and a #error enforces
that. As such, we're a lot more careless about namespace pollution
here than in asm-compat.h.
While we're at it, this patch adds a call to the PPC405_ERR77 macro in
futex.h which should have had it already, but didn't.
Built and booted on pSeries, Maple and iSeries (ARCH=powerpc). Built
for 32-bit powermac (ARCH=powerpc) and Walnut (ARCH=ppc).
Signed-off-by: David Gibson <dwg@au1.ibm.com>
Signed-off-by: Paul Mackerras <paulus@samba.org>
19 years ago
|
|
|
#include <asm/asm-compat.h>
|
|
|
|
|
|
|
|
#define ATOMIC_INIT(i) { (i) }
|
|
|
|
|
|
|
|
#define atomic_read(v) ((v)->counter)
|
|
|
|
#define atomic_set(v,i) (((v)->counter) = (i))
|
|
|
|
|
|
|
|
static __inline__ void atomic_add(int a, atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: lwarx %0,0,%3 # atomic_add\n\
|
|
|
|
add %0,%2,%0\n"
|
|
|
|
PPC405_ERR77(0,%3)
|
|
|
|
" stwcx. %0,0,%3 \n\
|
|
|
|
bne- 1b"
|
|
|
|
: "=&r" (t), "=m" (v->counter)
|
|
|
|
: "r" (a), "r" (&v->counter), "m" (v->counter)
|
|
|
|
: "cc");
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ int atomic_add_return(int a, atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
EIEIO_ON_SMP
|
|
|
|
"1: lwarx %0,0,%2 # atomic_add_return\n\
|
|
|
|
add %0,%1,%0\n"
|
|
|
|
PPC405_ERR77(0,%2)
|
|
|
|
" stwcx. %0,0,%2 \n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
|
|
|
|
|
|
|
|
static __inline__ void atomic_sub(int a, atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: lwarx %0,0,%3 # atomic_sub\n\
|
|
|
|
subf %0,%2,%0\n"
|
|
|
|
PPC405_ERR77(0,%3)
|
|
|
|
" stwcx. %0,0,%3 \n\
|
|
|
|
bne- 1b"
|
|
|
|
: "=&r" (t), "=m" (v->counter)
|
|
|
|
: "r" (a), "r" (&v->counter), "m" (v->counter)
|
|
|
|
: "cc");
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ int atomic_sub_return(int a, atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
EIEIO_ON_SMP
|
|
|
|
"1: lwarx %0,0,%2 # atomic_sub_return\n\
|
|
|
|
subf %0,%1,%0\n"
|
|
|
|
PPC405_ERR77(0,%2)
|
|
|
|
" stwcx. %0,0,%2 \n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ void atomic_inc(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: lwarx %0,0,%2 # atomic_inc\n\
|
|
|
|
addic %0,%0,1\n"
|
|
|
|
PPC405_ERR77(0,%2)
|
|
|
|
" stwcx. %0,0,%2 \n\
|
|
|
|
bne- 1b"
|
|
|
|
: "=&r" (t), "=m" (v->counter)
|
|
|
|
: "r" (&v->counter), "m" (v->counter)
|
|
|
|
: "cc");
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ int atomic_inc_return(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
EIEIO_ON_SMP
|
|
|
|
"1: lwarx %0,0,%1 # atomic_inc_return\n\
|
|
|
|
addic %0,%0,1\n"
|
|
|
|
PPC405_ERR77(0,%1)
|
|
|
|
" stwcx. %0,0,%1 \n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* atomic_inc_and_test - increment and test
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically increments @v by 1
|
|
|
|
* and returns true if the result is zero, or false for all
|
|
|
|
* other cases.
|
|
|
|
*/
|
|
|
|
#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
|
|
|
|
|
|
|
|
static __inline__ void atomic_dec(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: lwarx %0,0,%2 # atomic_dec\n\
|
|
|
|
addic %0,%0,-1\n"
|
|
|
|
PPC405_ERR77(0,%2)\
|
|
|
|
" stwcx. %0,0,%2\n\
|
|
|
|
bne- 1b"
|
|
|
|
: "=&r" (t), "=m" (v->counter)
|
|
|
|
: "r" (&v->counter), "m" (v->counter)
|
|
|
|
: "cc");
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ int atomic_dec_return(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
EIEIO_ON_SMP
|
|
|
|
"1: lwarx %0,0,%1 # atomic_dec_return\n\
|
|
|
|
addic %0,%0,-1\n"
|
|
|
|
PPC405_ERR77(0,%1)
|
|
|
|
" stwcx. %0,0,%1\n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
|
|
|
|
#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Atomically test *v and decrement if it is greater than 0.
|
|
|
|
* The function returns the old value of *v minus 1.
|
|
|
|
*/
|
|
|
|
static __inline__ int atomic_dec_if_positive(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
EIEIO_ON_SMP
|
|
|
|
"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
|
|
|
|
addic. %0,%0,-1\n\
|
|
|
|
blt- 2f\n"
|
|
|
|
PPC405_ERR77(0,%1)
|
|
|
|
" stwcx. %0,0,%1\n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
"\n\
|
|
|
|
2:" : "=&r" (t)
|
|
|
|
: "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define smp_mb__before_atomic_dec() smp_mb()
|
|
|
|
#define smp_mb__after_atomic_dec() smp_mb()
|
|
|
|
#define smp_mb__before_atomic_inc() smp_mb()
|
|
|
|
#define smp_mb__after_atomic_inc() smp_mb()
|
|
|
|
|
|
|
|
#endif /* __KERNEL__ */
|
|
|
|
#endif /* _ASM_POWERPC_ATOMIC_H_ */
|