|
|
|
@ -375,8 +375,8 @@ static __inline__ long atomic64_add_return(long i, atomic64_t *v) |
|
|
|
|
long __i = i; |
|
|
|
|
__asm__ __volatile__( |
|
|
|
|
LOCK_PREFIX "xaddq %0, %1;" |
|
|
|
|
:"=r"(i) |
|
|
|
|
:"m"(v->counter), "0"(i)); |
|
|
|
|
:"+r" (i), "+m" (v->counter) |
|
|
|
|
: : "memory"); |
|
|
|
|
return i + __i; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -388,7 +388,10 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v) |
|
|
|
|
#define atomic64_inc_return(v) (atomic64_add_return(1,v)) |
|
|
|
|
#define atomic64_dec_return(v) (atomic64_sub_return(1,v)) |
|
|
|
|
|
|
|
|
|
#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) |
|
|
|
|
#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) |
|
|
|
|
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) |
|
|
|
|
|
|
|
|
|
#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) |
|
|
|
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
@ -402,7 +405,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v) |
|
|
|
|
*/ |
|
|
|
|
#define atomic_add_unless(v, a, u) \ |
|
|
|
|
({ \
|
|
|
|
|
int c, old; \
|
|
|
|
|
__typeof__((v)->counter) c, old; \
|
|
|
|
|
c = atomic_read(v); \
|
|
|
|
|
for (;;) { \
|
|
|
|
|
if (unlikely(c == (u))) \
|
|
|
|
@ -416,6 +419,31 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v) |
|
|
|
|
}) |
|
|
|
|
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* atomic64_add_unless - add unless the number is a given value |
|
|
|
|
* @v: pointer of type atomic64_t |
|
|
|
|
* @a: the amount to add to v... |
|
|
|
|
* @u: ...unless v is equal to u. |
|
|
|
|
* |
|
|
|
|
* Atomically adds @a to @v, so long as it was not @u. |
|
|
|
|
* Returns non-zero if @v was not @u, and zero otherwise. |
|
|
|
|
*/ |
|
|
|
|
#define atomic64_add_unless(v, a, u) \ |
|
|
|
|
({ \
|
|
|
|
|
__typeof__((v)->counter) c, old; \
|
|
|
|
|
c = atomic64_read(v); \
|
|
|
|
|
for (;;) { \
|
|
|
|
|
if (unlikely(c == (u))) \
|
|
|
|
|
break; \
|
|
|
|
|
old = atomic64_cmpxchg((v), c, c + (a)); \
|
|
|
|
|
if (likely(old == c)) \
|
|
|
|
|
break; \
|
|
|
|
|
c = old; \
|
|
|
|
|
} \
|
|
|
|
|
c != (u); \
|
|
|
|
|
}) |
|
|
|
|
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
|
|
|
|
|
|
|
|
|
/* These are x86-specific, used by some header files */ |
|
|
|
|
#define atomic_clear_mask(mask, addr) \ |
|
|
|
|
__asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
|
|
|
|
|