diff options
author | Mathieu Desnoyers | 2008-02-07 00:16:21 -0800 |
---|---|---|
committer | Linus Torvalds | 2008-02-07 08:42:32 -0800 |
commit | df80c8c5679c4e6c72e694525d76a9f26d5f33dc (patch) | |
tree | 84144c163426e6d4ecfaa7c50dd9cd67fa91bc64 | |
parent | 027bcc27d3d1a218dbf4477964a18fed78983357 (diff) |
Add cmpxchg_local to parisc
Use the new generic cmpxchg_local (disables interrupt). Also use the generic
cmpxchg as fallback if SMP is not set.
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Cc: Kyle McMartin <kyle@mcmartin.ca>
Cc: Grant Grundler <grundler@parisc-linux.org>
Cc: Matthew Wilcox <willy@debian.org>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
-rw-r--r-- | include/asm-parisc/atomic.h | 33 |
1 files changed, 33 insertions, 0 deletions
diff --git a/include/asm-parisc/atomic.h b/include/asm-parisc/atomic.h index e894ee35074b..57fcc4a5ebb4 100644 --- a/include/asm-parisc/atomic.h +++ b/include/asm-parisc/atomic.h @@ -122,6 +122,39 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size) (unsigned long)_n_, sizeof(*(ptr))); \ }) +#include <asm-generic/cmpxchg-local.h> + +static inline unsigned long __cmpxchg_local(volatile void *ptr, + unsigned long old, + unsigned long new_, int size) +{ + switch (size) { +#ifdef CONFIG_64BIT + case 8: return __cmpxchg_u64((unsigned long *)ptr, old, new_); +#endif + case 4: return __cmpxchg_u32(ptr, old, new_); + default: + return __cmpxchg_local_generic(ptr, old, new_, size); + } +} + +/* + * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make + * them available. + */ +#define cmpxchg_local(ptr, o, n) \ + ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \ + (unsigned long)(n), sizeof(*(ptr)))) +#ifdef CONFIG_64BIT +#define cmpxchg64_local(ptr, o, n) \ + ({ \ + BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ + cmpxchg_local((ptr), (o), (n)); \ + }) +#else +#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) +#endif + /* Note that we need not lock read accesses - aligned word writes/reads * are atomic, so a reader never sees unconsistent values. * |