aboutsummaryrefslogtreecommitdiff
path: root/include/asm-blackfin
diff options
context:
space:
mode:
authorMathieu Desnoyers2008-02-07 00:16:13 -0800
committerLinus Torvalds2008-02-07 08:42:31 -0800
commit10b8827068377a11ed0e396248f7d02751fe5f17 (patch)
tree2ca6e89cc47354ac2cec717e186ede7f24e0b8fd /include/asm-blackfin
parent5e86c11d3eb4662000f3ced7344352b2ca319d03 (diff)
Add cmpxchg_local to blackfin, replace __cmpxchg by generic cmpxchg
Use the new generic cmpxchg_local (disables interrupt). Also use the generic cmpxchg as fallback if SMP is not set since nobody seems to know why __cmpxchg has been implemented in assembly in the first place thather than in plain C. Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> Cc: Bryan Wu <bryan.wu@analog.com> Cc: Michael Frysinger <michael.frysinger@analog.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-blackfin')
-rw-r--r--include/asm-blackfin/system.h57
1 files changed, 11 insertions, 46 deletions
diff --git a/include/asm-blackfin/system.h b/include/asm-blackfin/system.h
index 4a927379ee1c..51494ef5bb41 100644
--- a/include/asm-blackfin/system.h
+++ b/include/asm-blackfin/system.h
@@ -183,55 +183,20 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
return tmp;
}
+#include <asm-generic/cmpxchg-local.h>
+
/*
- * Atomic compare and exchange. Compare OLD with MEM, if identical,
- * store NEW in MEM. Return the initial value in MEM. Success is
- * indicated by comparing RETURN with OLD.
+ * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
+ * them available.
*/
-static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
- unsigned long new, int size)
-{
- unsigned long tmp = 0;
- unsigned long flags = 0;
-
- local_irq_save(flags);
-
- switch (size) {
- case 1:
- __asm__ __volatile__
- ("%0 = b%3 (z);\n\t"
- "CC = %1 == %0;\n\t"
- "IF !CC JUMP 1f;\n\t"
- "b%3 = %2;\n\t"
- "1:\n\t"
- : "=&d" (tmp) : "d" (old), "d" (new), "m" (*__xg(ptr)) : "memory");
- break;
- case 2:
- __asm__ __volatile__
- ("%0 = w%3 (z);\n\t"
- "CC = %1 == %0;\n\t"
- "IF !CC JUMP 1f;\n\t"
- "w%3 = %2;\n\t"
- "1:\n\t"
- : "=&d" (tmp) : "d" (old), "d" (new), "m" (*__xg(ptr)) : "memory");
- break;
- case 4:
- __asm__ __volatile__
- ("%0 = %3;\n\t"
- "CC = %1 == %0;\n\t"
- "IF !CC JUMP 1f;\n\t"
- "%3 = %2;\n\t"
- "1:\n\t"
- : "=&d" (tmp) : "d" (old), "d" (new), "m" (*__xg(ptr)) : "memory");
- break;
- }
- local_irq_restore(flags);
- return tmp;
-}
+#define cmpxchg_local(ptr, o, n) \
+ ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
+ (unsigned long)(n), sizeof(*(ptr))))
+#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
-#define cmpxchg(ptr,o,n)\
- ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
- (unsigned long)(n),sizeof(*(ptr))))
+#ifndef CONFIG_SMP
+#include <asm-generic/cmpxchg.h>
+#endif
#define prepare_to_switch() do { } while(0)