diff options
author | Peter Zijlstra | 2016-04-18 00:54:38 +0200 |
---|---|---|
committer | Ingo Molnar | 2016-06-16 10:48:32 +0200 |
commit | 28aa2bda2211f4327d83b44a4f917b4a061b1c56 (patch) | |
tree | e47ab8a7c49f19bd42722dcc2b8cf083e08cf270 /include/asm-generic | |
parent | e12133324b7daaa176bb687c1eb59e1a6b203da4 (diff) |
locking/atomic: Implement atomic{,64,_long}_fetch_{add,sub,and,andnot,or,xor}{,_relaxed,_acquire,_release}()
Now that all the architectures have implemented support for these new
atomic primitives add on the generic infrastructure to expose and use
it.
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Arnd Bergmann <arnd@arndb.de>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Borislav Petkov <bp@suse.de>
Cc: Davidlohr Bueso <dave@stgolabs.net>
Cc: Frederic Weisbecker <fweisbec@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Will Deacon <will.deacon@arm.com>
Cc: linux-arch@vger.kernel.org
Cc: linux-kernel@vger.kernel.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'include/asm-generic')
-rw-r--r-- | include/asm-generic/atomic-long.h | 36 | ||||
-rw-r--r-- | include/asm-generic/atomic.h | 49 | ||||
-rw-r--r-- | include/asm-generic/atomic64.h | 15 |
3 files changed, 95 insertions, 5 deletions
diff --git a/include/asm-generic/atomic-long.h b/include/asm-generic/atomic-long.h index 5e1f345b58dd..2d0d3cf791ab 100644 --- a/include/asm-generic/atomic-long.h +++ b/include/asm-generic/atomic-long.h @@ -112,6 +112,40 @@ static __always_inline void atomic_long_dec(atomic_long_t *l) ATOMIC_LONG_PFX(_dec)(v); } +#define ATOMIC_LONG_FETCH_OP(op, mo) \ +static inline long \ +atomic_long_fetch_##op##mo(long i, atomic_long_t *l) \ +{ \ + ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \ + \ + return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(i, v); \ +} + +ATOMIC_LONG_FETCH_OP(add, ) +ATOMIC_LONG_FETCH_OP(add, _relaxed) +ATOMIC_LONG_FETCH_OP(add, _acquire) +ATOMIC_LONG_FETCH_OP(add, _release) +ATOMIC_LONG_FETCH_OP(sub, ) +ATOMIC_LONG_FETCH_OP(sub, _relaxed) +ATOMIC_LONG_FETCH_OP(sub, _acquire) +ATOMIC_LONG_FETCH_OP(sub, _release) +ATOMIC_LONG_FETCH_OP(and, ) +ATOMIC_LONG_FETCH_OP(and, _relaxed) +ATOMIC_LONG_FETCH_OP(and, _acquire) +ATOMIC_LONG_FETCH_OP(and, _release) +ATOMIC_LONG_FETCH_OP(andnot, ) +ATOMIC_LONG_FETCH_OP(andnot, _relaxed) +ATOMIC_LONG_FETCH_OP(andnot, _acquire) +ATOMIC_LONG_FETCH_OP(andnot, _release) +ATOMIC_LONG_FETCH_OP(or, ) +ATOMIC_LONG_FETCH_OP(or, _relaxed) +ATOMIC_LONG_FETCH_OP(or, _acquire) +ATOMIC_LONG_FETCH_OP(or, _release) +ATOMIC_LONG_FETCH_OP(xor, ) +ATOMIC_LONG_FETCH_OP(xor, _relaxed) +ATOMIC_LONG_FETCH_OP(xor, _acquire) +ATOMIC_LONG_FETCH_OP(xor, _release) + #define ATOMIC_LONG_OP(op) \ static __always_inline void \ atomic_long_##op(long i, atomic_long_t *l) \ @@ -124,9 +158,9 @@ atomic_long_##op(long i, atomic_long_t *l) \ ATOMIC_LONG_OP(add) ATOMIC_LONG_OP(sub) ATOMIC_LONG_OP(and) +ATOMIC_LONG_OP(andnot) ATOMIC_LONG_OP(or) ATOMIC_LONG_OP(xor) -ATOMIC_LONG_OP(andnot) #undef ATOMIC_LONG_OP diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h index 74f1a3704d7a..a2304ccf4ed0 100644 --- a/include/asm-generic/atomic.h +++ b/include/asm-generic/atomic.h @@ -61,6 +61,18 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ return c c_op i; \ } +#define ATOMIC_FETCH_OP(op, c_op) \ +static inline int atomic_fetch_##op(int i, atomic_t *v) \ +{ \ + int c, old; \ + \ + c = v->counter; \ + while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \ + c = old; \ + \ + return c; \ +} + #else #include <linux/irqflags.h> @@ -88,6 +100,20 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ return ret; \ } +#define ATOMIC_FETCH_OP(op, c_op) \ +static inline int atomic_fetch_##op(int i, atomic_t *v) \ +{ \ + unsigned long flags; \ + int ret; \ + \ + raw_local_irq_save(flags); \ + ret = v->counter; \ + v->counter = v->counter c_op i; \ + raw_local_irq_restore(flags); \ + \ + return ret; \ +} + #endif /* CONFIG_SMP */ #ifndef atomic_add_return @@ -98,6 +124,28 @@ ATOMIC_OP_RETURN(add, +) ATOMIC_OP_RETURN(sub, -) #endif +#ifndef atomic_fetch_add +ATOMIC_FETCH_OP(add, +) +#endif + +#ifndef atomic_fetch_sub +ATOMIC_FETCH_OP(sub, -) +#endif + +#ifndef atomic_fetch_and +ATOMIC_FETCH_OP(and, &) +#endif + +#ifndef atomic_fetch_or +#define atomic_fetch_or atomic_fetch_or + +ATOMIC_FETCH_OP(or, |) +#endif + +#ifndef atomic_fetch_xor +ATOMIC_FETCH_OP(xor, ^) +#endif + #ifndef atomic_and ATOMIC_OP(and, &) #endif @@ -110,6 +158,7 @@ ATOMIC_OP(or, |) ATOMIC_OP(xor, ^) #endif +#undef ATOMIC_FETCH_OP #undef ATOMIC_OP_RETURN #undef ATOMIC_OP diff --git a/include/asm-generic/atomic64.h b/include/asm-generic/atomic64.h index d48e78ccad3d..dad68bf46c77 100644 --- a/include/asm-generic/atomic64.h +++ b/include/asm-generic/atomic64.h @@ -27,16 +27,23 @@ extern void atomic64_##op(long long a, atomic64_t *v); #define ATOMIC64_OP_RETURN(op) \ extern long long atomic64_##op##_return(long long a, atomic64_t *v); -#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) +#define ATOMIC64_FETCH_OP(op) \ +extern long long atomic64_fetch_##op(long long a, atomic64_t *v); + +#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op) ATOMIC64_OPS(add) ATOMIC64_OPS(sub) -ATOMIC64_OP(and) -ATOMIC64_OP(or) -ATOMIC64_OP(xor) +#undef ATOMIC64_OPS +#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op) + +ATOMIC64_OPS(and) +ATOMIC64_OPS(or) +ATOMIC64_OPS(xor) #undef ATOMIC64_OPS +#undef ATOMIC64_FETCH_OP #undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP |