diff options
author | Christophe Leroy | 2021-09-21 17:09:49 +0200 |
---|---|---|
committer | Michael Ellerman | 2021-11-30 11:45:57 +1100 |
commit | f05cab0034babaa9b3dfaf6003ee6493496a8180 (patch) | |
tree | d142db6cc9ff824d4f0216eb3d6c3d5a4d6b152b | |
parent | 41d65207de9fbff58acd8937a7c3f8940c186a87 (diff) |
powerpc/atomics: Remove atomic_inc()/atomic_dec() and friends
Now that atomic_add() and atomic_sub() handle immediate operands,
atomic_inc() and atomic_dec() have no added value compared to the
generic fallback which calls atomic_add(1) and atomic_sub(1).
Also remove atomic_inc_not_zero() which fallsback to
atomic_add_unless() which itself fallsback to
atomic_fetch_add_unless() which now handles immediate operands.
Signed-off-by: Christophe Leroy <christophe.leroy@csgroup.eu>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/0bc64a2f18726055093dbb2e479cefc60a409cfd.1632236981.git.christophe.leroy@csgroup.eu
-rw-r--r-- | arch/powerpc/include/asm/atomic.h | 95 |
1 files changed, 0 insertions, 95 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h index 45f564dbaef5..853dc86864f4 100644 --- a/arch/powerpc/include/asm/atomic.h +++ b/arch/powerpc/include/asm/atomic.h @@ -118,71 +118,6 @@ ATOMIC_OPS(xor, xor, "", K) #undef ATOMIC_OP_RETURN_RELAXED #undef ATOMIC_OP -static __inline__ void arch_atomic_inc(atomic_t *v) -{ - int t; - - __asm__ __volatile__( -"1: lwarx %0,0,%2 # atomic_inc\n\ - addic %0,%0,1\n" -" stwcx. %0,0,%2 \n\ - bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (&v->counter) - : "cc", "xer"); -} -#define arch_atomic_inc arch_atomic_inc - -static __inline__ int arch_atomic_inc_return_relaxed(atomic_t *v) -{ - int t; - - __asm__ __volatile__( -"1: lwarx %0,0,%2 # atomic_inc_return_relaxed\n" -" addic %0,%0,1\n" -" stwcx. %0,0,%2\n" -" bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (&v->counter) - : "cc", "xer"); - - return t; -} - -static __inline__ void arch_atomic_dec(atomic_t *v) -{ - int t; - - __asm__ __volatile__( -"1: lwarx %0,0,%2 # atomic_dec\n\ - addic %0,%0,-1\n" -" stwcx. %0,0,%2\n\ - bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (&v->counter) - : "cc", "xer"); -} -#define arch_atomic_dec arch_atomic_dec - -static __inline__ int arch_atomic_dec_return_relaxed(atomic_t *v) -{ - int t; - - __asm__ __volatile__( -"1: lwarx %0,0,%2 # atomic_dec_return_relaxed\n" -" addic %0,%0,-1\n" -" stwcx. %0,0,%2\n" -" bne- 1b" - : "=&r" (t), "+m" (v->counter) - : "r" (&v->counter) - : "cc", "xer"); - - return t; -} - -#define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed -#define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed - #define arch_atomic_cmpxchg(v, o, n) \ (arch_cmpxchg(&((v)->counter), (o), (n))) #define arch_atomic_cmpxchg_relaxed(v, o, n) \ @@ -255,36 +190,6 @@ static __inline__ int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) } #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless -/** - * atomic_inc_not_zero - increment unless the number is zero - * @v: pointer of type atomic_t - * - * Atomically increments @v by 1, so long as @v is non-zero. - * Returns non-zero if @v was non-zero, and zero otherwise. - */ -static __inline__ int arch_atomic_inc_not_zero(atomic_t *v) -{ - int t1, t2; - - __asm__ __volatile__ ( - PPC_ATOMIC_ENTRY_BARRIER -"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\ - cmpwi 0,%0,0\n\ - beq- 2f\n\ - addic %1,%0,1\n" -" stwcx. %1,0,%2\n\ - bne- 1b\n" - PPC_ATOMIC_EXIT_BARRIER - "\n\ -2:" - : "=&r" (t1), "=&r" (t2) - : "r" (&v->counter) - : "cc", "xer", "memory"); - - return t1; -} -#define arch_atomic_inc_not_zero(v) arch_atomic_inc_not_zero((v)) - /* * Atomically test *v and decrement if it is greater than 0. * The function returns the old value of *v minus 1, even if |