diff options
author | Will Deacon | 2013-06-27 12:01:51 +0100 |
---|---|---|
committer | Will Deacon | 2013-09-30 16:42:56 +0100 |
commit | d779c07dd72098a7416d907494f958213b7726f3 (patch) | |
tree | ce55eca4b34d090604b8dfcc227e74de4eede72c /arch | |
parent | f38d999c4d16fc0fce4270374f15fbb2d8713c09 (diff) |
ARM: bitops: prefetch the destination word for write prior to strex
The cost of changing a cacheline from shared to exclusive state can be
significant, especially when this is triggered by an exclusive store,
since it may result in having to retry the transaction.
This patch prefixes our atomic bitops implementation with prefetchw,
to try and grab the line in exclusive state from the start. The testop
macro is left alone, since the barrier semantics limit the usefulness
of prefetching data.
Acked-by: Nicolas Pitre <nico@linaro.org>
Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/arm/lib/bitops.h | 5 |
1 files changed, 5 insertions, 0 deletions
diff --git a/arch/arm/lib/bitops.h b/arch/arm/lib/bitops.h index d6408d1ee543..e0c68d5bb7dc 100644 --- a/arch/arm/lib/bitops.h +++ b/arch/arm/lib/bitops.h @@ -10,6 +10,11 @@ UNWIND( .fnstart ) and r3, r0, #31 @ Get bit offset mov r0, r0, lsr #5 add r1, r1, r0, lsl #2 @ Get word offset +#if __LINUX_ARM_ARCH__ >= 7 + .arch_extension mp + ALT_SMP(W(pldw) [r1]) + ALT_UP(W(nop)) +#endif mov r3, r2, lsl r3 1: ldrex r2, [r1] \instr r2, r2, r3 |