aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChristophe Leroy2019-08-16 07:52:20 +0000
committerMichael Ellerman2019-08-20 21:22:15 +1000
commit7ab0b7cb8951d4095d73e203759b74d41916e455 (patch)
tree4f63c162df91aa9231a46dcf1f4abc83b3cba0a5
parentf204338f8e29777302042cba578b0da44f69695f (diff)
powerpc/32: Add warning on misaligned copy_page() or clear_page()
copy_page() and clear_page() expect page aligned destination, and use dcbz instruction to clear entire cache lines based on the assumption that the destination is cache aligned. As shown during analysis of a bug in BTRFS filesystem, a misaligned copy_page() can create bugs that are difficult to locate (see Link). Add an explicit WARNING when copy_page() or clear_page() are called with misaligned destination. Signed-off-by: Christophe Leroy <christophe.leroy@c-s.fr> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au> Link: https://bugzilla.kernel.org/show_bug.cgi?id=204371 Link: https://lore.kernel.org/r/c6cea38f90480268d439ca44a645647e260fff09.1565941808.git.christophe.leroy@c-s.fr
-rw-r--r--arch/powerpc/include/asm/page_32.h4
-rw-r--r--arch/powerpc/kernel/misc_32.S5
2 files changed, 9 insertions, 0 deletions
diff --git a/arch/powerpc/include/asm/page_32.h b/arch/powerpc/include/asm/page_32.h
index 683dfbc67ca8..d64dfe3ac712 100644
--- a/arch/powerpc/include/asm/page_32.h
+++ b/arch/powerpc/include/asm/page_32.h
@@ -40,6 +40,8 @@ typedef unsigned long long pte_basic_t;
typedef unsigned long pte_basic_t;
#endif
+#include <asm/bug.h>
+
/*
* Clear page using the dcbz instruction, which doesn't cause any
* memory traffic (except to write out any cache lines which get
@@ -49,6 +51,8 @@ static inline void clear_page(void *addr)
{
unsigned int i;
+ WARN_ON((unsigned long)addr & (L1_CACHE_BYTES - 1));
+
for (i = 0; i < PAGE_SIZE / L1_CACHE_BYTES; i++, addr += L1_CACHE_BYTES)
dcbz(addr);
}
diff --git a/arch/powerpc/kernel/misc_32.S b/arch/powerpc/kernel/misc_32.S
index fe4bd321730e..02d90e1ebf65 100644
--- a/arch/powerpc/kernel/misc_32.S
+++ b/arch/powerpc/kernel/misc_32.S
@@ -452,7 +452,12 @@ END_FTR_SECTION_IFSET(CPU_FTR_COHERENT_ICACHE)
stwu r9,16(r3)
_GLOBAL(copy_page)
+ rlwinm r5, r3, 0, L1_CACHE_BYTES - 1
addi r3,r3,-4
+
+0: twnei r5, 0 /* WARN if r3 is not cache aligned */
+ EMIT_BUG_ENTRY 0b,__FILE__,__LINE__, BUGFLAG_WARNING
+
addi r4,r4,-4
li r5,4