aboutsummaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorArd Biesheuvel2023-04-04 12:36:25 +0200
committerGreg Kroah-Hartman2023-04-13 16:55:24 +0200
commit70f1913824fd704a44fc2e17ac22a88cb9652f5b (patch)
treeca7b23fcdeee596838ae3250a2527599287f654c /arch
parent332e7f93f112bd5ee3bb876cc8b3da9083029e01 (diff)
arm64: compat: Work around uninitialized variable warning
[ Upstream commit 32d85999680601d01b2a36713c9ffd7397c8688b ] Dan reports that smatch complains about a potential uninitialized variable being used in the compat alignment fixup code. The logic is not wrong per se, but we do end up using an uninitialized variable if reading the instruction that triggered the alignment fault from user space faults, even if the fault ensures that the uninitialized value doesn't propagate any further. Given that we just give up and return 1 if any fault occurs when reading the instruction, let's get rid of the 'success handling' pattern that captures the fault in a variable and aborts later, and instead, just return 1 immediately if any of the get_user() calls result in an exception. Fixes: 3fc24ef32d3b ("arm64: compat: Implement misalignment fixups for multiword loads") Reported-by: kernel test robot <lkp@intel.com> Reported-by: Dan Carpenter <error27@gmail.com> Link: https://lore.kernel.org/r/202304021214.gekJ8yRc-lkp@intel.com/ Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20230404103625.2386382-1-ardb@kernel.org Signed-off-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Sasha Levin <sashal@kernel.org>
Diffstat (limited to 'arch')
-rw-r--r--arch/arm64/kernel/compat_alignment.c32
1 files changed, 14 insertions, 18 deletions
diff --git a/arch/arm64/kernel/compat_alignment.c b/arch/arm64/kernel/compat_alignment.c
index 5edec2f49ec9..deff21bfa680 100644
--- a/arch/arm64/kernel/compat_alignment.c
+++ b/arch/arm64/kernel/compat_alignment.c
@@ -314,36 +314,32 @@ int do_compat_alignment_fixup(unsigned long addr, struct pt_regs *regs)
int (*handler)(unsigned long addr, u32 instr, struct pt_regs *regs);
unsigned int type;
u32 instr = 0;
- u16 tinstr = 0;
int isize = 4;
int thumb2_32b = 0;
- int fault;
instrptr = instruction_pointer(regs);
if (compat_thumb_mode(regs)) {
__le16 __user *ptr = (__le16 __user *)(instrptr & ~1);
+ u16 tinstr, tinst2;
- fault = alignment_get_thumb(regs, ptr, &tinstr);
- if (!fault) {
- if (IS_T32(tinstr)) {
- /* Thumb-2 32-bit */
- u16 tinst2;
- fault = alignment_get_thumb(regs, ptr + 1, &tinst2);
- instr = ((u32)tinstr << 16) | tinst2;
- thumb2_32b = 1;
- } else {
- isize = 2;
- instr = thumb2arm(tinstr);
- }
+ if (alignment_get_thumb(regs, ptr, &tinstr))
+ return 1;
+
+ if (IS_T32(tinstr)) { /* Thumb-2 32-bit */
+ if (alignment_get_thumb(regs, ptr + 1, &tinst2))
+ return 1;
+ instr = ((u32)tinstr << 16) | tinst2;
+ thumb2_32b = 1;
+ } else {
+ isize = 2;
+ instr = thumb2arm(tinstr);
}
} else {
- fault = alignment_get_arm(regs, (__le32 __user *)instrptr, &instr);
+ if (alignment_get_arm(regs, (__le32 __user *)instrptr, &instr))
+ return 1;
}
- if (fault)
- return 1;
-
switch (CODING_BITS(instr)) {
case 0x00000000: /* 3.13.4 load/store instruction extensions */
if (LDSTHD_I_BIT(instr))