aboutsummaryrefslogtreecommitdiff
path: root/include/asm-x86
diff options
context:
space:
mode:
authorJeremy Fitzhardinge2008-05-20 08:26:18 +0100
committerLinus Torvalds2008-05-20 07:51:20 -0700
commit1bb271db63c356212564aad050b2cf026f800858 (patch)
treedfda7f0ef5b7d7048354df1edfd96f368e3ff927 /include/asm-x86
parent2bd3a99c9d1851182f73d0a024dc5bdb0a470e8c (diff)
x86: fix warning on 32-bit non-PAE
Fix the warning: include2/asm/pgtable.h: In function `pte_modify': include2/asm/pgtable.h:290: warning: left shift count >= width of type On 32-bit PAE the virtual and physical addresses are both 32-bits, so it ends up evaluating 1<<32. Do the shift as a 64-bit shift then cast to the appropriate size. This should all be done at compile time, and so have no effect on generated code. Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com> Tested-by: Hugh Dickins <hugh@veritas.com> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-x86')
-rw-r--r--include/asm-x86/page.h2
1 files changed, 1 insertions, 1 deletions
diff --git a/include/asm-x86/page.h b/include/asm-x86/page.h
index 76b35e636d7d..223146da2faf 100644
--- a/include/asm-x86/page.h
+++ b/include/asm-x86/page.h
@@ -29,7 +29,7 @@
/* to align the pointer to the (next) page boundary */
#define PAGE_ALIGN(addr) (((addr)+PAGE_SIZE-1)&PAGE_MASK)
-#define __PHYSICAL_MASK ((((phys_addr_t)1) << __PHYSICAL_MASK_SHIFT) - 1)
+#define __PHYSICAL_MASK ((phys_addr_t)(1ULL << __PHYSICAL_MASK_SHIFT) - 1)
#define __VIRTUAL_MASK ((1UL << __VIRTUAL_MASK_SHIFT) - 1)
#ifndef __ASSEMBLY__