aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/mm/numa_64.c
diff options
context:
space:
mode:
authorYinghai Lu <yinghai@kernel.org>2010-10-28 12:50:17 -0400
committerH. Peter Anvin <hpa@linux.intel.com>2010-10-28 18:52:36 -0400
commit419db274bed4269f475a8e78cbe9c917192cfe8b (patch)
treeb46d6910ed8075fd901fcbe964984ea20a2dfd74 /arch/x86/mm/numa_64.c
parent47f19a0814e80e1d4e5c17d61b70fca85ea09162 (diff)
x86, memblock: Fix early_node_mem with big reserved region.
Xen can reserve huge amounts of memory for pre-ballooning, but that still shows as RAM in the e820 memory map. early_node_mem could not find range because of start/end adjusting, and will go through the fallback path. However, the fallback patch is still using memblock_x86_find_range_node(), and it is partially top-down because it go through active_range entries from low to high. Let's use memblock_find_in_range instead memblock_x86_find_range_node. So get real top down in fallback path. We may still need to make memblock_x86_find_range_node to do overall top_down work. Reported-by: Jeremy Fitzhardinge <jeremy@goop.org> Tested-by: Jeremy Fitzhardinge <jeremy@goop.org> Tested-by: Konrad Rzeszutek Wilk <konrad.wilk@oracle.com> Signed-off-by: Yinghai Lu <yinghai@kernel.org> LKML-Reference: <4CC9A9C9.8020700@kernel.org> Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
Diffstat (limited to 'arch/x86/mm/numa_64.c')
-rw-r--r--arch/x86/mm/numa_64.c7
1 files changed, 2 insertions, 5 deletions
diff --git a/arch/x86/mm/numa_64.c b/arch/x86/mm/numa_64.c
index 60f498511dd6..7ffc9b727efd 100644
--- a/arch/x86/mm/numa_64.c
+++ b/arch/x86/mm/numa_64.c
@@ -178,11 +178,8 @@ static void * __init early_node_mem(int nodeid, unsigned long start,
178 178
179 /* extend the search scope */ 179 /* extend the search scope */
180 end = max_pfn_mapped << PAGE_SHIFT; 180 end = max_pfn_mapped << PAGE_SHIFT;
181 if (end > (MAX_DMA32_PFN<<PAGE_SHIFT)) 181 start = MAX_DMA_PFN << PAGE_SHIFT;
182 start = MAX_DMA32_PFN<<PAGE_SHIFT; 182 mem = memblock_find_in_range(start, end, size, align);
183 else
184 start = MAX_DMA_PFN<<PAGE_SHIFT;
185 mem = memblock_x86_find_in_range_node(nodeid, start, end, size, align);
186 if (mem != MEMBLOCK_ERROR) 183 if (mem != MEMBLOCK_ERROR)
187 return __va(mem); 184 return __va(mem);
188 185