diff options
author | Christoph Lameter <cl@linux.com> | 2009-06-16 18:32:46 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2009-06-16 22:47:41 -0400 |
commit | b70d94ee438b3fd9c15c7691d7a932a135c18101 (patch) | |
tree | a33e88b812c0cd8a18017e34dcddcea7715320c9 /include/linux/gfp.h | |
parent | 31c911329e048b715a1dfeaaf617be9430fd7f4e (diff) |
page-allocator: use integer fields lookup for gfp_zone and check for errors in flags passed to the page allocator
This simplifies the code in gfp_zone() and also keeps the ability of the
compiler to use constant folding to get rid of gfp_zone processing.
The lookup of the zone is done using a bitfield stored in an integer. So
the code in gfp_zone is a simple extraction of bits from a constant
bitfield. The compiler is generating a load of a constant into a register
and then performs a shift and mask operation to get the zone from a gfp_t.
No cachelines are touched and no branches have to be predicted by the
compiler.
We are doing some macro tricks here to convince the compiler to always do
the constant folding if possible.
Signed-off-by: Christoph Lameter <cl@linux-foundation.org>
Cc: KAMEZAWA Hiroyuki <kamezawa.hiroyu@jp.fujitsu.com>
Reviewed-by: Mel Gorman <mel@csn.ul.ie>
Cc: Nick Piggin <nickpiggin@yahoo.com.au>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/linux/gfp.h')
-rw-r--r-- | include/linux/gfp.h | 111 |
1 files changed, 96 insertions, 15 deletions
diff --git a/include/linux/gfp.h b/include/linux/gfp.h index 06b7e8cc80ac..412178afd423 100644 --- a/include/linux/gfp.h +++ b/include/linux/gfp.h | |||
@@ -21,7 +21,8 @@ struct vm_area_struct; | |||
21 | #define __GFP_DMA ((__force gfp_t)0x01u) | 21 | #define __GFP_DMA ((__force gfp_t)0x01u) |
22 | #define __GFP_HIGHMEM ((__force gfp_t)0x02u) | 22 | #define __GFP_HIGHMEM ((__force gfp_t)0x02u) |
23 | #define __GFP_DMA32 ((__force gfp_t)0x04u) | 23 | #define __GFP_DMA32 ((__force gfp_t)0x04u) |
24 | 24 | #define __GFP_MOVABLE ((__force gfp_t)0x08u) /* Page is movable */ | |
25 | #define GFP_ZONEMASK (__GFP_DMA|__GFP_HIGHMEM|__GFP_DMA32|__GFP_MOVABLE) | ||
25 | /* | 26 | /* |
26 | * Action modifiers - doesn't change the zoning | 27 | * Action modifiers - doesn't change the zoning |
27 | * | 28 | * |
@@ -51,7 +52,6 @@ struct vm_area_struct; | |||
51 | #define __GFP_HARDWALL ((__force gfp_t)0x20000u) /* Enforce hardwall cpuset memory allocs */ | 52 | #define __GFP_HARDWALL ((__force gfp_t)0x20000u) /* Enforce hardwall cpuset memory allocs */ |
52 | #define __GFP_THISNODE ((__force gfp_t)0x40000u)/* No fallback, no policies */ | 53 | #define __GFP_THISNODE ((__force gfp_t)0x40000u)/* No fallback, no policies */ |
53 | #define __GFP_RECLAIMABLE ((__force gfp_t)0x80000u) /* Page is reclaimable */ | 54 | #define __GFP_RECLAIMABLE ((__force gfp_t)0x80000u) /* Page is reclaimable */ |
54 | #define __GFP_MOVABLE ((__force gfp_t)0x100000u) /* Page is movable */ | ||
55 | 55 | ||
56 | #define __GFP_BITS_SHIFT 21 /* Room for 21 __GFP_FOO bits */ | 56 | #define __GFP_BITS_SHIFT 21 /* Room for 21 __GFP_FOO bits */ |
57 | #define __GFP_BITS_MASK ((__force gfp_t)((1 << __GFP_BITS_SHIFT) - 1)) | 57 | #define __GFP_BITS_MASK ((__force gfp_t)((1 << __GFP_BITS_SHIFT) - 1)) |
@@ -116,24 +116,105 @@ static inline int allocflags_to_migratetype(gfp_t gfp_flags) | |||
116 | ((gfp_flags & __GFP_RECLAIMABLE) != 0); | 116 | ((gfp_flags & __GFP_RECLAIMABLE) != 0); |
117 | } | 117 | } |
118 | 118 | ||
119 | static inline enum zone_type gfp_zone(gfp_t flags) | 119 | #ifdef CONFIG_HIGHMEM |
120 | { | 120 | #define OPT_ZONE_HIGHMEM ZONE_HIGHMEM |
121 | #else | ||
122 | #define OPT_ZONE_HIGHMEM ZONE_NORMAL | ||
123 | #endif | ||
124 | |||
121 | #ifdef CONFIG_ZONE_DMA | 125 | #ifdef CONFIG_ZONE_DMA |
122 | if (flags & __GFP_DMA) | 126 | #define OPT_ZONE_DMA ZONE_DMA |
123 | return ZONE_DMA; | 127 | #else |
128 | #define OPT_ZONE_DMA ZONE_NORMAL | ||
124 | #endif | 129 | #endif |
130 | |||
125 | #ifdef CONFIG_ZONE_DMA32 | 131 | #ifdef CONFIG_ZONE_DMA32 |
126 | if (flags & __GFP_DMA32) | 132 | #define OPT_ZONE_DMA32 ZONE_DMA32 |
127 | return ZONE_DMA32; | 133 | #else |
134 | #define OPT_ZONE_DMA32 ZONE_NORMAL | ||
128 | #endif | 135 | #endif |
129 | if ((flags & (__GFP_HIGHMEM | __GFP_MOVABLE)) == | 136 | |
130 | (__GFP_HIGHMEM | __GFP_MOVABLE)) | 137 | /* |
131 | return ZONE_MOVABLE; | 138 | * GFP_ZONE_TABLE is a word size bitstring that is used for looking up the |
132 | #ifdef CONFIG_HIGHMEM | 139 | * zone to use given the lowest 4 bits of gfp_t. Entries are ZONE_SHIFT long |
133 | if (flags & __GFP_HIGHMEM) | 140 | * and there are 16 of them to cover all possible combinations of |
134 | return ZONE_HIGHMEM; | 141 | * __GFP_DMA, __GFP_DMA32, __GFP_MOVABLE and __GFP_HIGHMEM |
142 | * | ||
143 | * The zone fallback order is MOVABLE=>HIGHMEM=>NORMAL=>DMA32=>DMA. | ||
144 | * But GFP_MOVABLE is not only a zone specifier but also an allocation | ||
145 | * policy. Therefore __GFP_MOVABLE plus another zone selector is valid. | ||
146 | * Only 1bit of the lowest 3 bit (DMA,DMA32,HIGHMEM) can be set to "1". | ||
147 | * | ||
148 | * bit result | ||
149 | * ================= | ||
150 | * 0x0 => NORMAL | ||
151 | * 0x1 => DMA or NORMAL | ||
152 | * 0x2 => HIGHMEM or NORMAL | ||
153 | * 0x3 => BAD (DMA+HIGHMEM) | ||
154 | * 0x4 => DMA32 or DMA or NORMAL | ||
155 | * 0x5 => BAD (DMA+DMA32) | ||
156 | * 0x6 => BAD (HIGHMEM+DMA32) | ||
157 | * 0x7 => BAD (HIGHMEM+DMA32+DMA) | ||
158 | * 0x8 => NORMAL (MOVABLE+0) | ||
159 | * 0x9 => DMA or NORMAL (MOVABLE+DMA) | ||
160 | * 0xa => MOVABLE (Movable is valid only if HIGHMEM is set too) | ||
161 | * 0xb => BAD (MOVABLE+HIGHMEM+DMA) | ||
162 | * 0xc => DMA32 (MOVABLE+HIGHMEM+DMA32) | ||
163 | * 0xd => BAD (MOVABLE+DMA32+DMA) | ||
164 | * 0xe => BAD (MOVABLE+DMA32+HIGHMEM) | ||
165 | * 0xf => BAD (MOVABLE+DMA32+HIGHMEM+DMA) | ||
166 | * | ||
167 | * ZONES_SHIFT must be <= 2 on 32 bit platforms. | ||
168 | */ | ||
169 | |||
170 | #if 16 * ZONES_SHIFT > BITS_PER_LONG | ||
171 | #error ZONES_SHIFT too large to create GFP_ZONE_TABLE integer | ||
172 | #endif | ||
173 | |||
174 | #define GFP_ZONE_TABLE ( \ | ||
175 | (ZONE_NORMAL << 0 * ZONES_SHIFT) \ | ||
176 | | (OPT_ZONE_DMA << __GFP_DMA * ZONES_SHIFT) \ | ||
177 | | (OPT_ZONE_HIGHMEM << __GFP_HIGHMEM * ZONES_SHIFT) \ | ||
178 | | (OPT_ZONE_DMA32 << __GFP_DMA32 * ZONES_SHIFT) \ | ||
179 | | (ZONE_NORMAL << __GFP_MOVABLE * ZONES_SHIFT) \ | ||
180 | | (OPT_ZONE_DMA << (__GFP_MOVABLE | __GFP_DMA) * ZONES_SHIFT) \ | ||
181 | | (ZONE_MOVABLE << (__GFP_MOVABLE | __GFP_HIGHMEM) * ZONES_SHIFT)\ | ||
182 | | (OPT_ZONE_DMA32 << (__GFP_MOVABLE | __GFP_DMA32) * ZONES_SHIFT)\ | ||
183 | ) | ||
184 | |||
185 | /* | ||
186 | * GFP_ZONE_BAD is a bitmap for all combination of __GFP_DMA, __GFP_DMA32 | ||
187 | * __GFP_HIGHMEM and __GFP_MOVABLE that are not permitted. One flag per | ||
188 | * entry starting with bit 0. Bit is set if the combination is not | ||
189 | * allowed. | ||
190 | */ | ||
191 | #define GFP_ZONE_BAD ( \ | ||
192 | 1 << (__GFP_DMA | __GFP_HIGHMEM) \ | ||
193 | | 1 << (__GFP_DMA | __GFP_DMA32) \ | ||
194 | | 1 << (__GFP_DMA32 | __GFP_HIGHMEM) \ | ||
195 | | 1 << (__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM) \ | ||
196 | | 1 << (__GFP_MOVABLE | __GFP_HIGHMEM | __GFP_DMA) \ | ||
197 | | 1 << (__GFP_MOVABLE | __GFP_DMA32 | __GFP_DMA) \ | ||
198 | | 1 << (__GFP_MOVABLE | __GFP_DMA32 | __GFP_HIGHMEM) \ | ||
199 | | 1 << (__GFP_MOVABLE | __GFP_DMA32 | __GFP_DMA | __GFP_HIGHMEM)\ | ||
200 | ) | ||
201 | |||
202 | static inline enum zone_type gfp_zone(gfp_t flags) | ||
203 | { | ||
204 | enum zone_type z; | ||
205 | int bit = flags & GFP_ZONEMASK; | ||
206 | |||
207 | z = (GFP_ZONE_TABLE >> (bit * ZONES_SHIFT)) & | ||
208 | ((1 << ZONES_SHIFT) - 1); | ||
209 | |||
210 | if (__builtin_constant_p(bit)) | ||
211 | BUILD_BUG_ON((GFP_ZONE_BAD >> bit) & 1); | ||
212 | else { | ||
213 | #ifdef CONFIG_DEBUG_VM | ||
214 | BUG_ON((GFP_ZONE_BAD >> bit) & 1); | ||
135 | #endif | 215 | #endif |
136 | return ZONE_NORMAL; | 216 | } |
217 | return z; | ||
137 | } | 218 | } |
138 | 219 | ||
139 | /* | 220 | /* |