aboutsummaryrefslogtreecommitdiffstats
path: root/mm/slub.c
diff options
context:
space:
mode:
Diffstat (limited to 'mm/slub.c')
-rw-r--r--mm/slub.c10
1 files changed, 9 insertions, 1 deletions
diff --git a/mm/slub.c b/mm/slub.c
index 4907563ef7ff..5b915e86a9b0 100644
--- a/mm/slub.c
+++ b/mm/slub.c
@@ -269,6 +269,11 @@ static inline void *get_freepointer(struct kmem_cache *s, void *object)
269 return *(void **)(object + s->offset); 269 return *(void **)(object + s->offset);
270} 270}
271 271
272static void prefetch_freepointer(const struct kmem_cache *s, void *object)
273{
274 prefetch(object + s->offset);
275}
276
272static inline void *get_freepointer_safe(struct kmem_cache *s, void *object) 277static inline void *get_freepointer_safe(struct kmem_cache *s, void *object)
273{ 278{
274 void *p; 279 void *p;
@@ -2309,6 +2314,8 @@ redo:
2309 object = __slab_alloc(s, gfpflags, node, addr, c); 2314 object = __slab_alloc(s, gfpflags, node, addr, c);
2310 2315
2311 else { 2316 else {
2317 void *next_object = get_freepointer_safe(s, object);
2318
2312 /* 2319 /*
2313 * The cmpxchg will only match if there was no additional 2320 * The cmpxchg will only match if there was no additional
2314 * operation and if we are on the right processor. 2321 * operation and if we are on the right processor.
@@ -2324,11 +2331,12 @@ redo:
2324 if (unlikely(!this_cpu_cmpxchg_double( 2331 if (unlikely(!this_cpu_cmpxchg_double(
2325 s->cpu_slab->freelist, s->cpu_slab->tid, 2332 s->cpu_slab->freelist, s->cpu_slab->tid,
2326 object, tid, 2333 object, tid,
2327 get_freepointer_safe(s, object), next_tid(tid)))) { 2334 next_object, next_tid(tid)))) {
2328 2335
2329 note_cmpxchg_failure("slab_alloc", s, tid); 2336 note_cmpxchg_failure("slab_alloc", s, tid);
2330 goto redo; 2337 goto redo;
2331 } 2338 }
2339 prefetch_freepointer(s, next_object);
2332 stat(s, ALLOC_FASTPATH); 2340 stat(s, ALLOC_FASTPATH);
2333 } 2341 }
2334 2342