aboutsummaryrefslogtreecommitdiffstats
path: root/mm/slub.c
diff options
context:
space:
mode:
Diffstat (limited to 'mm/slub.c')
-rw-r--r--mm/slub.c15
1 files changed, 10 insertions, 5 deletions
diff --git a/mm/slub.c b/mm/slub.c
index 8f78e2577031..2fdd96f9e998 100644
--- a/mm/slub.c
+++ b/mm/slub.c
@@ -1524,12 +1524,13 @@ static inline void *acquire_slab(struct kmem_cache *s,
1524} 1524}
1525 1525
1526static int put_cpu_partial(struct kmem_cache *s, struct page *page, int drain); 1526static int put_cpu_partial(struct kmem_cache *s, struct page *page, int drain);
1527static inline bool pfmemalloc_match(struct page *page, gfp_t gfpflags);
1527 1528
1528/* 1529/*
1529 * Try to allocate a partial slab from a specific node. 1530 * Try to allocate a partial slab from a specific node.
1530 */ 1531 */
1531static void *get_partial_node(struct kmem_cache *s, 1532static void *get_partial_node(struct kmem_cache *s, struct kmem_cache_node *n,
1532 struct kmem_cache_node *n, struct kmem_cache_cpu *c) 1533 struct kmem_cache_cpu *c, gfp_t flags)
1533{ 1534{
1534 struct page *page, *page2; 1535 struct page *page, *page2;
1535 void *object = NULL; 1536 void *object = NULL;
@@ -1545,9 +1546,13 @@ static void *get_partial_node(struct kmem_cache *s,
1545 1546
1546 spin_lock(&n->list_lock); 1547 spin_lock(&n->list_lock);
1547 list_for_each_entry_safe(page, page2, &n->partial, lru) { 1548 list_for_each_entry_safe(page, page2, &n->partial, lru) {
1548 void *t = acquire_slab(s, n, page, object == NULL); 1549 void *t;
1549 int available; 1550 int available;
1550 1551
1552 if (!pfmemalloc_match(page, flags))
1553 continue;
1554
1555 t = acquire_slab(s, n, page, object == NULL);
1551 if (!t) 1556 if (!t)
1552 break; 1557 break;
1553 1558
@@ -1614,7 +1619,7 @@ static void *get_any_partial(struct kmem_cache *s, gfp_t flags,
1614 1619
1615 if (n && cpuset_zone_allowed_hardwall(zone, flags) && 1620 if (n && cpuset_zone_allowed_hardwall(zone, flags) &&
1616 n->nr_partial > s->min_partial) { 1621 n->nr_partial > s->min_partial) {
1617 object = get_partial_node(s, n, c); 1622 object = get_partial_node(s, n, c, flags);
1618 if (object) { 1623 if (object) {
1619 /* 1624 /*
1620 * Return the object even if 1625 * Return the object even if
@@ -1643,7 +1648,7 @@ static void *get_partial(struct kmem_cache *s, gfp_t flags, int node,
1643 void *object; 1648 void *object;
1644 int searchnode = (node == NUMA_NO_NODE) ? numa_node_id() : node; 1649 int searchnode = (node == NUMA_NO_NODE) ? numa_node_id() : node;
1645 1650
1646 object = get_partial_node(s, get_node(s, searchnode), c); 1651 object = get_partial_node(s, get_node(s, searchnode), c, flags);
1647 if (object || node != NUMA_NO_NODE) 1652 if (object || node != NUMA_NO_NODE)
1648 return object; 1653 return object;
1649 1654