aboutsummaryrefslogtreecommitdiffstats
path: root/mm/slub.c
diff options
context:
space:
mode:
Diffstat (limited to 'mm/slub.c')
-rw-r--r--mm/slub.c15
1 files changed, 10 insertions, 5 deletions
diff --git a/mm/slub.c b/mm/slub.c
index 944b4edaeb18..97a49d9a37cd 100644
--- a/mm/slub.c
+++ b/mm/slub.c
@@ -1531,12 +1531,13 @@ static inline void *acquire_slab(struct kmem_cache *s,
1531} 1531}
1532 1532
1533static int put_cpu_partial(struct kmem_cache *s, struct page *page, int drain); 1533static int put_cpu_partial(struct kmem_cache *s, struct page *page, int drain);
1534static inline bool pfmemalloc_match(struct page *page, gfp_t gfpflags);
1534 1535
1535/* 1536/*
1536 * Try to allocate a partial slab from a specific node. 1537 * Try to allocate a partial slab from a specific node.
1537 */ 1538 */
1538static void *get_partial_node(struct kmem_cache *s, 1539static void *get_partial_node(struct kmem_cache *s, struct kmem_cache_node *n,
1539 struct kmem_cache_node *n, struct kmem_cache_cpu *c) 1540 struct kmem_cache_cpu *c, gfp_t flags)
1540{ 1541{
1541 struct page *page, *page2; 1542 struct page *page, *page2;
1542 void *object = NULL; 1543 void *object = NULL;
@@ -1552,9 +1553,13 @@ static void *get_partial_node(struct kmem_cache *s,
1552 1553
1553 spin_lock(&n->list_lock); 1554 spin_lock(&n->list_lock);
1554 list_for_each_entry_safe(page, page2, &n->partial, lru) { 1555 list_for_each_entry_safe(page, page2, &n->partial, lru) {
1555 void *t = acquire_slab(s, n, page, object == NULL); 1556 void *t;
1556 int available; 1557 int available;
1557 1558
1559 if (!pfmemalloc_match(page, flags))
1560 continue;
1561
1562 t = acquire_slab(s, n, page, object == NULL);
1558 if (!t) 1563 if (!t)
1559 break; 1564 break;
1560 1565
@@ -1621,7 +1626,7 @@ static void *get_any_partial(struct kmem_cache *s, gfp_t flags,
1621 1626
1622 if (n && cpuset_zone_allowed_hardwall(zone, flags) && 1627 if (n && cpuset_zone_allowed_hardwall(zone, flags) &&
1623 n->nr_partial > s->min_partial) { 1628 n->nr_partial > s->min_partial) {
1624 object = get_partial_node(s, n, c); 1629 object = get_partial_node(s, n, c, flags);
1625 if (object) { 1630 if (object) {
1626 /* 1631 /*
1627 * Return the object even if 1632 * Return the object even if
@@ -1650,7 +1655,7 @@ static void *get_partial(struct kmem_cache *s, gfp_t flags, int node,
1650 void *object; 1655 void *object;
1651 int searchnode = (node == NUMA_NO_NODE) ? numa_node_id() : node; 1656 int searchnode = (node == NUMA_NO_NODE) ? numa_node_id() : node;
1652 1657
1653 object = get_partial_node(s, get_node(s, searchnode), c); 1658 object = get_partial_node(s, get_node(s, searchnode), c, flags);
1654 if (object || node != NUMA_NO_NODE) 1659 if (object || node != NUMA_NO_NODE)
1655 return object; 1660 return object;
1656 1661