aboutsummaryrefslogtreecommitdiffstats
path: root/mm
diff options
context:
space:
mode:
Diffstat (limited to 'mm')
-rw-r--r--mm/vmscan.c12
1 files changed, 7 insertions, 5 deletions
diff --git a/mm/vmscan.c b/mm/vmscan.c
index 68586c887611..259f8208a388 100644
--- a/mm/vmscan.c
+++ b/mm/vmscan.c
@@ -1713,13 +1713,15 @@ static void get_scan_count(struct lruvec *lruvec, struct scan_control *sc,
1713 file = get_lru_size(lruvec, LRU_ACTIVE_FILE) + 1713 file = get_lru_size(lruvec, LRU_ACTIVE_FILE) +
1714 get_lru_size(lruvec, LRU_INACTIVE_FILE); 1714 get_lru_size(lruvec, LRU_INACTIVE_FILE);
1715 1715
1716 /*
1717 * If it's foreseeable that reclaiming the file cache won't be
1718 * enough to get the zone back into a desirable shape, we have
1719 * to swap. Better start now and leave the - probably heavily
1720 * thrashing - remaining file pages alone.
1721 */
1716 if (global_reclaim(sc)) { 1722 if (global_reclaim(sc)) {
1717 free = zone_page_state(zone, NR_FREE_PAGES); 1723 free = zone_page_state(zone, NR_FREE_PAGES);
1718 if (unlikely(file + free <= high_wmark_pages(zone))) { 1724 if (unlikely(file + free <= high_wmark_pages(zone))) {
1719 /*
1720 * If we have very few page cache pages, force-scan
1721 * anon pages.
1722 */
1723 fraction[0] = 1; 1725 fraction[0] = 1;
1724 fraction[1] = 0; 1726 fraction[1] = 0;
1725 denominator = 1; 1727 denominator = 1;