aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/mm/slice.c
diff options
context:
space:
mode:
authorAneesh Kumar K.V <aneesh.kumar@linux.vnet.ibm.com>2017-03-21 23:36:47 -0400
committerMichael Ellerman <mpe@ellerman.id.au>2017-03-31 08:09:53 -0400
commitf3207c124e7aa8d4d9cf32cc45b10ceb4defedb9 (patch)
treed6ec28c09acb59d823e5ff804a5319819d9c6fcc /arch/powerpc/mm/slice.c
parent6aa59f5162fcca09c7dcc84d64e2ebd1e7449884 (diff)
powerpc/mm/slice: Convert slice_mask high slice to a bitmap
In followup patch we want to increase the va range which will result in us requiring high_slices to have more than 64 bits. To enable this convert high_slices to bitmap. We keep the number bits same in this patch and later change that to higher value Signed-off-by: Aneesh Kumar K.V <aneesh.kumar@linux.vnet.ibm.com> [mpe: Fold in fix to use bitmap_empty()] Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc/mm/slice.c')
-rw-r--r--arch/powerpc/mm/slice.c112
1 files changed, 75 insertions, 37 deletions
diff --git a/arch/powerpc/mm/slice.c b/arch/powerpc/mm/slice.c
index bf150557dba8..639c7171d174 100644
--- a/arch/powerpc/mm/slice.c
+++ b/arch/powerpc/mm/slice.c
@@ -36,11 +36,6 @@
36#include <asm/copro.h> 36#include <asm/copro.h>
37#include <asm/hugetlb.h> 37#include <asm/hugetlb.h>
38 38
39/* some sanity checks */
40#if (H_PGTABLE_RANGE >> 43) > SLICE_MASK_SIZE
41#error H_PGTABLE_RANGE exceeds slice_mask high_slices size
42#endif
43
44static DEFINE_SPINLOCK(slice_convert_lock); 39static DEFINE_SPINLOCK(slice_convert_lock);
45 40
46 41
@@ -49,7 +44,7 @@ int _slice_debug = 1;
49 44
50static void slice_print_mask(const char *label, struct slice_mask mask) 45static void slice_print_mask(const char *label, struct slice_mask mask)
51{ 46{
52 char *p, buf[16 + 3 + 64 + 1]; 47 char *p, buf[SLICE_NUM_LOW + 3 + SLICE_NUM_HIGH + 1];
53 int i; 48 int i;
54 49
55 if (!_slice_debug) 50 if (!_slice_debug)
@@ -60,8 +55,12 @@ static void slice_print_mask(const char *label, struct slice_mask mask)
60 *(p++) = ' '; 55 *(p++) = ' ';
61 *(p++) = '-'; 56 *(p++) = '-';
62 *(p++) = ' '; 57 *(p++) = ' ';
63 for (i = 0; i < SLICE_NUM_HIGH; i++) 58 for (i = 0; i < SLICE_NUM_HIGH; i++) {
64 *(p++) = (mask.high_slices & (1ul << i)) ? '1' : '0'; 59 if (test_bit(i, mask.high_slices))
60 *(p++) = '1';
61 else
62 *(p++) = '0';
63 }
65 *(p++) = 0; 64 *(p++) = 0;
66 65
67 printk(KERN_DEBUG "%s:%s\n", label, buf); 66 printk(KERN_DEBUG "%s:%s\n", label, buf);
@@ -80,7 +79,10 @@ static struct slice_mask slice_range_to_mask(unsigned long start,
80 unsigned long len) 79 unsigned long len)
81{ 80{
82 unsigned long end = start + len - 1; 81 unsigned long end = start + len - 1;
83 struct slice_mask ret = { 0, 0 }; 82 struct slice_mask ret;
83
84 ret.low_slices = 0;
85 bitmap_zero(ret.high_slices, SLICE_NUM_HIGH);
84 86
85 if (start < SLICE_LOW_TOP) { 87 if (start < SLICE_LOW_TOP) {
86 unsigned long mend = min(end, (SLICE_LOW_TOP - 1)); 88 unsigned long mend = min(end, (SLICE_LOW_TOP - 1));
@@ -89,10 +91,13 @@ static struct slice_mask slice_range_to_mask(unsigned long start,
89 - (1u << GET_LOW_SLICE_INDEX(start)); 91 - (1u << GET_LOW_SLICE_INDEX(start));
90 } 92 }
91 93
92 if ((start + len) > SLICE_LOW_TOP) 94 if ((start + len) > SLICE_LOW_TOP) {
93 ret.high_slices = (1ul << (GET_HIGH_SLICE_INDEX(end) + 1)) 95 unsigned long start_index = GET_HIGH_SLICE_INDEX(start);
94 - (1ul << GET_HIGH_SLICE_INDEX(start)); 96 unsigned long align_end = ALIGN(end, (1UL << SLICE_HIGH_SHIFT));
97 unsigned long count = GET_HIGH_SLICE_INDEX(align_end) - start_index;
95 98
99 bitmap_set(ret.high_slices, start_index, count);
100 }
96 return ret; 101 return ret;
97} 102}
98 103
@@ -129,9 +134,12 @@ static int slice_high_has_vma(struct mm_struct *mm, unsigned long slice)
129 134
130static struct slice_mask slice_mask_for_free(struct mm_struct *mm) 135static struct slice_mask slice_mask_for_free(struct mm_struct *mm)
131{ 136{
132 struct slice_mask ret = { 0, 0 }; 137 struct slice_mask ret;
133 unsigned long i; 138 unsigned long i;
134 139
140 ret.low_slices = 0;
141 bitmap_zero(ret.high_slices, SLICE_NUM_HIGH);
142
135 for (i = 0; i < SLICE_NUM_LOW; i++) 143 for (i = 0; i < SLICE_NUM_LOW; i++)
136 if (!slice_low_has_vma(mm, i)) 144 if (!slice_low_has_vma(mm, i))
137 ret.low_slices |= 1u << i; 145 ret.low_slices |= 1u << i;
@@ -141,7 +149,7 @@ static struct slice_mask slice_mask_for_free(struct mm_struct *mm)
141 149
142 for (i = 0; i < SLICE_NUM_HIGH; i++) 150 for (i = 0; i < SLICE_NUM_HIGH; i++)
143 if (!slice_high_has_vma(mm, i)) 151 if (!slice_high_has_vma(mm, i))
144 ret.high_slices |= 1ul << i; 152 __set_bit(i, ret.high_slices);
145 153
146 return ret; 154 return ret;
147} 155}
@@ -150,10 +158,13 @@ static struct slice_mask slice_mask_for_size(struct mm_struct *mm, int psize)
150{ 158{
151 unsigned char *hpsizes; 159 unsigned char *hpsizes;
152 int index, mask_index; 160 int index, mask_index;
153 struct slice_mask ret = { 0, 0 }; 161 struct slice_mask ret;
154 unsigned long i; 162 unsigned long i;
155 u64 lpsizes; 163 u64 lpsizes;
156 164
165 ret.low_slices = 0;
166 bitmap_zero(ret.high_slices, SLICE_NUM_HIGH);
167
157 lpsizes = mm->context.low_slices_psize; 168 lpsizes = mm->context.low_slices_psize;
158 for (i = 0; i < SLICE_NUM_LOW; i++) 169 for (i = 0; i < SLICE_NUM_LOW; i++)
159 if (((lpsizes >> (i * 4)) & 0xf) == psize) 170 if (((lpsizes >> (i * 4)) & 0xf) == psize)
@@ -164,7 +175,7 @@ static struct slice_mask slice_mask_for_size(struct mm_struct *mm, int psize)
164 mask_index = i & 0x1; 175 mask_index = i & 0x1;
165 index = i >> 1; 176 index = i >> 1;
166 if (((hpsizes[index] >> (mask_index * 4)) & 0xf) == psize) 177 if (((hpsizes[index] >> (mask_index * 4)) & 0xf) == psize)
167 ret.high_slices |= 1ul << i; 178 __set_bit(i, ret.high_slices);
168 } 179 }
169 180
170 return ret; 181 return ret;
@@ -172,8 +183,13 @@ static struct slice_mask slice_mask_for_size(struct mm_struct *mm, int psize)
172 183
173static int slice_check_fit(struct slice_mask mask, struct slice_mask available) 184static int slice_check_fit(struct slice_mask mask, struct slice_mask available)
174{ 185{
186 DECLARE_BITMAP(result, SLICE_NUM_HIGH);
187
188 bitmap_and(result, mask.high_slices,
189 available.high_slices, SLICE_NUM_HIGH);
190
175 return (mask.low_slices & available.low_slices) == mask.low_slices && 191 return (mask.low_slices & available.low_slices) == mask.low_slices &&
176 (mask.high_slices & available.high_slices) == mask.high_slices; 192 bitmap_equal(result, mask.high_slices, SLICE_NUM_HIGH);
177} 193}
178 194
179static void slice_flush_segments(void *parm) 195static void slice_flush_segments(void *parm)
@@ -220,7 +236,7 @@ static void slice_convert(struct mm_struct *mm, struct slice_mask mask, int psiz
220 for (i = 0; i < SLICE_NUM_HIGH; i++) { 236 for (i = 0; i < SLICE_NUM_HIGH; i++) {
221 mask_index = i & 0x1; 237 mask_index = i & 0x1;
222 index = i >> 1; 238 index = i >> 1;
223 if (mask.high_slices & (1ul << i)) 239 if (test_bit(i, mask.high_slices))
224 hpsizes[index] = (hpsizes[index] & 240 hpsizes[index] = (hpsizes[index] &
225 ~(0xf << (mask_index * 4))) | 241 ~(0xf << (mask_index * 4))) |
226 (((unsigned long)psize) << (mask_index * 4)); 242 (((unsigned long)psize) << (mask_index * 4));
@@ -256,7 +272,7 @@ static bool slice_scan_available(unsigned long addr,
256 slice = GET_HIGH_SLICE_INDEX(addr); 272 slice = GET_HIGH_SLICE_INDEX(addr);
257 *boundary_addr = (slice + end) ? 273 *boundary_addr = (slice + end) ?
258 ((slice + end) << SLICE_HIGH_SHIFT) : SLICE_LOW_TOP; 274 ((slice + end) << SLICE_HIGH_SHIFT) : SLICE_LOW_TOP;
259 return !!(available.high_slices & (1ul << slice)); 275 return !!test_bit(slice, available.high_slices);
260 } 276 }
261} 277}
262 278
@@ -363,15 +379,24 @@ static unsigned long slice_find_area(struct mm_struct *mm, unsigned long len,
363 return slice_find_area_bottomup(mm, len, mask, psize); 379 return slice_find_area_bottomup(mm, len, mask, psize);
364} 380}
365 381
366#define or_mask(dst, src) do { \ 382static inline void slice_or_mask(struct slice_mask *dst, struct slice_mask *src)
367 (dst).low_slices |= (src).low_slices; \ 383{
368 (dst).high_slices |= (src).high_slices; \ 384 DECLARE_BITMAP(result, SLICE_NUM_HIGH);
369} while (0)
370 385
371#define andnot_mask(dst, src) do { \ 386 dst->low_slices |= src->low_slices;
372 (dst).low_slices &= ~(src).low_slices; \ 387 bitmap_or(result, dst->high_slices, src->high_slices, SLICE_NUM_HIGH);
373 (dst).high_slices &= ~(src).high_slices; \ 388 bitmap_copy(dst->high_slices, result, SLICE_NUM_HIGH);
374} while (0) 389}
390
391static inline void slice_andnot_mask(struct slice_mask *dst, struct slice_mask *src)
392{
393 DECLARE_BITMAP(result, SLICE_NUM_HIGH);
394
395 dst->low_slices &= ~src->low_slices;
396
397 bitmap_andnot(result, dst->high_slices, src->high_slices, SLICE_NUM_HIGH);
398 bitmap_copy(dst->high_slices, result, SLICE_NUM_HIGH);
399}
375 400
376#ifdef CONFIG_PPC_64K_PAGES 401#ifdef CONFIG_PPC_64K_PAGES
377#define MMU_PAGE_BASE MMU_PAGE_64K 402#define MMU_PAGE_BASE MMU_PAGE_64K
@@ -383,15 +408,28 @@ unsigned long slice_get_unmapped_area(unsigned long addr, unsigned long len,
383 unsigned long flags, unsigned int psize, 408 unsigned long flags, unsigned int psize,
384 int topdown) 409 int topdown)
385{ 410{
386 struct slice_mask mask = {0, 0}; 411 struct slice_mask mask;
387 struct slice_mask good_mask; 412 struct slice_mask good_mask;
388 struct slice_mask potential_mask = {0,0} /* silence stupid warning */; 413 struct slice_mask potential_mask;
389 struct slice_mask compat_mask = {0, 0}; 414 struct slice_mask compat_mask;
390 int fixed = (flags & MAP_FIXED); 415 int fixed = (flags & MAP_FIXED);
391 int pshift = max_t(int, mmu_psize_defs[psize].shift, PAGE_SHIFT); 416 int pshift = max_t(int, mmu_psize_defs[psize].shift, PAGE_SHIFT);
392 struct mm_struct *mm = current->mm; 417 struct mm_struct *mm = current->mm;
393 unsigned long newaddr; 418 unsigned long newaddr;
394 419
420 /*
421 * init different masks
422 */
423 mask.low_slices = 0;
424 bitmap_zero(mask.high_slices, SLICE_NUM_HIGH);
425
426 /* silence stupid warning */;
427 potential_mask.low_slices = 0;
428 bitmap_zero(potential_mask.high_slices, SLICE_NUM_HIGH);
429
430 compat_mask.low_slices = 0;
431 bitmap_zero(compat_mask.high_slices, SLICE_NUM_HIGH);
432
395 /* Sanity checks */ 433 /* Sanity checks */
396 BUG_ON(mm->task_size == 0); 434 BUG_ON(mm->task_size == 0);
397 VM_BUG_ON(radix_enabled()); 435 VM_BUG_ON(radix_enabled());
@@ -449,7 +487,7 @@ unsigned long slice_get_unmapped_area(unsigned long addr, unsigned long len,
449 if (psize == MMU_PAGE_64K) { 487 if (psize == MMU_PAGE_64K) {
450 compat_mask = slice_mask_for_size(mm, MMU_PAGE_4K); 488 compat_mask = slice_mask_for_size(mm, MMU_PAGE_4K);
451 if (fixed) 489 if (fixed)
452 or_mask(good_mask, compat_mask); 490 slice_or_mask(&good_mask, &compat_mask);
453 } 491 }
454#endif 492#endif
455 493
@@ -484,7 +522,7 @@ unsigned long slice_get_unmapped_area(unsigned long addr, unsigned long len,
484 * empty and thus can be converted 522 * empty and thus can be converted
485 */ 523 */
486 potential_mask = slice_mask_for_free(mm); 524 potential_mask = slice_mask_for_free(mm);
487 or_mask(potential_mask, good_mask); 525 slice_or_mask(&potential_mask, &good_mask);
488 slice_print_mask(" potential", potential_mask); 526 slice_print_mask(" potential", potential_mask);
489 527
490 if ((addr != 0 || fixed) && slice_check_fit(mask, potential_mask)) { 528 if ((addr != 0 || fixed) && slice_check_fit(mask, potential_mask)) {
@@ -517,7 +555,7 @@ unsigned long slice_get_unmapped_area(unsigned long addr, unsigned long len,
517#ifdef CONFIG_PPC_64K_PAGES 555#ifdef CONFIG_PPC_64K_PAGES
518 if (addr == -ENOMEM && psize == MMU_PAGE_64K) { 556 if (addr == -ENOMEM && psize == MMU_PAGE_64K) {
519 /* retry the search with 4k-page slices included */ 557 /* retry the search with 4k-page slices included */
520 or_mask(potential_mask, compat_mask); 558 slice_or_mask(&potential_mask, &compat_mask);
521 addr = slice_find_area(mm, len, potential_mask, psize, 559 addr = slice_find_area(mm, len, potential_mask, psize,
522 topdown); 560 topdown);
523 } 561 }
@@ -531,9 +569,9 @@ unsigned long slice_get_unmapped_area(unsigned long addr, unsigned long len,
531 slice_print_mask(" mask", mask); 569 slice_print_mask(" mask", mask);
532 570
533 convert: 571 convert:
534 andnot_mask(mask, good_mask); 572 slice_andnot_mask(&mask, &good_mask);
535 andnot_mask(mask, compat_mask); 573 slice_andnot_mask(&mask, &compat_mask);
536 if (mask.low_slices || mask.high_slices) { 574 if (mask.low_slices || !bitmap_empty(mask.high_slices, SLICE_NUM_HIGH)) {
537 slice_convert(mm, mask, psize); 575 slice_convert(mm, mask, psize);
538 if (psize > MMU_PAGE_BASE) 576 if (psize > MMU_PAGE_BASE)
539 on_each_cpu(slice_flush_segments, mm, 1); 577 on_each_cpu(slice_flush_segments, mm, 1);
@@ -700,7 +738,7 @@ int is_hugepage_only_range(struct mm_struct *mm, unsigned long addr,
700 if (psize == MMU_PAGE_64K) { 738 if (psize == MMU_PAGE_64K) {
701 struct slice_mask compat_mask; 739 struct slice_mask compat_mask;
702 compat_mask = slice_mask_for_size(mm, MMU_PAGE_4K); 740 compat_mask = slice_mask_for_size(mm, MMU_PAGE_4K);
703 or_mask(available, compat_mask); 741 slice_or_mask(&available, &compat_mask);
704 } 742 }
705#endif 743#endif
706 744