27 #include <linux/kernel.h>
32 #include <linux/export.h>
38 #if (PGTABLE_RANGE >> 43) > SLICE_MASK_SIZE
39 #error PGTABLE_RANGE exceeds slice_mask high_slices size
48 static void slice_print_mask(
const char *
label,
struct slice_mask
mask)
50 char *
p,
buf[16 + 3 + 64 + 1];
56 for (i = 0; i < SLICE_NUM_LOW; i++)
57 *(p++) = (mask.low_slices & (1 <<
i)) ?
'1' :
'0';
61 for (i = 0; i < SLICE_NUM_HIGH; i++)
62 *(p++) = (mask.high_slices & (1ul <<
i)) ?
'1' :
'0';
68 #define slice_dbg(fmt...) do { if (_slice_debug) pr_debug(fmt); } while(0)
72 static void slice_print_mask(
const char *label,
struct slice_mask mask) {}
73 #define slice_dbg(fmt...)
77 static struct slice_mask slice_range_to_mask(unsigned
long start,
81 struct slice_mask ret = { 0, 0 };
83 if (
start < SLICE_LOW_TOP) {
84 unsigned long mend =
min(
end, SLICE_LOW_TOP);
85 unsigned long mstart =
min(
start, SLICE_LOW_TOP);
87 ret.low_slices = (1
u << (GET_LOW_SLICE_INDEX(mend) + 1))
88 - (1
u << GET_LOW_SLICE_INDEX(mstart));
92 ret.high_slices = (1ul << (GET_HIGH_SLICE_INDEX(
end) + 1))
93 - (1ul << GET_HIGH_SLICE_INDEX(
start));
98 static int slice_area_is_free(
struct mm_struct *mm,
unsigned long addr,
106 return (!vma || (addr + len) <= vma->
vm_start);
109 static int slice_low_has_vma(
struct mm_struct *mm,
unsigned long slice)
111 return !slice_area_is_free(mm, slice << SLICE_LOW_SHIFT,
112 1ul << SLICE_LOW_SHIFT);
115 static int slice_high_has_vma(
struct mm_struct *mm,
unsigned long slice)
117 unsigned long start = slice << SLICE_HIGH_SHIFT;
118 unsigned long end = start + (1ul << SLICE_HIGH_SHIFT);
124 start = SLICE_LOW_TOP;
126 return !slice_area_is_free(mm, start, end - start);
131 struct slice_mask
ret = { 0, 0 };
134 for (i = 0; i < SLICE_NUM_LOW; i++)
135 if (!slice_low_has_vma(mm, i))
136 ret.low_slices |= 1
u << i;
141 for (i = 0; i < SLICE_NUM_HIGH; i++)
142 if (!slice_high_has_vma(mm, i))
143 ret.high_slices |= 1ul << i;
148 static struct slice_mask slice_mask_for_size(
struct mm_struct *mm,
int psize)
150 unsigned char *hpsizes;
151 int index, mask_index;
152 struct slice_mask ret = { 0, 0 };
156 lpsizes = mm->context.low_slices_psize;
157 for (i = 0; i < SLICE_NUM_LOW; i++)
158 if (((lpsizes >> (i * 4)) & 0xf) == psize)
159 ret.low_slices |= 1
u <<
i;
161 hpsizes = mm->context.high_slices_psize;
162 for (i = 0; i < SLICE_NUM_HIGH; i++) {
163 mask_index = i & 0x1;
165 if (((hpsizes[index] >> (mask_index * 4)) & 0xf) == psize)
166 ret.high_slices |= 1ul <<
i;
172 static int slice_check_fit(
struct slice_mask mask,
struct slice_mask
available)
174 return (mask.low_slices & available.low_slices) == mask.low_slices &&
175 (mask.high_slices & available.high_slices) == mask.high_slices;
178 static void slice_flush_segments(
void *
parm)
187 get_paca()->context =
current->active_mm->context;
194 static void slice_convert(
struct mm_struct *mm,
struct slice_mask mask,
int psize)
196 int index, mask_index;
198 unsigned char *hpsizes;
202 slice_dbg(
"slice_convert(mm=%p, psize=%d)\n", mm, psize);
203 slice_print_mask(
" mask", mask);
210 lpsizes = mm->
context.low_slices_psize;
211 for (i = 0; i < SLICE_NUM_LOW; i++)
212 if (mask.low_slices & (1
u << i))
213 lpsizes = (lpsizes & ~(0xful << (i * 4))) |
214 (((
unsigned long)psize) << (i * 4));
217 mm->
context.low_slices_psize = lpsizes;
219 hpsizes = mm->
context.high_slices_psize;
220 for (i = 0; i < SLICE_NUM_HIGH; i++) {
221 mask_index = i & 0x1;
223 if (mask.high_slices & (1ul << i))
224 hpsizes[index] = (hpsizes[index] &
225 ~(0xf << (mask_index * 4))) |
226 (((
unsigned long)psize) << (mask_index * 4));
231 mm->
context.high_slices_psize);
233 spin_unlock_irqrestore(&slice_convert_lock, flags);
235 #ifdef CONFIG_SPU_BASE
240 static unsigned long slice_find_area_bottomup(
struct mm_struct *mm,
242 struct slice_mask available,
243 int psize,
int use_cache)
247 struct slice_mask mask;
251 if (len <= mm->cached_hole_size) {
267 mask = slice_range_to_mask(addr, len);
268 if (!slice_check_fit(mask, available)) {
269 if (addr < SLICE_LOW_TOP)
270 addr =
_ALIGN_UP(addr + 1, 1ul << SLICE_LOW_SHIFT);
272 addr =
_ALIGN_UP(addr + 1, 1ul << SLICE_HIGH_SHIFT);
275 if (!vma || addr + len <= vma->vm_start) {
297 static unsigned long slice_find_area_topdown(
struct mm_struct *mm,
299 struct slice_mask available,
300 int psize,
int use_cache)
304 struct slice_mask mask;
309 if (len <= mm->cached_hole_size) {
322 mask = slice_range_to_mask(addr, len);
323 if (slice_check_fit(mask, available) &&
324 slice_area_is_free(mm, addr, len))
338 mask = slice_range_to_mask(addr, len);
339 if (!slice_check_fit(mask, available)) {
340 if (addr < SLICE_LOW_TOP)
342 else if (addr < (1ul << SLICE_HIGH_SHIFT))
343 addr = SLICE_LOW_TOP;
355 if (!vma || (addr + len) <= vma->
vm_start) {
376 addr = slice_find_area_bottomup(mm, len, available, psize, 0);
390 static unsigned long slice_find_area(
struct mm_struct *mm,
unsigned long len,
391 struct slice_mask mask,
int psize,
392 int topdown,
int use_cache)
395 return slice_find_area_topdown(mm, len, mask, psize, use_cache);
397 return slice_find_area_bottomup(mm, len, mask, psize, use_cache);
400 #define or_mask(dst, src) do { \
401 (dst).low_slices |= (src).low_slices; \
402 (dst).high_slices |= (src).high_slices; \
405 #define andnot_mask(dst, src) do { \
406 (dst).low_slices &= ~(src).low_slices; \
407 (dst).high_slices &= ~(src).high_slices; \
410 #ifdef CONFIG_PPC_64K_PAGES
411 #define MMU_PAGE_BASE MMU_PAGE_64K
413 #define MMU_PAGE_BASE MMU_PAGE_4K
417 unsigned long flags,
unsigned int psize,
418 int topdown,
int use_cache)
420 struct slice_mask mask = {0, 0};
421 struct slice_mask good_mask;
422 struct slice_mask potential_mask = {0,0} ;
423 struct slice_mask compat_mask = {0, 0};
427 unsigned long newaddr;
432 slice_dbg(
"slice_get_unmapped_area(mm=%p, psize=%d...\n", mm, psize);
433 slice_dbg(
" addr=%lx, len=%lx, flags=%lx, topdown=%d, use_cache=%d\n",
434 addr, len, flags, topdown, use_cache);
438 if (len & ((1ul << pshift) - 1))
440 if (fixed && (addr & ((1ul << pshift) - 1)))
442 if (fixed && addr > (mm->
task_size - len))
446 if (!fixed && addr) {
451 !slice_area_is_free(mm, addr, len))
458 good_mask = slice_mask_for_size(mm, psize);
459 slice_print_mask(
" good_mask", good_mask);
480 #ifdef CONFIG_PPC_64K_PAGES
482 if (psize == MMU_PAGE_64K) {
483 compat_mask = slice_mask_for_size(mm, MMU_PAGE_4K);
485 or_mask(good_mask, compat_mask);
490 if (addr != 0 || fixed) {
492 mask = slice_range_to_mask(addr, len);
493 slice_print_mask(
" mask", mask);
498 if (slice_check_fit(mask, good_mask)) {
506 newaddr = slice_find_area(mm, len, good_mask, psize, topdown,
512 slice_dbg(
" found area at 0x%lx\n", newaddr);
520 potential_mask = slice_mask_for_free(mm);
521 or_mask(potential_mask, good_mask);
522 slice_print_mask(
" potential", potential_mask);
524 if ((addr != 0 || fixed) && slice_check_fit(mask, potential_mask)) {
539 addr = slice_find_area(mm, len, good_mask, psize, topdown,
542 slice_dbg(
" found area at 0x%lx\n", addr);
550 addr = slice_find_area(mm, len, potential_mask, psize, topdown,
553 #ifdef CONFIG_PPC_64K_PAGES
554 if (addr == -
ENOMEM && psize == MMU_PAGE_64K) {
556 or_mask(potential_mask, compat_mask);
557 addr = slice_find_area(mm, len, potential_mask, psize,
565 mask = slice_range_to_mask(addr, len);
566 slice_dbg(
" found potential area at 0x%lx\n", addr);
567 slice_print_mask(
" mask", mask);
572 if (mask.low_slices || mask.high_slices) {
573 slice_convert(mm, mask, psize);
589 current->mm->context.user_psize,
594 const unsigned long addr0,
595 const unsigned long len,
596 const unsigned long pgoff,
597 const unsigned long flags)
600 current->mm->context.user_psize,
606 unsigned char *hpsizes;
607 int index, mask_index;
609 if (addr < SLICE_LOW_TOP) {
611 lpsizes = mm->
context.low_slices_psize;
612 index = GET_LOW_SLICE_INDEX(addr);
613 return (lpsizes >> (index * 4)) & 0xf;
615 hpsizes = mm->
context.high_slices_psize;
616 index = GET_HIGH_SLICE_INDEX(addr);
617 mask_index = index & 0x1;
618 return (hpsizes[index >> 1] >> (mask_index * 4)) & 0xf;
638 int index, mask_index;
639 unsigned char *hpsizes;
640 unsigned long flags, lpsizes;
641 unsigned int old_psize;
644 slice_dbg(
"slice_set_user_psize(mm=%p, psize=%d)\n", mm, psize);
650 if (old_psize == psize)
656 lpsizes = mm->
context.low_slices_psize;
657 for (i = 0; i < SLICE_NUM_LOW; i++)
658 if (((lpsizes >> (i * 4)) & 0xf) == old_psize)
659 lpsizes = (lpsizes & ~(0xful << (i * 4))) |
660 (((
unsigned long)psize) << (i * 4));
662 mm->
context.low_slices_psize = lpsizes;
664 hpsizes = mm->
context.high_slices_psize;
665 for (i = 0; i < SLICE_NUM_HIGH; i++) {
666 mask_index = i & 0x1;
668 if (((hpsizes[index] >> (mask_index * 4)) & 0xf) == old_psize)
669 hpsizes[index] = (hpsizes[index] &
670 ~(0xf << (mask_index * 4))) |
671 (((
unsigned long)psize) << (mask_index * 4));
679 mm->
context.high_slices_psize);
682 spin_unlock_irqrestore(&slice_convert_lock, flags);
688 unsigned char *hpsizes;
693 if (address < SLICE_LOW_TOP) {
694 i = GET_LOW_SLICE_INDEX(address);
695 lpsizes = &mm->
context.low_slices_psize;
696 *lpsizes = (*lpsizes & ~(0xful << (i * 4))) |
697 ((
unsigned long) psize << (i * 4));
699 int index, mask_index;
700 i = GET_HIGH_SLICE_INDEX(address);
701 hpsizes = mm->
context.high_slices_psize;
702 mask_index = i & 0x1;
705 ~(0xf << (mask_index * 4))) |
706 (((
unsigned long)psize) << (mask_index * 4));
709 spin_unlock_irqrestore(&slice_convert_lock, flags);
711 #ifdef CONFIG_SPU_BASE
717 unsigned long len,
unsigned int psize)
719 struct slice_mask mask = slice_range_to_mask(start, len);
721 slice_convert(mm, mask, psize);
746 struct slice_mask mask, available;
749 mask = slice_range_to_mask(addr, len);
750 available = slice_mask_for_size(mm, psize);
751 #ifdef CONFIG_PPC_64K_PAGES
753 if (psize == MMU_PAGE_64K) {
754 struct slice_mask compat_mask;
755 compat_mask = slice_mask_for_size(mm, MMU_PAGE_4K);
756 or_mask(available, compat_mask);
761 slice_dbg(
"is_hugepage_only_range(mm=%p, addr=%lx, len=%lx)\n",
763 slice_print_mask(
" mask", mask);
764 slice_print_mask(
" available", available);
766 return !slice_check_fit(mask, available);