9 #include <linux/capability.h>
10 #include <linux/errno.h>
11 #include <linux/sched.h>
15 #include <linux/sem.h>
16 #include <linux/msg.h>
17 #include <linux/shm.h>
18 #include <linux/stat.h>
20 #include <linux/mman.h>
22 #include <linux/ipc.h>
24 #include <asm/setup.h>
25 #include <asm/uaccess.h>
26 #include <asm/cachectl.h>
27 #include <asm/traps.h>
29 #include <asm/unistd.h>
30 #include <asm/cacheflush.h>
40 unsigned long prot,
unsigned long flags,
41 unsigned long fd,
unsigned long pgoff)
52 #define virt_to_phys_040(vaddr) \
54 unsigned long _mmusr, _paddr; \
56 __asm__ __volatile__ (".chip 68040\n\t" \
58 "movec %%mmusr,%0\n\t" \
62 _paddr = (_mmusr & MMU_R_040) ? (_mmusr & PAGE_MASK) : 0; \
67 cache_flush_040 (
unsigned long addr,
int scope,
int cache,
unsigned long len)
102 if ((paddr = virt_to_phys_040(addr))) {
104 len = (len + (addr & 15) + 15) >> 4;
115 if ((paddr = virt_to_phys_040(addr)))
122 len = (len + 15) >> 4;
130 __asm__ __volatile__ (
"nop\n\t"
132 "cpushl %%dc,(%0)\n\t"
137 __asm__ __volatile__ (
"nop\n\t"
139 "cpushl %%ic,(%0)\n\t"
145 __asm__ __volatile__ (
"nop\n\t"
147 "cpushl %%bc,(%0)\n\t"
164 if ((paddr = virt_to_phys_040(addr)))
182 if (!(paddr = virt_to_phys_040(addr)))
187 __asm__ __volatile__ (
"nop\n\t"
189 "cpushp %%dc,(%0)\n\t"
194 __asm__ __volatile__ (
"nop\n\t"
196 "cpushp %%ic,(%0)\n\t"
202 __asm__ __volatile__ (
"nop\n\t"
204 "cpushp %%bc,(%0)\n\t"
215 #define virt_to_phys_060(vaddr) \
217 unsigned long paddr; \
218 __asm__ __volatile__ (".chip 68060\n\t" \
227 cache_flush_060 (
unsigned long addr,
int scope,
int cache,
unsigned long len)
243 __asm__ __volatile__ (
".chip 68060\n\t"
248 __asm__ __volatile__ (
".chip 68060\n\t"
254 __asm__ __volatile__ (
".chip 68060\n\t"
266 if (!(paddr = virt_to_phys_060(addr))) {
276 if ((paddr = virt_to_phys_060(addr)))
284 len = (len + 15) >> 4;
291 __asm__ __volatile__ (
".chip 68060\n\t"
292 "cpushl %%dc,(%0)\n\t"
297 __asm__ __volatile__ (
".chip 68060\n\t"
298 "cpushl %%ic,(%0)\n\t"
304 __asm__ __volatile__ (
".chip 68060\n\t"
305 "cpushl %%bc,(%0)\n\t"
325 if ((paddr = virt_to_phys_060(addr)))
345 if (!(paddr = virt_to_phys_060(addr)))
350 __asm__ __volatile__ (
".chip 68060\n\t"
351 "cpushp %%dc,(%0)\n\t"
356 __asm__ __volatile__ (
".chip 68060\n\t"
357 "cpushp %%ic,(%0)\n\t"
363 __asm__ __volatile__ (
".chip 68060\n\t"
364 "cpushp %%bc,(%0)\n\t"
377 sys_cacheflush (
unsigned long addr,
int scope,
int cache,
unsigned long len)
399 if (addr + len < addr)
408 __asm__ (
"movec %%cacr, %0" :
"=r" (cacr));
415 __asm__ __volatile__ (
"movec %1, %%caar\n\t"
418 :
"r" (cacr),
"r" (addr));
424 __asm__ (
"movec %%cacr, %0" :
"=r" (cacr));
425 if (cache & FLUSH_CACHE_INSN)
427 if (cache & FLUSH_CACHE_DATA)
429 __asm__ __volatile__ (
"movec %0, %%cacr" : :
"r" (cacr));
444 ret = cache_flush_040 (addr, scope, cache, len);
446 ret = cache_flush_060 (addr, scope, cache, len);
457 unsigned long __user *
mem)
466 unsigned long mem_value;
475 pte = pte_offset_map_lock(mm, pmd, (
unsigned long)mem, &ptl);
478 pte_unmap_unlock(pte, ptl);
487 if (mem_value == oldval)
490 pte_unmap_unlock(pte, ptl);
530 unsigned long __user * mem)
533 unsigned long mem_value;
538 if (mem_value == oldval)