56 #include <linux/module.h>
57 #include <linux/compiler.h>
58 #include <asm/uaccess.h>
59 #define s_space "%%sr1"
60 #define d_space "%%sr2"
63 #define s_space "%%sr0"
64 #define d_space "%%sr0"
65 #define pa_memcpy new2_copy
70 #define preserve_branch(label) do { \
74 if (unlikely(dummy != dummy)) \
78 #define get_user_space() (segment_eq(get_fs(), KERNEL_DS) ? 0 : mfsp(3))
79 #define get_kernel_space() (0)
81 #define MERGE(w0, sh_1, w1, sh_2) ({ \
85 "shrpw %1, %2, %%sar, %0\n" \
87 : "r"(w0), "r"(w1), "r"(sh_2) \
94 #define DPRINTF(fmt, args...) do { printk(KERN_DEBUG "%s:%d:%s ", __FILE__, __LINE__, __func__ ); printk(KERN_DEBUG fmt, ##args ); } while (0)
96 #define DPRINTF(fmt, args...)
99 #define def_load_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \
100 __asm__ __volatile__ ( \
101 "1:\t" #_insn ",ma " #_sz "(" _s ",%1), %0\n\t" \
102 ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
103 : _tt(_t), "+r"(_a) \
107 #define def_store_ai_insn(_insn,_sz,_tt,_s,_a,_t,_e) \
108 __asm__ __volatile__ ( \
109 "1:\t" #_insn ",ma %1, " #_sz "(" _s ",%0)\n\t" \
110 ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
115 #define ldbma(_s, _a, _t, _e) def_load_ai_insn(ldbs,1,"=r",_s,_a,_t,_e)
116 #define stbma(_s, _t, _a, _e) def_store_ai_insn(stbs,1,"r",_s,_a,_t,_e)
117 #define ldwma(_s, _a, _t, _e) def_load_ai_insn(ldw,4,"=r",_s,_a,_t,_e)
118 #define stwma(_s, _t, _a, _e) def_store_ai_insn(stw,4,"r",_s,_a,_t,_e)
119 #define flddma(_s, _a, _t, _e) def_load_ai_insn(fldd,8,"=f",_s,_a,_t,_e)
120 #define fstdma(_s, _t, _a, _e) def_store_ai_insn(fstd,8,"f",_s,_a,_t,_e)
122 #define def_load_insn(_insn,_tt,_s,_o,_a,_t,_e) \
123 __asm__ __volatile__ ( \
124 "1:\t" #_insn " " #_o "(" _s ",%1), %0\n\t" \
125 ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
130 #define def_store_insn(_insn,_tt,_s,_t,_o,_a,_e) \
131 __asm__ __volatile__ ( \
132 "1:\t" #_insn " %0, " #_o "(" _s ",%1)\n\t" \
133 ASM_EXCEPTIONTABLE_ENTRY(1b,_e) \
138 #define ldw(_s,_o,_a,_t,_e) def_load_insn(ldw,"=r",_s,_o,_a,_t,_e)
139 #define stw(_s,_t,_o,_a,_e) def_store_insn(stw,"r",_s,_t,_o,_a,_e)
141 #ifdef CONFIG_PREFETCH
152 #define prefetch_src(addr) do { } while(0)
153 #define prefetch_dst(addr) do { } while(0)
159 static inline unsigned long copy_dstaligned(
unsigned long dst,
unsigned long src,
unsigned long len,
unsigned long o_dst,
unsigned long o_src,
unsigned long o_len)
164 register unsigned int a0,
a1,
a2 = 0,
a3 = 0;
172 sh_1 = 8 * (src %
sizeof(
unsigned int));
173 sh_2 = 8 *
sizeof(
unsigned int) - sh_1;
176 src &= -
sizeof(
unsigned int);
185 src -= 1 *
sizeof(
unsigned int);
186 dst -= 3 *
sizeof(
unsigned int);
194 src -= 0 *
sizeof(
unsigned int);
195 dst -= 2 *
sizeof(
unsigned int);
205 src -=-1 *
sizeof(
unsigned int);
206 dst -= 1 *
sizeof(
unsigned int);
214 src -=-2 *
sizeof(
unsigned int);
215 dst -= 0 *
sizeof(
unsigned int);
246 src += 4 *
sizeof(
unsigned int);
247 dst += 4 *
sizeof(
unsigned int);
262 __asm__ __volatile__ (
"cda_ldw_exc:\n");
264 DPRINTF(
"cda_ldw_exc: o_len=%lu fault_addr=%lu o_src=%lu ret=%lu\n",
269 __asm__ __volatile__ (
"cda_stw_exc:\n");
271 DPRINTF(
"cda_stw_exc: o_len=%lu fault_addr=%lu o_dst=%lu ret=%lu\n",
278 static unsigned long pa_memcpy(
void *dstp,
const void *srcp,
unsigned long len)
281 register unsigned char *pcs, *pcd;
282 register unsigned int *pws, *pwd;
283 register double *pds, *pdd;
284 unsigned long ret = 0;
285 unsigned long o_dst, o_src, o_len;
288 src = (
unsigned long)srcp;
289 dst = (
unsigned long)dstp;
290 pcs = (
unsigned char *)srcp;
291 pcd = (
unsigned char *)dstp;
293 o_dst =
dst; o_src =
src; o_len = len;
302 if (
unlikely(t1 & (
sizeof(
double)-1)))
308 t2 = src & (
sizeof(double) - 1);
310 t2 =
sizeof(double) - t2;
325 while (len >= 8*
sizeof(
double)) {
349 len -= 8*
sizeof(double);
353 pws = (
unsigned int *)pds;
354 pwd = (
unsigned int *)pdd;
357 while (len >= 8*
sizeof(
unsigned int)) {
377 len -= 8*
sizeof(
unsigned int);
380 while (len >= 4*
sizeof(
unsigned int)) {
390 len -= 4*
sizeof(
unsigned int);
393 pcs = (
unsigned char *)pws;
394 pcd = (
unsigned char *)pwd;
408 if (
likely((t1 & (
sizeof(
unsigned int)-1)) == 0)) {
409 t2 = src & (
sizeof(
unsigned int) - 1);
412 t2 =
sizeof(
unsigned int) - t2;
422 pws = (
unsigned int *)pcs;
423 pwd = (
unsigned int *)pcd;
428 if (
unlikely((dst & (
sizeof(
unsigned int) - 1)) != 0)) {
429 t2 =
sizeof(
unsigned int) - (dst & (
sizeof(
unsigned int) - 1));
437 dst = (
unsigned long)pcd;
438 src = (
unsigned long)pcs;
441 ret = copy_dstaligned(dst, src, len /
sizeof(
unsigned int),
442 o_dst, o_src, o_len);
446 pcs += (len & -
sizeof(
unsigned int));
447 pcd += (len & -
sizeof(
unsigned int));
448 len %=
sizeof(
unsigned int);
456 __asm__ __volatile__ (
"pmc_load_exc:\n");
458 DPRINTF(
"pmc_load_exc: o_len=%lu fault_addr=%lu o_src=%lu ret=%lu\n",
463 __asm__ __volatile__ (
"pmc_store_exc:\n");
465 DPRINTF(
"pmc_store_exc: o_len=%lu fault_addr=%lu o_dst=%lu ret=%lu\n",
471 unsigned long copy_to_user(
void __user *dst,
const void *src,
unsigned long len)
479 unsigned long __copy_from_user(
void *dst,
const void __user *src,
unsigned long len)
486 unsigned long copy_in_user(
void __user *dst,
const void __user *src,
unsigned long len)
494 void *
memcpy(
void * dst,
const void *src,
size_t count)