19 #if defined(__i386__) && !defined(__arch_um__)
25 const struct raid6_mmx_constants {
27 } raid6_mmx_constants = {
28 0x1d1d1d1d1d1d1d1dULL,
31 static int raid6_have_mmx(
void)
40 static void raid6_mmx1_gen_syndrome(
int disks,
size_t bytes,
void **
ptrs)
42 u8 **dptr = (
u8 **)ptrs;
52 asm volatile(
"movq %0,%%mm0" : :
"m" (raid6_mmx_constants.x1d));
53 asm volatile(
"pxor %mm5,%mm5");
55 for ( d = 0 ; d <
bytes ; d += 8 ) {
56 asm volatile(
"movq %0,%%mm2" : :
"m" (dptr[z0][
d]));
57 asm volatile(
"movq %mm2,%mm4");
58 for ( z = z0-1 ; z >= 0 ; z-- ) {
59 asm volatile(
"movq %0,%%mm6" : :
"m" (dptr[z][
d]));
60 asm volatile(
"pcmpgtb %mm4,%mm5");
61 asm volatile(
"paddb %mm4,%mm4");
62 asm volatile(
"pand %mm0,%mm5");
63 asm volatile(
"pxor %mm5,%mm4");
64 asm volatile(
"pxor %mm5,%mm5");
65 asm volatile(
"pxor %mm6,%mm2");
66 asm volatile(
"pxor %mm6,%mm4");
68 asm volatile(
"movq %%mm2,%0" :
"=m" (p[
d]));
69 asm volatile(
"pxor %mm2,%mm2");
70 asm volatile(
"movq %%mm4,%0" :
"=m" (q[
d]));
71 asm volatile(
"pxor %mm4,%mm4");
78 raid6_mmx1_gen_syndrome,
87 static void raid6_mmx2_gen_syndrome(
int disks,
size_t bytes,
void **ptrs)
89 u8 **dptr = (
u8 **)ptrs;
99 asm volatile(
"movq %0,%%mm0" : :
"m" (raid6_mmx_constants.x1d));
100 asm volatile(
"pxor %mm5,%mm5");
101 asm volatile(
"pxor %mm7,%mm7");
103 for ( d = 0 ; d <
bytes ; d += 16 ) {
104 asm volatile(
"movq %0,%%mm2" : :
"m" (dptr[z0][
d]));
105 asm volatile(
"movq %0,%%mm3" : :
"m" (dptr[z0][d+8]));
106 asm volatile(
"movq %mm2,%mm4");
107 asm volatile(
"movq %mm3,%mm6");
108 for ( z = z0-1 ; z >= 0 ; z-- ) {
109 asm volatile(
"pcmpgtb %mm4,%mm5");
110 asm volatile(
"pcmpgtb %mm6,%mm7");
111 asm volatile(
"paddb %mm4,%mm4");
112 asm volatile(
"paddb %mm6,%mm6");
113 asm volatile(
"pand %mm0,%mm5");
114 asm volatile(
"pand %mm0,%mm7");
115 asm volatile(
"pxor %mm5,%mm4");
116 asm volatile(
"pxor %mm7,%mm6");
117 asm volatile(
"movq %0,%%mm5" : :
"m" (dptr[z][
d]));
118 asm volatile(
"movq %0,%%mm7" : :
"m" (dptr[z][d+8]));
119 asm volatile(
"pxor %mm5,%mm2");
120 asm volatile(
"pxor %mm7,%mm3");
121 asm volatile(
"pxor %mm5,%mm4");
122 asm volatile(
"pxor %mm7,%mm6");
123 asm volatile(
"pxor %mm5,%mm5");
124 asm volatile(
"pxor %mm7,%mm7");
126 asm volatile(
"movq %%mm2,%0" :
"=m" (p[
d]));
127 asm volatile(
"movq %%mm3,%0" :
"=m" (p[d+8]));
128 asm volatile(
"movq %%mm4,%0" :
"=m" (q[
d]));
129 asm volatile(
"movq %%mm6,%0" :
"=m" (q[d+8]));
136 raid6_mmx2_gen_syndrome,