5 * Optimized RAID-5 checksumming functions for SSE.
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2, or (at your option)
12 * You should have received a copy of the GNU General Public License
13 * (for example /usr/src/linux/COPYING); if not, write to the Free
14 * Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18 * Cache avoiding checksumming functions utilizing KNI instructions
19 * Copyright (C) 1999 Zach Brown (with obvious credit due Ingo)
24 * High-speed RAID5 checksumming functions utilizing SSE instructions.
25 * Copyright (C) 1998 Ingo Molnar.
29 * x86-64 changes / gcc fixes from Andi Kleen.
30 * Copyright 2002 Andi Kleen, SuSE Labs.
32 * This hasn't been optimized for the hammer yet, but there are likely
33 * no advantages to be gotten from x86-64 here anyways.
36 #include <asm/fpu/api.h>
39 /* reduce register pressure */
40 # define XOR_CONSTANT_CONSTRAINT "i"
42 # define XOR_CONSTANT_CONSTRAINT "re"
45 #define OFFS(x) "16*("#x")"
46 #define PF_OFFS(x) "256+16*("#x")"
47 #define PF0(x) " prefetchnta "PF_OFFS(x)"(%[p1]) ;\n"
48 #define LD(x, y) " movaps "OFFS(x)"(%[p1]), %%xmm"#y" ;\n"
49 #define ST(x, y) " movaps %%xmm"#y", "OFFS(x)"(%[p1]) ;\n"
50 #define PF1(x) " prefetchnta "PF_OFFS(x)"(%[p2]) ;\n"
51 #define PF2(x) " prefetchnta "PF_OFFS(x)"(%[p3]) ;\n"
52 #define PF3(x) " prefetchnta "PF_OFFS(x)"(%[p4]) ;\n"
53 #define PF4(x) " prefetchnta "PF_OFFS(x)"(%[p5]) ;\n"
54 #define XO1(x, y) " xorps "OFFS(x)"(%[p2]), %%xmm"#y" ;\n"
55 #define XO2(x, y) " xorps "OFFS(x)"(%[p3]), %%xmm"#y" ;\n"
56 #define XO3(x, y) " xorps "OFFS(x)"(%[p4]), %%xmm"#y" ;\n"
57 #define XO4(x, y) " xorps "OFFS(x)"(%[p5]), %%xmm"#y" ;\n"
60 #define BLK64(pf, op, i) \
68 xor_sse_2(unsigned long bytes
, unsigned long *p1
, unsigned long *p2
)
70 unsigned long lines
= bytes
>> 8;
106 " add %[inc], %[p1] ;\n"
107 " add %[inc], %[p2] ;\n"
110 : [cnt
] "+r" (lines
),
111 [p1
] "+r" (p1
), [p2
] "+r" (p2
)
112 : [inc
] XOR_CONSTANT_CONSTRAINT (256UL)
119 xor_sse_2_pf64(unsigned long bytes
, unsigned long *p1
, unsigned long *p2
)
121 unsigned long lines
= bytes
>> 8;
140 " add %[inc], %[p1] ;\n"
141 " add %[inc], %[p2] ;\n"
144 : [cnt
] "+r" (lines
),
145 [p1
] "+r" (p1
), [p2
] "+r" (p2
)
146 : [inc
] XOR_CONSTANT_CONSTRAINT (256UL)
153 xor_sse_3(unsigned long bytes
, unsigned long *p1
, unsigned long *p2
,
156 unsigned long lines
= bytes
>> 8;
198 " add %[inc], %[p1] ;\n"
199 " add %[inc], %[p2] ;\n"
200 " add %[inc], %[p3] ;\n"
203 : [cnt
] "+r" (lines
),
204 [p1
] "+r" (p1
), [p2
] "+r" (p2
), [p3
] "+r" (p3
)
205 : [inc
] XOR_CONSTANT_CONSTRAINT (256UL)
212 xor_sse_3_pf64(unsigned long bytes
, unsigned long *p1
, unsigned long *p2
,
215 unsigned long lines
= bytes
>> 8;
235 " add %[inc], %[p1] ;\n"
236 " add %[inc], %[p2] ;\n"
237 " add %[inc], %[p3] ;\n"
240 : [cnt
] "+r" (lines
),
241 [p1
] "+r" (p1
), [p2
] "+r" (p2
), [p3
] "+r" (p3
)
242 : [inc
] XOR_CONSTANT_CONSTRAINT (256UL)
249 xor_sse_4(unsigned long bytes
, unsigned long *p1
, unsigned long *p2
,
250 unsigned long *p3
, unsigned long *p4
)
252 unsigned long lines
= bytes
>> 8;
300 " add %[inc], %[p1] ;\n"
301 " add %[inc], %[p2] ;\n"
302 " add %[inc], %[p3] ;\n"
303 " add %[inc], %[p4] ;\n"
306 : [cnt
] "+r" (lines
), [p1
] "+r" (p1
),
307 [p2
] "+r" (p2
), [p3
] "+r" (p3
), [p4
] "+r" (p4
)
308 : [inc
] XOR_CONSTANT_CONSTRAINT (256UL)
315 xor_sse_4_pf64(unsigned long bytes
, unsigned long *p1
, unsigned long *p2
,
316 unsigned long *p3
, unsigned long *p4
)
318 unsigned long lines
= bytes
>> 8;
339 " add %[inc], %[p1] ;\n"
340 " add %[inc], %[p2] ;\n"
341 " add %[inc], %[p3] ;\n"
342 " add %[inc], %[p4] ;\n"
345 : [cnt
] "+r" (lines
), [p1
] "+r" (p1
),
346 [p2
] "+r" (p2
), [p3
] "+r" (p3
), [p4
] "+r" (p4
)
347 : [inc
] XOR_CONSTANT_CONSTRAINT (256UL)
354 xor_sse_5(unsigned long bytes
, unsigned long *p1
, unsigned long *p2
,
355 unsigned long *p3
, unsigned long *p4
, unsigned long *p5
)
357 unsigned long lines
= bytes
>> 8;
411 " add %[inc], %[p1] ;\n"
412 " add %[inc], %[p2] ;\n"
413 " add %[inc], %[p3] ;\n"
414 " add %[inc], %[p4] ;\n"
415 " add %[inc], %[p5] ;\n"
418 : [cnt
] "+r" (lines
), [p1
] "+r" (p1
), [p2
] "+r" (p2
),
419 [p3
] "+r" (p3
), [p4
] "+r" (p4
), [p5
] "+r" (p5
)
420 : [inc
] XOR_CONSTANT_CONSTRAINT (256UL)
427 xor_sse_5_pf64(unsigned long bytes
, unsigned long *p1
, unsigned long *p2
,
428 unsigned long *p3
, unsigned long *p4
, unsigned long *p5
)
430 unsigned long lines
= bytes
>> 8;
452 " add %[inc], %[p1] ;\n"
453 " add %[inc], %[p2] ;\n"
454 " add %[inc], %[p3] ;\n"
455 " add %[inc], %[p4] ;\n"
456 " add %[inc], %[p5] ;\n"
459 : [cnt
] "+r" (lines
), [p1
] "+r" (p1
), [p2
] "+r" (p2
),
460 [p3
] "+r" (p3
), [p4
] "+r" (p4
), [p5
] "+r" (p5
)
461 : [inc
] XOR_CONSTANT_CONSTRAINT (256UL)
467 static struct xor_block_template xor_block_sse_pf64
= {
468 .name
= "prefetch64-sse",
469 .do_2
= xor_sse_2_pf64
,
470 .do_3
= xor_sse_3_pf64
,
471 .do_4
= xor_sse_4_pf64
,
472 .do_5
= xor_sse_5_pf64
,
485 #undef XOR_CONSTANT_CONSTRAINT
488 # include <asm/xor_32.h>
490 # include <asm/xor_64.h>
493 #define XOR_SELECT_TEMPLATE(FASTEST) \
496 #endif /* _ASM_X86_XOR_H */