1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Altivec XOR operations
5 * Copyright 2017 IBM Corp.
8 #include <linux/preempt.h>
9 #include <linux/export.h>
10 #include <linux/sched.h>
11 #include <asm/switch_to.h>
12 #include <asm/xor_altivec.h>
15 void xor_altivec_2(unsigned long bytes
, unsigned long * __restrict p1
,
16 const unsigned long * __restrict p2
)
19 enable_kernel_altivec();
20 __xor_altivec_2(bytes
, p1
, p2
);
21 disable_kernel_altivec();
24 EXPORT_SYMBOL(xor_altivec_2
);
26 void xor_altivec_3(unsigned long bytes
, unsigned long * __restrict p1
,
27 const unsigned long * __restrict p2
,
28 const unsigned long * __restrict p3
)
31 enable_kernel_altivec();
32 __xor_altivec_3(bytes
, p1
, p2
, p3
);
33 disable_kernel_altivec();
36 EXPORT_SYMBOL(xor_altivec_3
);
38 void xor_altivec_4(unsigned long bytes
, unsigned long * __restrict p1
,
39 const unsigned long * __restrict p2
,
40 const unsigned long * __restrict p3
,
41 const unsigned long * __restrict p4
)
44 enable_kernel_altivec();
45 __xor_altivec_4(bytes
, p1
, p2
, p3
, p4
);
46 disable_kernel_altivec();
49 EXPORT_SYMBOL(xor_altivec_4
);
51 void xor_altivec_5(unsigned long bytes
, unsigned long * __restrict p1
,
52 const unsigned long * __restrict p2
,
53 const unsigned long * __restrict p3
,
54 const unsigned long * __restrict p4
,
55 const unsigned long * __restrict p5
)
58 enable_kernel_altivec();
59 __xor_altivec_5(bytes
, p1
, p2
, p3
, p4
, p5
);
60 disable_kernel_altivec();
63 EXPORT_SYMBOL(xor_altivec_5
);