1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Copyright (C) 2002 Paul Mackerras, IBM Corp.
5 #include <asm/processor.h>
6 #include <asm/ppc_asm.h>
7 #include <asm/export.h>
8 #include <asm/asm-compat.h>
9 #include <asm/feature-fixups.h>
10 #include <asm/kasan.h>
13 /* For big-endian, 0 == most CPUs, 1 == POWER6, 2 == Cell */
14 #define SELFTEST_CASE 0
18 _GLOBAL_TOC_KASAN(memcpy)
20 #ifdef __LITTLE_ENDIAN__
23 std r3,-STACKFRAMESIZE+STK_REG(R31)(r1) /* save destination pointer for return value */
26 #ifdef CONFIG_PPC_BOOK3S_64
29 ALT_FTR_SECTION_END_IFCLR(CPU_FTR_VMX_COPY)
30 #ifdef __LITTLE_ENDIAN__
31 /* dumb little-endian memcpy that will get replaced at runtime */
43 neg r6,r3 # LS 3 bits = # bytes to 8-byte dest bdry
47 /* Below we want to nop out the bne if we're on a CPU that has the
48 CPU_FTR_UNALIGNED_LD_STD bit set and the CPU_FTR_CP_USE_DCBTZ bit
50 At the time of writing the only CPU that has this combination of bits
52 test_feature = (SELFTEST_CASE == 1)
57 ALT_FTR_SECTION_END(CPU_FTR_UNALIGNED_LD_STD | CPU_FTR_CP_USE_DCBTZ, \
58 CPU_FTR_UNALIGNED_LD_STD)
61 test_feature = (SELFTEST_CASE == 0)
65 END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
98 3: ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1) /* return dest pointer */
115 ld r9,0(r4) # 3+2n loads, 2+2n stores
124 # s1<< in r8, d0=(s0<<|s1>>) in r7, s3 in r0, s2 in r9, nix in r6 & r12
127 0: ld r0,0(r4) # 4+2n loads, 3+2n stores
142 # d0=(s0<<|s1>>) in r12, s1<< in r6, s2>> in r7, s2<< in r8, s3 in r9
181 3: ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1) /* return dest pointer */
185 PPC_MTOCRF(0x01,r6) # put #bytes to 8B bdry into cr7
200 3: PPC_MTOCRF(0x01,r5)
226 4: ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1) /* return dest pointer */
229 EXPORT_SYMBOL(memcpy)
230 EXPORT_SYMBOL_KASAN(memcpy)