1 /* SPDX-License-Identifier: GPL-2.0+ WITH GCC-exception-2.0
3 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
5 Free Software Foundation, Inc.
8 !! libgcc routines for the Renesas / SuperH SH CPUs.
9 !! Contributed by Steve Chamberlain.
12 !! ashiftrt_r4_x, ___ashrsi3, ___ashlsi3, ___lshrsi3 routines
13 !! recoded in assembly by Toshiyasu Morita
16 /* SH2 optimizations for ___ashrsi3, ___ashlsi3, ___lshrsi3 and
17 ELF local label prefixes by J"orn Rennecke
24 .set __movstr, __movmem
25 /* This would be a lot simpler if r6 contained the byte count
26 minus 64, and we wouldn't be called here for a byte count of 64. */
33 movmem_loop: /* Reached with rts */
47 ! done all the large groups, do the remainder
52 movmem_done: ! share slot insn, works out aligned.
62 .set __movstrSI64, __movmemSI64
68 .set __movstrSI60, __movmemSI60
74 .set __movstrSI56, __movmemSI56
80 .set __movstrSI52, __movmemSI52
86 .set __movstrSI48, __movmemSI48
92 .set __movstrSI44, __movmemSI44
98 .set __movstrSI40, __movmemSI40
104 .set __movstrSI36, __movmemSI36
110 .set __movstrSI32, __movmemSI32
116 .set __movstrSI28, __movmemSI28
122 .set __movstrSI24, __movmemSI24
128 .set __movstrSI20, __movmemSI20
134 .set __movstrSI16, __movmemSI16
140 .set __movstrSI12, __movmemSI12
146 .set __movstrSI8, __movmemSI8
152 .set __movstrSI4, __movmemSI4
158 .global __movmem_i4_even
159 .global __movstr_i4_even
160 .set __movstr_i4_even, __movmem_i4_even
162 .global __movmem_i4_odd
163 .global __movstr_i4_odd
164 .set __movstr_i4_odd, __movmem_i4_odd
166 .global __movmemSI12_i4
167 .global __movstrSI12_i4
168 .set __movstrSI12_i4, __movmemSI12_i4
180 bra L_movmem_start_even
195 bt/s L_movmem_2mod4_end