1 /* SPDX-License-Identifier: GPL-2.0+ */
2 /* Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
4 Free Software Foundation, Inc.
7 !! libgcc routines for the Renesas / SuperH SH CPUs.
8 !! Contributed by Steve Chamberlain.
11 !! ashiftrt_r4_x, ___ashrsi3, ___ashlsi3, ___lshrsi3 routines
12 !! recoded in assembly by Toshiyasu Morita
15 /* SH2 optimizations for ___ashrsi3, ___ashlsi3, ___lshrsi3 and
16 ELF local label prefixes by J"orn Rennecke
23 .set __movstr, __movmem
24 /* This would be a lot simpler if r6 contained the byte count
25 minus 64, and we wouldn't be called here for a byte count of 64. */
32 movmem_loop: /* Reached with rts */
46 ! done all the large groups, do the remainder
51 movmem_done: ! share slot insn, works out aligned.
61 .set __movstrSI64, __movmemSI64
67 .set __movstrSI60, __movmemSI60
73 .set __movstrSI56, __movmemSI56
79 .set __movstrSI52, __movmemSI52
85 .set __movstrSI48, __movmemSI48
91 .set __movstrSI44, __movmemSI44
97 .set __movstrSI40, __movmemSI40
103 .set __movstrSI36, __movmemSI36
109 .set __movstrSI32, __movmemSI32
115 .set __movstrSI28, __movmemSI28
121 .set __movstrSI24, __movmemSI24
127 .set __movstrSI20, __movmemSI20
133 .set __movstrSI16, __movmemSI16
139 .set __movstrSI12, __movmemSI12
145 .set __movstrSI8, __movmemSI8
151 .set __movstrSI4, __movmemSI4
157 .global __movmem_i4_even
158 .global __movstr_i4_even
159 .set __movstr_i4_even, __movmem_i4_even
161 .global __movmem_i4_odd
162 .global __movstr_i4_odd
163 .set __movstr_i4_odd, __movmem_i4_odd
165 .global __movmemSI12_i4
166 .global __movstrSI12_i4
167 .set __movstrSI12_i4, __movmemSI12_i4
179 bra L_movmem_start_even
194 bt/s L_movmem_2mod4_end