1 /* $NetBSD: bzero.S,v 1.14 2013/09/12 15:36:15 joerg Exp $ */
4 * Copyright (C) 2001 Martin J. Laubach <mjl@NetBSD.org>
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. The name of the author may not be used to endorse or promote products
16 * derived from this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 /*----------------------------------------------------------------------*/
31 #include <machine/asm.h>
34 #if defined(LIBC_SCCS) && !defined(lint)
35 __RCSID("$NetBSD: bzero.S,v 1.14 2013/09/12 15:36:15 joerg Exp $")
36 #endif /* LIBC_SCCS && !lint */
40 #define USE_STSWX 0 /* don't. slower than trivial copy loop */
42 /*----------------------------------------------------------------------*/
44 void bzero(void *b %r3, size_t len %r4);
45 void * memset(void *b %r3, int c %r4, size_t len %r5);
47 /*----------------------------------------------------------------------*/
56 li r_val, 0 /* Value to stuff in */
64 beqlr- %cr1 /* Nothing to do */
66 rlwimi %r0, %r4, 8, 16, 23 /* word extend fill value */
67 rlwimi %r0, %r0, 16, 0, 15
69 bne- simple_fill /* =! 0, use trivial fill */
72 /*----------------------------------------------------------------------*/
75 /* First get cache line size */
80 addis %r10,%r10,_libc_powerpc_cache_info+CACHE_INFO_DCACHE_LINE_SIZE-1b@ha
81 lwz %r9,_libc_powerpc_cache_info+CACHE_INFO_DCACHE_LINE_SIZE-1b@l(%r10)
83 lis %r10,_libc_powerpc_cache_info+CACHE_INFO_DCACHE_LINE_SIZE@ha
84 lwz %r9,_libc_powerpc_cache_info+CACHE_INFO_DCACHE_LINE_SIZE@l(%r10)
86 cmplwi %cr1, %r9, 0 /* Unknown? */
87 beq- simple_fill /* a trivial fill routine */
90 mfsprg %r10, 0 /* Get cpu_info pointer */
92 lis %r10, cpu_info_store@ha
93 addi %r10, %r10, cpu_info_store@l
95 lwz %r9, CPU_CI+CACHE_INFO_DCACHE_LINE_SIZE(%r10) /* Load D$ line size */
97 cntlzw %r10, %r9 /* Calculate shift.. */
100 /* Back in memory filling business */
102 cmplwi %cr1, r_len, 0 /* Nothing to do? */
104 cmplw r_len, %r5 /* <= 2*CL bytes to move? */
105 beqlr- %cr1 /* then do nothing */
107 blt+ simple_fill /* a trivial fill routine */
109 /* Word align the block, fill bytewise until dst even*/
111 andi. %r5, r_dst, 0x03
113 beq+ cb_aligned_w /* already aligned to word? */
115 subf %r5, %r5, %r6 /* bytes to fill to align4 */
119 add r_dst, %r5, r_dst
124 1: stbu r_val, 1(r_dst) /* Fill bytewise */
129 subf r_len, %r5, r_len
131 cb_aligned_w: /* Cache block align, fill wordwise until dst aligned */
133 /* I know I have something to do since we had > 2*CL initially */
134 /* so no need to check for r_len = 0 */
136 subi %r6, %r9, 1 /* CL mask */
140 beq cb_aligned_cb /* already on CL boundary? */
142 subf %r5, %r5, %r6 /* words to fill to alignment */
145 subf r_len, %r5, r_len
148 1: stwu r_val, 4(r_dst) /* Fill wordwise */
152 cb_aligned_cb: /* no need to check r_len, see above */
154 srw. %r5, r_len, %r10 /* Number of cache blocks */
159 subf r_len, %r5, r_len
161 1: dcbz 0, r_dst /* Clear blockwise */
162 add r_dst, r_dst, %r9
165 cblocks_done: /* still CL aligned, but less than CL bytes left */
166 cmplwi %cr1, r_len, 0
170 blt- sf_bytewise /* <8 remaining? */
173 /*----------------------------------------------------------------------*/
177 beqlr- /* Nothing to do */
181 cmplwi %cr1, r_len, 12 /* < 12 bytes to move? */
183 cmplwi %cr1, r_len, 8 /* < 8 bytes to move? */
185 andi. %r5, r_dst, 0x03 /* bytes to fill to align4 */
186 blt %cr1, sf_bytewise /* trivial byte mover */
190 beq+ sf_aligned_w /* dest is word aligned */
195 add r_dst, %r5, r_dst
197 mtctr %r5 /* nope, then fill bytewise */
198 subi r_dst, r_dst, 1 /* until it is */
199 1: stbu r_val, 1(r_dst)
204 subf r_len, %r5, r_len
206 sf_aligned_w: /* no need to check r_len since it were >= 8 bytes initially */
214 slwi %r5, %r5, 3 /* adjust len */
215 subf. r_len, %r5, r_len
217 1: stswi %r6, r_dst, 8
221 srwi %r5, r_len, 2 /* words to fill */
225 subf. r_len, %r5, r_len /* adjust len for fill */
228 1: stwu r_val, 4(r_dst)
233 sf_word_done: bne- sf_bytewise
235 sf_return: mr %r3, %r8 /* restore orig ptr */
236 blr /* for memset functionality */
250 1: stbu r_val, 1(r_dst)
253 mr %r3, %r8 /* restore orig ptr */
254 blr /* for memset functionality */
257 /*----------------------------------------------------------------------*/
261 cache_info: .long -1, -1, -1, -1
265 /*----------------------------------------------------------------------*/