2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
12 #include <asm/asm-offsets.h>
13 #include <asm/export.h>
14 #include <asm/regdef.h>
24 #ifdef CONFIG_CPU_MICROMIPS
25 #define STORSIZE (LONGSIZE * 2)
26 #define STORMASK (STORSIZE - 1)
30 #define LONG_S LONG_SP
32 #define STORSIZE LONGSIZE
33 #define STORMASK LONGMASK
42 * No need to protect it with EVA #ifdefery. The generated block of code
43 * will never be assembled if EVA is not enabled.
45 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
46 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
48 #define EX(insn,reg,addr,handler) \
49 .if \mode == LEGACY_MODE; \
52 9: ___BUILD_EVA_INSN(insn, reg, addr); \
54 .section __ex_table,"a"; \
58 .macro f_fill64 dst, offset, val, fixup, mode
59 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
63 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
69 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
84 * Macro to generate the __bzero{,_user} symbol
86 * mode: LEGACY_MODE or EVA_MODE
88 .macro __BUILD_BZERO mode
89 /* Initialize __memset if this is the first time we call this macro */
92 .hidden __memset /* Make sure it does not leak */
95 sltiu t0, a2, STORSIZE /* very small region? */
97 bnez t0, .Lsmall_memset\@
98 andi t0, a0, STORMASK /* aligned? */
101 #ifdef CONFIG_CPU_MICROMIPS
102 move t8, a1 /* used by 'swp' instruction */
106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS
108 PTR_SUBU t0, STORSIZE /* alignment in bytes */
113 PTR_SUBU t0, AT /* alignment in bytes */
118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
121 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
123 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
125 PTR_SUBU a0, t0 /* long align ptr */
126 PTR_ADDU a2, t0 /* correct size */
128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
129 #define STORE_BYTE(N) \
130 EX(sb, a1, N(a0), .Lbyte_fixup\@); \
136 PTR_ADDU a2, t0 /* correct size */
141 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
147 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
152 PTR_ADDIU a0, STORSIZE
153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
154 1: ori t1, a2, 0x3f /* # of full blocks */
156 andi t0, a2, 0x40-STORSIZE
157 beqz t1, .Lmemset_partial\@ /* no block to fill */
159 PTR_ADDU t1, a0 /* end address */
162 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
167 PTR_LA t1, 2f /* where to start */
168 #ifdef CONFIG_CPU_MICROMIPS
172 PTR_SUBU t1, FILLPTRG
175 LONG_SRL AT, FILLPTRG, 1
179 PTR_ADDU a0, t0 /* dest ptr */
182 /* ... but first do longs ... */
183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
184 2: andi a2, STORMASK /* At most one long to go */
188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
189 PTR_ADDU a0, a2 /* What's left */
193 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
195 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
200 move a2, zero /* No remaining longs */
205 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
211 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
222 1: PTR_ADDIU a0, 1 /* fill bytewise */
226 EX(sb, a1, -1(a0), .Lsmall_fixup\@)
237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR
240 * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
249 /* unset_bytes already in a2 */
254 * unset_bytes = partial_start_addr + #bytes - fault_addr
255 * a2 = t1 + (a2 & 3f) - $28->task->BUADDR
257 PTR_L t0, TI_TASK($28)
259 LONG_L t0, THREAD_BUADDR(t0)
266 * unset_bytes = partial_end_addr + #bytes - fault_addr
267 * a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR
269 PTR_L t0, TI_TASK($28)
271 LONG_L t0, THREAD_BUADDR(t0)
277 /* unset_bytes already in a2 */
282 * unset_bytes = end_addr - current_addr + 1
292 * memset(void *s, int c, size_t n)
294 * a0: start of area to clear
295 * a1: char to fill with
296 * a2: size of area to clear
300 EXPORT_SYMBOL(memset)
301 move v0, a0 /* result */
304 andi a1, 0xff /* spread fillword */
316 EXPORT_SYMBOL(__bzero)
318 FEXPORT(__bzero_kernel)
319 EXPORT_SYMBOL(__bzero_kernel)
321 __BUILD_BZERO LEGACY_MODE
325 EXPORT_SYMBOL(__bzero)
326 __BUILD_BZERO EVA_MODE