2 * copy_page, __copy_user_page, __copy_user implementation of SuperH
4 * Copyright (C) 2001 Niibe Yutaka & Kaz Kojima
5 * Copyright (C) 2002 Toshinobu Sugioka
6 * Copyright (C) 2006 Paul Mundt
8 #include <linux/linkage.h>
16 * void copy_page_slow(void *to, void *from)
20 * r0, r1, r2, r3, r4, r5, r6, r7 --- scratch
21 * r8 --- from + PAGE_SIZE
44 #if defined(CONFIG_CPU_SH3)
46 #elif defined(CONFIG_CPU_SH4)
58 #if defined(CONFIG_CPU_SH4)
71 #if defined(CONFIG_CPU_SH4)
74 * @to: P1 address (with same color)
76 * @orig_to: P1 address
78 * void __copy_user_page(void *to, void *from, void *orig_to)
82 * r0, r1, r2, r3, r4, r5, r6, r7 --- scratch
83 * r8 --- from + PAGE_SIZE
88 ENTRY(__copy_user_page)
133 .Lpsz: .long PAGE_SIZE
135 * __kernel_size_t __copy_user(void *to, const void *from, __kernel_size_t n);
136 * Return the number of bytes NOT copied
139 9999: __VA_ARGS__ ; \
140 .section __ex_table, "a"; \
141 .long 9999b, 6000f ; \
144 ! Check if small number of bytes
147 cmp/gt r0,r6 ! r6 (len) > r0 (11)
148 bf/s .L_cleanup_loop_no_pop
149 add r6,r3 ! last destination address
151 ! Calculate bytes needed to align to src
163 ! Copy bytes to long word align src
171 ! Jump to appropriate routine depending on dest
190 * Come here if there are less than 12 bytes to copy
192 * Keep the branch target close, so the bf/s callee doesn't overflow
193 * and result in a more expensive branch being inserted. This is the
194 * fast-path for small copies, the jump via the jump table will hit the
195 * default slow-path cleanup. -PFM.
197 .L_cleanup_loop_no_pop:
198 tst r6,r6 ! Check explicitly for zero
208 1: mov #0,r0 ! normal return
212 .section .fixup, "ax"
228 ! Skip the large copy for small transfers
230 cmp/gt r6, r0 ! r0 (60) > r6 (len)
233 ! Align dest to a 32 byte boundary
260 EX( mov.l r1,@(4,r4) )
262 EX( mov.l r2,@(8,r4) )
263 cmp/gt r6, r0 ! r0 (32) > r6 (len)
264 EX( mov.l r7,@(12,r4) )
265 EX( mov.l r8,@(16,r4) )
266 EX( mov.l r9,@(20,r4) )
267 EX( mov.l r10,@(24,r4) )
268 EX( mov.l r11,@(28,r4) )
298 #ifdef CONFIG_CPU_LITTLE_ENDIAN
312 EX( mov.l r1,@(4,r4) )
313 EX( mov.l r8,@(8,r4) )
314 EX( mov.l r9,@(12,r4) )
323 EX( mov.l r10,@(16,r4) )
324 EX( mov.l r1,@(20,r4) )
325 EX( mov.l r8,@(24,r4) )
326 EX( mov.w r0,@(28,r4) )
330 EX( mov.l @(28,r5),r0 )
331 EX( mov.l @(24,r5),r8 )
332 EX( mov.l @(20,r5),r9 )
333 EX( mov.l @(16,r5),r10 )
334 EX( mov.w r0,@(30,r4) )
339 EX( mov.l r0,@(28,r4) )
340 EX( mov.l r8,@(24,r4) )
341 EX( mov.l r9,@(20,r4) )
343 EX( mov.l @(12,r5),r0 )
344 EX( mov.l @(8,r5),r8 )
346 EX( mov.l @(4,r5),r9 )
352 EX( mov.l r0,@(12,r4) )
353 EX( mov.l r8,@(8,r4) )
355 EX( mov.l r9,@(4,r4) )
356 EX( mov.w r0,@(2,r4) )
365 1: ! Read longword, write two words per iteration
368 #ifdef CONFIG_CPU_LITTLE_ENDIAN
371 EX( mov.w r0,@(2,r4) )
373 EX( mov.w r0,@(2,r4) )
383 ! Destination = 01 or 11
387 ! Read longword, write byte, word, byte per iteration
390 #ifdef CONFIG_CPU_LITTLE_ENDIAN
396 EX( mov.b r0,@(2,r4) )
400 EX( mov.b r0,@(3,r4) )
410 ! Cleanup last few bytes
426 mov #0,r0 ! normal return
431 .section .fixup, "ax"