2 * copy_page, __copy_user_page, __copy_user implementation of SuperH
4 * Copyright (C) 2001 Niibe Yutaka & Kaz Kojima
5 * Copyright (C) 2002 Toshinobu Sugioka
6 * Copyright (C) 2006 Paul Mundt
8 #include <linux/linkage.h>
16 * void copy_page_slow(void *to, void *from)
20 * r0, r1, r2, r3, r4, r5, r6, r7 --- scratch
21 * r8 --- from + PAGE_SIZE
44 #if defined(CONFIG_CPU_SH3)
46 #elif defined(CONFIG_CPU_SH4)
58 #if defined(CONFIG_CPU_SH4)
71 #if defined(CONFIG_CPU_SH4)
74 * @to: P1 address (with same color)
76 * @orig_to: P1 address
78 * void __copy_user_page(void *to, void *from, void *orig_to)
82 * r0, r1, r2, r3, r4, r5, r6, r7 --- scratch
83 * r8 --- from + PAGE_SIZE
88 ENTRY(__copy_user_page)
133 .Lpsz: .long PAGE_SIZE
135 * __kernel_size_t __copy_user(void *to, const void *from, __kernel_size_t n);
136 * Return the number of bytes NOT copied
139 9999: __VA_ARGS__ ; \
140 .section __ex_table, "a"; \
141 .long 9999b, 6000f ; \
144 ! Check if small number of bytes
147 cmp/gt r0,r6 ! r6 (len) > r0 (11)
148 bf/s .L_cleanup_loop_no_pop
149 add r6,r3 ! last destination address
151 ! Calculate bytes needed to align to src
163 ! Copy bytes to long word align src
171 ! Jump to appropriate routine depending on dest
190 * Come here if there are less than 12 bytes to copy
192 * Keep the branch target close, so the bf/s callee doesn't overflow
193 * and result in a more expensive branch being inserted. This is the
194 * fast-path for small copies, the jump via the jump table will hit the
195 * default slow-path cleanup. -PFM.
197 .L_cleanup_loop_no_pop:
198 tst r6,r6 ! Check explicitly for zero
208 1: mov #0,r0 ! normal return
212 .section .fixup, "ax"
228 ! Skip the large copy for small transfers
230 cmp/gt r6, r0 ! r0 (60) > r6 (len)
233 ! Align dest to a 32 byte boundary
258 #ifdef CONFIG_CPU_SH4
264 EX( mov.l r1,@(4,r4) )
266 EX( mov.l r2,@(8,r4) )
267 cmp/gt r6, r0 ! r0 (32) > r6 (len)
268 EX( mov.l r7,@(12,r4) )
269 EX( mov.l r8,@(16,r4) )
270 EX( mov.l r9,@(20,r4) )
271 EX( mov.l r10,@(24,r4) )
272 EX( mov.l r11,@(28,r4) )
302 #ifdef CONFIG_CPU_LITTLE_ENDIAN
316 EX( mov.l r1,@(4,r4) )
317 EX( mov.l r8,@(8,r4) )
318 EX( mov.l r9,@(12,r4) )
327 EX( mov.l r10,@(16,r4) )
328 EX( mov.l r1,@(20,r4) )
329 EX( mov.l r8,@(24,r4) )
330 EX( mov.w r0,@(28,r4) )
334 EX( mov.l @(28,r5),r0 )
335 EX( mov.l @(24,r5),r8 )
336 EX( mov.l @(20,r5),r9 )
337 EX( mov.l @(16,r5),r10 )
338 EX( mov.w r0,@(30,r4) )
343 EX( mov.l r0,@(28,r4) )
344 EX( mov.l r8,@(24,r4) )
345 EX( mov.l r9,@(20,r4) )
347 EX( mov.l @(12,r5),r0 )
348 EX( mov.l @(8,r5),r8 )
350 EX( mov.l @(4,r5),r9 )
356 EX( mov.l r0,@(12,r4) )
357 EX( mov.l r8,@(8,r4) )
359 EX( mov.l r9,@(4,r4) )
360 EX( mov.w r0,@(2,r4) )
369 1: ! Read longword, write two words per iteration
372 #ifdef CONFIG_CPU_LITTLE_ENDIAN
375 EX( mov.w r0,@(2,r4) )
377 EX( mov.w r0,@(2,r4) )
387 ! Destination = 01 or 11
391 ! Read longword, write byte, word, byte per iteration
394 #ifdef CONFIG_CPU_LITTLE_ENDIAN
400 EX( mov.b r0,@(2,r4) )
404 EX( mov.b r0,@(3,r4) )
414 ! Cleanup last few bytes
430 mov #0,r0 ! normal return
435 .section .fixup, "ax"