[PATCH] x86: Add the check for all the cores in a package in cache information
[linux-2.6/next.git] / include / asm-m68k / uaccess.h
blob605e6cb811f80847fc6646ace670d2312da7339e
1 #ifndef __M68K_UACCESS_H
2 #define __M68K_UACCESS_H
4 /*
5 * User space memory access functions
6 */
7 #include <linux/errno.h>
8 #include <linux/sched.h>
9 #include <asm/segment.h>
11 #define VERIFY_READ 0
12 #define VERIFY_WRITE 1
14 /* We let the MMU do all checking */
15 #define access_ok(type,addr,size) 1
17 /* this function will go away soon - use access_ok() instead */
18 static inline int __deprecated verify_area(int type, const void *addr, unsigned long size)
20 return access_ok(type,addr,size) ? 0 : -EFAULT;
24 * The exception table consists of pairs of addresses: the first is the
25 * address of an instruction that is allowed to fault, and the second is
26 * the address at which the program should continue. No registers are
27 * modified, so it is entirely up to the continuation code to figure out
28 * what to do.
30 * All the routines below use bits of fixup code that are out of line
31 * with the main instruction path. This means when everything is well,
32 * we don't even have to jump over them. Further, they do not intrude
33 * on our cache or tlb entries.
36 struct exception_table_entry
38 unsigned long insn, fixup;
43 * These are the main single-value transfer routines. They automatically
44 * use the right size if we just have the right pointer type.
47 #define put_user(x, ptr) \
48 ({ \
49 int __pu_err; \
50 typeof(*(ptr)) __pu_val = (x); \
51 switch (sizeof (*(ptr))) { \
52 case 1: \
53 __put_user_asm(__pu_err, __pu_val, ptr, b); \
54 break; \
55 case 2: \
56 __put_user_asm(__pu_err, __pu_val, ptr, w); \
57 break; \
58 case 4: \
59 __put_user_asm(__pu_err, __pu_val, ptr, l); \
60 break; \
61 case 8: \
62 __pu_err = __constant_copy_to_user(ptr, &__pu_val, 8); \
63 break; \
64 default: \
65 __pu_err = __put_user_bad(); \
66 break; \
67 } \
68 __pu_err; \
70 #define __put_user(x, ptr) put_user(x, ptr)
72 extern int __put_user_bad(void);
75 * Tell gcc we read from memory instead of writing: this is because
76 * we do not write to any memory gcc knows about, so there are no
77 * aliasing issues.
79 #define __put_user_asm(err,x,ptr,bwl) \
80 __asm__ __volatile__ \
81 ("21:moves" #bwl " %2,%1\n" \
82 "1:\n" \
83 ".section .fixup,\"ax\"\n" \
84 " .even\n" \
85 "2: movel %3,%0\n" \
86 " jra 1b\n" \
87 ".previous\n" \
88 ".section __ex_table,\"a\"\n" \
89 " .align 4\n" \
90 " .long 21b,2b\n" \
91 " .long 1b,2b\n" \
92 ".previous" \
93 : "=d"(err) \
94 : "m"(*(ptr)), "r"(x), "i"(-EFAULT), "0"(0))
96 #define get_user(x, ptr) \
97 ({ \
98 int __gu_err; \
99 typeof(*(ptr)) __gu_val; \
100 switch (sizeof(*(ptr))) { \
101 case 1: \
102 __get_user_asm(__gu_err, __gu_val, ptr, b, "=d"); \
103 break; \
104 case 2: \
105 __get_user_asm(__gu_err, __gu_val, ptr, w, "=r"); \
106 break; \
107 case 4: \
108 __get_user_asm(__gu_err, __gu_val, ptr, l, "=r"); \
109 break; \
110 case 8: \
111 __gu_err = __constant_copy_from_user(&__gu_val, ptr, 8); \
112 break; \
113 default: \
114 __gu_val = 0; \
115 __gu_err = __get_user_bad(); \
116 break; \
118 (x) = __gu_val; \
119 __gu_err; \
121 #define __get_user(x, ptr) get_user(x, ptr)
123 extern int __get_user_bad(void);
125 #define __get_user_asm(err,x,ptr,bwl,reg) \
126 __asm__ __volatile__ \
127 ("1: moves" #bwl " %2,%1\n" \
128 "2:\n" \
129 ".section .fixup,\"ax\"\n" \
130 " .even\n" \
131 "3: movel %3,%0\n" \
132 " sub" #bwl " %1,%1\n" \
133 " jra 2b\n" \
134 ".previous\n" \
135 ".section __ex_table,\"a\"\n" \
136 " .align 4\n" \
137 " .long 1b,3b\n" \
138 ".previous" \
139 : "=d"(err), reg(x) \
140 : "m"(*(ptr)), "i" (-EFAULT), "0"(0))
142 static inline unsigned long
143 __generic_copy_from_user(void *to, const void *from, unsigned long n)
145 unsigned long tmp;
146 __asm__ __volatile__
147 (" tstl %2\n"
148 " jeq 2f\n"
149 "1: movesl (%1)+,%3\n"
150 " movel %3,(%0)+\n"
151 " subql #1,%2\n"
152 " jne 1b\n"
153 "2: movel %4,%2\n"
154 " bclr #1,%2\n"
155 " jeq 4f\n"
156 "3: movesw (%1)+,%3\n"
157 " movew %3,(%0)+\n"
158 "4: bclr #0,%2\n"
159 " jeq 6f\n"
160 "5: movesb (%1)+,%3\n"
161 " moveb %3,(%0)+\n"
162 "6:\n"
163 ".section .fixup,\"ax\"\n"
164 " .even\n"
165 "7: movel %2,%%d0\n"
166 "71:clrl (%0)+\n"
167 " subql #1,%%d0\n"
168 " jne 71b\n"
169 " lsll #2,%2\n"
170 " addl %4,%2\n"
171 " btst #1,%4\n"
172 " jne 81f\n"
173 " btst #0,%4\n"
174 " jne 91f\n"
175 " jra 6b\n"
176 "8: addql #2,%2\n"
177 "81:clrw (%0)+\n"
178 " btst #0,%4\n"
179 " jne 91f\n"
180 " jra 6b\n"
181 "9: addql #1,%2\n"
182 "91:clrb (%0)+\n"
183 " jra 6b\n"
184 ".previous\n"
185 ".section __ex_table,\"a\"\n"
186 " .align 4\n"
187 " .long 1b,7b\n"
188 " .long 3b,8b\n"
189 " .long 5b,9b\n"
190 ".previous"
191 : "=a"(to), "=a"(from), "=d"(n), "=&d"(tmp)
192 : "d"(n & 3), "0"(to), "1"(from), "2"(n/4)
193 : "d0", "memory");
194 return n;
197 static inline unsigned long
198 __generic_copy_to_user(void *to, const void *from, unsigned long n)
200 unsigned long tmp;
201 __asm__ __volatile__
202 (" tstl %2\n"
203 " jeq 3f\n"
204 "1: movel (%1)+,%3\n"
205 "22:movesl %3,(%0)+\n"
206 "2: subql #1,%2\n"
207 " jne 1b\n"
208 "3: movel %4,%2\n"
209 " bclr #1,%2\n"
210 " jeq 4f\n"
211 " movew (%1)+,%3\n"
212 "24:movesw %3,(%0)+\n"
213 "4: bclr #0,%2\n"
214 " jeq 5f\n"
215 " moveb (%1)+,%3\n"
216 "25:movesb %3,(%0)+\n"
217 "5:\n"
218 ".section .fixup,\"ax\"\n"
219 " .even\n"
220 "60:addql #1,%2\n"
221 "6: lsll #2,%2\n"
222 " addl %4,%2\n"
223 " jra 5b\n"
224 "7: addql #2,%2\n"
225 " jra 5b\n"
226 "8: addql #1,%2\n"
227 " jra 5b\n"
228 ".previous\n"
229 ".section __ex_table,\"a\"\n"
230 " .align 4\n"
231 " .long 1b,60b\n"
232 " .long 22b,6b\n"
233 " .long 2b,6b\n"
234 " .long 24b,7b\n"
235 " .long 3b,60b\n"
236 " .long 4b,7b\n"
237 " .long 25b,8b\n"
238 " .long 5b,8b\n"
239 ".previous"
240 : "=a"(to), "=a"(from), "=d"(n), "=&d"(tmp)
241 : "r"(n & 3), "0"(to), "1"(from), "2"(n / 4)
242 : "memory");
243 return n;
246 #define __copy_from_user_big(to, from, n, fixup, copy) \
247 __asm__ __volatile__ \
248 ("10: movesl (%1)+,%%d0\n" \
249 " movel %%d0,(%0)+\n" \
250 " subql #1,%2\n" \
251 " jne 10b\n" \
252 ".section .fixup,\"ax\"\n" \
253 " .even\n" \
254 "11: movel %2,%%d0\n" \
255 "13: clrl (%0)+\n" \
256 " subql #1,%%d0\n" \
257 " jne 13b\n" \
258 " lsll #2,%2\n" \
259 fixup "\n" \
260 " jra 12f\n" \
261 ".previous\n" \
262 ".section __ex_table,\"a\"\n" \
263 " .align 4\n" \
264 " .long 10b,11b\n" \
265 ".previous\n" \
266 copy "\n" \
267 "12:" \
268 : "=a"(to), "=a"(from), "=d"(n) \
269 : "0"(to), "1"(from), "2"(n/4) \
270 : "d0", "memory")
272 static inline unsigned long
273 __constant_copy_from_user(void *to, const void *from, unsigned long n)
275 switch (n) {
276 case 0:
277 break;
278 case 1:
279 __asm__ __volatile__
280 ("1: movesb (%1)+,%%d0\n"
281 " moveb %%d0,(%0)+\n"
282 "2:\n"
283 ".section .fixup,\"ax\"\n"
284 " .even\n"
285 "3: addql #1,%2\n"
286 " clrb (%0)+\n"
287 " jra 2b\n"
288 ".previous\n"
289 ".section __ex_table,\"a\"\n"
290 " .align 4\n"
291 " .long 1b,3b\n"
292 ".previous"
293 : "=a"(to), "=a"(from), "=d"(n)
294 : "0"(to), "1"(from), "2"(0)
295 : "d0", "memory");
296 break;
297 case 2:
298 __asm__ __volatile__
299 ("1: movesw (%1)+,%%d0\n"
300 " movew %%d0,(%0)+\n"
301 "2:\n"
302 ".section .fixup,\"ax\"\n"
303 " .even\n"
304 "3: addql #2,%2\n"
305 " clrw (%0)+\n"
306 " jra 2b\n"
307 ".previous\n"
308 ".section __ex_table,\"a\"\n"
309 " .align 4\n"
310 " .long 1b,3b\n"
311 ".previous"
312 : "=a"(to), "=a"(from), "=d"(n)
313 : "0"(to), "1"(from), "2"(0)
314 : "d0", "memory");
315 break;
316 case 3:
317 __asm__ __volatile__
318 ("1: movesw (%1)+,%%d0\n"
319 " movew %%d0,(%0)+\n"
320 "2: movesb (%1)+,%%d0\n"
321 " moveb %%d0,(%0)+\n"
322 "3:"
323 ".section .fixup,\"ax\"\n"
324 " .even\n"
325 "4: addql #2,%2\n"
326 " clrw (%0)+\n"
327 "5: addql #1,%2\n"
328 " clrb (%0)+\n"
329 " jra 3b\n"
330 ".previous\n"
331 ".section __ex_table,\"a\"\n"
332 " .align 4\n"
333 " .long 1b,4b\n"
334 " .long 2b,5b\n"
335 ".previous"
336 : "=a"(to), "=a"(from), "=d"(n)
337 : "0"(to), "1"(from), "2"(0)
338 : "d0", "memory");
339 break;
340 case 4:
341 __asm__ __volatile__
342 ("1: movesl (%1)+,%%d0\n"
343 " movel %%d0,(%0)+\n"
344 "2:"
345 ".section .fixup,\"ax\"\n"
346 " .even\n"
347 "3: addql #4,%2\n"
348 " clrl (%0)+\n"
349 " jra 2b\n"
350 ".previous\n"
351 ".section __ex_table,\"a\"\n"
352 " .align 4\n"
353 " .long 1b,3b\n"
354 ".previous"
355 : "=a"(to), "=a"(from), "=d"(n)
356 : "0"(to), "1"(from), "2"(0)
357 : "d0", "memory");
358 break;
359 case 8:
360 __asm__ __volatile__
361 ("1: movesl (%1)+,%%d0\n"
362 " movel %%d0,(%0)+\n"
363 "2: movesl (%1)+,%%d0\n"
364 " movel %%d0,(%0)+\n"
365 "3:"
366 ".section .fixup,\"ax\"\n"
367 " .even\n"
368 "4: addql #4,%2\n"
369 " clrl (%0)+\n"
370 "5: addql #4,%2\n"
371 " clrl (%0)+\n"
372 " jra 3b\n"
373 ".previous\n"
374 ".section __ex_table,\"a\"\n"
375 " .align 4\n"
376 " .long 1b,4b\n"
377 " .long 2b,5b\n"
378 ".previous"
379 : "=a"(to), "=a"(from), "=d"(n)
380 : "0"(to), "1"(from), "2"(0)
381 : "d0", "memory");
382 break;
383 case 12:
384 __asm__ __volatile__
385 ("1: movesl (%1)+,%%d0\n"
386 " movel %%d0,(%0)+\n"
387 "2: movesl (%1)+,%%d0\n"
388 " movel %%d0,(%0)+\n"
389 "3: movesl (%1)+,%%d0\n"
390 " movel %%d0,(%0)+\n"
391 "4:"
392 ".section .fixup,\"ax\"\n"
393 " .even\n"
394 "5: addql #4,%2\n"
395 " clrl (%0)+\n"
396 "6: addql #4,%2\n"
397 " clrl (%0)+\n"
398 "7: addql #4,%2\n"
399 " clrl (%0)+\n"
400 " jra 4b\n"
401 ".previous\n"
402 ".section __ex_table,\"a\"\n"
403 " .align 4\n"
404 " .long 1b,5b\n"
405 " .long 2b,6b\n"
406 " .long 3b,7b\n"
407 ".previous"
408 : "=a"(to), "=a"(from), "=d"(n)
409 : "0"(to), "1"(from), "2"(0)
410 : "d0", "memory");
411 break;
412 case 16:
413 __asm__ __volatile__
414 ("1: movesl (%1)+,%%d0\n"
415 " movel %%d0,(%0)+\n"
416 "2: movesl (%1)+,%%d0\n"
417 " movel %%d0,(%0)+\n"
418 "3: movesl (%1)+,%%d0\n"
419 " movel %%d0,(%0)+\n"
420 "4: movesl (%1)+,%%d0\n"
421 " movel %%d0,(%0)+\n"
422 "5:"
423 ".section .fixup,\"ax\"\n"
424 " .even\n"
425 "6: addql #4,%2\n"
426 " clrl (%0)+\n"
427 "7: addql #4,%2\n"
428 " clrl (%0)+\n"
429 "8: addql #4,%2\n"
430 " clrl (%0)+\n"
431 "9: addql #4,%2\n"
432 " clrl (%0)+\n"
433 " jra 5b\n"
434 ".previous\n"
435 ".section __ex_table,\"a\"\n"
436 " .align 4\n"
437 " .long 1b,6b\n"
438 " .long 2b,7b\n"
439 " .long 3b,8b\n"
440 " .long 4b,9b\n"
441 ".previous"
442 : "=a"(to), "=a"(from), "=d"(n)
443 : "0"(to), "1"(from), "2"(0)
444 : "d0", "memory");
445 break;
446 default:
447 switch (n & 3) {
448 case 0:
449 __copy_from_user_big(to, from, n, "", "");
450 break;
451 case 1:
452 __copy_from_user_big(to, from, n,
453 /* fixup */
454 "1: addql #1,%2\n"
455 " clrb (%0)+",
456 /* copy */
457 "2: movesb (%1)+,%%d0\n"
458 " moveb %%d0,(%0)+\n"
459 ".section __ex_table,\"a\"\n"
460 " .long 2b,1b\n"
461 ".previous");
462 break;
463 case 2:
464 __copy_from_user_big(to, from, n,
465 /* fixup */
466 "1: addql #2,%2\n"
467 " clrw (%0)+",
468 /* copy */
469 "2: movesw (%1)+,%%d0\n"
470 " movew %%d0,(%0)+\n"
471 ".section __ex_table,\"a\"\n"
472 " .long 2b,1b\n"
473 ".previous");
474 break;
475 case 3:
476 __copy_from_user_big(to, from, n,
477 /* fixup */
478 "1: addql #2,%2\n"
479 " clrw (%0)+\n"
480 "2: addql #1,%2\n"
481 " clrb (%0)+",
482 /* copy */
483 "3: movesw (%1)+,%%d0\n"
484 " movew %%d0,(%0)+\n"
485 "4: movesb (%1)+,%%d0\n"
486 " moveb %%d0,(%0)+\n"
487 ".section __ex_table,\"a\"\n"
488 " .long 3b,1b\n"
489 " .long 4b,2b\n"
490 ".previous");
491 break;
493 break;
495 return n;
498 #define __copy_to_user_big(to, from, n, fixup, copy) \
499 __asm__ __volatile__ \
500 ("10: movel (%1)+,%%d0\n" \
501 "31: movesl %%d0,(%0)+\n" \
502 "11: subql #1,%2\n" \
503 " jne 10b\n" \
504 "41:\n" \
505 ".section .fixup,\"ax\"\n" \
506 " .even\n" \
507 "22: addql #1,%2\n" \
508 "12: lsll #2,%2\n" \
509 fixup "\n" \
510 " jra 13f\n" \
511 ".previous\n" \
512 ".section __ex_table,\"a\"\n" \
513 " .align 4\n" \
514 " .long 10b,22b\n" \
515 " .long 31b,12b\n" \
516 " .long 11b,12b\n" \
517 " .long 41b,22b\n" \
518 ".previous\n" \
519 copy "\n" \
520 "13:" \
521 : "=a"(to), "=a"(from), "=d"(n) \
522 : "0"(to), "1"(from), "2"(n/4) \
523 : "d0", "memory")
525 #define __copy_to_user_inatomic __copy_to_user
526 #define __copy_from_user_inatomic __copy_from_user
528 static inline unsigned long
529 __constant_copy_to_user(void *to, const void *from, unsigned long n)
531 switch (n) {
532 case 0:
533 break;
534 case 1:
535 __asm__ __volatile__
536 (" moveb (%1)+,%%d0\n"
537 "21:movesb %%d0,(%0)+\n"
538 "1:\n"
539 ".section .fixup,\"ax\"\n"
540 " .even\n"
541 "2: addql #1,%2\n"
542 " jra 1b\n"
543 ".previous\n"
544 ".section __ex_table,\"a\"\n"
545 " .align 4\n "
546 " .long 21b,2b\n"
547 " .long 1b,2b\n"
548 ".previous"
549 : "=a"(to), "=a"(from), "=d"(n)
550 : "0"(to), "1"(from), "2"(0)
551 : "d0", "memory");
552 break;
553 case 2:
554 __asm__ __volatile__
555 (" movew (%1)+,%%d0\n"
556 "21:movesw %%d0,(%0)+\n"
557 "1:\n"
558 ".section .fixup,\"ax\"\n"
559 " .even\n"
560 "2: addql #2,%2\n"
561 " jra 1b\n"
562 ".previous\n"
563 ".section __ex_table,\"a\"\n"
564 " .align 4\n"
565 " .long 21b,2b\n"
566 " .long 1b,2b\n"
567 ".previous"
568 : "=a"(to), "=a"(from), "=d"(n)
569 : "0"(to), "1"(from), "2"(0)
570 : "d0", "memory");
571 break;
572 case 3:
573 __asm__ __volatile__
574 (" movew (%1)+,%%d0\n"
575 "21:movesw %%d0,(%0)+\n"
576 "1: moveb (%1)+,%%d0\n"
577 "22:movesb %%d0,(%0)+\n"
578 "2:\n"
579 ".section .fixup,\"ax\"\n"
580 " .even\n"
581 "3: addql #2,%2\n"
582 "4: addql #1,%2\n"
583 " jra 2b\n"
584 ".previous\n"
585 ".section __ex_table,\"a\"\n"
586 " .align 4\n"
587 " .long 21b,3b\n"
588 " .long 1b,3b\n"
589 " .long 22b,4b\n"
590 " .long 2b,4b\n"
591 ".previous"
592 : "=a"(to), "=a"(from), "=d"(n)
593 : "0"(to), "1"(from), "2"(0)
594 : "d0", "memory");
595 break;
596 case 4:
597 __asm__ __volatile__
598 (" movel (%1)+,%%d0\n"
599 "21:movesl %%d0,(%0)+\n"
600 "1:\n"
601 ".section .fixup,\"ax\"\n"
602 " .even\n"
603 "2: addql #4,%2\n"
604 " jra 1b\n"
605 ".previous\n"
606 ".section __ex_table,\"a\"\n"
607 " .align 4\n"
608 " .long 21b,2b\n"
609 " .long 1b,2b\n"
610 ".previous"
611 : "=a"(to), "=a"(from), "=d"(n)
612 : "0"(to), "1"(from), "2"(0)
613 : "d0", "memory");
614 break;
615 case 8:
616 __asm__ __volatile__
617 (" movel (%1)+,%%d0\n"
618 "21:movesl %%d0,(%0)+\n"
619 "1: movel (%1)+,%%d0\n"
620 "22:movesl %%d0,(%0)+\n"
621 "2:\n"
622 ".section .fixup,\"ax\"\n"
623 " .even\n"
624 "3: addql #4,%2\n"
625 "4: addql #4,%2\n"
626 " jra 2b\n"
627 ".previous\n"
628 ".section __ex_table,\"a\"\n"
629 " .align 4\n"
630 " .long 21b,3b\n"
631 " .long 1b,3b\n"
632 " .long 22b,4b\n"
633 " .long 2b,4b\n"
634 ".previous"
635 : "=a"(to), "=a"(from), "=d"(n)
636 : "0"(to), "1"(from), "2"(0)
637 : "d0", "memory");
638 break;
639 case 12:
640 __asm__ __volatile__
641 (" movel (%1)+,%%d0\n"
642 "21:movesl %%d0,(%0)+\n"
643 "1: movel (%1)+,%%d0\n"
644 "22:movesl %%d0,(%0)+\n"
645 "2: movel (%1)+,%%d0\n"
646 "23:movesl %%d0,(%0)+\n"
647 "3:\n"
648 ".section .fixup,\"ax\"\n"
649 " .even\n"
650 "4: addql #4,%2\n"
651 "5: addql #4,%2\n"
652 "6: addql #4,%2\n"
653 " jra 3b\n"
654 ".previous\n"
655 ".section __ex_table,\"a\"\n"
656 " .align 4\n"
657 " .long 21b,4b\n"
658 " .long 1b,4b\n"
659 " .long 22b,5b\n"
660 " .long 2b,5b\n"
661 " .long 23b,6b\n"
662 " .long 3b,6b\n"
663 ".previous"
664 : "=a"(to), "=a"(from), "=d"(n)
665 : "0"(to), "1"(from), "2"(0)
666 : "d0", "memory");
667 break;
668 case 16:
669 __asm__ __volatile__
670 (" movel (%1)+,%%d0\n"
671 "21:movesl %%d0,(%0)+\n"
672 "1: movel (%1)+,%%d0\n"
673 "22:movesl %%d0,(%0)+\n"
674 "2: movel (%1)+,%%d0\n"
675 "23:movesl %%d0,(%0)+\n"
676 "3: movel (%1)+,%%d0\n"
677 "24:movesl %%d0,(%0)+\n"
678 "4:"
679 ".section .fixup,\"ax\"\n"
680 " .even\n"
681 "5: addql #4,%2\n"
682 "6: addql #4,%2\n"
683 "7: addql #4,%2\n"
684 "8: addql #4,%2\n"
685 " jra 4b\n"
686 ".previous\n"
687 ".section __ex_table,\"a\"\n"
688 " .align 4\n"
689 " .long 21b,5b\n"
690 " .long 1b,5b\n"
691 " .long 22b,6b\n"
692 " .long 2b,6b\n"
693 " .long 23b,7b\n"
694 " .long 3b,7b\n"
695 " .long 24b,8b\n"
696 " .long 4b,8b\n"
697 ".previous"
698 : "=a"(to), "=a"(from), "=d"(n)
699 : "0"(to), "1"(from), "2"(0)
700 : "d0", "memory");
701 break;
702 default:
703 switch (n & 3) {
704 case 0:
705 __copy_to_user_big(to, from, n, "", "");
706 break;
707 case 1:
708 __copy_to_user_big(to, from, n,
709 /* fixup */
710 "1: addql #1,%2",
711 /* copy */
712 " moveb (%1)+,%%d0\n"
713 "22:movesb %%d0,(%0)+\n"
714 "2:"
715 ".section __ex_table,\"a\"\n"
716 " .long 22b,1b\n"
717 " .long 2b,1b\n"
718 ".previous");
719 break;
720 case 2:
721 __copy_to_user_big(to, from, n,
722 /* fixup */
723 "1: addql #2,%2",
724 /* copy */
725 " movew (%1)+,%%d0\n"
726 "22:movesw %%d0,(%0)+\n"
727 "2:"
728 ".section __ex_table,\"a\"\n"
729 " .long 22b,1b\n"
730 " .long 2b,1b\n"
731 ".previous");
732 break;
733 case 3:
734 __copy_to_user_big(to, from, n,
735 /* fixup */
736 "1: addql #2,%2\n"
737 "2: addql #1,%2",
738 /* copy */
739 " movew (%1)+,%%d0\n"
740 "23:movesw %%d0,(%0)+\n"
741 "3: moveb (%1)+,%%d0\n"
742 "24:movesb %%d0,(%0)+\n"
743 "4:"
744 ".section __ex_table,\"a\"\n"
745 " .long 23b,1b\n"
746 " .long 3b,1b\n"
747 " .long 24b,2b\n"
748 " .long 4b,2b\n"
749 ".previous");
750 break;
752 break;
754 return n;
757 #define copy_from_user(to, from, n) \
758 (__builtin_constant_p(n) ? \
759 __constant_copy_from_user(to, from, n) : \
760 __generic_copy_from_user(to, from, n))
762 #define copy_to_user(to, from, n) \
763 (__builtin_constant_p(n) ? \
764 __constant_copy_to_user(to, from, n) : \
765 __generic_copy_to_user(to, from, n))
767 #define __copy_from_user(to, from, n) copy_from_user(to, from, n)
768 #define __copy_to_user(to, from, n) copy_to_user(to, from, n)
771 * Copy a null terminated string from userspace.
774 static inline long
775 strncpy_from_user(char *dst, const char *src, long count)
777 long res;
778 if (count == 0) return count;
779 __asm__ __volatile__
780 ("1: movesb (%2)+,%%d0\n"
781 "12:moveb %%d0,(%1)+\n"
782 " jeq 2f\n"
783 " subql #1,%3\n"
784 " jne 1b\n"
785 "2: subl %3,%0\n"
786 "3:\n"
787 ".section .fixup,\"ax\"\n"
788 " .even\n"
789 "4: movel %4,%0\n"
790 " jra 3b\n"
791 ".previous\n"
792 ".section __ex_table,\"a\"\n"
793 " .align 4\n"
794 " .long 1b,4b\n"
795 " .long 12b,4b\n"
796 ".previous"
797 : "=d"(res), "=a"(dst), "=a"(src), "=d"(count)
798 : "i"(-EFAULT), "0"(count), "1"(dst), "2"(src), "3"(count)
799 : "d0", "memory");
800 return res;
804 * Return the size of a string (including the ending 0)
806 * Return 0 on exception, a value greater than N if too long
808 static inline long strnlen_user(const char *src, long n)
810 long res;
812 res = -(long)src;
813 __asm__ __volatile__
814 ("1:\n"
815 " tstl %2\n"
816 " jeq 3f\n"
817 "2: movesb (%1)+,%%d0\n"
818 "22:\n"
819 " subql #1,%2\n"
820 " tstb %%d0\n"
821 " jne 1b\n"
822 " jra 4f\n"
823 "3:\n"
824 " addql #1,%0\n"
825 "4:\n"
826 " addl %1,%0\n"
827 "5:\n"
828 ".section .fixup,\"ax\"\n"
829 " .even\n"
830 "6: moveq %3,%0\n"
831 " jra 5b\n"
832 ".previous\n"
833 ".section __ex_table,\"a\"\n"
834 " .align 4\n"
835 " .long 2b,6b\n"
836 " .long 22b,6b\n"
837 ".previous"
838 : "=d"(res), "=a"(src), "=d"(n)
839 : "i"(0), "0"(res), "1"(src), "2"(n)
840 : "d0");
841 return res;
844 #define strlen_user(str) strnlen_user(str, 32767)
847 * Zero Userspace
850 static inline unsigned long
851 clear_user(void *to, unsigned long n)
853 __asm__ __volatile__
854 (" tstl %1\n"
855 " jeq 3f\n"
856 "1: movesl %3,(%0)+\n"
857 "2: subql #1,%1\n"
858 " jne 1b\n"
859 "3: movel %2,%1\n"
860 " bclr #1,%1\n"
861 " jeq 4f\n"
862 "24:movesw %3,(%0)+\n"
863 "4: bclr #0,%1\n"
864 " jeq 5f\n"
865 "25:movesb %3,(%0)+\n"
866 "5:\n"
867 ".section .fixup,\"ax\"\n"
868 " .even\n"
869 "61:addql #1,%1\n"
870 "6: lsll #2,%1\n"
871 " addl %2,%1\n"
872 " jra 5b\n"
873 "7: addql #2,%1\n"
874 " jra 5b\n"
875 "8: addql #1,%1\n"
876 " jra 5b\n"
877 ".previous\n"
878 ".section __ex_table,\"a\"\n"
879 " .align 4\n"
880 " .long 1b,61b\n"
881 " .long 2b,6b\n"
882 " .long 3b,61b\n"
883 " .long 24b,7b\n"
884 " .long 4b,7b\n"
885 " .long 25b,8b\n"
886 " .long 5b,8b\n"
887 ".previous"
888 : "=a"(to), "=d"(n)
889 : "r"(n & 3), "r"(0), "0"(to), "1"(n/4));
890 return n;
893 #endif /* _M68K_UACCESS_H */