1 #ifndef __ASM_SH_CMPXCHG_GRB_H
2 #define __ASM_SH_CMPXCHG_GRB_H
4 static inline unsigned long xchg_u32(volatile u32
*m
, unsigned long val
)
10 " mova 1f, r0 \n\t" /* r0 = end point */
12 " mov r15, r1 \n\t" /* r1 = saved sp */
13 " mov #-4, r15 \n\t" /* LOGIN */
14 " mov.l @%1, %0 \n\t" /* load old value */
15 " mov.l %2, @%1 \n\t" /* store new value */
16 "1: mov r1, r15 \n\t" /* LOGOUT */
19 "+r" (val
) /* inhibit r15 overloading */
21 : "memory", "r0", "r1");
26 static inline unsigned long xchg_u16(volatile u16
*m
, unsigned long val
)
30 __asm__
__volatile__ (
32 " mova 1f, r0 \n\t" /* r0 = end point */
33 " mov r15, r1 \n\t" /* r1 = saved sp */
34 " mov #-6, r15 \n\t" /* LOGIN */
35 " mov.w @%1, %0 \n\t" /* load old value */
36 " extu.w %0, %0 \n\t" /* extend as unsigned */
37 " mov.w %2, @%1 \n\t" /* store new value */
38 "1: mov r1, r15 \n\t" /* LOGOUT */
41 "+r" (val
) /* inhibit r15 overloading */
43 : "memory" , "r0", "r1");
48 static inline unsigned long xchg_u8(volatile u8
*m
, unsigned long val
)
52 __asm__
__volatile__ (
54 " mova 1f, r0 \n\t" /* r0 = end point */
55 " mov r15, r1 \n\t" /* r1 = saved sp */
56 " mov #-6, r15 \n\t" /* LOGIN */
57 " mov.b @%1, %0 \n\t" /* load old value */
58 " extu.b %0, %0 \n\t" /* extend as unsigned */
59 " mov.b %2, @%1 \n\t" /* store new value */
60 "1: mov r1, r15 \n\t" /* LOGOUT */
63 "+r" (val
) /* inhibit r15 overloading */
65 : "memory" , "r0", "r1");
70 static inline unsigned long __cmpxchg_u32(volatile int *m
, unsigned long old
,
75 __asm__
__volatile__ (
77 " mova 1f, r0 \n\t" /* r0 = end point */
79 " mov r15, r1 \n\t" /* r1 = saved sp */
80 " mov #-8, r15 \n\t" /* LOGIN */
81 " mov.l @%3, %0 \n\t" /* load old value */
83 " bf 1f \n\t" /* if not equal */
84 " mov.l %2, @%3 \n\t" /* store new value */
85 "1: mov r1, r15 \n\t" /* LOGOUT */
87 "+r" (old
), "+r" (new) /* old or new can be r15 */
89 : "memory" , "r0", "r1", "t");
94 #endif /* __ASM_SH_CMPXCHG_GRB_H */