1 #ifndef __ASM_SH_CMPXCHG_LLSC_H
2 #define __ASM_SH_CMPXCHG_LLSC_H
4 static inline unsigned long xchg_u32(volatile u32
*m
, unsigned long val
)
11 "movli.l @%2, %0 ! xchg_u32 \n\t"
14 "movco.l %0, @%2 \n\t"
17 : "=&z"(tmp
), "=&r" (retval
)
25 static inline unsigned long xchg_u8(volatile u8
*m
, unsigned long val
)
30 __asm__
__volatile__ (
32 "movli.l @%2, %0 ! xchg_u8 \n\t"
35 "movco.l %0, @%2 \n\t"
38 : "=&z"(tmp
), "=&r" (retval
)
39 : "r" (m
), "r" (val
& 0xff)
46 static inline unsigned long
47 __cmpxchg_u32(volatile int *m
, unsigned long old
, unsigned long new)
52 __asm__
__volatile__ (
54 "movli.l @%2, %0 ! __cmpxchg_u32 \n\t"
60 "movco.l %0, @%2 \n\t"
63 : "=&z" (tmp
), "=&r" (retval
)
64 : "r" (m
), "r" (old
), "r" (new)
71 #endif /* __ASM_SH_CMPXCHG_LLSC_H */