1 #ifndef __ASM_SH_CMPXCHG_XCHG_H
2 #define __ASM_SH_CMPXCHG_XCHG_H
5 * Copyright (C) 2016 Red Hat, Inc.
6 * Author: Michael S. Tsirkin <mst@redhat.com>
8 * This work is licensed under the terms of the GNU GPL, version 2. See the
9 * file "COPYING" in the main directory of this archive for more details.
11 #include <linux/bitops.h>
12 #include <asm/byteorder.h>
15 * Portable implementations of 1 and 2 byte xchg using a 4 byte cmpxchg.
16 * Note: this header isn't self-contained: before including it, __cmpxchg_u32
17 * must be defined first.
19 static inline u32
__xchg_cmpxchg(volatile void *ptr
, u32 x
, int size
)
21 int off
= (unsigned long)ptr
% sizeof(u32
);
22 volatile u32
*p
= ptr
- off
;
24 int bitoff
= (sizeof(u32
) - size
- off
) * BITS_PER_BYTE
;
26 int bitoff
= off
* BITS_PER_BYTE
;
28 u32 bitmask
= ((0x1 << size
* BITS_PER_BYTE
) - 1) << bitoff
;
34 ret
= (oldv
& bitmask
) >> bitoff
;
35 newv
= (oldv
& ~bitmask
) | (x
<< bitoff
);
36 } while (__cmpxchg_u32(p
, oldv
, newv
) != oldv
);
41 static inline unsigned long xchg_u16(volatile u16
*m
, unsigned long val
)
43 return __xchg_cmpxchg(m
, val
, sizeof *m
);
46 static inline unsigned long xchg_u8(volatile u8
*m
, unsigned long val
)
48 return __xchg_cmpxchg(m
, val
, sizeof *m
);
51 #endif /* __ASM_SH_CMPXCHG_XCHG_H */