Expand PMF_FN_* macros.
[netbsd-mini2440.git] / sys / arch / powerpc / include / lock.h
blobbc7719d4697727c163dc140aef91cba279e032a3
1 /* $NetBSD: lock.h,v 1.11 2007/10/17 19:56:41 garbled Exp $ */
3 /*-
4 * Copyright (c) 2000, 2007 The NetBSD Foundation, Inc.
5 * All rights reserved.
7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Jason R. Thorpe and Andrew Doran.
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
12 * are met:
13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE.
33 * Machine-dependent spin lock operations.
36 #ifndef _POWERPC_LOCK_H_
37 #define _POWERPC_LOCK_H_
39 static __inline int
40 __SIMPLELOCK_LOCKED_P(__cpu_simple_lock_t *__ptr)
42 return *__ptr == __SIMPLELOCK_LOCKED;
45 static __inline int
46 __SIMPLELOCK_UNLOCKED_P(__cpu_simple_lock_t *__ptr)
48 return *__ptr == __SIMPLELOCK_UNLOCKED;
51 static __inline void
52 __cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr)
54 *__ptr = __SIMPLELOCK_UNLOCKED;
57 static __inline void
58 __cpu_simple_lock_set(__cpu_simple_lock_t *__ptr)
60 *__ptr = __SIMPLELOCK_LOCKED;
63 static __inline void
64 __cpu_simple_lock_init(__cpu_simple_lock_t *alp)
66 *alp = __SIMPLELOCK_UNLOCKED;
67 __asm volatile ("sync");
70 static __inline void
71 __cpu_simple_lock(__cpu_simple_lock_t *alp)
73 int old;
75 __asm volatile (" \
76 \n\
77 1: lwarx %0,0,%1 \n\
78 cmpwi %0,%2 \n\
79 beq+ 3f \n\
80 2: lwzx %0,0,%1 \n\
81 cmpwi %0,%2 \n\
82 beq+ 1b \n\
83 b 2b \n\
84 3: stwcx. %3,0,%1 \n\
85 bne- 1b \n\
86 isync \n\
87 \n"
88 : "=&r"(old)
89 : "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED)
90 : "memory");
93 static __inline int
94 __cpu_simple_lock_try(__cpu_simple_lock_t *alp)
96 int old, dummy;
98 __asm volatile (" \
99 \n\
100 1: lwarx %0,0,%1 \n\
101 cmpwi %0,%2 \n\
102 bne 2f \n\
103 stwcx. %3,0,%1 \n\
104 bne- 1b \n\
105 2: stwcx. %3,0,%4 \n\
106 isync \n\
108 : "=&r"(old)
109 : "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED),
110 "r"(&dummy)
111 : "memory");
113 return (old == __SIMPLELOCK_UNLOCKED);
116 static __inline void
117 __cpu_simple_unlock(__cpu_simple_lock_t *alp)
119 __asm volatile ("sync");
120 *alp = __SIMPLELOCK_UNLOCKED;
123 static __inline void
124 mb_read(void)
126 __asm volatile ("isync" ::: "memory");
129 static __inline void
130 mb_write(void)
132 __asm volatile ("sync" ::: "memory");
135 static __inline void
136 mb_memory(void)
138 __asm volatile ("sync" ::: "memory");
141 #endif /* _POWERPC_LOCK_H_ */