1 /* $NetBSD: macros.h,v 1.43 2007/11/20 13:37:37 yamt Exp $ */
4 * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. All advertising materials mentioning features or use of this software
16 * must display the following acknowledgement:
17 * This product includes software developed at Ludd, University of Lule}.
18 * 4. The name of the author may not be used to endorse or promote products
19 * derived from this software without specific prior written permission
21 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 /* All bugs are subject to removal without further notice */
35 #if !defined(_VAX_MACROS_H_) && !defined(__lint__)
36 #define _VAX_MACROS_H_
38 void __blkset(void *, int, size_t);
39 void __blkcpy(const void *, void *, size_t);
41 #if !__GNUC_PREREQ__(4, 1)
42 /* Here general macros are supposed to be stored */
44 static __inline
int __attribute__((__unused__
))
49 __asm
volatile ("ffs $0,$32,%1,%0;"
61 static __inline
void __attribute__((__unused__
))
64 __asm
volatile ("remque (%0),%0;clrl 4(%0)"
70 static __inline
void __attribute__((__unused__
))
71 vax_insque(void *p
, void *q
)
73 __asm
volatile ("insque (%0),(%1)"
80 static __inline
void *__attribute__((__unused__
))
81 vax_memcpy(void *to
, const void *from
, size_t len
)
84 __blkcpy(from
, to
, len
);
86 __asm
volatile ("movc3 %1,(%2),%0"
88 : "g" (len
), "r" (*(const char *)from
)
89 :"r0","r1","r2","r3","r4","r5","memory","cc");
93 #define memcpy vax_memcpy
95 static __inline
void *__attribute__((__unused__
))
96 vax_memmove(void *to
, const void *from
, size_t len
)
99 __blkcpy(from
, to
, len
);
101 __asm
__volatile ("movc3 %1,%2,%0"
103 : "g" (len
), "mo" (*(const char *)from
)
104 :"r0","r1","r2","r3","r4","r5","memory","cc");
108 #define memmove vax_memmove
111 static __inline
void *__attribute__((__unused__
))
112 vax_memset(void *block
, int c
, size_t len
)
115 __blkset(block
, c
, len
);
117 __asm
__volatile ("movc5 $0,(%%sp),%2,%1,%0"
118 : "=m" (*(char *)block
)
120 :"r0","r1","r2","r3","r4","r5","memory","cc");
124 #define memset vax_memset
127 /* XXX - the return syntax of memcmp is wrong */
128 static __inline
int __attribute__((__unused__
))
129 memcmp(const void *b1
, const void *b2
, size_t len
)
133 __asm
volatile("cmpc3 %3,(%1),(%2);"
136 : "r" (b1
), "r" (b2
), "r" (len
)
137 : "r0","r1","r2","r3" );
141 static __inline
int __attribute__((__unused__
))
142 bcmp(const void *b1
, const void *b2
, size_t len
)
146 __asm
volatile("cmpc3 %3,(%1),(%2);"
149 : "r" (b1
), "r" (b2
), "r" (len
)
150 : "r0","r1","r2","r3" );
155 static __inline
size_t __attribute__((__unused__
))
156 strlen(const char *cp
)
160 __asm
volatile("locc $0,$65535,(%1);"
161 "subl3 %%r0,$65535,%0"
168 static __inline
char * __attribute__((__unused__
))
169 strcat(char *cp
, const char *c2
)
171 __asm
volatile("locc $0,$65535,(%1);"
172 "subl3 %%r0,$65535,%%r2;"
174 "locc $0,$65535,(%0);"
175 "movc3 %%r2,(%1),(%%r1)"
178 : "r0","r1","r2","r3","r4","r5","memory","cc");
182 static __inline
char * __attribute__((__unused__
))
183 strncat(char *cp
, const char *c2
, size_t count
)
185 __asm
volatile("locc $0,%2,(%1);"
186 "subl3 %%r0,%2,%%r2;"
187 "locc $0,$65535,(%0);"
188 "movc3 %%r2,(%1),(%%r1);"
191 : "r" (cp
), "r" (c2
), "g"(count
)
192 : "r0","r1","r2","r3","r4","r5","memory","cc");
196 static __inline
char * __attribute__((__unused__
))
197 strcpy(char *cp
, const char *c2
)
199 __asm
volatile("locc $0,$65535,(%1);"
200 "subl3 %%r0,$65535,%%r2;"
201 "movc3 %%r2,(%1),(%0);"
205 : "r0","r1","r2","r3","r4","r5","memory","cc");
209 static __inline
char * __attribute__((__unused__
))
210 strncpy(char *cp
, const char *c2
, size_t len
)
212 __asm
volatile("movl %2,%%r2;"
215 "subl3 %%r0,%2,%%r2;"
218 "movc3 %%r2,(%1),(%0)"
220 : "r" (cp
), "r" (c2
), "g"(len
)
221 : "r0","r1","r2","r3","r4","r5","memory","cc");
225 static __inline
void *__attribute__((__unused__
))
226 memchr(const void *cp
, int c
, size_t len
)
229 __asm
volatile("locc %2,%3,(%1);"
235 : "r" (cp
), "r" (c
), "g"(len
)
240 static __inline
int __attribute__((__unused__
))
241 strcmp(const char *cp
, const char *c2
)
244 __asm
volatile("locc $0,$65535,(%1);"
245 "subl3 %%r0,$65535,%%r0;"
247 "cmpc3 %%r0,(%1),(%2);"
250 "cmpb (%%r1),(%%r3);"
257 : "r0","r1","r2","r3","cc");
262 #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
263 static __inline
int __attribute__((__unused__
))
264 locc(int mask
, char *cp
, size_t size
){
267 __asm
volatile("locc %1,%2,(%3);"
270 : "r" (mask
),"r"(size
),"r"(cp
)
276 static __inline
int __attribute__((__unused__
))
277 vax_scanc(u_int size
, const u_char
*cp
, const u_char
*table
, int mask
)
281 __asm
volatile("scanc %1,(%2),(%3),%4;"
284 : "r"(size
),"r"(cp
),"r"(table
),"r"(mask
)
285 : "r0","r1","r2","r3" );
288 #define scanc vax_scanc
290 static __inline
int __attribute__((__unused__
))
291 vax_skpc(int mask
, size_t size
, u_char
*cp
)
295 __asm
volatile("skpc %1,%2,(%3);"
298 : "r"(mask
),"r"(size
),"r"(cp
)
302 #define skpc vax_skpc
305 * Set/clear a bit at a memory position; interlocked.
306 * Return 0 if already set, 1 otherwise.
308 static __inline
int __attribute__((__unused__
))
309 bbssi(int bitnr
, long *addr
)
313 __asm
volatile("clrl %%r0;"
319 : "g"(bitnr
),"m"(*addr
)
320 : "r0","cc","memory");
324 static __inline
int __attribute__((__unused__
))
325 bbcci(int bitnr
, long *addr
)
329 __asm
volatile("clrl %%r0;"
335 : "g"(bitnr
),"m"(*addr
)
336 : "r0","cc","memory");
340 static inline struct lwp
*
341 cpu_switchto(struct lwp
*oldlwp
, struct lwp
*newlwp
, bool returning
)
351 : "g" (oldlwp
), "g" (newlwp
)
357 * Interlock instructions. Used both in multiprocessor environments to
358 * lock between CPUs and in uniprocessor systems when locking is required
359 * between I/O devices and the master CPU.
362 * Insqti() locks and inserts an element into the end of a queue.
363 * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
365 static __inline
int __attribute__((__unused__
))
366 insqti(void *entry
, void *header
) {
372 " bcs 1f;" /* failed insert */
373 " beql 2f;" /* jump if first entry */
379 : "r"(entry
), "r"(header
)
386 * Remqhi() removes an element from the head of the queue.
387 * Returns -1 if interlock failed, 0 if queue empty, address of the
388 * removed element otherwise.
390 static __inline
void *__attribute__((__unused__
))
391 remqhi(void *header
) {
396 " bcs 1f;" /* failed interlock */
397 " bvs 2f;" /* nothing was removed */
409 #define ILCK_FAILED -1 /* Interlock failed */
410 #define Q_EMPTY 0 /* Queue is/was empty */
411 #define Q_OK 1 /* Inserted OK */
413 #endif /* !_VAX_MACROS_H_ && !__lint__ */