alpha: Add hidden alias for fegetenv.
[glibc-ports.git] / sysdeps / hppa / linuxthreads / pspinlock.c
blobe5a5545227b6a7bb97a4b13fb5cf124d8eaa807c
1 /* POSIX spinlock implementation. hppa version.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public License as
7 published by the Free Software Foundation; either version 2.1 of the
8 License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; see the file COPYING.LIB. If not,
17 write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA. */
20 #include <errno.h>
21 #include <pthread.h>
22 #include "internals.h"
24 int
25 __pthread_spin_lock (pthread_spinlock_t *lock)
27 volatile unsigned int *addr = __ldcw_align (lock);
29 while (__ldcw (addr) == 0)
30 while (*addr == 0) ;
32 return 0;
34 weak_alias (__pthread_spin_lock, pthread_spin_lock)
37 int
38 __pthread_spin_trylock (pthread_spinlock_t *lock)
40 volatile unsigned int *a = __ldcw_align (lock);
42 return __ldcw (a) ? 0 : EBUSY;
44 weak_alias (__pthread_spin_trylock, pthread_spin_trylock)
47 int
48 __pthread_spin_unlock (pthread_spinlock_t *lock)
50 volatile unsigned int *a = __ldcw_align (lock);
51 int tmp = 1;
52 /* This should be a memory barrier to newer compilers */
53 __asm__ __volatile__ ("stw,ma %1,0(%0)"
54 : : "r" (a), "r" (tmp) : "memory");
55 return 0;
57 weak_alias (__pthread_spin_unlock, pthread_spin_unlock)
60 int
61 __pthread_spin_init (pthread_spinlock_t *lock, int pshared)
63 /* We can ignore the `pshared' parameter. Since we are busy-waiting
64 all processes which can access the memory location `lock' points
65 to can use the spinlock. */
66 volatile unsigned int *a = __ldcw_align (lock);
67 int tmp = 1;
68 /* This should be a memory barrier to newer compilers */
69 __asm__ __volatile__ ("stw,ma %1,0(%0)"
70 : : "r" (a), "r" (tmp) : "memory");
71 return 0;
73 weak_alias (__pthread_spin_init, pthread_spin_init)
76 int
77 __pthread_spin_destroy (pthread_spinlock_t *lock)
79 /* Nothing to do. */
80 return 0;
82 weak_alias (__pthread_spin_destroy, pthread_spin_destroy)