Update year range in gprofng copyright notices
[binutils-gdb.git] / gprofng / libcollector / libcol_util.h
blobbb9411493f67f10c20610f45b8aef07a1121f62f
1 /* Copyright (C) 2021-2023 Free Software Foundation, Inc.
2 Contributed by Oracle.
4 This file is part of GNU Binutils.
6 This program is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 This program is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with this program; if not, write to the Free Software
18 Foundation, 51 Franklin Street - Fifth Floor, Boston,
19 MA 02110-1301, USA. */
21 #ifndef _LIBCOL_UTIL_H
22 #define _LIBCOL_UTIL_H
24 #include <stdarg.h>
25 #include <pthread.h>
26 #include <signal.h>
28 // LIBCOLLECTOR NOT I18N
29 #define NTXT(x) x
30 #define STXT(x) x
32 extern int __collector_tracelevel;
34 /* Initialization function */
35 extern int __collector_util_init();
36 extern void __collector_libkstat_funcs_init();
37 extern void __collector_libscf_funcs_init();
39 /* ------- functions from libcol_util.c ----------------- */
40 extern void * __collector_memcpy (void *s1, const void *s2, size_t n);
41 extern int (*__collector_sscanfp)(const char *restrict s, const char *restrict fmt, ...);
42 extern char * __collector_strcat (char *s1, const char *s2);
43 extern char * __collector_strchr (const char *s1, int chr);
44 extern size_t __collector_strlcpy (char *dst, const char *src, size_t dstsize);
45 extern char* __collector_strrchr (const char *str, int chr);
46 extern size_t __collector_strlen (const char *s);
47 extern size_t __collector_strlcat (char *dst, const char *src, size_t dstsize);
48 extern char* __collector_strchr (const char *str, int chr);
49 extern int __collector_strcmp (const char *s1, const char *s2);
50 extern int __collector_strncmp (const char *s1, const char *s2, size_t n);
51 extern char * __collector_strstr (const char *s1, const char *s2);
52 extern size_t __collector_strncpy (char *dst, const char *src, size_t dstsize);
53 extern size_t __collector_strncat (char *dst, const char *src, size_t dstsize);
54 extern void * __collector_malloc (size_t size);
55 extern void * __collector_calloc (size_t nelem, size_t elsize);
56 extern char * __collector_strdup (const char * str);
57 extern int __collector_strStartWith (const char *s1, const char *s2);
58 extern int __collector_xml_snprintf (char *s, size_t n, const char *format, ...) __attribute__ ((format (printf, 3, 4)));
59 extern int __collector_xml_vsnprintf (char *s, size_t n, const char *format, va_list args);
61 /* ------- collector_thread ----------------- */
62 extern pid_t __collector_gettid ();
63 extern void __collector_ext_gettid_tsd_create_key ();
64 typedef pthread_t collector_thread_t;
65 #define __collector_lwp_self() ((collector_thread_t) ((unsigned long) __collector_gettid()))
66 #define __collector_thr_self() ((collector_thread_t) ((unsigned long) __collector_gettid()))
68 /* ------- collector_mutex ----------------- */
70 * mutex_init is defined in libthread. If we don't want to interact
71 * with libthread we should use memset to initialize mutexes
74 typedef volatile int collector_mutex_t;
75 #define COLLECTOR_MUTEX_INITIALIZER 0
76 extern int __collector_mutex_lock (collector_mutex_t *mp);
77 extern int __collector_mutex_unlock (collector_mutex_t *mp);
78 extern int __collector_mutex_trylock (collector_mutex_t *mp);
80 #define __collector_mutex_init(xx) \
81 do { collector_mutex_t tmp=COLLECTOR_MUTEX_INITIALIZER; *(xx)=tmp; } while(0)
83 void __collector_sample (char *name);
84 void __collector_terminate_expt ();
85 void __collector_pause ();
86 void __collector_pause_m ();
87 void __collector_resume ();
89 struct DT_lineno;
91 typedef enum
93 DFUNC_API = 1, /* dynamic function declared with API */
94 DFUNC_JAVA, /* dynamically compiled java method */
95 DFUNC_KERNEL /* dynamic code mapped by the kernel (Linux) */
96 } dfunc_mode_t;
98 extern void __collector_int_func_load (dfunc_mode_t mode, char *name,
99 char *sourcename, void *vaddr,
100 int size, int lntsize,
101 struct DT_lineno *lntable);
102 extern void __collector_int_func_unload (dfunc_mode_t mode, void *vaddr);
104 extern int __collector_sigaction (int sig, const struct sigaction *nact,
105 struct sigaction *oact);
106 extern void __collector_SIGDFL_handler (int sig);
107 extern int __collector_ext_itimer_set (int period);
109 #if ARCH(Intel)
110 /* Atomic functions on x86/x64 */
113 * This function enables the inrementing (by one) of the value stored in target
114 * to occur in an atomic manner.
116 static __attribute__ ((always_inline)) inline void
117 __collector_inc_32 (uint32_t *ptr)
119 __asm__ __volatile__("lock; incl %0"
120 : // "=m" (*ptr) // output
121 : "m" (*ptr)); // input
125 * This function enables the decrementing (by one) of the value stored in target
126 * to occur in an atomic manner.
128 static __attribute__ ((always_inline)) inline void
129 __collector_dec_32 (volatile uint32_t *ptr)
131 __asm__ __volatile__("lock; decl %0"
132 : // "=m" (*ptr) // output
133 : "m" (*ptr)); // input
137 * This function subtrackts the value "off" of the value stored in target
138 * to occur in an atomic manner, and returns new value stored in target.
140 static __attribute__ ((always_inline)) inline uint32_t
141 __collector_subget_32 (uint32_t *ptr, uint32_t off)
143 uint32_t r;
144 uint32_t offset = off;
145 __asm__ __volatile__("movl %2, %0; negl %0; lock; xaddl %0, %1"
146 : "=r" (r), "=m" (*ptr) /* output */
147 : "a" (off), "r" (*ptr) /* input */
149 return (r - offset);
153 * This function returns the value of the stack pointer register
155 static __attribute__ ((always_inline)) inline void *
156 __collector_getsp ()
158 void *r;
159 #if WSIZE(32) || defined(__ILP32__)
160 __asm__ __volatile__("movl %%esp, %0"
161 #else
162 __asm__ __volatile__("movq %%rsp, %0"
163 #endif
164 : "=r" (r)); // output
165 return r;
169 * This function returns the value of the frame pointer register
171 static __attribute__ ((always_inline)) inline void *
172 __collector_getfp ()
174 void *r;
175 #if WSIZE(32) || defined(__ILP32__)
176 __asm__ __volatile__("movl %%ebp, %0"
177 #else
178 __asm__ __volatile__("movq %%rbp, %0"
179 #endif
180 : "=r" (r)); // output
181 return r;
185 * This function returns the value of the processor counter register
187 static __attribute__ ((always_inline)) inline void *
188 __collector_getpc ()
190 void *r;
191 #if defined(__x86_64)
192 __asm__ __volatile__("lea (%%rip), %0" : "=r" (r));
193 #else
194 __asm__ __volatile__("call 1f \n"
195 "1: popl %0" : "=r" (r));
196 #endif
197 return r;
201 * This function enables a compare and swap operation to occur atomically.
202 * The 32-bit value stored in target is compared with "old". If these values
203 * are equal, the value stored in target is replaced with "new". The old
204 * 32-bit value stored in target is returned by the function whether or not
205 * the replacement occurred.
207 static __attribute__ ((always_inline)) inline uint32_t
208 __collector_cas_32 (volatile uint32_t *pdata, uint32_t old, uint32_t new)
210 uint32_t r;
211 __asm__ __volatile__("lock; cmpxchgl %2, %1"
212 : "=a" (r), "=m" (*pdata) : "r" (new),
213 "a" (old), "m" (*pdata));
214 return r;
217 * This function enables a compare and swap operation to occur atomically.
218 * The 64-bit value stored in target is compared with "old". If these values
219 * are equal, the value stored in target is replaced with "new". The old
220 * 64-bit value stored in target is returned by the function whether or not
221 * the replacement occurred.
223 static __attribute__ ((always_inline)) inline uint64_t
224 __collector_cas_64p (volatile uint64_t *mem, uint64_t *old, uint64_t * new)
226 uint64_t r;
227 #if WSIZE(32)
228 uint32_t old1 = (uint32_t) (*old & 0xFFFFFFFFL);
229 uint32_t old2 = (uint32_t) ((*old >> 32) & 0xFFFFFFFFL);
230 uint32_t new1 = (uint32_t) (*new & 0xFFFFFFFFL);
231 uint32_t new2 = (uint32_t) ((*new >> 32) & 0xFFFFFFFFL);
232 uint32_t res1 = 0;
233 uint32_t res2 = 0;
234 __asm__ __volatile__(
235 "movl %3, %%esi; lock; cmpxchg8b (%%esi); movl %%edx, %2; movl %%eax, %1"
236 : "=m" (r), "=m" (res1), "=m" (res2) /* output */
237 : "m" (mem), "a" (old1), "d" (old2), "b" (new1), "c" (new2) /* input */
238 : "memory", "cc", "esi" //, "edx", "ecx", "ebx", "eax" /* clobbered register */
240 r = (((uint64_t) res2) << 32) | ((uint64_t) res1);
241 #else
242 __asm__ __volatile__( "lock; cmpxchgq %2, %1"
243 : "=a" (r), "=m" (*mem) /* output */
244 : "r" (*new), "a" (*old), "m" (*mem) /* input */
245 : "%rcx", "rdx" /* clobbered register */
247 #endif
248 return r;
251 * This function enables a compare and swap operation to occur atomically.
252 * The 32-/64-bit value stored in target is compared with "cmp". If these values
253 * are equal, the value stored in target is replaced with "new".
254 * The old value stored in target is returned by the function whether or not
255 * the replacement occurred.
257 static __attribute__ ((always_inline)) inline void *
258 __collector_cas_ptr (void *mem, void *cmp, void *new)
260 void *r;
261 #if WSIZE(32) || defined(__ILP32__)
262 r = (void *) __collector_cas_32 ((volatile uint32_t *)mem, (uint32_t) cmp, (uint32_t)new);
263 #else
264 __asm__ __volatile__("lock; cmpxchgq %2, (%1)"
265 : "=a" (r), "=b" (mem) /* output */
266 : "r" (new), "a" (cmp), "b" (mem) /* input */
268 #endif
269 return r;
272 #elif ARCH(Aarch64)
273 static __attribute__ ((always_inline)) inline uint32_t
274 __collector_inc_32 (volatile uint32_t *ptr)
276 return __sync_add_and_fetch (ptr, 1);
279 static __attribute__ ((always_inline)) inline uint32_t
280 __collector_dec_32 (volatile uint32_t *ptr)
282 return __sync_sub_and_fetch (ptr, 1);
285 static __attribute__ ((always_inline)) inline uint32_t
286 __collector_subget_32 (volatile uint32_t *ptr, uint32_t off)
288 return __sync_sub_and_fetch (ptr, off);
291 static __attribute__ ((always_inline)) inline uint32_t
292 __collector_cas_32 (volatile uint32_t *ptr, uint32_t old, uint32_t new)
294 return __sync_val_compare_and_swap (ptr, old, new);
297 static __attribute__ ((always_inline)) inline uint64_t
298 __collector_cas_64p (volatile uint64_t *ptr, uint64_t *old, uint64_t * new)
300 return __sync_val_compare_and_swap (ptr, *old, *new);
303 static __attribute__ ((always_inline)) inline void *
304 __collector_cas_ptr (void *ptr, void *old, void *new)
306 return (void *) __sync_val_compare_and_swap ((unsigned long *) ptr, (unsigned long) old, (unsigned long) new);
309 #else
310 extern void __collector_flushw (); /* defined for SPARC only */
311 extern void* __collector_getpc ();
312 extern void* __collector_getsp ();
313 extern void* __collector_getfp ();
314 extern void __collector_inc_32 (volatile uint32_t *);
315 extern void __collector_dec_32 (volatile uint32_t *);
316 extern void* __collector_cas_ptr (volatile void *, void *, void *);
317 extern uint32_t __collector_cas_32 (volatile uint32_t *, uint32_t, uint32_t);
318 extern uint32_t __collector_subget_32 (volatile uint32_t *, uint32_t);
319 extern uint64_t __collector_cas_64p (volatile uint64_t *, uint64_t *, uint64_t *);
320 #endif /* ARCH() */
321 #endif /* _LIBCOL_UTIL_H */