sparc: fix breakage introduced by cmp+jmp fusion (sparc64 has two sets of
[ajla.git] / refcount.h
blob27b317e5ec03f84667cddfcd95b200476c99b493
1 /*
2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
9 * version.
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 #ifndef AJLA_REFCOUNT_H
20 #define AJLA_REFCOUNT_H
22 #include "addrlock.h"
23 #include "thread.h"
24 #include "asm.h"
25 #include "ptrcomp.h"
27 #if defined(REFCOUNT_WIN32)
29 #if !defined(_WIN64)
30 typedef ULONG refcount_int_t;
31 #define REFCOUNT_STEP 1
32 #define RefcountDec(x) ((LONG)InterlockedDecrement((/*volatile*/ LONG *)(x)) < 0)
33 #define RefcountAdd(x, v) \
34 do { \
35 LONG v_ = (v); \
36 while (v_ > 0) \
37 InterlockedIncrement((/*volatile*/ LONG *)(x)), v_--; \
38 while (v_ < 0) \
39 InterlockedDecrement((/*volatile*/ LONG *)(x)), v_++; \
40 } while (0)
41 #define RefcountXchg(x, v) InterlockedExchange((/*volatile*/ LONG *)(x), v)
42 #else
43 typedef ULONGLONG refcount_int_t;
44 #define REFCOUNT_STEP 256
45 #define RefcountDec(x) ((ULONGLONG)InterlockedExchangeAdd64((/*volatile*/ LONGLONG *)(x), -REFCOUNT_STEP) < REFCOUNT_STEP)
46 #define RefcountAdd(x, v) InterlockedAdd64((/*volatile*/ LONGLONG *)(x), v)
47 #define RefcountXchg(x, v) InterlockedExchange64((/*volatile*/ LONGLONG *)(x), v)
48 #endif
50 #else
52 #if defined(SIZEOF_VOID_P) && SIZEOF_VOID_P >= 8 && !defined(DEBUG)
53 #define REFCOUNT_STEP 256
54 #else
55 #define REFCOUNT_STEP 1
56 #endif
58 typedef uintptr_t refcount_int_t;
60 #endif
63 #if REFCOUNT_STEP >= 32
64 #define REFCOUNT_TAG
65 #endif
68 #if defined(REFCOUNT_ASM_X86) || defined(REFCOUNT_ASM_X86_LABELS)
69 #if !defined(INLINE_ASM_GCC_X86_64)
70 #define REFCOUNT_ASM_X86_SUFFIX "l"
71 #else
72 #define REFCOUNT_ASM_X86_SUFFIX "q"
73 #endif
74 #endif
76 typedef struct {
77 #if !defined(REFCOUNT_ATOMIC) || defined(HAVE_REAL_GNUC)
78 refcount_int_t refcount;
79 #define refcount_const const
80 #else
81 /* we don't really want this because on parisc it causes unwanted locking during initialization */
82 atomic_uintptr_t refcount;
83 #define refcount_const
84 #endif
85 #ifdef DEBUG_REFCOUNTS
86 char position[20];
87 #endif
88 } refcount_t;
91 #if !defined(REFCOUNT_ATOMIC)
92 #define MEMORY_ORDER_DEC 0
93 #define MEMORY_ORDER_TEST_1 0
94 #define memory_order_relaxed 0
95 #elif defined(THREAD_SANITIZER)
96 #define MEMORY_ORDER_DEC memory_order_acq_rel
97 #define MEMORY_ORDER_TEST_1 memory_order_acquire
98 #else
99 #define MEMORY_ORDER_DEC memory_order_release
100 #define MEMORY_ORDER_TEST_1 memory_order_relaxed
101 #endif
104 #ifdef REFCOUNTS_ARE_ATOMIC
105 #ifdef REFCOUNT_ATOMIC
106 #define REFCOUNT_VOLATILE(r, order) (atomic_load_explicit(&(r)->refcount, order))
107 #else
108 #define REFCOUNT_VOLATILE(r, order) (*cast_ptr(volatile refcount_const refcount_int_t *, &(r)->refcount))
109 #endif
110 #endif
112 #if defined(SIZEOF_VOID_P) && defined(POINTER_IGNORE_BITS)
113 #define REFCOUNT_LIMIT (((uintptr_t)1 << (SIZEOF_VOID_P * 8 - 1 - POINTER_IGNORE_BITS)) / (SIZEOF_VOID_P / 2))
114 #elif defined(SIZEOF_VOID_P)
115 #define REFCOUNT_LIMIT (((uintptr_t)1 << (SIZEOF_VOID_P * 8 - 1)) / (SIZEOF_VOID_P / 2))
116 #else
117 #define REFCOUNT_LIMIT ((refcount_int_t)-(2 * REFCOUNT_STEP))
118 #endif
120 static attr_always_inline void refcount_set_position(refcount_t attr_unused *r, const char attr_unused *position)
122 #ifdef DEBUG_REFCOUNTS
123 size_t l = strlen(position);
124 if (likely(l < sizeof(r->position)))
125 strcpy(r->position, position);
126 else
127 strcpy(r->position, position + (l - sizeof(r->position)) + 1);
128 #endif
131 static attr_always_inline void refcount_init_raw_(refcount_t *r, refcount_int_t val, const char *position)
133 #if !defined(REFCOUNT_ATOMIC) || defined(HAVE_REAL_GNUC)
134 r->refcount = val;
135 #else
136 atomic_init(&r->refcount, val);
137 #endif
138 refcount_set_position(r, position);
141 static attr_always_inline void refcount_init_(refcount_t *r, const char *position)
143 refcount_init_raw_(r, 0, position);
146 #ifdef REFCOUNT_TAG
147 static attr_always_inline void refcount_init_tag_(refcount_t *r, unsigned char tag, const char *position)
149 #ifdef DEBUG_REFCOUNTS
150 unsigned t = tag;
151 if (unlikely(t >= REFCOUNT_STEP))
152 internal(file_line, "refcount_init_tag: invalid tag %u", tag);
153 #endif
154 refcount_init_raw_(r, tag, position);
156 #endif
158 static attr_always_inline void refcount_init_val_(refcount_t *r, refcount_int_t val, const char *position)
160 refcount_init_raw_(r, (val - 1) * REFCOUNT_STEP, position);
163 #if defined(REFCOUNTS_ARE_ATOMIC) && !defined(THREAD_NONE)
164 #define refcount_get_(r, order) REFCOUNT_VOLATILE(r, order)
165 #elif defined(REFCOUNT_SYNC)
166 static attr_always_inline refcount_int_t refcount_get_(refcount_const refcount_t *r, int attr_unused order)
168 return __sync_add_and_fetch(cast_ptr(refcount_int_t *, &r->refcount), 0);
170 #else
171 static attr_always_inline refcount_int_t refcount_get_(refcount_const refcount_t *r, int attr_unused order)
173 refcount_int_t result;
174 address_lock(r, DEPTH_REFCOUNT);
175 result = r->refcount;
176 address_unlock(r, DEPTH_REFCOUNT);
177 return result;
179 #endif
181 static attr_always_inline bool refcount_is_read_only(refcount_const refcount_t *r)
183 refcount_int_t v = refcount_get_(r, memory_order_relaxed);
184 #ifdef DEBUG_REFCOUNTS
185 return v >= (refcount_int_t)-(REFCOUNT_STEP * 2) && v < (refcount_int_t)-REFCOUNT_STEP;
186 #else
187 return v >= (refcount_int_t)-REFCOUNT_STEP;
188 #endif
191 static attr_always_inline void refcount_set_read_only(refcount_t *r)
193 #ifdef DEBUG_REFCOUNTS
194 refcount_int_t val = (refcount_int_t)-(REFCOUNT_STEP * 2);
195 #else
196 refcount_int_t val = (refcount_int_t)-REFCOUNT_STEP;
197 #endif
198 #ifdef REFCOUNT_TAG
199 val |= refcount_get_(r, memory_order_relaxed) & (REFCOUNT_STEP - 1);
200 #endif
201 refcount_init_raw_(r, val, file_line);
204 static attr_always_inline bool refcount_is_invalid(refcount_const refcount_t attr_unused *r)
206 return !refcount_is_read_only(r) && refcount_get_(r, memory_order_relaxed) >= REFCOUNT_LIMIT;
209 static attr_always_inline void refcount_validate(refcount_const refcount_t attr_unused *r, const char attr_unused *msg, const char attr_unused *position)
211 #ifdef DEBUG_REFCOUNTS
212 if (unlikely(!r))
213 internal(position, "%s: refcount is NULL", msg);
214 if (unlikely(refcount_is_invalid(r))) {
215 internal(position, "%s: refcount %p is invalid: %"PRIxMAX" (%s)", msg, r, (uintmax_t)refcount_get_(r, memory_order_relaxed), r->position);
217 #endif
220 static attr_always_inline bool refcount_is_one_(refcount_const refcount_t *r, const char *position)
222 refcount_validate(r, "refcount_is_one", position);
223 return refcount_get_(r, MEMORY_ORDER_TEST_1) < REFCOUNT_STEP;
226 #ifdef REFCOUNT_TAG
227 static attr_always_inline unsigned char refcount_tag_get(refcount_const refcount_t *r)
229 #if defined(C_LITTLE_ENDIAN) && !defined(__alpha__) && !defined(THREAD_SANITIZER)
230 return ((volatile unsigned char *)&r->refcount)[0] & (REFCOUNT_STEP - 1);
231 #elif defined(C_BIG_ENDIAN) && !defined(THREAD_SANITIZER)
232 return ((volatile unsigned char *)&r->refcount)[sizeof(refcount_int_t) - 1] & (REFCOUNT_STEP - 1);
233 #else
234 return refcount_get_(r, memory_order_relaxed) & (REFCOUNT_STEP - 1);
235 #endif
237 #endif
239 static attr_always_inline void refcount_add_raw_(refcount_t *r, refcount_int_t add, const char *position)
241 refcount_int_t attr_unused value;
242 if (position)
243 refcount_validate(r, "refcount_add_raw", position);
244 #if defined(REFCOUNT_ASM_X86) || defined(REFCOUNT_ASM_X86_LABELS)
245 if (is_constant(add) && add == 1)
246 __asm__ volatile ("lock; inc"REFCOUNT_ASM_X86_SUFFIX" %0"::"m"(r->refcount):"cc","memory");
247 else if (is_constant(add) && add == (refcount_int_t)-1)
248 __asm__ volatile ("lock; dec"REFCOUNT_ASM_X86_SUFFIX" %0"::"m"(r->refcount):"cc","memory");
249 else
250 __asm__ volatile ("lock; add"REFCOUNT_ASM_X86_SUFFIX" %1, %0"::"m"(r->refcount),"ir"(add):"cc","memory");
251 #if defined(DEBUG_REFCOUNTS) && defined(REFCOUNTS_ARE_ATOMIC)
252 value = REFCOUNT_VOLATILE(r, memory_order_relaxed);
253 #else
254 value = REFCOUNT_STEP;
255 #endif
256 #elif defined(REFCOUNT_ATOMIC)
257 value = atomic_fetch_add_explicit(&r->refcount, add, memory_order_relaxed);
258 value += add;
259 #elif defined(REFCOUNT_SYNC)
260 value = __sync_add_and_fetch(&r->refcount, add);
261 #elif defined(REFCOUNT_WIN32)
262 RefcountAdd(&r->refcount, add);
263 #if defined(DEBUG_REFCOUNTS) && defined(REFCOUNTS_ARE_ATOMIC)
264 value = REFCOUNT_VOLATILE(r, memory_order_relaxed);
265 #else
266 value = REFCOUNT_STEP;
267 #endif
268 #elif defined(REFCOUNT_LOCK)
269 address_lock(r, DEPTH_REFCOUNT);
270 value = r->refcount += add;
271 address_unlock(r, DEPTH_REFCOUNT);
272 #else
273 error - no refcount method
274 #endif
275 #ifdef DEBUG_REFCOUNTS
276 if (position) {
277 refcount_t test;
278 test.refcount = value;
279 if (unlikely(refcount_is_invalid(&test)) || (unlikely(refcount_is_one_(&test, position)) && unlikely(add >= REFCOUNT_STEP) && likely(add < sign_bit(refcount_int_t))))
280 internal(position, "refcount_add_raw_: refcount overflow: %"PRIuMAX" (adding %"PRIuMAX")", (uintmax_t)value, (uintmax_t)add);
282 #endif
285 #ifdef REFCOUNT_TAG
286 static attr_always_inline void refcount_tag_set_(refcount_t *r, unsigned char old_tag, unsigned char new_tag, const char attr_unused *position)
288 #ifdef DEBUG_REFCOUNTS
289 if (unlikely(refcount_tag_get(r) != old_tag))
290 internal(position, "refcount_tag_set: tag does not match: %u != %u; new tag %u", refcount_tag_get(r), old_tag, new_tag);
291 #endif
292 refcount_add_raw_(r, (refcount_int_t)new_tag - (refcount_int_t)old_tag, NULL);
294 #endif
296 static attr_always_inline void refcount_poison_(refcount_t attr_unused *r, const char attr_unused *position)
298 #ifdef DEBUG_REFCOUNTS
299 #ifndef REFCOUNT_TAG
300 refcount_init_raw_(r, -1, position);
301 #else
302 refcount_int_t add = (refcount_int_t)-REFCOUNT_STEP - (refcount_int_t)((refcount_int_t)refcount_get_(r, memory_order_relaxed) & (refcount_int_t)-REFCOUNT_STEP);
303 refcount_add_raw_(r, add, NULL);
304 #endif
305 #endif
308 static attr_always_inline void refcount_poison_tag_(refcount_t attr_unused *r, const char attr_unused *position)
310 #ifdef DEBUG_REFCOUNTS
311 refcount_init_raw_(r, -1, position);
312 #endif
315 #if !defined(THREAD_NONE) && defined(REFCOUNTS_ARE_ATOMIC) && !defined(UNUSUAL_ARITHMETICS)
316 #define refcount_one_fastpath \
317 do { \
318 if (likely(REFCOUNT_VOLATILE(r, MEMORY_ORDER_TEST_1) < REFCOUNT_STEP)) {\
319 refcount_poison_(r, position); \
320 return true; \
322 } while (0)
323 #else
324 #define refcount_one_fastpath do { } while (0)
325 #endif
327 static attr_always_inline bool refcount_dec_(refcount_t *r, const char *position)
329 bool result;
330 refcount_validate(r, "refcount_dec", position);
331 refcount_set_position(r, position);
332 #if defined(REFCOUNT_ASM_X86_LABELS)
333 refcount_one_fastpath;
334 __asm__ goto ("lock; sub"REFCOUNT_ASM_X86_SUFFIX" %1, %0; jc %l[x86_ret_true]"::"m"(r->refcount),"i"(REFCOUNT_STEP):"cc","memory":x86_ret_true);
335 return false;
336 x86_ret_true:
337 refcount_poison_(r, position);
338 return true;
339 #elif defined(REFCOUNT_ASM_X86)
340 refcount_one_fastpath;
341 if (sizeof(bool) == 1) {
342 __asm__ volatile ("lock; sub"REFCOUNT_ASM_X86_SUFFIX" %2, %1; setc %0":"=q"(result):"m"(r->refcount),"i"(REFCOUNT_STEP):"cc","memory");
343 } else {
344 unsigned char res;
345 __asm__ volatile ("lock; sub"REFCOUNT_ASM_X86_SUFFIX" %2, %1; setc %0":"=q"(res):"m"(r->refcount),"i"(REFCOUNT_STEP):"cc","memory");
346 result = res;
348 return result;
349 #elif defined(REFCOUNT_ATOMIC)
350 refcount_one_fastpath;
351 result = atomic_fetch_sub_explicit(&r->refcount, REFCOUNT_STEP, MEMORY_ORDER_DEC) < REFCOUNT_STEP;
352 #elif defined(REFCOUNT_SYNC)
353 refcount_one_fastpath;
354 result = (refcount_int_t)(__sync_sub_and_fetch(&r->refcount, REFCOUNT_STEP) + REFCOUNT_STEP) < REFCOUNT_STEP;
355 #elif defined(REFCOUNT_WIN32)
356 refcount_one_fastpath;
357 result = RefcountDec(&r->refcount);
358 #elif defined(REFCOUNT_LOCK)
359 refcount_one_fastpath;
360 address_lock(r, DEPTH_REFCOUNT);
361 result = (refcount_int_t)(r->refcount -= REFCOUNT_STEP) >= (refcount_int_t)-REFCOUNT_STEP;
362 address_unlock(r, DEPTH_REFCOUNT);
363 #else
364 error - no refcount method
365 #endif
366 if (result)
367 refcount_poison_(r, position);
368 return result;
371 #undef refcount_one_fastpath
373 static attr_always_inline bool refcount_xchgcmp(refcount_t *r, refcount_int_t val, refcount_int_t cmp)
375 refcount_int_t result;
376 val = (val - 1) * REFCOUNT_STEP;
377 #if defined(REFCOUNT_ASM_X86) || defined(REFCOUNT_ASM_X86_LABELS)
378 __asm__ ("xchg"REFCOUNT_ASM_X86_SUFFIX" %1, %0":"=r"(result):"m"(r->refcount),"0"(val):"memory");
379 #elif defined(REFCOUNT_ATOMIC)
380 result = atomic_exchange_explicit(&r->refcount, val, memory_order_acquire);
381 #elif defined(REFCOUNT_SYNC)
382 do {
383 result = refcount_get_(r, memory_order_relaxed);
384 } while (unlikely(!__sync_bool_compare_and_swap(&r->refcount, result, val)));
385 #elif defined(REFCOUNT_WIN32)
386 result = RefcountXchg(&r->refcount, val);
387 #elif defined(REFCOUNT_LOCK)
388 address_lock(r, DEPTH_REFCOUNT);
389 result = r->refcount;
390 refcount_init_raw_(r, val, file_line);
391 address_unlock(r, DEPTH_REFCOUNT);
392 #else
393 error - no refcount method
394 #endif
395 return result == (cmp - 1) * REFCOUNT_STEP;
398 static attr_always_inline void refcount_set(refcount_t *r, refcount_int_t val)
400 val = (val - 1) * REFCOUNT_STEP;
401 #if defined(REFCOUNT_ASM_X86) || defined(REFCOUNT_ASM_X86_LABELS)
402 __asm__ ("mov"REFCOUNT_ASM_X86_SUFFIX" %1, %0":"=m"(r->refcount):"ri"(val));
403 #elif defined(REFCOUNT_ATOMIC)
404 atomic_store_explicit(&r->refcount, val, memory_order_relaxed);
405 #elif defined(REFCOUNT_SYNC)
407 refcount_int_t result;
408 do {
409 result = refcount_get_(r, memory_order_relaxed);
410 } while (unlikely(!__sync_bool_compare_and_swap(&r->refcount, result, val)));
412 #elif defined(REFCOUNT_WIN32)
413 *(volatile refcount_int_t *)&r->refcount = val;
414 #elif defined(REFCOUNT_LOCK)
415 address_lock(r, DEPTH_REFCOUNT);
416 refcount_init_raw_(r, val, file_line);
417 address_unlock(r, DEPTH_REFCOUNT);
418 #else
419 error - no refcount method
420 #endif
423 static attr_always_inline bool refcount_is_one_nonatomic_(refcount_const refcount_t *r, const char *position)
425 refcount_validate(r, "refcount_is_one_nonatomic", position);
426 return r->refcount < REFCOUNT_STEP;
429 static attr_always_inline void refcount_inc_nonatomic_(refcount_t *r, const char *position)
431 refcount_validate(r, "refcount_inc_nonatomic", position);
432 r->refcount += REFCOUNT_STEP;
433 #ifdef DEBUG_REFCOUNTS
434 if (unlikely(refcount_is_invalid(r)) || unlikely(refcount_is_one_(r, position)))
435 internal(position, "refcount_inc_nonatomic: refcount overflow: %"PRIuMAX"", (uintmax_t)r->refcount);
436 #endif
439 static attr_always_inline bool refcount_dec_nonatomic_(refcount_t *r, const char *position)
441 bool result;
442 refcount_validate(r, "refcount_dec_nonatomic", position);
443 refcount_init_raw_(r, r->refcount - REFCOUNT_STEP, position);
444 result = (refcount_int_t)r->refcount >= (refcount_int_t)-REFCOUNT_STEP;
445 if (result)
446 refcount_poison_(r, position);
447 return result;
450 static attr_always_inline refcount_int_t refcount_get_nonatomic_(refcount_t *r, const char *position)
452 refcount_validate(r, "refcount_get_nonatomic", position);
453 return (r->refcount / REFCOUNT_STEP) + 1;
456 #define refcount_init(r) refcount_init_(r, file_line)
457 #define refcount_init_tag(r, tag) refcount_init_tag_(r, tag, file_line)
458 #define refcount_init_val(r, val) refcount_init_val_(r, val, file_line)
459 #define refcount_is_one(r) refcount_is_one_(r, file_line)
460 #define refcount_inc(r) refcount_add_raw_(r, REFCOUNT_STEP, file_line)
461 #define refcount_inc_(r, position) refcount_add_raw_(r, REFCOUNT_STEP, position)
462 #define refcount_add(r, val) refcount_add_raw_(r, (refcount_int_t)(val) * REFCOUNT_STEP, file_line)
463 #define refcount_add_(r, val, position) refcount_add_raw_(r, (refcount_int_t)(val) * REFCOUNT_STEP, position)
464 #define refcount_poison(r) refcount_poison_(r, file_line);
465 #define refcount_poison_tag(r) refcount_poison_tag_(r, file_line);
466 #define refcount_dec(r) refcount_dec_(r, file_line)
467 #define refcount_is_one_nonatomic(r) refcount_is_one_nonatomic_(r, file_line)
468 #define refcount_inc_nonatomic(r) refcount_inc_nonatomic_(r, file_line)
469 #define refcount_dec_nonatomic(r) refcount_dec_nonatomic_(r, file_line)
470 #define refcount_get_nonatomic(r) refcount_get_nonatomic_(r, file_line)
472 #ifdef REFCOUNT_TAG
473 #define refcount_tag_set(r, old_tag, new_tag) refcount_tag_set_(r, old_tag, new_tag, file_line)
474 #endif
476 #endif