9 #define NR_STEPS ((uint32_t)UINT16_MAX * 4)
11 #define BUG_ON(COND) \
18 void *adder_simple(void *arg
)
21 atomic_size_t
*counter
= arg
;
23 for (step
= 0; step
< NR_STEPS
; ++step
)
24 atomic_fetch_add_explicit(counter
, 1, memory_order_relaxed
);
30 void *adder_cmpxchg(void *arg
)
33 atomic_size_t
*counter
= arg
;
35 for (step
= 0; step
< NR_STEPS
; ++step
) {
37 size_t cmp
= atomic_load_explicit(counter
, memory_order_relaxed
);
41 } while (!atomic_compare_exchange_strong_explicit(counter
,
42 &cmp
, xchg
, memory_order_relaxed
, memory_order_relaxed
));
49 void atomic_counter_test(void *(*adder
)(void *arg
))
52 atomic_size_t counter
;
53 pthread_t thread
[NR_THREADS
];
55 atomic_init(&counter
, 0);
57 for (index
= 0; index
< NR_THREADS
; ++index
)
58 BUG_ON(pthread_create(&thread
[index
], NULL
, adder
, (void *)&counter
));
60 for (index
= 0; index
< NR_THREADS
; ++index
)
61 BUG_ON(pthread_join(thread
[index
], NULL
));
63 if (atomic_load(&counter
) == (NR_THREADS
* NR_STEPS
))
71 atomic_counter_test(adder_simple
);
72 atomic_counter_test(adder_cmpxchg
);