8 static struct mp_counts mp
;
9 static volatile uint32_t spinlock
= 0; /* volatile to not optimize the smp_lock loop away */
11 // defined in criticala.asm
12 int critical_enter_internal(void);
14 static void smp_lock(void)
16 memory_barrier(); /* syncronize */
19 " pause; " /* used to optimize loops on P4/Xeon's */
20 " lock btsl $0x00, %0; " /* if no lock is set, it will btsl (bit set long) */
21 " jc 1b; " /* still locked? */
26 static void smp_unlock(void)
28 spinlock
= 0; /* unlock */
31 int critical_enter(void)
33 int state
= critical_enter_internal();
35 // interrupts *were* enabled
36 if(mp
.processor_count
> 1)
44 void critical_exit(int state
)
47 if(mp
.processor_count
> 1){
54 /* mb = memory barrier, used for code synchronization */
55 void memory_barrier(void)
57 if(cpuid
.features_edx
& (1 << 26)) /* test for SSE2 */
59 asm volatile("mfence");