1 /* This code runs in userspace. */
3 #define DISABLE_BRANCH_PROFILING
6 notrace cycle_t __vsyscall_fn
vread_tsc(void)
12 * Empirically, a fence (of type that depends on the CPU)
13 * before rdtsc is enough to ensure that rdtsc is ordered
14 * with respect to loads. The various CPU manuals are unclear
15 * as to whether rdtsc can be reordered with later loads,
16 * but no one has ever seen it happen.
19 ret
= (cycle_t
)vget_cycles();
21 last
= VVAR(vsyscall_gtod_data
).clock
.cycle_last
;
23 if (likely(ret
>= last
))
27 * GCC likes to generate cmov here, but this branch is extremely
28 * predictable (it's just a funciton of time and the likely is
29 * very likely) and there's a data dependence, so force GCC
30 * to generate a branch instead. I don't barrier() because
31 * we don't actually need a barrier, and if this function
32 * ever gets inlined it will generate worse code.