2 * IPRT - AMD64 and x86 Specific Assembly Functions.
6 * Copyright (C) 2006-2024 Oracle and/or its affiliates.
8 * This file is part of VirtualBox base platform packages, as
9 * available from https://www.virtualbox.org.
11 * This program is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU General Public License
13 * as published by the Free Software Foundation, in version 3 of the
16 * This program is distributed in the hope that it will be useful, but
17 * WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 * The contents of this file may alternatively be used under the terms
25 * of the Common Development and Distribution License Version 1.0
26 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
27 * in the VirtualBox distribution, in which case the provisions of the
28 * CDDL are applicable instead of those of the GPL.
30 * You may elect to license modified versions of this file under the
31 * terms and conditions of either the GPL or the CDDL or both.
33 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
36 #ifndef IPRT_INCLUDED_asm_amd64_x86_h
37 #define IPRT_INCLUDED_asm_amd64_x86_h
38 #ifndef RT_WITHOUT_PRAGMA_ONCE
42 #include <iprt/types.h>
43 #include <iprt/assert.h>
44 #include <iprt/x86-helpers.h>
45 #if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
46 # error "Not on AMD64 or x86"
49 #if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
50 /* Emit the intrinsics at all optimization levels. */
51 # include <iprt/sanitized/intrin.h>
52 # pragma intrinsic(_ReadWriteBarrier)
53 # pragma intrinsic(__cpuid)
54 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2010 /*?*/
55 # pragma intrinsic(__cpuidex)
57 # pragma intrinsic(_enable)
58 # pragma intrinsic(_disable)
59 # pragma intrinsic(__rdtsc)
60 # pragma intrinsic(__readmsr)
61 # pragma intrinsic(__writemsr)
62 # pragma intrinsic(__outbyte)
63 # pragma intrinsic(__outbytestring)
64 # pragma intrinsic(__outword)
65 # pragma intrinsic(__outwordstring)
66 # pragma intrinsic(__outdword)
67 # pragma intrinsic(__outdwordstring)
68 # pragma intrinsic(__inbyte)
69 # pragma intrinsic(__inbytestring)
70 # pragma intrinsic(__inword)
71 # pragma intrinsic(__inwordstring)
72 # pragma intrinsic(__indword)
73 # pragma intrinsic(__indwordstring)
74 # pragma intrinsic(__invlpg)
75 # pragma intrinsic(__wbinvd)
76 # pragma intrinsic(__readcr0)
77 # pragma intrinsic(__readcr2)
78 # pragma intrinsic(__readcr3)
79 # pragma intrinsic(__readcr4)
80 # pragma intrinsic(__writecr0)
81 # pragma intrinsic(__writecr3)
82 # pragma intrinsic(__writecr4)
83 # pragma intrinsic(__readdr)
84 # pragma intrinsic(__writedr)
86 # pragma intrinsic(__readcr8)
87 # pragma intrinsic(__writecr8)
89 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2005
90 # pragma intrinsic(__halt)
92 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
93 /*# pragma intrinsic(__readeflags) - buggy intrinsics in VC++ 2010, reordering/optimizers issues
94 # pragma intrinsic(__writeeflags) */
95 # pragma intrinsic(__rdtscp)
97 # if defined(RT_ARCH_AMD64) && RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015 /*?*/
98 # pragma intrinsic(_readfsbase_u64)
99 # pragma intrinsic(_readgsbase_u64)
100 # pragma intrinsic(_writefsbase_u64)
101 # pragma intrinsic(_writegsbase_u64)
103 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
104 # pragma intrinsic(__lidt)
105 # pragma intrinsic(__sidt)
106 # pragma intrinsic(_lgdt)
107 # pragma intrinsic(_sgdt)
113 * Undefine all symbols we have Watcom C/C++ #pragma aux'es for.
115 #if defined(__WATCOMC__) && ARCH_BITS == 16
116 # include "asm-amd64-x86-watcom-16.h"
117 #elif defined(__WATCOMC__) && ARCH_BITS == 32
118 # include "asm-amd64-x86-watcom-32.h"
122 /** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
123 * @ingroup grp_rt_asm
127 /** @todo find a more proper place for these structures? */
131 typedef struct RTIDTR
133 /** Size of the IDT. */
135 /** Address of the IDT. */
141 } RTIDTR
, RT_FAR
*PRTIDTR
;
146 typedef struct RTIDTRALIGNEDINT
148 /** Alignment padding. */
149 uint16_t au16Padding
[ARCH_BITS
== 64 ? 3 : 1];
150 /** The IDTR structure. */
155 /** Wrapped RTIDTR for preventing misalignment exceptions. */
156 typedef union RTIDTRALIGNED
158 /** Try make sure this structure has optimal alignment. */
159 uint64_t auAlignmentHack
[ARCH_BITS
== 64 ? 2 : 1];
160 /** Aligned structure. */
163 AssertCompileSize(RTIDTRALIGNED
, ((ARCH_BITS
== 64) + 1) * 8);
164 /** Pointer to a an RTIDTR alignment wrapper. */
165 typedef RTIDTRALIGNED RT_FAR
*PRIDTRALIGNED
;
170 typedef struct RTGDTR
172 /** Size of the GDT. */
174 /** Address of the GDT. */
180 } RTGDTR
, RT_FAR
*PRTGDTR
;
185 typedef struct RTGDTRALIGNEDINT
187 /** Alignment padding. */
188 uint16_t au16Padding
[ARCH_BITS
== 64 ? 3 : 1];
189 /** The GDTR structure. */
194 /** Wrapped RTGDTR for preventing misalignment exceptions. */
195 typedef union RTGDTRALIGNED
197 /** Try make sure this structure has optimal alignment. */
198 uint64_t auAlignmentHack
[ARCH_BITS
== 64 ? 2 : 1];
199 /** Aligned structure. */
202 AssertCompileSize(RTIDTRALIGNED
, ((ARCH_BITS
== 64) + 1) * 8);
203 /** Pointer to a an RTGDTR alignment wrapper. */
204 typedef RTGDTRALIGNED RT_FAR
*PRGDTRALIGNED
;
208 * Gets the content of the IDTR CPU register.
209 * @param pIdtr Where to store the IDTR contents.
211 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
212 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMGetIDTR(PRTIDTR pIdtr
);
214 DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr
)
216 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
218 # elif RT_INLINE_ASM_GNU_STYLE
219 __asm__
__volatile__("sidt %0" : "=m" (*pIdtr
));
223 # ifdef RT_ARCH_AMD64
237 * Gets the content of the IDTR.LIMIT CPU register.
238 * @returns IDTR limit.
240 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
241 RT_ASM_DECL_PRAGMA_WATCOM(uint16_t) ASMGetIdtrLimit(void);
243 DECLINLINE(uint16_t) ASMGetIdtrLimit(void)
245 RTIDTRALIGNED TmpIdtr
;
246 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
248 # elif RT_INLINE_ASM_GNU_STYLE
249 __asm__
__volatile__("sidt %0" : "=m" (TmpIdtr
.s
.Idtr
));
253 sidt
[TmpIdtr
.s
.Idtr
]
256 return TmpIdtr
.s
.Idtr
.cbIdt
;
262 * Sets the content of the IDTR CPU register.
263 * @param pIdtr Where to load the IDTR contents from
265 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
266 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetIDTR(const RTIDTR RT_FAR
*pIdtr
);
268 DECLINLINE(void) ASMSetIDTR(const RTIDTR RT_FAR
*pIdtr
)
270 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
271 __lidt((void *)pIdtr
);
272 # elif RT_INLINE_ASM_GNU_STYLE
273 __asm__
__volatile__("lidt %0" : : "m" (*pIdtr
));
277 # ifdef RT_ARCH_AMD64
291 * Gets the content of the GDTR CPU register.
292 * @param pGdtr Where to store the GDTR contents.
294 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
295 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMGetGDTR(PRTGDTR pGdtr
);
297 DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr
)
299 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
301 # elif RT_INLINE_ASM_GNU_STYLE
302 __asm__
__volatile__("sgdt %0" : "=m" (*pGdtr
));
306 # ifdef RT_ARCH_AMD64
320 * Sets the content of the GDTR CPU register.
321 * @param pGdtr Where to load the GDTR contents from
323 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
324 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetGDTR(const RTGDTR RT_FAR
*pGdtr
);
326 DECLINLINE(void) ASMSetGDTR(const RTGDTR RT_FAR
*pGdtr
)
328 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
329 _lgdt((void *)pGdtr
);
330 # elif RT_INLINE_ASM_GNU_STYLE
331 __asm__
__volatile__("lgdt %0" : : "m" (*pGdtr
));
335 # ifdef RT_ARCH_AMD64
350 * Get the cs register.
353 #if RT_INLINE_ASM_EXTERNAL
354 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL
) ASMGetCS(void);
356 DECLINLINE(RTSEL
) ASMGetCS(void)
359 # if RT_INLINE_ASM_GNU_STYLE
360 __asm__
__volatile__("movw %%cs, %0\n\t" : "=r" (SelCS
));
374 * Get the DS register.
377 #if RT_INLINE_ASM_EXTERNAL
378 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL
) ASMGetDS(void);
380 DECLINLINE(RTSEL
) ASMGetDS(void)
383 # if RT_INLINE_ASM_GNU_STYLE
384 __asm__
__volatile__("movw %%ds, %0\n\t" : "=r" (SelDS
));
398 * Get the ES register.
401 #if RT_INLINE_ASM_EXTERNAL
402 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL
) ASMGetES(void);
404 DECLINLINE(RTSEL
) ASMGetES(void)
407 # if RT_INLINE_ASM_GNU_STYLE
408 __asm__
__volatile__("movw %%es, %0\n\t" : "=r" (SelES
));
422 * Get the FS register.
425 #if RT_INLINE_ASM_EXTERNAL
426 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL
) ASMGetFS(void);
428 DECLINLINE(RTSEL
) ASMGetFS(void)
431 # if RT_INLINE_ASM_GNU_STYLE
432 __asm__
__volatile__("movw %%fs, %0\n\t" : "=r" (SelFS
));
447 * Get the FS base register.
448 * @returns FS base address.
450 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
451 DECLASM(uint64_t) ASMGetFSBase(void);
453 DECLINLINE(uint64_t) ASMGetFSBase(void)
455 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
456 return (uint64_t)_readfsbase_u64();
457 # elif RT_INLINE_ASM_GNU_STYLE
459 __asm__
__volatile__("rdfsbase %0\n\t" : "=r" (uFSBase
));
467 * Set the FS base register.
468 * @param uNewBase The new base value.
470 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
471 DECLASM(void) ASMSetFSBase(uint64_t uNewBase
);
473 DECLINLINE(void) ASMSetFSBase(uint64_t uNewBase
)
475 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
476 _writefsbase_u64(uNewBase
);
477 # elif RT_INLINE_ASM_GNU_STYLE
478 __asm__
__volatile__("wrfsbase %0\n\t" : : "r" (uNewBase
));
483 #endif /* RT_ARCH_AMD64 */
486 * Get the GS register.
489 #if RT_INLINE_ASM_EXTERNAL
490 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL
) ASMGetGS(void);
492 DECLINLINE(RTSEL
) ASMGetGS(void)
495 # if RT_INLINE_ASM_GNU_STYLE
496 __asm__
__volatile__("movw %%gs, %0\n\t" : "=r" (SelGS
));
511 * Get the GS base register.
512 * @returns GS base address.
514 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
515 DECLASM(uint64_t) ASMGetGSBase(void);
517 DECLINLINE(uint64_t) ASMGetGSBase(void)
519 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
520 return (uint64_t)_readgsbase_u64();
521 # elif RT_INLINE_ASM_GNU_STYLE
523 __asm__
__volatile__("rdgsbase %0\n\t" : "=r" (uGSBase
));
531 * Set the GS base register.
532 * @param uNewBase The new base value.
534 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
535 DECLASM(void) ASMSetGSBase(uint64_t uNewBase
);
537 DECLINLINE(void) ASMSetGSBase(uint64_t uNewBase
)
539 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
540 _writegsbase_u64(uNewBase
);
541 # elif RT_INLINE_ASM_GNU_STYLE
542 __asm__
__volatile__("wrgsbase %0\n\t" : : "r" (uNewBase
));
547 #endif /* RT_ARCH_AMD64 */
551 * Get the SS register.
554 #if RT_INLINE_ASM_EXTERNAL
555 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL
) ASMGetSS(void);
557 DECLINLINE(RTSEL
) ASMGetSS(void)
560 # if RT_INLINE_ASM_GNU_STYLE
561 __asm__
__volatile__("movw %%ss, %0\n\t" : "=r" (SelSS
));
575 * Get the TR register.
578 #if RT_INLINE_ASM_EXTERNAL
579 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL
) ASMGetTR(void);
581 DECLINLINE(RTSEL
) ASMGetTR(void)
584 # if RT_INLINE_ASM_GNU_STYLE
585 __asm__
__volatile__("str %w0\n\t" : "=r" (SelTR
));
599 * Get the LDTR register.
602 #if RT_INLINE_ASM_EXTERNAL
603 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL
) ASMGetLDTR(void);
605 DECLINLINE(RTSEL
) ASMGetLDTR(void)
608 # if RT_INLINE_ASM_GNU_STYLE
609 __asm__
__volatile__("sldt %w0\n\t" : "=r" (SelLDTR
));
623 * Get the access rights for the segment selector.
625 * @returns The access rights on success or UINT32_MAX on failure.
626 * @param uSel The selector value.
628 * @remarks Using UINT32_MAX for failure is chosen because valid access rights
629 * always have bits 0:7 as 0 (on both Intel & AMD).
631 #if RT_INLINE_ASM_EXTERNAL
632 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMGetSegAttr(uint32_t uSel
);
634 DECLINLINE(uint32_t) ASMGetSegAttr(uint32_t uSel
)
637 /* LAR only accesses 16-bit of the source operand, but eax for the
638 destination operand is required for getting the full 32-bit access rights. */
639 # if RT_INLINE_ASM_GNU_STYLE
640 __asm__
__volatile__("lar %1, %%eax\n\t"
642 "movl $0xffffffff, %%eax\n\t"
664 * Get the [RE]FLAGS register.
665 * @returns [RE]FLAGS.
667 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
668 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG
) ASMGetFlags(void);
670 DECLINLINE(RTCCUINTREG
) ASMGetFlags(void)
673 # if RT_INLINE_ASM_GNU_STYLE
674 # ifdef RT_ARCH_AMD64
675 __asm__
__volatile__("pushfq\n\t"
679 __asm__
__volatile__("pushfl\n\t"
683 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
684 uFlags
= __readeflags();
688 # ifdef RT_ARCH_AMD64
703 * Set the [RE]FLAGS register.
704 * @param uFlags The new [RE]FLAGS value.
706 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - see __readeflags() above. */
707 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetFlags(RTCCUINTREG uFlags
);
709 DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags
)
711 # if RT_INLINE_ASM_GNU_STYLE
712 # ifdef RT_ARCH_AMD64
713 __asm__
__volatile__("pushq %0\n\t"
717 __asm__
__volatile__("pushl %0\n\t"
721 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
722 __writeeflags(uFlags
);
726 # ifdef RT_ARCH_AMD64
740 * Modifies the [RE]FLAGS register.
741 * @returns Original value.
742 * @param fAndEfl Flags to keep (applied first).
743 * @param fOrEfl Flags to be set.
745 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
746 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG
) ASMChangeFlags(RTCCUINTREG fAndEfl
, RTCCUINTREG fOrEfl
);
748 DECLINLINE(RTCCUINTREG
) ASMChangeFlags(RTCCUINTREG fAndEfl
, RTCCUINTREG fOrEfl
)
751 # if RT_INLINE_ASM_GNU_STYLE
752 # ifdef RT_ARCH_AMD64
753 __asm__
__volatile__("pushfq\n\t"
754 "movq (%%rsp), %0\n\t"
757 "mov %1, (%%rsp)\n\t"
764 __asm__
__volatile__("pushfl\n\t"
765 "movl (%%esp), %0\n\t"
766 "andl %1, (%%esp)\n\t"
767 "orl %2, (%%esp)\n\t"
773 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
774 fOldEfl
= __readeflags();
775 __writeeflags((fOldEfl
& fAndEfl
) | fOrEfl
);
779 # ifdef RT_ARCH_AMD64
808 * Modifies the [RE]FLAGS register by ORing in one or more flags.
809 * @returns Original value.
810 * @param fOrEfl The flags to be set (ORed in).
812 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
813 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG
) ASMAddFlags(RTCCUINTREG fOrEfl
);
815 DECLINLINE(RTCCUINTREG
) ASMAddFlags(RTCCUINTREG fOrEfl
)
818 # if RT_INLINE_ASM_GNU_STYLE
819 # ifdef RT_ARCH_AMD64
820 __asm__
__volatile__("pushfq\n\t"
821 "movq (%%rsp), %0\n\t"
822 "orq %1, (%%rsp)\n\t"
827 __asm__
__volatile__("pushfl\n\t"
828 "movl (%%esp), %0\n\t"
829 "orl %1, (%%esp)\n\t"
834 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
835 fOldEfl
= __readeflags();
836 __writeeflags(fOldEfl
| fOrEfl
);
840 # ifdef RT_ARCH_AMD64
863 * Modifies the [RE]FLAGS register by AND'ing out one or more flags.
864 * @returns Original value.
865 * @param fAndEfl The flags to keep.
867 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
868 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG
) ASMClearFlags(RTCCUINTREG fAndEfl
);
870 DECLINLINE(RTCCUINTREG
) ASMClearFlags(RTCCUINTREG fAndEfl
)
873 # if RT_INLINE_ASM_GNU_STYLE
874 # ifdef RT_ARCH_AMD64
875 __asm__
__volatile__("pushfq\n\t"
876 "movq (%%rsp), %0\n\t"
877 "andq %1, (%%rsp)\n\t"
882 __asm__
__volatile__("pushfl\n\t"
883 "movl (%%esp), %0\n\t"
884 "andl %1, (%%esp)\n\t"
889 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
890 fOldEfl
= __readeflags();
891 __writeeflags(fOldEfl
& fAndEfl
);
895 # ifdef RT_ARCH_AMD64
918 * Gets the content of the CPU timestamp counter register.
922 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
923 RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMReadTSC(void);
925 DECLINLINE(uint64_t) ASMReadTSC(void)
928 # if RT_INLINE_ASM_GNU_STYLE
929 __asm__
__volatile__("rdtsc\n\t" : "=a" (u
.s
.Lo
), "=d" (u
.s
.Hi
));
931 # if RT_INLINE_ASM_USES_INTRIN
948 * Gets the content of the CPU timestamp counter register and the
949 * assoicated AUX value.
952 * @param puAux Where to store the AUX value.
954 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2008
955 RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMReadTscWithAux(uint32_t RT_FAR
*puAux
);
957 DECLINLINE(uint64_t) ASMReadTscWithAux(uint32_t RT_FAR
*puAux
)
960 # if RT_INLINE_ASM_GNU_STYLE
961 /* rdtscp is not supported by ancient linux build VM of course :-( */
962 /*__asm__ __volatile__("rdtscp\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux)); */
963 __asm__
__volatile__(".byte 0x0f,0x01,0xf9\n\t" : "=a" (u
.s
.Lo
), "=d" (u
.s
.Hi
), "=c" (*puAux
));
965 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
966 u
.u
= __rdtscp(puAux
);
984 * Performs the cpuid instruction returning all registers.
986 * @param uOperator CPUID operation (eax).
987 * @param pvEAX Where to store eax.
988 * @param pvEBX Where to store ebx.
989 * @param pvECX Where to store ecx.
990 * @param pvEDX Where to store edx.
991 * @remark We're using void pointers to ease the use of special bitfield structures and such.
993 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
994 DECLASM(void) ASMCpuId(uint32_t uOperator
, void RT_FAR
*pvEAX
, void RT_FAR
*pvEBX
, void RT_FAR
*pvECX
, void RT_FAR
*pvEDX
);
996 DECLINLINE(void) ASMCpuId(uint32_t uOperator
, void RT_FAR
*pvEAX
, void RT_FAR
*pvEBX
, void RT_FAR
*pvECX
, void RT_FAR
*pvEDX
)
998 # if RT_INLINE_ASM_GNU_STYLE
999 # ifdef RT_ARCH_AMD64
1000 RTCCUINTREG uRAX
, uRBX
, uRCX
, uRDX
;
1001 __asm__
__volatile__ ("cpuid\n\t"
1006 : "0" (uOperator
), "2" (0));
1007 *(uint32_t RT_FAR
*)pvEAX
= (uint32_t)uRAX
;
1008 *(uint32_t RT_FAR
*)pvEBX
= (uint32_t)uRBX
;
1009 *(uint32_t RT_FAR
*)pvECX
= (uint32_t)uRCX
;
1010 *(uint32_t RT_FAR
*)pvEDX
= (uint32_t)uRDX
;
1012 __asm__
__volatile__ ("xchgl %%ebx, %1\n\t"
1014 "xchgl %%ebx, %1\n\t"
1015 : "=a" (*(uint32_t *)pvEAX
),
1016 "=r" (*(uint32_t *)pvEBX
),
1017 "=c" (*(uint32_t *)pvECX
),
1018 "=d" (*(uint32_t *)pvEDX
)
1019 : "0" (uOperator
), "2" (0));
1022 # elif RT_INLINE_ASM_USES_INTRIN
1024 __cpuid(aInfo
, uOperator
);
1025 *(uint32_t RT_FAR
*)pvEAX
= aInfo
[0];
1026 *(uint32_t RT_FAR
*)pvEBX
= aInfo
[1];
1027 *(uint32_t RT_FAR
*)pvECX
= aInfo
[2];
1028 *(uint32_t RT_FAR
*)pvEDX
= aInfo
[3];
1038 mov eax
, [uOperator
]
1046 *(uint32_t RT_FAR
*)pvEAX
= uEAX
;
1047 *(uint32_t RT_FAR
*)pvEBX
= uEBX
;
1048 *(uint32_t RT_FAR
*)pvECX
= uECX
;
1049 *(uint32_t RT_FAR
*)pvEDX
= uEDX
;
1056 * Performs the CPUID instruction with EAX and ECX input returning ALL output
1059 * @param uOperator CPUID operation (eax).
1060 * @param uIdxECX ecx index
1061 * @param pvEAX Where to store eax.
1062 * @param pvEBX Where to store ebx.
1063 * @param pvECX Where to store ecx.
1064 * @param pvEDX Where to store edx.
1065 * @remark We're using void pointers to ease the use of special bitfield structures and such.
1067 #if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
1068 DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator
, uint32_t uIdxECX
, void RT_FAR
*pvEAX
, void RT_FAR
*pvEBX
, void RT_FAR
*pvECX
, void RT_FAR
*pvEDX
);
1070 DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator
, uint32_t uIdxECX
, void RT_FAR
*pvEAX
, void RT_FAR
*pvEBX
, void RT_FAR
*pvECX
, void RT_FAR
*pvEDX
)
1072 # if RT_INLINE_ASM_GNU_STYLE
1073 # ifdef RT_ARCH_AMD64
1074 RTCCUINTREG uRAX
, uRBX
, uRCX
, uRDX
;
1075 __asm__ ("cpuid\n\t"
1082 *(uint32_t RT_FAR
*)pvEAX
= (uint32_t)uRAX
;
1083 *(uint32_t RT_FAR
*)pvEBX
= (uint32_t)uRBX
;
1084 *(uint32_t RT_FAR
*)pvECX
= (uint32_t)uRCX
;
1085 *(uint32_t RT_FAR
*)pvEDX
= (uint32_t)uRDX
;
1087 __asm__ ("xchgl %%ebx, %1\n\t"
1089 "xchgl %%ebx, %1\n\t"
1090 : "=a" (*(uint32_t *)pvEAX
),
1091 "=r" (*(uint32_t *)pvEBX
),
1092 "=c" (*(uint32_t *)pvECX
),
1093 "=d" (*(uint32_t *)pvEDX
)
1098 # elif RT_INLINE_ASM_USES_INTRIN
1100 __cpuidex(aInfo
, uOperator
, uIdxECX
);
1101 *(uint32_t RT_FAR
*)pvEAX
= aInfo
[0];
1102 *(uint32_t RT_FAR
*)pvEBX
= aInfo
[1];
1103 *(uint32_t RT_FAR
*)pvECX
= aInfo
[2];
1104 *(uint32_t RT_FAR
*)pvEDX
= aInfo
[3];
1114 mov eax
, [uOperator
]
1123 *(uint32_t RT_FAR
*)pvEAX
= uEAX
;
1124 *(uint32_t RT_FAR
*)pvEBX
= uEBX
;
1125 *(uint32_t RT_FAR
*)pvECX
= uECX
;
1126 *(uint32_t RT_FAR
*)pvEDX
= uEDX
;
1133 * CPUID variant that initializes all 4 registers before the CPUID instruction.
1135 * @returns The EAX result value.
1136 * @param uOperator CPUID operation (eax).
1137 * @param uInitEBX The value to assign EBX prior to the CPUID instruction.
1138 * @param uInitECX The value to assign ECX prior to the CPUID instruction.
1139 * @param uInitEDX The value to assign EDX prior to the CPUID instruction.
1140 * @param pvEAX Where to store eax. Optional.
1141 * @param pvEBX Where to store ebx. Optional.
1142 * @param pvECX Where to store ecx. Optional.
1143 * @param pvEDX Where to store edx. Optional.
1145 DECLASM(uint32_t) ASMCpuIdExSlow(uint32_t uOperator
, uint32_t uInitEBX
, uint32_t uInitECX
, uint32_t uInitEDX
,
1146 void RT_FAR
*pvEAX
, void RT_FAR
*pvEBX
, void RT_FAR
*pvECX
, void RT_FAR
*pvEDX
);
1150 * Performs the cpuid instruction returning ecx and edx.
1152 * @param uOperator CPUID operation (eax).
1153 * @param pvECX Where to store ecx.
1154 * @param pvEDX Where to store edx.
1155 * @remark We're using void pointers to ease the use of special bitfield structures and such.
1157 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1158 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMCpuId_ECX_EDX(uint32_t uOperator
, void RT_FAR
*pvECX
, void RT_FAR
*pvEDX
);
1160 DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator
, void RT_FAR
*pvECX
, void RT_FAR
*pvEDX
)
1163 ASMCpuId(uOperator
, &uOperator
, &uEBX
, pvECX
, pvEDX
);
1169 * Performs the cpuid instruction returning eax.
1171 * @param uOperator CPUID operation (eax).
1172 * @returns EAX after cpuid operation.
1174 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1175 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EAX(uint32_t uOperator
);
1177 DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator
)
1180 # if RT_INLINE_ASM_GNU_STYLE
1181 # ifdef RT_ARCH_AMD64
1185 : "rbx", "rcx", "rdx");
1186 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1187 __asm__ ("push %%ebx\n\t"
1197 : "edx", "ecx", "ebx");
1200 # elif RT_INLINE_ASM_USES_INTRIN
1202 __cpuid(aInfo
, uOperator
);
1209 mov eax
, [uOperator
]
1215 return (uint32_t)xAX
;
1221 * Performs the cpuid instruction returning ebx.
1223 * @param uOperator CPUID operation (eax).
1224 * @returns EBX after cpuid operation.
1226 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1227 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EBX(uint32_t uOperator
);
1229 DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator
)
1232 # if RT_INLINE_ASM_GNU_STYLE
1233 # ifdef RT_ARCH_AMD64
1240 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1241 __asm__ ("push %%ebx\n\t"
1243 "mov %%ebx, %%edx\n\t"
1257 # elif RT_INLINE_ASM_USES_INTRIN
1259 __cpuid(aInfo
, uOperator
);
1266 mov eax
, [uOperator
]
1272 return (uint32_t)xBX
;
1278 * Performs the cpuid instruction returning ecx.
1280 * @param uOperator CPUID operation (eax).
1281 * @returns ECX after cpuid operation.
1283 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1284 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_ECX(uint32_t uOperator
);
1286 DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator
)
1289 # if RT_INLINE_ASM_GNU_STYLE
1290 # ifdef RT_ARCH_AMD64
1297 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1298 __asm__ ("push %%ebx\n\t"
1314 # elif RT_INLINE_ASM_USES_INTRIN
1316 __cpuid(aInfo
, uOperator
);
1323 mov eax
, [uOperator
]
1329 return (uint32_t)xCX
;
1335 * Performs the cpuid instruction returning edx.
1337 * @param uOperator CPUID operation (eax).
1338 * @returns EDX after cpuid operation.
1340 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1341 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EDX(uint32_t uOperator
);
1343 DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator
)
1346 # if RT_INLINE_ASM_GNU_STYLE
1347 # ifdef RT_ARCH_AMD64
1354 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1355 __asm__ ("push %%ebx\n\t"
1370 # elif RT_INLINE_ASM_USES_INTRIN
1372 __cpuid(aInfo
, uOperator
);
1379 mov eax
, [uOperator
]
1385 return (uint32_t)xDX
;
1391 * Checks if the current CPU supports CPUID.
1393 * @returns true if CPUID is supported.
1396 DECLASM(bool) ASMHasCpuId(void);
1398 DECLINLINE(bool) ASMHasCpuId(void)
1400 # ifdef RT_ARCH_AMD64
1401 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
1402 # else /* !RT_ARCH_AMD64 */
1404 # if RT_INLINE_ASM_GNU_STYLE
1407 __asm__ ("pushf\n\t"
1410 "xorl $0x200000, %1\n\t"
1419 : "=m" (fRet
), "=r" (u1
), "=r" (u2
));
1438 # endif /* !RT_ARCH_AMD64 */
1444 * Gets the APIC ID of the current CPU.
1446 * @returns the APIC ID.
1448 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1449 RT_ASM_DECL_PRAGMA_WATCOM(uint8_t) ASMGetApicId(void);
1451 DECLINLINE(uint8_t) ASMGetApicId(void)
1454 # if RT_INLINE_ASM_GNU_STYLE
1455 # ifdef RT_ARCH_AMD64
1457 __asm__
__volatile__ ("cpuid"
1462 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1464 __asm__
__volatile__ ("mov %%ebx,%1\n\t"
1466 "xchgl %%ebx,%1\n\t"
1473 __asm__
__volatile__ ("cpuid"
1480 # elif RT_INLINE_ASM_USES_INTRIN
1495 return (uint8_t)(xBX
>> 24);
1501 * Gets the APIC ID of the current CPU using leaf 0xb.
1503 * @returns the APIC ID.
1505 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2010 /*?*/
1506 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMGetApicIdExt0B(void);
1508 DECLINLINE(uint32_t) ASMGetApicIdExt0B(void)
1510 # if RT_INLINE_ASM_GNU_STYLE
1512 # ifdef RT_ARCH_AMD64
1513 RTCCUINTREG uSpillEax
, uSpillEcx
;
1514 __asm__
__volatile__ ("cpuid"
1521 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1522 RTCCUINTREG uSpillEax
, uSpillEcx
, uSpillEbx
;
1523 __asm__
__volatile__ ("mov %%ebx,%2\n\t"
1525 "xchgl %%ebx,%2\n\t"
1533 RTCCUINTREG uSpillEax
, uSpillEcx
;
1534 __asm__
__volatile__ ("cpuid"
1542 return (uint32_t)xDX
;
1544 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2010 /*?*/
1547 __cpuidex(aInfo
, 0xb, 0);
1561 return (uint32_t)xDX
;
1568 * Gets the APIC ID of the current CPU using leaf 8000001E.
1570 * @returns the APIC ID.
1572 DECLINLINE(uint32_t) ASMGetApicIdExt8000001E(void)
1574 return ASMCpuId_EAX(0x8000001e);
1579 * Tests if this is a genuine Intel CPU.
1581 * @returns true/false.
1582 * @remarks ASSUMES that cpuid is supported by the CPU.
1584 DECLINLINE(bool) ASMIsIntelCpu(void)
1586 uint32_t uEAX
, uEBX
, uECX
, uEDX
;
1587 ASMCpuId(0, &uEAX
, &uEBX
, &uECX
, &uEDX
);
1588 return RTX86IsIntelCpu(uEBX
, uECX
, uEDX
);
1593 * Tests if this is an authentic AMD CPU.
1595 * @returns true/false.
1596 * @remarks ASSUMES that cpuid is supported by the CPU.
1598 DECLINLINE(bool) ASMIsAmdCpu(void)
1600 uint32_t uEAX
, uEBX
, uECX
, uEDX
;
1601 ASMCpuId(0, &uEAX
, &uEBX
, &uECX
, &uEDX
);
1602 return RTX86IsAmdCpu(uEBX
, uECX
, uEDX
);
1607 * Tests if this is a centaur hauling VIA CPU.
1609 * @returns true/false.
1610 * @remarks ASSUMES that cpuid is supported by the CPU.
1612 DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1614 uint32_t uEAX
, uEBX
, uECX
, uEDX
;
1615 ASMCpuId(0, &uEAX
, &uEBX
, &uECX
, &uEDX
);
1616 return RTX86IsViaCentaurCpu(uEBX
, uECX
, uEDX
);
1621 * Tests if this is a Shanghai CPU.
1623 * @returns true/false.
1624 * @remarks ASSUMES that cpuid is supported by the CPU.
1626 DECLINLINE(bool) ASMIsShanghaiCpu(void)
1628 uint32_t uEAX
, uEBX
, uECX
, uEDX
;
1629 ASMCpuId(0, &uEAX
, &uEBX
, &uECX
, &uEDX
);
1630 return RTX86IsShanghaiCpu(uEBX
, uECX
, uEDX
);
1635 * Tests if this is a genuine Hygon CPU.
1637 * @returns true/false.
1638 * @remarks ASSUMES that cpuid is supported by the CPU.
1640 DECLINLINE(bool) ASMIsHygonCpu(void)
1642 uint32_t uEAX
, uEBX
, uECX
, uEDX
;
1643 ASMCpuId(0, &uEAX
, &uEBX
, &uECX
, &uEDX
);
1644 return RTX86IsHygonCpu(uEBX
, uECX
, uEDX
);
1652 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1653 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetCR0(void);
1655 DECLINLINE(RTCCUINTXREG
) ASMGetCR0(void)
1658 # if RT_INLINE_ASM_USES_INTRIN
1661 # elif RT_INLINE_ASM_GNU_STYLE
1662 # ifdef RT_ARCH_AMD64
1663 __asm__
__volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0
));
1665 __asm__
__volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0
));
1670 # ifdef RT_ARCH_AMD64
1685 * Sets the CR0 register.
1686 * @param uCR0 The new CR0 value.
1688 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1689 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR0(RTCCUINTXREG uCR0
);
1691 DECLINLINE(void) ASMSetCR0(RTCCUINTXREG uCR0
)
1693 # if RT_INLINE_ASM_USES_INTRIN
1696 # elif RT_INLINE_ASM_GNU_STYLE
1697 # ifdef RT_ARCH_AMD64
1698 __asm__
__volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0
));
1700 __asm__
__volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0
));
1705 # ifdef RT_ARCH_AMD64
1722 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1723 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetCR2(void);
1725 DECLINLINE(RTCCUINTXREG
) ASMGetCR2(void)
1728 # if RT_INLINE_ASM_USES_INTRIN
1731 # elif RT_INLINE_ASM_GNU_STYLE
1732 # ifdef RT_ARCH_AMD64
1733 __asm__
__volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2
));
1735 __asm__
__volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2
));
1740 # ifdef RT_ARCH_AMD64
1755 * Sets the CR2 register.
1756 * @param uCR2 The new CR0 value.
1758 #if RT_INLINE_ASM_EXTERNAL
1759 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR2(RTCCUINTXREG uCR2
);
1761 DECLINLINE(void) ASMSetCR2(RTCCUINTXREG uCR2
)
1763 # if RT_INLINE_ASM_GNU_STYLE
1764 # ifdef RT_ARCH_AMD64
1765 __asm__
__volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2
));
1767 __asm__
__volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2
));
1772 # ifdef RT_ARCH_AMD64
1789 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1790 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetCR3(void);
1792 DECLINLINE(RTCCUINTXREG
) ASMGetCR3(void)
1795 # if RT_INLINE_ASM_USES_INTRIN
1798 # elif RT_INLINE_ASM_GNU_STYLE
1799 # ifdef RT_ARCH_AMD64
1800 __asm__
__volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3
));
1802 __asm__
__volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3
));
1807 # ifdef RT_ARCH_AMD64
1822 * Sets the CR3 register.
1824 * @param uCR3 New CR3 value.
1826 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1827 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR3(RTCCUINTXREG uCR3
);
1829 DECLINLINE(void) ASMSetCR3(RTCCUINTXREG uCR3
)
1831 # if RT_INLINE_ASM_USES_INTRIN
1834 # elif RT_INLINE_ASM_GNU_STYLE
1835 # ifdef RT_ARCH_AMD64
1836 __asm__
__volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3
));
1838 __asm__
__volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3
));
1843 # ifdef RT_ARCH_AMD64
1857 * Reloads the CR3 register.
1859 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1860 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMReloadCR3(void);
1862 DECLINLINE(void) ASMReloadCR3(void)
1864 # if RT_INLINE_ASM_USES_INTRIN
1865 __writecr3(__readcr3());
1867 # elif RT_INLINE_ASM_GNU_STYLE
1869 # ifdef RT_ARCH_AMD64
1870 __asm__
__volatile__("movq %%cr3, %0\n\t"
1871 "movq %0, %%cr3\n\t"
1874 __asm__
__volatile__("movl %%cr3, %0\n\t"
1875 "movl %0, %%cr3\n\t"
1881 # ifdef RT_ARCH_AMD64
1898 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1899 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetCR4(void);
1901 DECLINLINE(RTCCUINTXREG
) ASMGetCR4(void)
1904 # if RT_INLINE_ASM_USES_INTRIN
1907 # elif RT_INLINE_ASM_GNU_STYLE
1908 # ifdef RT_ARCH_AMD64
1909 __asm__
__volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4
));
1911 __asm__
__volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4
));
1916 # ifdef RT_ARCH_AMD64
1920 push eax
/* just in case */
1936 * Sets the CR4 register.
1938 * @param uCR4 New CR4 value.
1940 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1941 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR4(RTCCUINTXREG uCR4
);
1943 DECLINLINE(void) ASMSetCR4(RTCCUINTXREG uCR4
)
1945 # if RT_INLINE_ASM_USES_INTRIN
1948 # elif RT_INLINE_ASM_GNU_STYLE
1949 # ifdef RT_ARCH_AMD64
1950 __asm__
__volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4
));
1952 __asm__
__volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4
));
1957 # ifdef RT_ARCH_AMD64
1964 _emit
0xE0 /* mov cr4, eax */
1975 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1977 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1978 DECLASM(RTCCUINTXREG
) ASMGetCR8(void);
1980 DECLINLINE(RTCCUINTXREG
) ASMGetCR8(void)
1982 # ifdef RT_ARCH_AMD64
1984 # if RT_INLINE_ASM_USES_INTRIN
1987 # elif RT_INLINE_ASM_GNU_STYLE
1988 __asm__
__volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8
));
1997 # else /* !RT_ARCH_AMD64 */
1999 # endif /* !RT_ARCH_AMD64 */
2005 * Get XCR0 (eXtended feature Control Register 0).
2008 DECLASM(uint64_t) ASMGetXcr0(void);
2011 * Sets the XCR0 register.
2012 * @param uXcr0 The new XCR0 value.
2014 DECLASM(void) ASMSetXcr0(uint64_t uXcr0
);
2016 struct X86XSAVEAREA
;
2018 * Save extended CPU state.
2019 * @param pXStateArea Where to save the state.
2020 * @param fComponents Which state components to save.
2022 DECLASM(void) ASMXSave(struct X86XSAVEAREA RT_FAR
*pXStateArea
, uint64_t fComponents
);
2025 * Loads extended CPU state.
2026 * @param pXStateArea Where to load the state from.
2027 * @param fComponents Which state components to load.
2029 DECLASM(void) ASMXRstor(struct X86XSAVEAREA
const RT_FAR
*pXStateArea
, uint64_t fComponents
);
2034 * Save FPU and SSE CPU state.
2035 * @param pXStateArea Where to save the state.
2037 DECLASM(void) ASMFxSave(struct X86FXSTATE RT_FAR
*pXStateArea
);
2040 * Load FPU and SSE CPU state.
2041 * @param pXStateArea Where to load the state from.
2043 DECLASM(void) ASMFxRstor(struct X86FXSTATE
const RT_FAR
*pXStateArea
);
2047 * Enables interrupts (EFLAGS.IF).
2049 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2050 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMIntEnable(void);
2052 DECLINLINE(void) ASMIntEnable(void)
2054 # if RT_INLINE_ASM_GNU_STYLE
2056 # elif RT_INLINE_ASM_USES_INTRIN
2066 * Disables interrupts (!EFLAGS.IF).
2068 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2069 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMIntDisable(void);
2071 DECLINLINE(void) ASMIntDisable(void)
2073 # if RT_INLINE_ASM_GNU_STYLE
2075 # elif RT_INLINE_ASM_USES_INTRIN
2085 * Disables interrupts and returns previous xFLAGS.
2087 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2088 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG
) ASMIntDisableFlags(void);
2090 DECLINLINE(RTCCUINTREG
) ASMIntDisableFlags(void)
2093 # if RT_INLINE_ASM_GNU_STYLE
2094 # ifdef RT_ARCH_AMD64
2095 __asm__
__volatile__("pushfq\n\t"
2100 __asm__
__volatile__("pushfl\n\t"
2105 # elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
2106 xFlags
= ASMGetFlags();
2121 * Are interrupts enabled?
2123 * @returns true / false.
2125 DECLINLINE(bool) ASMIntAreEnabled(void)
2127 RTCCUINTREG uFlags
= ASMGetFlags();
2128 return uFlags
& 0x200 /* X86_EFL_IF */ ? true : false;
2133 * Halts the CPU until interrupted.
2135 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2005
2136 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMHalt(void);
2138 DECLINLINE(void) ASMHalt(void)
2140 # if RT_INLINE_ASM_GNU_STYLE
2141 __asm__
__volatile__("hlt\n\t");
2142 # elif RT_INLINE_ASM_USES_INTRIN
2154 * Reads a machine specific register.
2156 * @returns Register content.
2157 * @param uRegister Register to read.
2159 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2160 RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMRdMsr(uint32_t uRegister
);
2162 DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister
)
2165 # if RT_INLINE_ASM_GNU_STYLE
2166 __asm__
__volatile__("rdmsr\n\t"
2171 # elif RT_INLINE_ASM_USES_INTRIN
2172 u
.u
= __readmsr(uRegister
);
2177 mov ecx
, [uRegister
]
2190 * Writes a machine specific register.
2192 * @param uRegister Register to write to.
2193 * @param u64Val Value to write.
2195 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2196 RT_ASM_DECL_PRAGMA_WATCOM_386(void) ASMWrMsr(uint32_t uRegister
, uint64_t u64Val
);
2198 DECLINLINE(void) ASMWrMsr(uint32_t uRegister
, uint64_t u64Val
)
2203 # if RT_INLINE_ASM_GNU_STYLE
2204 __asm__
__volatile__("wrmsr\n\t"
2209 # elif RT_INLINE_ASM_USES_INTRIN
2210 __writemsr(uRegister
, u
.u
);
2215 mov ecx
, [uRegister
]
2226 * Reads a machine specific register, extended version (for AMD).
2228 * @returns Register content.
2229 * @param uRegister Register to read.
2230 * @param uXDI RDI/EDI value.
2232 #if RT_INLINE_ASM_EXTERNAL
2233 RT_ASM_DECL_PRAGMA_WATCOM_386(uint64_t) ASMRdMsrEx(uint32_t uRegister
, RTCCUINTXREG uXDI
);
2235 DECLINLINE(uint64_t) ASMRdMsrEx(uint32_t uRegister
, RTCCUINTXREG uXDI
)
2238 # if RT_INLINE_ASM_GNU_STYLE
2239 __asm__
__volatile__("rdmsr\n\t"
2248 mov ecx
, [uRegister
]
2263 * Writes a machine specific register, extended version (for AMD).
2265 * @param uRegister Register to write to.
2266 * @param uXDI RDI/EDI value.
2267 * @param u64Val Value to write.
2269 #if RT_INLINE_ASM_EXTERNAL
2270 RT_ASM_DECL_PRAGMA_WATCOM_386(void) ASMWrMsrEx(uint32_t uRegister
, RTCCUINTXREG uXDI
, uint64_t u64Val
);
2272 DECLINLINE(void) ASMWrMsrEx(uint32_t uRegister
, RTCCUINTXREG uXDI
, uint64_t u64Val
)
2277 # if RT_INLINE_ASM_GNU_STYLE
2278 __asm__
__volatile__("wrmsr\n\t"
2287 mov ecx
, [uRegister
]
2301 * Reads low part of a machine specific register.
2303 * @returns Register content.
2304 * @param uRegister Register to read.
2306 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2307 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMRdMsr_Low(uint32_t uRegister
);
2309 DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister
)
2312 # if RT_INLINE_ASM_GNU_STYLE
2313 __asm__
__volatile__("rdmsr\n\t"
2318 # elif RT_INLINE_ASM_USES_INTRIN
2319 u32
= (uint32_t)__readmsr(uRegister
);
2324 mov ecx
, [uRegister
]
2336 * Reads high part of a machine specific register.
2338 * @returns Register content.
2339 * @param uRegister Register to read.
2341 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2342 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMRdMsr_High(uint32_t uRegister
);
2344 DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister
)
2347 # if RT_INLINE_ASM_GNU_STYLE
2348 __asm__
__volatile__("rdmsr\n\t"
2353 # elif RT_INLINE_ASM_USES_INTRIN
2354 u32
= (uint32_t)(__readmsr(uRegister
) >> 32);
2359 mov ecx
, [uRegister
]
2375 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2376 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetDR0(void);
2378 DECLINLINE(RTCCUINTXREG
) ASMGetDR0(void)
2381 # if RT_INLINE_ASM_USES_INTRIN
2383 # elif RT_INLINE_ASM_GNU_STYLE
2384 # ifdef RT_ARCH_AMD64
2385 __asm__
__volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0
));
2387 __asm__
__volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0
));
2392 # ifdef RT_ARCH_AMD64
2411 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2412 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetDR1(void);
2414 DECLINLINE(RTCCUINTXREG
) ASMGetDR1(void)
2417 # if RT_INLINE_ASM_USES_INTRIN
2419 # elif RT_INLINE_ASM_GNU_STYLE
2420 # ifdef RT_ARCH_AMD64
2421 __asm__
__volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1
));
2423 __asm__
__volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1
));
2428 # ifdef RT_ARCH_AMD64
2447 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2448 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetDR2(void);
2450 DECLINLINE(RTCCUINTXREG
) ASMGetDR2(void)
2453 # if RT_INLINE_ASM_USES_INTRIN
2455 # elif RT_INLINE_ASM_GNU_STYLE
2456 # ifdef RT_ARCH_AMD64
2457 __asm__
__volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2
));
2459 __asm__
__volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2
));
2464 # ifdef RT_ARCH_AMD64
2483 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2484 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetDR3(void);
2486 DECLINLINE(RTCCUINTXREG
) ASMGetDR3(void)
2489 # if RT_INLINE_ASM_USES_INTRIN
2491 # elif RT_INLINE_ASM_GNU_STYLE
2492 # ifdef RT_ARCH_AMD64
2493 __asm__
__volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3
));
2495 __asm__
__volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3
));
2500 # ifdef RT_ARCH_AMD64
2519 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2520 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetDR6(void);
2522 DECLINLINE(RTCCUINTXREG
) ASMGetDR6(void)
2525 # if RT_INLINE_ASM_USES_INTRIN
2527 # elif RT_INLINE_ASM_GNU_STYLE
2528 # ifdef RT_ARCH_AMD64
2529 __asm__
__volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6
));
2531 __asm__
__volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6
));
2536 # ifdef RT_ARCH_AMD64
2551 * Reads and clears DR6.
2555 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2556 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetAndClearDR6(void);
2558 DECLINLINE(RTCCUINTXREG
) ASMGetAndClearDR6(void)
2561 # if RT_INLINE_ASM_USES_INTRIN
2563 __writedr(6, 0xffff0ff0U
); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2564 # elif RT_INLINE_ASM_GNU_STYLE
2565 RTCCUINTXREG uNewValue
= 0xffff0ff0U
;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2566 # ifdef RT_ARCH_AMD64
2567 __asm__
__volatile__("movq %%dr6, %0\n\t"
2568 "movq %1, %%dr6\n\t"
2572 __asm__
__volatile__("movl %%dr6, %0\n\t"
2573 "movl %1, %%dr6\n\t"
2580 # ifdef RT_ARCH_AMD64
2584 mov ecx
, 0ffff0ff0h
; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2589 mov ecx
, 0ffff0ff0h
; /* 31-16 and 4-11 are 1's, 12 is zero. */
2604 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2605 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG
) ASMGetDR7(void);
2607 DECLINLINE(RTCCUINTXREG
) ASMGetDR7(void)
2610 # if RT_INLINE_ASM_USES_INTRIN
2612 # elif RT_INLINE_ASM_GNU_STYLE
2613 # ifdef RT_ARCH_AMD64
2614 __asm__
__volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7
));
2616 __asm__
__volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7
));
2621 # ifdef RT_ARCH_AMD64
2638 * @param uDRVal Debug register value to write
2640 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2641 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR0(RTCCUINTXREG uDRVal
);
2643 DECLINLINE(void) ASMSetDR0(RTCCUINTXREG uDRVal
)
2645 # if RT_INLINE_ASM_USES_INTRIN
2646 __writedr(0, uDRVal
);
2647 # elif RT_INLINE_ASM_GNU_STYLE
2648 # ifdef RT_ARCH_AMD64
2649 __asm__
__volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal
));
2651 __asm__
__volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal
));
2656 # ifdef RT_ARCH_AMD64
2672 * @param uDRVal Debug register value to write
2674 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2675 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR1(RTCCUINTXREG uDRVal
);
2677 DECLINLINE(void) ASMSetDR1(RTCCUINTXREG uDRVal
)
2679 # if RT_INLINE_ASM_USES_INTRIN
2680 __writedr(1, uDRVal
);
2681 # elif RT_INLINE_ASM_GNU_STYLE
2682 # ifdef RT_ARCH_AMD64
2683 __asm__
__volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal
));
2685 __asm__
__volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal
));
2690 # ifdef RT_ARCH_AMD64
2706 * @param uDRVal Debug register value to write
2708 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2709 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR2(RTCCUINTXREG uDRVal
);
2711 DECLINLINE(void) ASMSetDR2(RTCCUINTXREG uDRVal
)
2713 # if RT_INLINE_ASM_USES_INTRIN
2714 __writedr(2, uDRVal
);
2715 # elif RT_INLINE_ASM_GNU_STYLE
2716 # ifdef RT_ARCH_AMD64
2717 __asm__
__volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal
));
2719 __asm__
__volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal
));
2724 # ifdef RT_ARCH_AMD64
2740 * @param uDRVal Debug register value to write
2742 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2743 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR3(RTCCUINTXREG uDRVal
);
2745 DECLINLINE(void) ASMSetDR3(RTCCUINTXREG uDRVal
)
2747 # if RT_INLINE_ASM_USES_INTRIN
2748 __writedr(3, uDRVal
);
2749 # elif RT_INLINE_ASM_GNU_STYLE
2750 # ifdef RT_ARCH_AMD64
2751 __asm__
__volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal
));
2753 __asm__
__volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal
));
2758 # ifdef RT_ARCH_AMD64
2774 * @param uDRVal Debug register value to write
2776 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2777 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR6(RTCCUINTXREG uDRVal
);
2779 DECLINLINE(void) ASMSetDR6(RTCCUINTXREG uDRVal
)
2781 # if RT_INLINE_ASM_USES_INTRIN
2782 __writedr(6, uDRVal
);
2783 # elif RT_INLINE_ASM_GNU_STYLE
2784 # ifdef RT_ARCH_AMD64
2785 __asm__
__volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal
));
2787 __asm__
__volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal
));
2792 # ifdef RT_ARCH_AMD64
2808 * @param uDRVal Debug register value to write
2810 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2811 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR7(RTCCUINTXREG uDRVal
);
2813 DECLINLINE(void) ASMSetDR7(RTCCUINTXREG uDRVal
)
2815 # if RT_INLINE_ASM_USES_INTRIN
2816 __writedr(7, uDRVal
);
2817 # elif RT_INLINE_ASM_GNU_STYLE
2818 # ifdef RT_ARCH_AMD64
2819 __asm__
__volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal
));
2821 __asm__
__volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal
));
2826 # ifdef RT_ARCH_AMD64
2840 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2842 * @param Port I/O port to write to.
2843 * @param u8 8-bit integer to write.
2845 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2846 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU8(RTIOPORT Port
, uint8_t u8
);
2848 DECLINLINE(void) ASMOutU8(RTIOPORT Port
, uint8_t u8
)
2850 # if RT_INLINE_ASM_GNU_STYLE
2851 __asm__
__volatile__("outb %b1, %w0\n\t"
2855 # elif RT_INLINE_ASM_USES_INTRIN
2856 __outbyte(Port
, u8
);
2871 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2873 * @returns 8-bit integer.
2874 * @param Port I/O port to read from.
2876 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2877 RT_ASM_DECL_PRAGMA_WATCOM(uint8_t) ASMInU8(RTIOPORT Port
);
2879 DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port
)
2882 # if RT_INLINE_ASM_GNU_STYLE
2883 __asm__
__volatile__("inb %w1, %b0\n\t"
2887 # elif RT_INLINE_ASM_USES_INTRIN
2888 u8
= __inbyte(Port
);
2904 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2906 * @param Port I/O port to write to.
2907 * @param u16 16-bit integer to write.
2909 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2910 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU16(RTIOPORT Port
, uint16_t u16
);
2912 DECLINLINE(void) ASMOutU16(RTIOPORT Port
, uint16_t u16
)
2914 # if RT_INLINE_ASM_GNU_STYLE
2915 __asm__
__volatile__("outw %w1, %w0\n\t"
2919 # elif RT_INLINE_ASM_USES_INTRIN
2920 __outword(Port
, u16
);
2935 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2937 * @returns 16-bit integer.
2938 * @param Port I/O port to read from.
2940 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2941 RT_ASM_DECL_PRAGMA_WATCOM(uint16_t) ASMInU16(RTIOPORT Port
);
2943 DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port
)
2946 # if RT_INLINE_ASM_GNU_STYLE
2947 __asm__
__volatile__("inw %w1, %w0\n\t"
2951 # elif RT_INLINE_ASM_USES_INTRIN
2952 u16
= __inword(Port
);
2968 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2970 * @param Port I/O port to write to.
2971 * @param u32 32-bit integer to write.
2973 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2974 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU32(RTIOPORT Port
, uint32_t u32
);
2976 DECLINLINE(void) ASMOutU32(RTIOPORT Port
, uint32_t u32
)
2978 # if RT_INLINE_ASM_GNU_STYLE
2979 __asm__
__volatile__("outl %1, %w0\n\t"
2983 # elif RT_INLINE_ASM_USES_INTRIN
2984 __outdword(Port
, u32
);
2999 * Reads a 32-bit unsigned integer from an I/O port, ordered.
3001 * @returns 32-bit integer.
3002 * @param Port I/O port to read from.
3004 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3005 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMInU32(RTIOPORT Port
);
3007 DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port
)
3010 # if RT_INLINE_ASM_GNU_STYLE
3011 __asm__
__volatile__("inl %w1, %0\n\t"
3015 # elif RT_INLINE_ASM_USES_INTRIN
3016 u32
= __indword(Port
);
3032 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
3034 * @param Port I/O port to write to.
3035 * @param pau8 Pointer to the string buffer.
3036 * @param c The number of items to write.
3038 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3039 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU8(RTIOPORT Port
, uint8_t const RT_FAR
*pau8
, size_t c
);
3041 DECLINLINE(void) ASMOutStrU8(RTIOPORT Port
, uint8_t const RT_FAR
*pau8
, size_t c
)
3043 # if RT_INLINE_ASM_GNU_STYLE
3044 __asm__
__volatile__("rep; outsb\n\t"
3049 # elif RT_INLINE_ASM_USES_INTRIN
3050 __outbytestring(Port
, (unsigned char RT_FAR
*)pau8
, (unsigned long)c
);
3068 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
3070 * @param Port I/O port to read from.
3071 * @param pau8 Pointer to the string buffer (output).
3072 * @param c The number of items to read.
3074 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3075 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU8(RTIOPORT Port
, uint8_t RT_FAR
*pau8
, size_t c
);
3077 DECLINLINE(void) ASMInStrU8(RTIOPORT Port
, uint8_t RT_FAR
*pau8
, size_t c
)
3079 # if RT_INLINE_ASM_GNU_STYLE
3080 __asm__
__volatile__("rep; insb\n\t"
3085 # elif RT_INLINE_ASM_USES_INTRIN
3086 __inbytestring(Port
, pau8
, (unsigned long)c
);
3104 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
3106 * @param Port I/O port to write to.
3107 * @param pau16 Pointer to the string buffer.
3108 * @param c The number of items to write.
3110 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3111 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU16(RTIOPORT Port
, uint16_t const RT_FAR
*pau16
, size_t c
);
3113 DECLINLINE(void) ASMOutStrU16(RTIOPORT Port
, uint16_t const RT_FAR
*pau16
, size_t c
)
3115 # if RT_INLINE_ASM_GNU_STYLE
3116 __asm__
__volatile__("rep; outsw\n\t"
3121 # elif RT_INLINE_ASM_USES_INTRIN
3122 __outwordstring(Port
, (unsigned short RT_FAR
*)pau16
, (unsigned long)c
);
3140 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
3142 * @param Port I/O port to read from.
3143 * @param pau16 Pointer to the string buffer (output).
3144 * @param c The number of items to read.
3146 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3147 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU16(RTIOPORT Port
, uint16_t RT_FAR
*pau16
, size_t c
);
3149 DECLINLINE(void) ASMInStrU16(RTIOPORT Port
, uint16_t RT_FAR
*pau16
, size_t c
)
3151 # if RT_INLINE_ASM_GNU_STYLE
3152 __asm__
__volatile__("rep; insw\n\t"
3157 # elif RT_INLINE_ASM_USES_INTRIN
3158 __inwordstring(Port
, pau16
, (unsigned long)c
);
3176 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
3178 * @param Port I/O port to write to.
3179 * @param pau32 Pointer to the string buffer.
3180 * @param c The number of items to write.
3182 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3183 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU32(RTIOPORT Port
, uint32_t const RT_FAR
*pau32
, size_t c
);
3185 DECLINLINE(void) ASMOutStrU32(RTIOPORT Port
, uint32_t const RT_FAR
*pau32
, size_t c
)
3187 # if RT_INLINE_ASM_GNU_STYLE
3188 __asm__
__volatile__("rep; outsl\n\t"
3193 # elif RT_INLINE_ASM_USES_INTRIN
3194 __outdwordstring(Port
, (unsigned long RT_FAR
*)pau32
, (unsigned long)c
);
3212 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
3214 * @param Port I/O port to read from.
3215 * @param pau32 Pointer to the string buffer (output).
3216 * @param c The number of items to read.
3218 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3219 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU32(RTIOPORT Port
, uint32_t RT_FAR
*pau32
, size_t c
);
3221 DECLINLINE(void) ASMInStrU32(RTIOPORT Port
, uint32_t RT_FAR
*pau32
, size_t c
)
3223 # if RT_INLINE_ASM_GNU_STYLE
3224 __asm__
__volatile__("rep; insl\n\t"
3229 # elif RT_INLINE_ASM_USES_INTRIN
3230 __indwordstring(Port
, (unsigned long RT_FAR
*)pau32
, (unsigned long)c
);
3250 * @param uPtr Address of the page to invalidate.
3252 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3253 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInvalidatePage(RTCCUINTXREG uPtr
);
3255 DECLINLINE(void) ASMInvalidatePage(RTCCUINTXREG uPtr
)
3257 # if RT_INLINE_ASM_USES_INTRIN
3258 __invlpg((void RT_FAR
*)uPtr
);
3260 # elif RT_INLINE_ASM_GNU_STYLE
3261 __asm__
__volatile__("invlpg %0\n\t"
3262 : : "m" (*(uint8_t RT_FAR
*)(uintptr_t)uPtr
));
3266 # ifdef RT_ARCH_AMD64
3280 * Write back the internal caches and invalidate them.
3282 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3283 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMWriteBackAndInvalidateCaches(void);
3285 DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
3287 # if RT_INLINE_ASM_USES_INTRIN
3290 # elif RT_INLINE_ASM_GNU_STYLE
3291 __asm__
__volatile__("wbinvd");
3303 * Invalidate internal and (perhaps) external caches without first
3304 * flushing dirty cache lines. Use with extreme care.
3306 #if RT_INLINE_ASM_EXTERNAL
3307 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInvalidateInternalCaches(void);
3309 DECLINLINE(void) ASMInvalidateInternalCaches(void)
3311 # if RT_INLINE_ASM_GNU_STYLE
3312 __asm__
__volatile__("invd");
3324 * Memory load/store fence, waits for any pending writes and reads to complete.
3325 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3327 DECLINLINE(void) ASMMemoryFenceSSE2(void)
3329 #if RT_INLINE_ASM_GNU_STYLE
3330 __asm__
__volatile__ (".byte 0x0f,0xae,0xf0\n\t");
3331 #elif RT_INLINE_ASM_USES_INTRIN
3345 * Memory store fence, waits for any writes to complete.
3346 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
3348 DECLINLINE(void) ASMWriteFenceSSE(void)
3350 #if RT_INLINE_ASM_GNU_STYLE
3351 __asm__
__volatile__ (".byte 0x0f,0xae,0xf8\n\t");
3352 #elif RT_INLINE_ASM_USES_INTRIN
3366 * Memory load fence, waits for any pending reads to complete.
3367 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3369 DECLINLINE(void) ASMReadFenceSSE2(void)
3371 #if RT_INLINE_ASM_GNU_STYLE
3372 __asm__
__volatile__ (".byte 0x0f,0xae,0xe8\n\t");
3373 #elif RT_INLINE_ASM_USES_INTRIN
3385 #if !defined(_MSC_VER) || !defined(RT_ARCH_AMD64)
3388 * Clear the AC bit in the EFLAGS register.
3389 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3390 * Requires to be executed in R0.
3392 DECLINLINE(void) ASMClearAC(void)
3394 #if RT_INLINE_ASM_GNU_STYLE
3395 __asm__
__volatile__ (".byte 0x0f,0x01,0xca\n\t");
3408 * Set the AC bit in the EFLAGS register.
3409 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3410 * Requires to be executed in R0.
3412 DECLINLINE(void) ASMSetAC(void)
3414 #if RT_INLINE_ASM_GNU_STYLE
3415 __asm__
__volatile__ (".byte 0x0f,0x01,0xcb\n\t");
3426 #endif /* !_MSC_VER || !RT_ARCH_AMD64 */
3430 * Include #pragma aux definitions for Watcom C/C++.
3432 #if defined(__WATCOMC__) && ARCH_BITS == 16
3433 # define IPRT_ASM_AMD64_X86_WATCOM_16_INSTANTIATE
3434 # undef IPRT_INCLUDED_asm_amd64_x86_watcom_16_h
3435 # include "asm-amd64-x86-watcom-16.h"
3436 #elif defined(__WATCOMC__) && ARCH_BITS == 32
3437 # define IPRT_ASM_AMD64_X86_WATCOM_32_INSTANTIATE
3438 # undef IPRT_INCLUDED_asm_amd64_x86_watcom_32_h
3439 # include "asm-amd64-x86-watcom-32.h"
3444 #endif /* !IPRT_INCLUDED_asm_amd64_x86_h */