GuestHost/installation/VBoxWinDrvInst.cpp: Try harder if DiInstallDriverW() returns...
[vbox.git] / include / iprt / asm-amd64-x86.h
blob0ebf30409616e26a31071d635a55c34314f995ee
1 /** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
5 /*
6 * Copyright (C) 2006-2024 Oracle and/or its affiliates.
8 * This file is part of VirtualBox base platform packages, as
9 * available from https://www.virtualbox.org.
11 * This program is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU General Public License
13 * as published by the Free Software Foundation, in version 3 of the
14 * License.
16 * This program is distributed in the hope that it will be useful, but
17 * WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 * The contents of this file may alternatively be used under the terms
25 * of the Common Development and Distribution License Version 1.0
26 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
27 * in the VirtualBox distribution, in which case the provisions of the
28 * CDDL are applicable instead of those of the GPL.
30 * You may elect to license modified versions of this file under the
31 * terms and conditions of either the GPL or the CDDL or both.
33 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
36 #ifndef IPRT_INCLUDED_asm_amd64_x86_h
37 #define IPRT_INCLUDED_asm_amd64_x86_h
38 #ifndef RT_WITHOUT_PRAGMA_ONCE
39 # pragma once
40 #endif
42 #include <iprt/types.h>
43 #include <iprt/assert.h>
44 #include <iprt/x86-helpers.h>
45 #if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
46 # error "Not on AMD64 or x86"
47 #endif
49 #if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
50 /* Emit the intrinsics at all optimization levels. */
51 # include <iprt/sanitized/intrin.h>
52 # pragma intrinsic(_ReadWriteBarrier)
53 # pragma intrinsic(__cpuid)
54 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2010 /*?*/
55 # pragma intrinsic(__cpuidex)
56 # endif
57 # pragma intrinsic(_enable)
58 # pragma intrinsic(_disable)
59 # pragma intrinsic(__rdtsc)
60 # pragma intrinsic(__readmsr)
61 # pragma intrinsic(__writemsr)
62 # pragma intrinsic(__outbyte)
63 # pragma intrinsic(__outbytestring)
64 # pragma intrinsic(__outword)
65 # pragma intrinsic(__outwordstring)
66 # pragma intrinsic(__outdword)
67 # pragma intrinsic(__outdwordstring)
68 # pragma intrinsic(__inbyte)
69 # pragma intrinsic(__inbytestring)
70 # pragma intrinsic(__inword)
71 # pragma intrinsic(__inwordstring)
72 # pragma intrinsic(__indword)
73 # pragma intrinsic(__indwordstring)
74 # pragma intrinsic(__invlpg)
75 # pragma intrinsic(__wbinvd)
76 # pragma intrinsic(__readcr0)
77 # pragma intrinsic(__readcr2)
78 # pragma intrinsic(__readcr3)
79 # pragma intrinsic(__readcr4)
80 # pragma intrinsic(__writecr0)
81 # pragma intrinsic(__writecr3)
82 # pragma intrinsic(__writecr4)
83 # pragma intrinsic(__readdr)
84 # pragma intrinsic(__writedr)
85 # ifdef RT_ARCH_AMD64
86 # pragma intrinsic(__readcr8)
87 # pragma intrinsic(__writecr8)
88 # endif
89 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2005
90 # pragma intrinsic(__halt)
91 # endif
92 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
93 /*# pragma intrinsic(__readeflags) - buggy intrinsics in VC++ 2010, reordering/optimizers issues
94 # pragma intrinsic(__writeeflags) */
95 # pragma intrinsic(__rdtscp)
96 # endif
97 # if defined(RT_ARCH_AMD64) && RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015 /*?*/
98 # pragma intrinsic(_readfsbase_u64)
99 # pragma intrinsic(_readgsbase_u64)
100 # pragma intrinsic(_writefsbase_u64)
101 # pragma intrinsic(_writegsbase_u64)
102 # endif
103 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
104 # pragma intrinsic(__lidt)
105 # pragma intrinsic(__sidt)
106 # pragma intrinsic(_lgdt)
107 # pragma intrinsic(_sgdt)
108 # endif
109 #endif
113 * Undefine all symbols we have Watcom C/C++ #pragma aux'es for.
115 #if defined(__WATCOMC__) && ARCH_BITS == 16
116 # include "asm-amd64-x86-watcom-16.h"
117 #elif defined(__WATCOMC__) && ARCH_BITS == 32
118 # include "asm-amd64-x86-watcom-32.h"
119 #endif
122 /** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
123 * @ingroup grp_rt_asm
124 * @{
127 /** @todo find a more proper place for these structures? */
129 #pragma pack(1)
130 /** IDTR */
131 typedef struct RTIDTR
133 /** Size of the IDT. */
134 uint16_t cbIdt;
135 /** Address of the IDT. */
136 #if ARCH_BITS != 64
137 uint32_t pIdt;
138 #else
139 uint64_t pIdt;
140 #endif
141 } RTIDTR, RT_FAR *PRTIDTR;
142 #pragma pack()
144 #pragma pack(1)
145 /** @internal */
146 typedef struct RTIDTRALIGNEDINT
148 /** Alignment padding. */
149 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
150 /** The IDTR structure. */
151 RTIDTR Idtr;
152 } RTIDTRALIGNEDINT;
153 #pragma pack()
155 /** Wrapped RTIDTR for preventing misalignment exceptions. */
156 typedef union RTIDTRALIGNED
158 /** Try make sure this structure has optimal alignment. */
159 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
160 /** Aligned structure. */
161 RTIDTRALIGNEDINT s;
162 } RTIDTRALIGNED;
163 AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);
164 /** Pointer to a an RTIDTR alignment wrapper. */
165 typedef RTIDTRALIGNED RT_FAR *PRIDTRALIGNED;
168 #pragma pack(1)
169 /** GDTR */
170 typedef struct RTGDTR
172 /** Size of the GDT. */
173 uint16_t cbGdt;
174 /** Address of the GDT. */
175 #if ARCH_BITS != 64
176 uint32_t pGdt;
177 #else
178 uint64_t pGdt;
179 #endif
180 } RTGDTR, RT_FAR *PRTGDTR;
181 #pragma pack()
183 #pragma pack(1)
184 /** @internal */
185 typedef struct RTGDTRALIGNEDINT
187 /** Alignment padding. */
188 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
189 /** The GDTR structure. */
190 RTGDTR Gdtr;
191 } RTGDTRALIGNEDINT;
192 #pragma pack()
194 /** Wrapped RTGDTR for preventing misalignment exceptions. */
195 typedef union RTGDTRALIGNED
197 /** Try make sure this structure has optimal alignment. */
198 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
199 /** Aligned structure. */
200 RTGDTRALIGNEDINT s;
201 } RTGDTRALIGNED;
202 AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);
203 /** Pointer to a an RTGDTR alignment wrapper. */
204 typedef RTGDTRALIGNED RT_FAR *PRGDTRALIGNED;
208 * Gets the content of the IDTR CPU register.
209 * @param pIdtr Where to store the IDTR contents.
211 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
212 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMGetIDTR(PRTIDTR pIdtr);
213 #else
214 DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
216 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
217 __sidt(pIdtr);
218 # elif RT_INLINE_ASM_GNU_STYLE
219 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
220 # else
221 __asm
223 # ifdef RT_ARCH_AMD64
224 mov rax, [pIdtr]
225 sidt [rax]
226 # else
227 mov eax, [pIdtr]
228 sidt [eax]
229 # endif
231 # endif
233 #endif
237 * Gets the content of the IDTR.LIMIT CPU register.
238 * @returns IDTR limit.
240 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
241 RT_ASM_DECL_PRAGMA_WATCOM(uint16_t) ASMGetIdtrLimit(void);
242 #else
243 DECLINLINE(uint16_t) ASMGetIdtrLimit(void)
245 RTIDTRALIGNED TmpIdtr;
246 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
247 __sidt(&TmpIdtr);
248 # elif RT_INLINE_ASM_GNU_STYLE
249 __asm__ __volatile__("sidt %0" : "=m" (TmpIdtr.s.Idtr));
250 # else
251 __asm
253 sidt [TmpIdtr.s.Idtr]
255 # endif
256 return TmpIdtr.s.Idtr.cbIdt;
258 #endif
262 * Sets the content of the IDTR CPU register.
263 * @param pIdtr Where to load the IDTR contents from
265 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
266 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetIDTR(const RTIDTR RT_FAR *pIdtr);
267 #else
268 DECLINLINE(void) ASMSetIDTR(const RTIDTR RT_FAR *pIdtr)
270 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
271 __lidt((void *)pIdtr);
272 # elif RT_INLINE_ASM_GNU_STYLE
273 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
274 # else
275 __asm
277 # ifdef RT_ARCH_AMD64
278 mov rax, [pIdtr]
279 lidt [rax]
280 # else
281 mov eax, [pIdtr]
282 lidt [eax]
283 # endif
285 # endif
287 #endif
291 * Gets the content of the GDTR CPU register.
292 * @param pGdtr Where to store the GDTR contents.
294 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
295 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMGetGDTR(PRTGDTR pGdtr);
296 #else
297 DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
299 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
300 _sgdt(pGdtr);
301 # elif RT_INLINE_ASM_GNU_STYLE
302 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
303 # else
304 __asm
306 # ifdef RT_ARCH_AMD64
307 mov rax, [pGdtr]
308 sgdt [rax]
309 # else
310 mov eax, [pGdtr]
311 sgdt [eax]
312 # endif
314 # endif
316 #endif
320 * Sets the content of the GDTR CPU register.
321 * @param pGdtr Where to load the GDTR contents from
323 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
324 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetGDTR(const RTGDTR RT_FAR *pGdtr);
325 #else
326 DECLINLINE(void) ASMSetGDTR(const RTGDTR RT_FAR *pGdtr)
328 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
329 _lgdt((void *)pGdtr);
330 # elif RT_INLINE_ASM_GNU_STYLE
331 __asm__ __volatile__("lgdt %0" : : "m" (*pGdtr));
332 # else
333 __asm
335 # ifdef RT_ARCH_AMD64
336 mov rax, [pGdtr]
337 lgdt [rax]
338 # else
339 mov eax, [pGdtr]
340 lgdt [eax]
341 # endif
343 # endif
345 #endif
350 * Get the cs register.
351 * @returns cs.
353 #if RT_INLINE_ASM_EXTERNAL
354 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetCS(void);
355 #else
356 DECLINLINE(RTSEL) ASMGetCS(void)
358 RTSEL SelCS;
359 # if RT_INLINE_ASM_GNU_STYLE
360 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
361 # else
362 __asm
364 mov ax, cs
365 mov [SelCS], ax
367 # endif
368 return SelCS;
370 #endif
374 * Get the DS register.
375 * @returns DS.
377 #if RT_INLINE_ASM_EXTERNAL
378 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetDS(void);
379 #else
380 DECLINLINE(RTSEL) ASMGetDS(void)
382 RTSEL SelDS;
383 # if RT_INLINE_ASM_GNU_STYLE
384 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
385 # else
386 __asm
388 mov ax, ds
389 mov [SelDS], ax
391 # endif
392 return SelDS;
394 #endif
398 * Get the ES register.
399 * @returns ES.
401 #if RT_INLINE_ASM_EXTERNAL
402 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetES(void);
403 #else
404 DECLINLINE(RTSEL) ASMGetES(void)
406 RTSEL SelES;
407 # if RT_INLINE_ASM_GNU_STYLE
408 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
409 # else
410 __asm
412 mov ax, es
413 mov [SelES], ax
415 # endif
416 return SelES;
418 #endif
422 * Get the FS register.
423 * @returns FS.
425 #if RT_INLINE_ASM_EXTERNAL
426 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetFS(void);
427 #else
428 DECLINLINE(RTSEL) ASMGetFS(void)
430 RTSEL SelFS;
431 # if RT_INLINE_ASM_GNU_STYLE
432 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
433 # else
434 __asm
436 mov ax, fs
437 mov [SelFS], ax
439 # endif
440 return SelFS;
442 # endif
444 #ifdef RT_ARCH_AMD64
447 * Get the FS base register.
448 * @returns FS base address.
450 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
451 DECLASM(uint64_t) ASMGetFSBase(void);
452 #else
453 DECLINLINE(uint64_t) ASMGetFSBase(void)
455 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
456 return (uint64_t)_readfsbase_u64();
457 # elif RT_INLINE_ASM_GNU_STYLE
458 uint64_t uFSBase;
459 __asm__ __volatile__("rdfsbase %0\n\t" : "=r" (uFSBase));
460 return uFSBase;
461 # endif
463 # endif
467 * Set the FS base register.
468 * @param uNewBase The new base value.
470 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
471 DECLASM(void) ASMSetFSBase(uint64_t uNewBase);
472 #else
473 DECLINLINE(void) ASMSetFSBase(uint64_t uNewBase)
475 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
476 _writefsbase_u64(uNewBase);
477 # elif RT_INLINE_ASM_GNU_STYLE
478 __asm__ __volatile__("wrfsbase %0\n\t" : : "r" (uNewBase));
479 # endif
481 # endif
483 #endif /* RT_ARCH_AMD64 */
486 * Get the GS register.
487 * @returns GS.
489 #if RT_INLINE_ASM_EXTERNAL
490 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetGS(void);
491 #else
492 DECLINLINE(RTSEL) ASMGetGS(void)
494 RTSEL SelGS;
495 # if RT_INLINE_ASM_GNU_STYLE
496 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
497 # else
498 __asm
500 mov ax, gs
501 mov [SelGS], ax
503 # endif
504 return SelGS;
506 #endif
508 #ifdef RT_ARCH_AMD64
511 * Get the GS base register.
512 * @returns GS base address.
514 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
515 DECLASM(uint64_t) ASMGetGSBase(void);
516 #else
517 DECLINLINE(uint64_t) ASMGetGSBase(void)
519 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
520 return (uint64_t)_readgsbase_u64();
521 # elif RT_INLINE_ASM_GNU_STYLE
522 uint64_t uGSBase;
523 __asm__ __volatile__("rdgsbase %0\n\t" : "=r" (uGSBase));
524 return uGSBase;
525 # endif
527 # endif
531 * Set the GS base register.
532 * @param uNewBase The new base value.
534 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
535 DECLASM(void) ASMSetGSBase(uint64_t uNewBase);
536 #else
537 DECLINLINE(void) ASMSetGSBase(uint64_t uNewBase)
539 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
540 _writegsbase_u64(uNewBase);
541 # elif RT_INLINE_ASM_GNU_STYLE
542 __asm__ __volatile__("wrgsbase %0\n\t" : : "r" (uNewBase));
543 # endif
545 # endif
547 #endif /* RT_ARCH_AMD64 */
551 * Get the SS register.
552 * @returns SS.
554 #if RT_INLINE_ASM_EXTERNAL
555 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetSS(void);
556 #else
557 DECLINLINE(RTSEL) ASMGetSS(void)
559 RTSEL SelSS;
560 # if RT_INLINE_ASM_GNU_STYLE
561 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
562 # else
563 __asm
565 mov ax, ss
566 mov [SelSS], ax
568 # endif
569 return SelSS;
571 #endif
575 * Get the TR register.
576 * @returns TR.
578 #if RT_INLINE_ASM_EXTERNAL
579 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetTR(void);
580 #else
581 DECLINLINE(RTSEL) ASMGetTR(void)
583 RTSEL SelTR;
584 # if RT_INLINE_ASM_GNU_STYLE
585 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
586 # else
587 __asm
589 str ax
590 mov [SelTR], ax
592 # endif
593 return SelTR;
595 #endif
599 * Get the LDTR register.
600 * @returns LDTR.
602 #if RT_INLINE_ASM_EXTERNAL
603 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetLDTR(void);
604 #else
605 DECLINLINE(RTSEL) ASMGetLDTR(void)
607 RTSEL SelLDTR;
608 # if RT_INLINE_ASM_GNU_STYLE
609 __asm__ __volatile__("sldt %w0\n\t" : "=r" (SelLDTR));
610 # else
611 __asm
613 sldt ax
614 mov [SelLDTR], ax
616 # endif
617 return SelLDTR;
619 #endif
623 * Get the access rights for the segment selector.
625 * @returns The access rights on success or UINT32_MAX on failure.
626 * @param uSel The selector value.
628 * @remarks Using UINT32_MAX for failure is chosen because valid access rights
629 * always have bits 0:7 as 0 (on both Intel & AMD).
631 #if RT_INLINE_ASM_EXTERNAL
632 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMGetSegAttr(uint32_t uSel);
633 #else
634 DECLINLINE(uint32_t) ASMGetSegAttr(uint32_t uSel)
636 uint32_t uAttr;
637 /* LAR only accesses 16-bit of the source operand, but eax for the
638 destination operand is required for getting the full 32-bit access rights. */
639 # if RT_INLINE_ASM_GNU_STYLE
640 __asm__ __volatile__("lar %1, %%eax\n\t"
641 "jz done%=\n\t"
642 "movl $0xffffffff, %%eax\n\t"
643 "done%=:\n\t"
644 "movl %%eax, %0\n\t"
645 : "=r" (uAttr)
646 : "r" (uSel)
647 : "cc", "%eax");
648 # else
649 __asm
651 lar eax, [uSel]
652 jz done
653 mov eax, 0ffffffffh
654 done:
655 mov [uAttr], eax
657 # endif
658 return uAttr;
660 #endif
664 * Get the [RE]FLAGS register.
665 * @returns [RE]FLAGS.
667 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
668 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMGetFlags(void);
669 #else
670 DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
672 RTCCUINTREG uFlags;
673 # if RT_INLINE_ASM_GNU_STYLE
674 # ifdef RT_ARCH_AMD64
675 __asm__ __volatile__("pushfq\n\t"
676 "popq %0\n\t"
677 : "=r" (uFlags));
678 # else
679 __asm__ __volatile__("pushfl\n\t"
680 "popl %0\n\t"
681 : "=r" (uFlags));
682 # endif
683 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
684 uFlags = __readeflags();
685 # else
686 __asm
688 # ifdef RT_ARCH_AMD64
689 pushfq
690 pop [uFlags]
691 # else
692 pushfd
693 pop [uFlags]
694 # endif
696 # endif
697 return uFlags;
699 #endif
703 * Set the [RE]FLAGS register.
704 * @param uFlags The new [RE]FLAGS value.
706 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - see __readeflags() above. */
707 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetFlags(RTCCUINTREG uFlags);
708 #else
709 DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
711 # if RT_INLINE_ASM_GNU_STYLE
712 # ifdef RT_ARCH_AMD64
713 __asm__ __volatile__("pushq %0\n\t"
714 "popfq\n\t"
715 : : "g" (uFlags));
716 # else
717 __asm__ __volatile__("pushl %0\n\t"
718 "popfl\n\t"
719 : : "g" (uFlags));
720 # endif
721 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
722 __writeeflags(uFlags);
723 # else
724 __asm
726 # ifdef RT_ARCH_AMD64
727 push [uFlags]
728 popfq
729 # else
730 push [uFlags]
731 popfd
732 # endif
734 # endif
736 #endif
740 * Modifies the [RE]FLAGS register.
741 * @returns Original value.
742 * @param fAndEfl Flags to keep (applied first).
743 * @param fOrEfl Flags to be set.
745 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
746 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl);
747 #else
748 DECLINLINE(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl)
750 RTCCUINTREG fOldEfl;
751 # if RT_INLINE_ASM_GNU_STYLE
752 # ifdef RT_ARCH_AMD64
753 __asm__ __volatile__("pushfq\n\t"
754 "movq (%%rsp), %0\n\t"
755 "andq %0, %1\n\t"
756 "orq %3, %1\n\t"
757 "mov %1, (%%rsp)\n\t"
758 "popfq\n\t"
759 : "=&r" (fOldEfl),
760 "=r" (fAndEfl)
761 : "1" (fAndEfl),
762 "rn" (fOrEfl) );
763 # else
764 __asm__ __volatile__("pushfl\n\t"
765 "movl (%%esp), %0\n\t"
766 "andl %1, (%%esp)\n\t"
767 "orl %2, (%%esp)\n\t"
768 "popfl\n\t"
769 : "=&r" (fOldEfl)
770 : "rn" (fAndEfl),
771 "rn" (fOrEfl) );
772 # endif
773 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
774 fOldEfl = __readeflags();
775 __writeeflags((fOldEfl & fAndEfl) | fOrEfl);
776 # else
777 __asm
779 # ifdef RT_ARCH_AMD64
780 mov rdx, [fAndEfl]
781 mov rcx, [fOrEfl]
782 pushfq
783 mov rax, [rsp]
784 and rdx, rax
785 or rdx, rcx
786 mov [rsp], rdx
787 popfq
788 mov [fOldEfl], rax
789 # else
790 mov edx, [fAndEfl]
791 mov ecx, [fOrEfl]
792 pushfd
793 mov eax, [esp]
794 and edx, eax
795 or edx, ecx
796 mov [esp], edx
797 popfd
798 mov [fOldEfl], eax
799 # endif
801 # endif
802 return fOldEfl;
804 #endif
808 * Modifies the [RE]FLAGS register by ORing in one or more flags.
809 * @returns Original value.
810 * @param fOrEfl The flags to be set (ORed in).
812 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
813 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl);
814 #else
815 DECLINLINE(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl)
817 RTCCUINTREG fOldEfl;
818 # if RT_INLINE_ASM_GNU_STYLE
819 # ifdef RT_ARCH_AMD64
820 __asm__ __volatile__("pushfq\n\t"
821 "movq (%%rsp), %0\n\t"
822 "orq %1, (%%rsp)\n\t"
823 "popfq\n\t"
824 : "=&r" (fOldEfl)
825 : "rn" (fOrEfl) );
826 # else
827 __asm__ __volatile__("pushfl\n\t"
828 "movl (%%esp), %0\n\t"
829 "orl %1, (%%esp)\n\t"
830 "popfl\n\t"
831 : "=&r" (fOldEfl)
832 : "rn" (fOrEfl) );
833 # endif
834 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
835 fOldEfl = __readeflags();
836 __writeeflags(fOldEfl | fOrEfl);
837 # else
838 __asm
840 # ifdef RT_ARCH_AMD64
841 mov rcx, [fOrEfl]
842 pushfq
843 mov rdx, [rsp]
844 or [rsp], rcx
845 popfq
846 mov [fOldEfl], rax
847 # else
848 mov ecx, [fOrEfl]
849 pushfd
850 mov edx, [esp]
851 or [esp], ecx
852 popfd
853 mov [fOldEfl], eax
854 # endif
856 # endif
857 return fOldEfl;
859 #endif
863 * Modifies the [RE]FLAGS register by AND'ing out one or more flags.
864 * @returns Original value.
865 * @param fAndEfl The flags to keep.
867 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
868 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl);
869 #else
870 DECLINLINE(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl)
872 RTCCUINTREG fOldEfl;
873 # if RT_INLINE_ASM_GNU_STYLE
874 # ifdef RT_ARCH_AMD64
875 __asm__ __volatile__("pushfq\n\t"
876 "movq (%%rsp), %0\n\t"
877 "andq %1, (%%rsp)\n\t"
878 "popfq\n\t"
879 : "=&r" (fOldEfl)
880 : "rn" (fAndEfl) );
881 # else
882 __asm__ __volatile__("pushfl\n\t"
883 "movl (%%esp), %0\n\t"
884 "andl %1, (%%esp)\n\t"
885 "popfl\n\t"
886 : "=&r" (fOldEfl)
887 : "rn" (fAndEfl) );
888 # endif
889 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
890 fOldEfl = __readeflags();
891 __writeeflags(fOldEfl & fAndEfl);
892 # else
893 __asm
895 # ifdef RT_ARCH_AMD64
896 mov rdx, [fAndEfl]
897 pushfq
898 mov rdx, [rsp]
899 and [rsp], rdx
900 popfq
901 mov [fOldEfl], rax
902 # else
903 mov edx, [fAndEfl]
904 pushfd
905 mov edx, [esp]
906 and [esp], edx
907 popfd
908 mov [fOldEfl], eax
909 # endif
911 # endif
912 return fOldEfl;
914 #endif
918 * Gets the content of the CPU timestamp counter register.
920 * @returns TSC.
922 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
923 RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMReadTSC(void);
924 #else
925 DECLINLINE(uint64_t) ASMReadTSC(void)
927 RTUINT64U u;
928 # if RT_INLINE_ASM_GNU_STYLE
929 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
930 # else
931 # if RT_INLINE_ASM_USES_INTRIN
932 u.u = __rdtsc();
933 # else
934 __asm
936 rdtsc
937 mov [u.s.Lo], eax
938 mov [u.s.Hi], edx
940 # endif
941 # endif
942 return u.u;
944 #endif
948 * Gets the content of the CPU timestamp counter register and the
949 * assoicated AUX value.
951 * @returns TSC.
952 * @param puAux Where to store the AUX value.
954 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2008
955 RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMReadTscWithAux(uint32_t RT_FAR *puAux);
956 #else
957 DECLINLINE(uint64_t) ASMReadTscWithAux(uint32_t RT_FAR *puAux)
959 RTUINT64U u;
960 # if RT_INLINE_ASM_GNU_STYLE
961 /* rdtscp is not supported by ancient linux build VM of course :-( */
962 /*__asm__ __volatile__("rdtscp\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux)); */
963 __asm__ __volatile__(".byte 0x0f,0x01,0xf9\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux));
964 # else
965 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
966 u.u = __rdtscp(puAux);
967 # else
968 __asm
970 rdtscp
971 mov [u.s.Lo], eax
972 mov [u.s.Hi], edx
973 mov eax, [puAux]
974 mov [eax], ecx
976 # endif
977 # endif
978 return u.u;
980 #endif
984 * Performs the cpuid instruction returning all registers.
986 * @param uOperator CPUID operation (eax).
987 * @param pvEAX Where to store eax.
988 * @param pvEBX Where to store ebx.
989 * @param pvECX Where to store ecx.
990 * @param pvEDX Where to store edx.
991 * @remark We're using void pointers to ease the use of special bitfield structures and such.
993 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
994 DECLASM(void) ASMCpuId(uint32_t uOperator, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX);
995 #else
996 DECLINLINE(void) ASMCpuId(uint32_t uOperator, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX)
998 # if RT_INLINE_ASM_GNU_STYLE
999 # ifdef RT_ARCH_AMD64
1000 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
1001 __asm__ __volatile__ ("cpuid\n\t"
1002 : "=a" (uRAX),
1003 "=b" (uRBX),
1004 "=c" (uRCX),
1005 "=d" (uRDX)
1006 : "0" (uOperator), "2" (0));
1007 *(uint32_t RT_FAR *)pvEAX = (uint32_t)uRAX;
1008 *(uint32_t RT_FAR *)pvEBX = (uint32_t)uRBX;
1009 *(uint32_t RT_FAR *)pvECX = (uint32_t)uRCX;
1010 *(uint32_t RT_FAR *)pvEDX = (uint32_t)uRDX;
1011 # else
1012 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
1013 "cpuid\n\t"
1014 "xchgl %%ebx, %1\n\t"
1015 : "=a" (*(uint32_t *)pvEAX),
1016 "=r" (*(uint32_t *)pvEBX),
1017 "=c" (*(uint32_t *)pvECX),
1018 "=d" (*(uint32_t *)pvEDX)
1019 : "0" (uOperator), "2" (0));
1020 # endif
1022 # elif RT_INLINE_ASM_USES_INTRIN
1023 int aInfo[4];
1024 __cpuid(aInfo, uOperator);
1025 *(uint32_t RT_FAR *)pvEAX = aInfo[0];
1026 *(uint32_t RT_FAR *)pvEBX = aInfo[1];
1027 *(uint32_t RT_FAR *)pvECX = aInfo[2];
1028 *(uint32_t RT_FAR *)pvEDX = aInfo[3];
1030 # else
1031 uint32_t uEAX;
1032 uint32_t uEBX;
1033 uint32_t uECX;
1034 uint32_t uEDX;
1035 __asm
1037 push ebx
1038 mov eax, [uOperator]
1039 cpuid
1040 mov [uEAX], eax
1041 mov [uEBX], ebx
1042 mov [uECX], ecx
1043 mov [uEDX], edx
1044 pop ebx
1046 *(uint32_t RT_FAR *)pvEAX = uEAX;
1047 *(uint32_t RT_FAR *)pvEBX = uEBX;
1048 *(uint32_t RT_FAR *)pvECX = uECX;
1049 *(uint32_t RT_FAR *)pvEDX = uEDX;
1050 # endif
1052 #endif
1056 * Performs the CPUID instruction with EAX and ECX input returning ALL output
1057 * registers.
1059 * @param uOperator CPUID operation (eax).
1060 * @param uIdxECX ecx index
1061 * @param pvEAX Where to store eax.
1062 * @param pvEBX Where to store ebx.
1063 * @param pvECX Where to store ecx.
1064 * @param pvEDX Where to store edx.
1065 * @remark We're using void pointers to ease the use of special bitfield structures and such.
1067 #if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
1068 DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX);
1069 #else
1070 DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX)
1072 # if RT_INLINE_ASM_GNU_STYLE
1073 # ifdef RT_ARCH_AMD64
1074 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
1075 __asm__ ("cpuid\n\t"
1076 : "=a" (uRAX),
1077 "=b" (uRBX),
1078 "=c" (uRCX),
1079 "=d" (uRDX)
1080 : "0" (uOperator),
1081 "2" (uIdxECX));
1082 *(uint32_t RT_FAR *)pvEAX = (uint32_t)uRAX;
1083 *(uint32_t RT_FAR *)pvEBX = (uint32_t)uRBX;
1084 *(uint32_t RT_FAR *)pvECX = (uint32_t)uRCX;
1085 *(uint32_t RT_FAR *)pvEDX = (uint32_t)uRDX;
1086 # else
1087 __asm__ ("xchgl %%ebx, %1\n\t"
1088 "cpuid\n\t"
1089 "xchgl %%ebx, %1\n\t"
1090 : "=a" (*(uint32_t *)pvEAX),
1091 "=r" (*(uint32_t *)pvEBX),
1092 "=c" (*(uint32_t *)pvECX),
1093 "=d" (*(uint32_t *)pvEDX)
1094 : "0" (uOperator),
1095 "2" (uIdxECX));
1096 # endif
1098 # elif RT_INLINE_ASM_USES_INTRIN
1099 int aInfo[4];
1100 __cpuidex(aInfo, uOperator, uIdxECX);
1101 *(uint32_t RT_FAR *)pvEAX = aInfo[0];
1102 *(uint32_t RT_FAR *)pvEBX = aInfo[1];
1103 *(uint32_t RT_FAR *)pvECX = aInfo[2];
1104 *(uint32_t RT_FAR *)pvEDX = aInfo[3];
1106 # else
1107 uint32_t uEAX;
1108 uint32_t uEBX;
1109 uint32_t uECX;
1110 uint32_t uEDX;
1111 __asm
1113 push ebx
1114 mov eax, [uOperator]
1115 mov ecx, [uIdxECX]
1116 cpuid
1117 mov [uEAX], eax
1118 mov [uEBX], ebx
1119 mov [uECX], ecx
1120 mov [uEDX], edx
1121 pop ebx
1123 *(uint32_t RT_FAR *)pvEAX = uEAX;
1124 *(uint32_t RT_FAR *)pvEBX = uEBX;
1125 *(uint32_t RT_FAR *)pvECX = uECX;
1126 *(uint32_t RT_FAR *)pvEDX = uEDX;
1127 # endif
1129 #endif
1133 * CPUID variant that initializes all 4 registers before the CPUID instruction.
1135 * @returns The EAX result value.
1136 * @param uOperator CPUID operation (eax).
1137 * @param uInitEBX The value to assign EBX prior to the CPUID instruction.
1138 * @param uInitECX The value to assign ECX prior to the CPUID instruction.
1139 * @param uInitEDX The value to assign EDX prior to the CPUID instruction.
1140 * @param pvEAX Where to store eax. Optional.
1141 * @param pvEBX Where to store ebx. Optional.
1142 * @param pvECX Where to store ecx. Optional.
1143 * @param pvEDX Where to store edx. Optional.
1145 DECLASM(uint32_t) ASMCpuIdExSlow(uint32_t uOperator, uint32_t uInitEBX, uint32_t uInitECX, uint32_t uInitEDX,
1146 void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX);
1150 * Performs the cpuid instruction returning ecx and edx.
1152 * @param uOperator CPUID operation (eax).
1153 * @param pvECX Where to store ecx.
1154 * @param pvEDX Where to store edx.
1155 * @remark We're using void pointers to ease the use of special bitfield structures and such.
1157 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1158 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void RT_FAR *pvECX, void RT_FAR *pvEDX);
1159 #else
1160 DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void RT_FAR *pvECX, void RT_FAR *pvEDX)
1162 uint32_t uEBX;
1163 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
1165 #endif
1169 * Performs the cpuid instruction returning eax.
1171 * @param uOperator CPUID operation (eax).
1172 * @returns EAX after cpuid operation.
1174 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1175 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
1176 #else
1177 DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
1179 RTCCUINTREG xAX;
1180 # if RT_INLINE_ASM_GNU_STYLE
1181 # ifdef RT_ARCH_AMD64
1182 __asm__ ("cpuid"
1183 : "=a" (xAX)
1184 : "0" (uOperator)
1185 : "rbx", "rcx", "rdx");
1186 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1187 __asm__ ("push %%ebx\n\t"
1188 "cpuid\n\t"
1189 "pop %%ebx\n\t"
1190 : "=a" (xAX)
1191 : "0" (uOperator)
1192 : "ecx", "edx");
1193 # else
1194 __asm__ ("cpuid"
1195 : "=a" (xAX)
1196 : "0" (uOperator)
1197 : "edx", "ecx", "ebx");
1198 # endif
1200 # elif RT_INLINE_ASM_USES_INTRIN
1201 int aInfo[4];
1202 __cpuid(aInfo, uOperator);
1203 xAX = aInfo[0];
1205 # else
1206 __asm
1208 push ebx
1209 mov eax, [uOperator]
1210 cpuid
1211 mov [xAX], eax
1212 pop ebx
1214 # endif
1215 return (uint32_t)xAX;
1217 #endif
1221 * Performs the cpuid instruction returning ebx.
1223 * @param uOperator CPUID operation (eax).
1224 * @returns EBX after cpuid operation.
1226 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1227 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
1228 #else
1229 DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
1231 RTCCUINTREG xBX;
1232 # if RT_INLINE_ASM_GNU_STYLE
1233 # ifdef RT_ARCH_AMD64
1234 RTCCUINTREG uSpill;
1235 __asm__ ("cpuid"
1236 : "=a" (uSpill),
1237 "=b" (xBX)
1238 : "0" (uOperator)
1239 : "rdx", "rcx");
1240 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1241 __asm__ ("push %%ebx\n\t"
1242 "cpuid\n\t"
1243 "mov %%ebx, %%edx\n\t"
1244 "pop %%ebx\n\t"
1245 : "=a" (uOperator),
1246 "=d" (xBX)
1247 : "0" (uOperator)
1248 : "ecx");
1249 # else
1250 __asm__ ("cpuid"
1251 : "=a" (uOperator),
1252 "=b" (xBX)
1253 : "0" (uOperator)
1254 : "edx", "ecx");
1255 # endif
1257 # elif RT_INLINE_ASM_USES_INTRIN
1258 int aInfo[4];
1259 __cpuid(aInfo, uOperator);
1260 xBX = aInfo[1];
1262 # else
1263 __asm
1265 push ebx
1266 mov eax, [uOperator]
1267 cpuid
1268 mov [xBX], ebx
1269 pop ebx
1271 # endif
1272 return (uint32_t)xBX;
1274 #endif
1278 * Performs the cpuid instruction returning ecx.
1280 * @param uOperator CPUID operation (eax).
1281 * @returns ECX after cpuid operation.
1283 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1284 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
1285 #else
1286 DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
1288 RTCCUINTREG xCX;
1289 # if RT_INLINE_ASM_GNU_STYLE
1290 # ifdef RT_ARCH_AMD64
1291 RTCCUINTREG uSpill;
1292 __asm__ ("cpuid"
1293 : "=a" (uSpill),
1294 "=c" (xCX)
1295 : "0" (uOperator)
1296 : "rbx", "rdx");
1297 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1298 __asm__ ("push %%ebx\n\t"
1299 "cpuid\n\t"
1300 "pop %%ebx\n\t"
1301 : "=a" (uOperator),
1302 "=c" (xCX)
1303 : "0" (uOperator)
1304 : "edx");
1305 # else
1306 __asm__ ("cpuid"
1307 : "=a" (uOperator),
1308 "=c" (xCX)
1309 : "0" (uOperator)
1310 : "ebx", "edx");
1312 # endif
1314 # elif RT_INLINE_ASM_USES_INTRIN
1315 int aInfo[4];
1316 __cpuid(aInfo, uOperator);
1317 xCX = aInfo[2];
1319 # else
1320 __asm
1322 push ebx
1323 mov eax, [uOperator]
1324 cpuid
1325 mov [xCX], ecx
1326 pop ebx
1328 # endif
1329 return (uint32_t)xCX;
1331 #endif
1335 * Performs the cpuid instruction returning edx.
1337 * @param uOperator CPUID operation (eax).
1338 * @returns EDX after cpuid operation.
1340 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1341 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
1342 #else
1343 DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
1345 RTCCUINTREG xDX;
1346 # if RT_INLINE_ASM_GNU_STYLE
1347 # ifdef RT_ARCH_AMD64
1348 RTCCUINTREG uSpill;
1349 __asm__ ("cpuid"
1350 : "=a" (uSpill),
1351 "=d" (xDX)
1352 : "0" (uOperator)
1353 : "rbx", "rcx");
1354 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1355 __asm__ ("push %%ebx\n\t"
1356 "cpuid\n\t"
1357 "pop %%ebx\n\t"
1358 : "=a" (uOperator),
1359 "=d" (xDX)
1360 : "0" (uOperator)
1361 : "ecx");
1362 # else
1363 __asm__ ("cpuid"
1364 : "=a" (uOperator),
1365 "=d" (xDX)
1366 : "0" (uOperator)
1367 : "ebx", "ecx");
1368 # endif
1370 # elif RT_INLINE_ASM_USES_INTRIN
1371 int aInfo[4];
1372 __cpuid(aInfo, uOperator);
1373 xDX = aInfo[3];
1375 # else
1376 __asm
1378 push ebx
1379 mov eax, [uOperator]
1380 cpuid
1381 mov [xDX], edx
1382 pop ebx
1384 # endif
1385 return (uint32_t)xDX;
1387 #endif
1391 * Checks if the current CPU supports CPUID.
1393 * @returns true if CPUID is supported.
1395 #ifdef __WATCOMC__
1396 DECLASM(bool) ASMHasCpuId(void);
1397 #else
1398 DECLINLINE(bool) ASMHasCpuId(void)
1400 # ifdef RT_ARCH_AMD64
1401 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
1402 # else /* !RT_ARCH_AMD64 */
1403 bool fRet = false;
1404 # if RT_INLINE_ASM_GNU_STYLE
1405 uint32_t u1;
1406 uint32_t u2;
1407 __asm__ ("pushf\n\t"
1408 "pop %1\n\t"
1409 "mov %1, %2\n\t"
1410 "xorl $0x200000, %1\n\t"
1411 "push %1\n\t"
1412 "popf\n\t"
1413 "pushf\n\t"
1414 "pop %1\n\t"
1415 "cmpl %1, %2\n\t"
1416 "setne %0\n\t"
1417 "push %2\n\t"
1418 "popf\n\t"
1419 : "=m" (fRet), "=r" (u1), "=r" (u2));
1420 # else
1421 __asm
1423 pushfd
1424 pop eax
1425 mov ebx, eax
1426 xor eax, 0200000h
1427 push eax
1428 popfd
1429 pushfd
1430 pop eax
1431 cmp eax, ebx
1432 setne fRet
1433 push ebx
1434 popfd
1436 # endif
1437 return fRet;
1438 # endif /* !RT_ARCH_AMD64 */
1440 #endif
1444 * Gets the APIC ID of the current CPU.
1446 * @returns the APIC ID.
1448 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1449 RT_ASM_DECL_PRAGMA_WATCOM(uint8_t) ASMGetApicId(void);
1450 #else
1451 DECLINLINE(uint8_t) ASMGetApicId(void)
1453 RTCCUINTREG xBX;
1454 # if RT_INLINE_ASM_GNU_STYLE
1455 # ifdef RT_ARCH_AMD64
1456 RTCCUINTREG uSpill;
1457 __asm__ __volatile__ ("cpuid"
1458 : "=a" (uSpill),
1459 "=b" (xBX)
1460 : "0" (1)
1461 : "rcx", "rdx");
1462 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1463 RTCCUINTREG uSpill;
1464 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
1465 "cpuid\n\t"
1466 "xchgl %%ebx,%1\n\t"
1467 : "=a" (uSpill),
1468 "=rm" (xBX)
1469 : "0" (1)
1470 : "ecx", "edx");
1471 # else
1472 RTCCUINTREG uSpill;
1473 __asm__ __volatile__ ("cpuid"
1474 : "=a" (uSpill),
1475 "=b" (xBX)
1476 : "0" (1)
1477 : "ecx", "edx");
1478 # endif
1480 # elif RT_INLINE_ASM_USES_INTRIN
1481 int aInfo[4];
1482 __cpuid(aInfo, 1);
1483 xBX = aInfo[1];
1485 # else
1486 __asm
1488 push ebx
1489 mov eax, 1
1490 cpuid
1491 mov [xBX], ebx
1492 pop ebx
1494 # endif
1495 return (uint8_t)(xBX >> 24);
1497 #endif
1501 * Gets the APIC ID of the current CPU using leaf 0xb.
1503 * @returns the APIC ID.
1505 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2010 /*?*/
1506 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMGetApicIdExt0B(void);
1507 #else
1508 DECLINLINE(uint32_t) ASMGetApicIdExt0B(void)
1510 # if RT_INLINE_ASM_GNU_STYLE
1511 RTCCUINTREG xDX;
1512 # ifdef RT_ARCH_AMD64
1513 RTCCUINTREG uSpillEax, uSpillEcx;
1514 __asm__ __volatile__ ("cpuid"
1515 : "=a" (uSpillEax),
1516 "=c" (uSpillEcx),
1517 "=d" (xDX)
1518 : "0" (0xb),
1519 "1" (0)
1520 : "rbx");
1521 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1522 RTCCUINTREG uSpillEax, uSpillEcx, uSpillEbx;
1523 __asm__ __volatile__ ("mov %%ebx,%2\n\t"
1524 "cpuid\n\t"
1525 "xchgl %%ebx,%2\n\t"
1526 : "=a" (uSpillEax),
1527 "=c" (uSpillEcx),
1528 "=rm" (uSpillEbx),
1529 "=d" (xDX)
1530 : "0" (0xb),
1531 "1" (0));
1532 # else
1533 RTCCUINTREG uSpillEax, uSpillEcx;
1534 __asm__ __volatile__ ("cpuid"
1535 : "=a" (uSpillEax),
1536 "=c" (uSpillEcx),
1537 "=d" (xDX)
1538 : "0" (0xb),
1539 "1" (0)
1540 : "ebx");
1541 # endif
1542 return (uint32_t)xDX;
1544 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2010 /*?*/
1546 int aInfo[4];
1547 __cpuidex(aInfo, 0xb, 0);
1548 return aInfo[3];
1550 # else
1551 RTCCUINTREG xDX;
1552 __asm
1554 push ebx
1555 mov eax, 0xb
1556 xor ecx, ecx
1557 cpuid
1558 mov [xDX], edx
1559 pop ebx
1561 return (uint32_t)xDX;
1562 # endif
1564 #endif
1568 * Gets the APIC ID of the current CPU using leaf 8000001E.
1570 * @returns the APIC ID.
1572 DECLINLINE(uint32_t) ASMGetApicIdExt8000001E(void)
1574 return ASMCpuId_EAX(0x8000001e);
1579 * Tests if this is a genuine Intel CPU.
1581 * @returns true/false.
1582 * @remarks ASSUMES that cpuid is supported by the CPU.
1584 DECLINLINE(bool) ASMIsIntelCpu(void)
1586 uint32_t uEAX, uEBX, uECX, uEDX;
1587 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1588 return RTX86IsIntelCpu(uEBX, uECX, uEDX);
1593 * Tests if this is an authentic AMD CPU.
1595 * @returns true/false.
1596 * @remarks ASSUMES that cpuid is supported by the CPU.
1598 DECLINLINE(bool) ASMIsAmdCpu(void)
1600 uint32_t uEAX, uEBX, uECX, uEDX;
1601 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1602 return RTX86IsAmdCpu(uEBX, uECX, uEDX);
1607 * Tests if this is a centaur hauling VIA CPU.
1609 * @returns true/false.
1610 * @remarks ASSUMES that cpuid is supported by the CPU.
1612 DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1614 uint32_t uEAX, uEBX, uECX, uEDX;
1615 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1616 return RTX86IsViaCentaurCpu(uEBX, uECX, uEDX);
1621 * Tests if this is a Shanghai CPU.
1623 * @returns true/false.
1624 * @remarks ASSUMES that cpuid is supported by the CPU.
1626 DECLINLINE(bool) ASMIsShanghaiCpu(void)
1628 uint32_t uEAX, uEBX, uECX, uEDX;
1629 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1630 return RTX86IsShanghaiCpu(uEBX, uECX, uEDX);
1635 * Tests if this is a genuine Hygon CPU.
1637 * @returns true/false.
1638 * @remarks ASSUMES that cpuid is supported by the CPU.
1640 DECLINLINE(bool) ASMIsHygonCpu(void)
1642 uint32_t uEAX, uEBX, uECX, uEDX;
1643 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1644 return RTX86IsHygonCpu(uEBX, uECX, uEDX);
1649 * Get cr0.
1650 * @returns cr0.
1652 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1653 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR0(void);
1654 #else
1655 DECLINLINE(RTCCUINTXREG) ASMGetCR0(void)
1657 RTCCUINTXREG uCR0;
1658 # if RT_INLINE_ASM_USES_INTRIN
1659 uCR0 = __readcr0();
1661 # elif RT_INLINE_ASM_GNU_STYLE
1662 # ifdef RT_ARCH_AMD64
1663 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1664 # else
1665 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1666 # endif
1667 # else
1668 __asm
1670 # ifdef RT_ARCH_AMD64
1671 mov rax, cr0
1672 mov [uCR0], rax
1673 # else
1674 mov eax, cr0
1675 mov [uCR0], eax
1676 # endif
1678 # endif
1679 return uCR0;
1681 #endif
1685 * Sets the CR0 register.
1686 * @param uCR0 The new CR0 value.
1688 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1689 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR0(RTCCUINTXREG uCR0);
1690 #else
1691 DECLINLINE(void) ASMSetCR0(RTCCUINTXREG uCR0)
1693 # if RT_INLINE_ASM_USES_INTRIN
1694 __writecr0(uCR0);
1696 # elif RT_INLINE_ASM_GNU_STYLE
1697 # ifdef RT_ARCH_AMD64
1698 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1699 # else
1700 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1701 # endif
1702 # else
1703 __asm
1705 # ifdef RT_ARCH_AMD64
1706 mov rax, [uCR0]
1707 mov cr0, rax
1708 # else
1709 mov eax, [uCR0]
1710 mov cr0, eax
1711 # endif
1713 # endif
1715 #endif
1719 * Get cr2.
1720 * @returns cr2.
1722 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1723 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR2(void);
1724 #else
1725 DECLINLINE(RTCCUINTXREG) ASMGetCR2(void)
1727 RTCCUINTXREG uCR2;
1728 # if RT_INLINE_ASM_USES_INTRIN
1729 uCR2 = __readcr2();
1731 # elif RT_INLINE_ASM_GNU_STYLE
1732 # ifdef RT_ARCH_AMD64
1733 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1734 # else
1735 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1736 # endif
1737 # else
1738 __asm
1740 # ifdef RT_ARCH_AMD64
1741 mov rax, cr2
1742 mov [uCR2], rax
1743 # else
1744 mov eax, cr2
1745 mov [uCR2], eax
1746 # endif
1748 # endif
1749 return uCR2;
1751 #endif
1755 * Sets the CR2 register.
1756 * @param uCR2 The new CR0 value.
1758 #if RT_INLINE_ASM_EXTERNAL
1759 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR2(RTCCUINTXREG uCR2);
1760 #else
1761 DECLINLINE(void) ASMSetCR2(RTCCUINTXREG uCR2)
1763 # if RT_INLINE_ASM_GNU_STYLE
1764 # ifdef RT_ARCH_AMD64
1765 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1766 # else
1767 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1768 # endif
1769 # else
1770 __asm
1772 # ifdef RT_ARCH_AMD64
1773 mov rax, [uCR2]
1774 mov cr2, rax
1775 # else
1776 mov eax, [uCR2]
1777 mov cr2, eax
1778 # endif
1780 # endif
1782 #endif
1786 * Get cr3.
1787 * @returns cr3.
1789 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1790 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR3(void);
1791 #else
1792 DECLINLINE(RTCCUINTXREG) ASMGetCR3(void)
1794 RTCCUINTXREG uCR3;
1795 # if RT_INLINE_ASM_USES_INTRIN
1796 uCR3 = __readcr3();
1798 # elif RT_INLINE_ASM_GNU_STYLE
1799 # ifdef RT_ARCH_AMD64
1800 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1801 # else
1802 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1803 # endif
1804 # else
1805 __asm
1807 # ifdef RT_ARCH_AMD64
1808 mov rax, cr3
1809 mov [uCR3], rax
1810 # else
1811 mov eax, cr3
1812 mov [uCR3], eax
1813 # endif
1815 # endif
1816 return uCR3;
1818 #endif
1822 * Sets the CR3 register.
1824 * @param uCR3 New CR3 value.
1826 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1827 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR3(RTCCUINTXREG uCR3);
1828 #else
1829 DECLINLINE(void) ASMSetCR3(RTCCUINTXREG uCR3)
1831 # if RT_INLINE_ASM_USES_INTRIN
1832 __writecr3(uCR3);
1834 # elif RT_INLINE_ASM_GNU_STYLE
1835 # ifdef RT_ARCH_AMD64
1836 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1837 # else
1838 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1839 # endif
1840 # else
1841 __asm
1843 # ifdef RT_ARCH_AMD64
1844 mov rax, [uCR3]
1845 mov cr3, rax
1846 # else
1847 mov eax, [uCR3]
1848 mov cr3, eax
1849 # endif
1851 # endif
1853 #endif
1857 * Reloads the CR3 register.
1859 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1860 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMReloadCR3(void);
1861 #else
1862 DECLINLINE(void) ASMReloadCR3(void)
1864 # if RT_INLINE_ASM_USES_INTRIN
1865 __writecr3(__readcr3());
1867 # elif RT_INLINE_ASM_GNU_STYLE
1868 RTCCUINTXREG u;
1869 # ifdef RT_ARCH_AMD64
1870 __asm__ __volatile__("movq %%cr3, %0\n\t"
1871 "movq %0, %%cr3\n\t"
1872 : "=r" (u));
1873 # else
1874 __asm__ __volatile__("movl %%cr3, %0\n\t"
1875 "movl %0, %%cr3\n\t"
1876 : "=r" (u));
1877 # endif
1878 # else
1879 __asm
1881 # ifdef RT_ARCH_AMD64
1882 mov rax, cr3
1883 mov cr3, rax
1884 # else
1885 mov eax, cr3
1886 mov cr3, eax
1887 # endif
1889 # endif
1891 #endif
1895 * Get cr4.
1896 * @returns cr4.
1898 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1899 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR4(void);
1900 #else
1901 DECLINLINE(RTCCUINTXREG) ASMGetCR4(void)
1903 RTCCUINTXREG uCR4;
1904 # if RT_INLINE_ASM_USES_INTRIN
1905 uCR4 = __readcr4();
1907 # elif RT_INLINE_ASM_GNU_STYLE
1908 # ifdef RT_ARCH_AMD64
1909 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1910 # else
1911 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1912 # endif
1913 # else
1914 __asm
1916 # ifdef RT_ARCH_AMD64
1917 mov rax, cr4
1918 mov [uCR4], rax
1919 # else
1920 push eax /* just in case */
1921 /*mov eax, cr4*/
1922 _emit 0x0f
1923 _emit 0x20
1924 _emit 0xe0
1925 mov [uCR4], eax
1926 pop eax
1927 # endif
1929 # endif
1930 return uCR4;
1932 #endif
1936 * Sets the CR4 register.
1938 * @param uCR4 New CR4 value.
1940 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1941 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR4(RTCCUINTXREG uCR4);
1942 #else
1943 DECLINLINE(void) ASMSetCR4(RTCCUINTXREG uCR4)
1945 # if RT_INLINE_ASM_USES_INTRIN
1946 __writecr4(uCR4);
1948 # elif RT_INLINE_ASM_GNU_STYLE
1949 # ifdef RT_ARCH_AMD64
1950 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1951 # else
1952 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1953 # endif
1954 # else
1955 __asm
1957 # ifdef RT_ARCH_AMD64
1958 mov rax, [uCR4]
1959 mov cr4, rax
1960 # else
1961 mov eax, [uCR4]
1962 _emit 0x0F
1963 _emit 0x22
1964 _emit 0xE0 /* mov cr4, eax */
1965 # endif
1967 # endif
1969 #endif
1973 * Get cr8.
1974 * @returns cr8.
1975 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1977 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1978 DECLASM(RTCCUINTXREG) ASMGetCR8(void);
1979 #else
1980 DECLINLINE(RTCCUINTXREG) ASMGetCR8(void)
1982 # ifdef RT_ARCH_AMD64
1983 RTCCUINTXREG uCR8;
1984 # if RT_INLINE_ASM_USES_INTRIN
1985 uCR8 = __readcr8();
1987 # elif RT_INLINE_ASM_GNU_STYLE
1988 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1989 # else
1990 __asm
1992 mov rax, cr8
1993 mov [uCR8], rax
1995 # endif
1996 return uCR8;
1997 # else /* !RT_ARCH_AMD64 */
1998 return 0;
1999 # endif /* !RT_ARCH_AMD64 */
2001 #endif
2005 * Get XCR0 (eXtended feature Control Register 0).
2006 * @returns xcr0.
2008 DECLASM(uint64_t) ASMGetXcr0(void);
2011 * Sets the XCR0 register.
2012 * @param uXcr0 The new XCR0 value.
2014 DECLASM(void) ASMSetXcr0(uint64_t uXcr0);
2016 struct X86XSAVEAREA;
2018 * Save extended CPU state.
2019 * @param pXStateArea Where to save the state.
2020 * @param fComponents Which state components to save.
2022 DECLASM(void) ASMXSave(struct X86XSAVEAREA RT_FAR *pXStateArea, uint64_t fComponents);
2025 * Loads extended CPU state.
2026 * @param pXStateArea Where to load the state from.
2027 * @param fComponents Which state components to load.
2029 DECLASM(void) ASMXRstor(struct X86XSAVEAREA const RT_FAR *pXStateArea, uint64_t fComponents);
2032 struct X86FXSTATE;
2034 * Save FPU and SSE CPU state.
2035 * @param pXStateArea Where to save the state.
2037 DECLASM(void) ASMFxSave(struct X86FXSTATE RT_FAR *pXStateArea);
2040 * Load FPU and SSE CPU state.
2041 * @param pXStateArea Where to load the state from.
2043 DECLASM(void) ASMFxRstor(struct X86FXSTATE const RT_FAR *pXStateArea);
2047 * Enables interrupts (EFLAGS.IF).
2049 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2050 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMIntEnable(void);
2051 #else
2052 DECLINLINE(void) ASMIntEnable(void)
2054 # if RT_INLINE_ASM_GNU_STYLE
2055 __asm("sti\n");
2056 # elif RT_INLINE_ASM_USES_INTRIN
2057 _enable();
2058 # else
2059 __asm sti
2060 # endif
2062 #endif
2066 * Disables interrupts (!EFLAGS.IF).
2068 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2069 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMIntDisable(void);
2070 #else
2071 DECLINLINE(void) ASMIntDisable(void)
2073 # if RT_INLINE_ASM_GNU_STYLE
2074 __asm("cli\n");
2075 # elif RT_INLINE_ASM_USES_INTRIN
2076 _disable();
2077 # else
2078 __asm cli
2079 # endif
2081 #endif
2085 * Disables interrupts and returns previous xFLAGS.
2087 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2088 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMIntDisableFlags(void);
2089 #else
2090 DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
2092 RTCCUINTREG xFlags;
2093 # if RT_INLINE_ASM_GNU_STYLE
2094 # ifdef RT_ARCH_AMD64
2095 __asm__ __volatile__("pushfq\n\t"
2096 "cli\n\t"
2097 "popq %0\n\t"
2098 : "=r" (xFlags));
2099 # else
2100 __asm__ __volatile__("pushfl\n\t"
2101 "cli\n\t"
2102 "popl %0\n\t"
2103 : "=r" (xFlags));
2104 # endif
2105 # elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
2106 xFlags = ASMGetFlags();
2107 _disable();
2108 # else
2109 __asm {
2110 pushfd
2112 pop [xFlags]
2114 # endif
2115 return xFlags;
2117 #endif
2121 * Are interrupts enabled?
2123 * @returns true / false.
2125 DECLINLINE(bool) ASMIntAreEnabled(void)
2127 RTCCUINTREG uFlags = ASMGetFlags();
2128 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
2133 * Halts the CPU until interrupted.
2135 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2005
2136 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMHalt(void);
2137 #else
2138 DECLINLINE(void) ASMHalt(void)
2140 # if RT_INLINE_ASM_GNU_STYLE
2141 __asm__ __volatile__("hlt\n\t");
2142 # elif RT_INLINE_ASM_USES_INTRIN
2143 __halt();
2144 # else
2145 __asm {
2148 # endif
2150 #endif
2154 * Reads a machine specific register.
2156 * @returns Register content.
2157 * @param uRegister Register to read.
2159 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2160 RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMRdMsr(uint32_t uRegister);
2161 #else
2162 DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
2164 RTUINT64U u;
2165 # if RT_INLINE_ASM_GNU_STYLE
2166 __asm__ __volatile__("rdmsr\n\t"
2167 : "=a" (u.s.Lo),
2168 "=d" (u.s.Hi)
2169 : "c" (uRegister));
2171 # elif RT_INLINE_ASM_USES_INTRIN
2172 u.u = __readmsr(uRegister);
2174 # else
2175 __asm
2177 mov ecx, [uRegister]
2178 rdmsr
2179 mov [u.s.Lo], eax
2180 mov [u.s.Hi], edx
2182 # endif
2184 return u.u;
2186 #endif
2190 * Writes a machine specific register.
2192 * @param uRegister Register to write to.
2193 * @param u64Val Value to write.
2195 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2196 RT_ASM_DECL_PRAGMA_WATCOM_386(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
2197 #else
2198 DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
2200 RTUINT64U u;
2202 u.u = u64Val;
2203 # if RT_INLINE_ASM_GNU_STYLE
2204 __asm__ __volatile__("wrmsr\n\t"
2205 ::"a" (u.s.Lo),
2206 "d" (u.s.Hi),
2207 "c" (uRegister));
2209 # elif RT_INLINE_ASM_USES_INTRIN
2210 __writemsr(uRegister, u.u);
2212 # else
2213 __asm
2215 mov ecx, [uRegister]
2216 mov edx, [u.s.Hi]
2217 mov eax, [u.s.Lo]
2218 wrmsr
2220 # endif
2222 #endif
2226 * Reads a machine specific register, extended version (for AMD).
2228 * @returns Register content.
2229 * @param uRegister Register to read.
2230 * @param uXDI RDI/EDI value.
2232 #if RT_INLINE_ASM_EXTERNAL
2233 RT_ASM_DECL_PRAGMA_WATCOM_386(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI);
2234 #else
2235 DECLINLINE(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI)
2237 RTUINT64U u;
2238 # if RT_INLINE_ASM_GNU_STYLE
2239 __asm__ __volatile__("rdmsr\n\t"
2240 : "=a" (u.s.Lo),
2241 "=d" (u.s.Hi)
2242 : "c" (uRegister),
2243 "D" (uXDI));
2245 # else
2246 __asm
2248 mov ecx, [uRegister]
2249 xchg edi, [uXDI]
2250 rdmsr
2251 mov [u.s.Lo], eax
2252 mov [u.s.Hi], edx
2253 xchg edi, [uXDI]
2255 # endif
2257 return u.u;
2259 #endif
2263 * Writes a machine specific register, extended version (for AMD).
2265 * @param uRegister Register to write to.
2266 * @param uXDI RDI/EDI value.
2267 * @param u64Val Value to write.
2269 #if RT_INLINE_ASM_EXTERNAL
2270 RT_ASM_DECL_PRAGMA_WATCOM_386(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val);
2271 #else
2272 DECLINLINE(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val)
2274 RTUINT64U u;
2276 u.u = u64Val;
2277 # if RT_INLINE_ASM_GNU_STYLE
2278 __asm__ __volatile__("wrmsr\n\t"
2279 ::"a" (u.s.Lo),
2280 "d" (u.s.Hi),
2281 "c" (uRegister),
2282 "D" (uXDI));
2284 # else
2285 __asm
2287 mov ecx, [uRegister]
2288 xchg edi, [uXDI]
2289 mov edx, [u.s.Hi]
2290 mov eax, [u.s.Lo]
2291 wrmsr
2292 xchg edi, [uXDI]
2294 # endif
2296 #endif
2301 * Reads low part of a machine specific register.
2303 * @returns Register content.
2304 * @param uRegister Register to read.
2306 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2307 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
2308 #else
2309 DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
2311 uint32_t u32;
2312 # if RT_INLINE_ASM_GNU_STYLE
2313 __asm__ __volatile__("rdmsr\n\t"
2314 : "=a" (u32)
2315 : "c" (uRegister)
2316 : "edx");
2318 # elif RT_INLINE_ASM_USES_INTRIN
2319 u32 = (uint32_t)__readmsr(uRegister);
2321 #else
2322 __asm
2324 mov ecx, [uRegister]
2325 rdmsr
2326 mov [u32], eax
2328 # endif
2330 return u32;
2332 #endif
2336 * Reads high part of a machine specific register.
2338 * @returns Register content.
2339 * @param uRegister Register to read.
2341 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2342 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
2343 #else
2344 DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
2346 uint32_t u32;
2347 # if RT_INLINE_ASM_GNU_STYLE
2348 __asm__ __volatile__("rdmsr\n\t"
2349 : "=d" (u32)
2350 : "c" (uRegister)
2351 : "eax");
2353 # elif RT_INLINE_ASM_USES_INTRIN
2354 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
2356 # else
2357 __asm
2359 mov ecx, [uRegister]
2360 rdmsr
2361 mov [u32], edx
2363 # endif
2365 return u32;
2367 #endif
2371 * Gets dr0.
2373 * @returns dr0.
2375 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2376 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR0(void);
2377 #else
2378 DECLINLINE(RTCCUINTXREG) ASMGetDR0(void)
2380 RTCCUINTXREG uDR0;
2381 # if RT_INLINE_ASM_USES_INTRIN
2382 uDR0 = __readdr(0);
2383 # elif RT_INLINE_ASM_GNU_STYLE
2384 # ifdef RT_ARCH_AMD64
2385 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
2386 # else
2387 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
2388 # endif
2389 # else
2390 __asm
2392 # ifdef RT_ARCH_AMD64
2393 mov rax, dr0
2394 mov [uDR0], rax
2395 # else
2396 mov eax, dr0
2397 mov [uDR0], eax
2398 # endif
2400 # endif
2401 return uDR0;
2403 #endif
2407 * Gets dr1.
2409 * @returns dr1.
2411 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2412 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR1(void);
2413 #else
2414 DECLINLINE(RTCCUINTXREG) ASMGetDR1(void)
2416 RTCCUINTXREG uDR1;
2417 # if RT_INLINE_ASM_USES_INTRIN
2418 uDR1 = __readdr(1);
2419 # elif RT_INLINE_ASM_GNU_STYLE
2420 # ifdef RT_ARCH_AMD64
2421 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
2422 # else
2423 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
2424 # endif
2425 # else
2426 __asm
2428 # ifdef RT_ARCH_AMD64
2429 mov rax, dr1
2430 mov [uDR1], rax
2431 # else
2432 mov eax, dr1
2433 mov [uDR1], eax
2434 # endif
2436 # endif
2437 return uDR1;
2439 #endif
2443 * Gets dr2.
2445 * @returns dr2.
2447 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2448 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR2(void);
2449 #else
2450 DECLINLINE(RTCCUINTXREG) ASMGetDR2(void)
2452 RTCCUINTXREG uDR2;
2453 # if RT_INLINE_ASM_USES_INTRIN
2454 uDR2 = __readdr(2);
2455 # elif RT_INLINE_ASM_GNU_STYLE
2456 # ifdef RT_ARCH_AMD64
2457 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
2458 # else
2459 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
2460 # endif
2461 # else
2462 __asm
2464 # ifdef RT_ARCH_AMD64
2465 mov rax, dr2
2466 mov [uDR2], rax
2467 # else
2468 mov eax, dr2
2469 mov [uDR2], eax
2470 # endif
2472 # endif
2473 return uDR2;
2475 #endif
2479 * Gets dr3.
2481 * @returns dr3.
2483 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2484 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR3(void);
2485 #else
2486 DECLINLINE(RTCCUINTXREG) ASMGetDR3(void)
2488 RTCCUINTXREG uDR3;
2489 # if RT_INLINE_ASM_USES_INTRIN
2490 uDR3 = __readdr(3);
2491 # elif RT_INLINE_ASM_GNU_STYLE
2492 # ifdef RT_ARCH_AMD64
2493 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
2494 # else
2495 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
2496 # endif
2497 # else
2498 __asm
2500 # ifdef RT_ARCH_AMD64
2501 mov rax, dr3
2502 mov [uDR3], rax
2503 # else
2504 mov eax, dr3
2505 mov [uDR3], eax
2506 # endif
2508 # endif
2509 return uDR3;
2511 #endif
2515 * Gets dr6.
2517 * @returns dr6.
2519 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2520 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR6(void);
2521 #else
2522 DECLINLINE(RTCCUINTXREG) ASMGetDR6(void)
2524 RTCCUINTXREG uDR6;
2525 # if RT_INLINE_ASM_USES_INTRIN
2526 uDR6 = __readdr(6);
2527 # elif RT_INLINE_ASM_GNU_STYLE
2528 # ifdef RT_ARCH_AMD64
2529 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
2530 # else
2531 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
2532 # endif
2533 # else
2534 __asm
2536 # ifdef RT_ARCH_AMD64
2537 mov rax, dr6
2538 mov [uDR6], rax
2539 # else
2540 mov eax, dr6
2541 mov [uDR6], eax
2542 # endif
2544 # endif
2545 return uDR6;
2547 #endif
2551 * Reads and clears DR6.
2553 * @returns DR6.
2555 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2556 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetAndClearDR6(void);
2557 #else
2558 DECLINLINE(RTCCUINTXREG) ASMGetAndClearDR6(void)
2560 RTCCUINTXREG uDR6;
2561 # if RT_INLINE_ASM_USES_INTRIN
2562 uDR6 = __readdr(6);
2563 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2564 # elif RT_INLINE_ASM_GNU_STYLE
2565 RTCCUINTXREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2566 # ifdef RT_ARCH_AMD64
2567 __asm__ __volatile__("movq %%dr6, %0\n\t"
2568 "movq %1, %%dr6\n\t"
2569 : "=r" (uDR6)
2570 : "r" (uNewValue));
2571 # else
2572 __asm__ __volatile__("movl %%dr6, %0\n\t"
2573 "movl %1, %%dr6\n\t"
2574 : "=r" (uDR6)
2575 : "r" (uNewValue));
2576 # endif
2577 # else
2578 __asm
2580 # ifdef RT_ARCH_AMD64
2581 mov rax, dr6
2582 mov [uDR6], rax
2583 mov rcx, rax
2584 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2585 mov dr6, rcx
2586 # else
2587 mov eax, dr6
2588 mov [uDR6], eax
2589 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
2590 mov dr6, ecx
2591 # endif
2593 # endif
2594 return uDR6;
2596 #endif
2600 * Gets dr7.
2602 * @returns dr7.
2604 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2605 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR7(void);
2606 #else
2607 DECLINLINE(RTCCUINTXREG) ASMGetDR7(void)
2609 RTCCUINTXREG uDR7;
2610 # if RT_INLINE_ASM_USES_INTRIN
2611 uDR7 = __readdr(7);
2612 # elif RT_INLINE_ASM_GNU_STYLE
2613 # ifdef RT_ARCH_AMD64
2614 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
2615 # else
2616 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
2617 # endif
2618 # else
2619 __asm
2621 # ifdef RT_ARCH_AMD64
2622 mov rax, dr7
2623 mov [uDR7], rax
2624 # else
2625 mov eax, dr7
2626 mov [uDR7], eax
2627 # endif
2629 # endif
2630 return uDR7;
2632 #endif
2636 * Sets dr0.
2638 * @param uDRVal Debug register value to write
2640 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2641 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR0(RTCCUINTXREG uDRVal);
2642 #else
2643 DECLINLINE(void) ASMSetDR0(RTCCUINTXREG uDRVal)
2645 # if RT_INLINE_ASM_USES_INTRIN
2646 __writedr(0, uDRVal);
2647 # elif RT_INLINE_ASM_GNU_STYLE
2648 # ifdef RT_ARCH_AMD64
2649 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2650 # else
2651 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2652 # endif
2653 # else
2654 __asm
2656 # ifdef RT_ARCH_AMD64
2657 mov rax, [uDRVal]
2658 mov dr0, rax
2659 # else
2660 mov eax, [uDRVal]
2661 mov dr0, eax
2662 # endif
2664 # endif
2666 #endif
2670 * Sets dr1.
2672 * @param uDRVal Debug register value to write
2674 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2675 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR1(RTCCUINTXREG uDRVal);
2676 #else
2677 DECLINLINE(void) ASMSetDR1(RTCCUINTXREG uDRVal)
2679 # if RT_INLINE_ASM_USES_INTRIN
2680 __writedr(1, uDRVal);
2681 # elif RT_INLINE_ASM_GNU_STYLE
2682 # ifdef RT_ARCH_AMD64
2683 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2684 # else
2685 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2686 # endif
2687 # else
2688 __asm
2690 # ifdef RT_ARCH_AMD64
2691 mov rax, [uDRVal]
2692 mov dr1, rax
2693 # else
2694 mov eax, [uDRVal]
2695 mov dr1, eax
2696 # endif
2698 # endif
2700 #endif
2704 * Sets dr2.
2706 * @param uDRVal Debug register value to write
2708 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2709 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR2(RTCCUINTXREG uDRVal);
2710 #else
2711 DECLINLINE(void) ASMSetDR2(RTCCUINTXREG uDRVal)
2713 # if RT_INLINE_ASM_USES_INTRIN
2714 __writedr(2, uDRVal);
2715 # elif RT_INLINE_ASM_GNU_STYLE
2716 # ifdef RT_ARCH_AMD64
2717 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2718 # else
2719 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2720 # endif
2721 # else
2722 __asm
2724 # ifdef RT_ARCH_AMD64
2725 mov rax, [uDRVal]
2726 mov dr2, rax
2727 # else
2728 mov eax, [uDRVal]
2729 mov dr2, eax
2730 # endif
2732 # endif
2734 #endif
2738 * Sets dr3.
2740 * @param uDRVal Debug register value to write
2742 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2743 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR3(RTCCUINTXREG uDRVal);
2744 #else
2745 DECLINLINE(void) ASMSetDR3(RTCCUINTXREG uDRVal)
2747 # if RT_INLINE_ASM_USES_INTRIN
2748 __writedr(3, uDRVal);
2749 # elif RT_INLINE_ASM_GNU_STYLE
2750 # ifdef RT_ARCH_AMD64
2751 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2752 # else
2753 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2754 # endif
2755 # else
2756 __asm
2758 # ifdef RT_ARCH_AMD64
2759 mov rax, [uDRVal]
2760 mov dr3, rax
2761 # else
2762 mov eax, [uDRVal]
2763 mov dr3, eax
2764 # endif
2766 # endif
2768 #endif
2772 * Sets dr6.
2774 * @param uDRVal Debug register value to write
2776 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2777 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR6(RTCCUINTXREG uDRVal);
2778 #else
2779 DECLINLINE(void) ASMSetDR6(RTCCUINTXREG uDRVal)
2781 # if RT_INLINE_ASM_USES_INTRIN
2782 __writedr(6, uDRVal);
2783 # elif RT_INLINE_ASM_GNU_STYLE
2784 # ifdef RT_ARCH_AMD64
2785 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2786 # else
2787 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2788 # endif
2789 # else
2790 __asm
2792 # ifdef RT_ARCH_AMD64
2793 mov rax, [uDRVal]
2794 mov dr6, rax
2795 # else
2796 mov eax, [uDRVal]
2797 mov dr6, eax
2798 # endif
2800 # endif
2802 #endif
2806 * Sets dr7.
2808 * @param uDRVal Debug register value to write
2810 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2811 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR7(RTCCUINTXREG uDRVal);
2812 #else
2813 DECLINLINE(void) ASMSetDR7(RTCCUINTXREG uDRVal)
2815 # if RT_INLINE_ASM_USES_INTRIN
2816 __writedr(7, uDRVal);
2817 # elif RT_INLINE_ASM_GNU_STYLE
2818 # ifdef RT_ARCH_AMD64
2819 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2820 # else
2821 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2822 # endif
2823 # else
2824 __asm
2826 # ifdef RT_ARCH_AMD64
2827 mov rax, [uDRVal]
2828 mov dr7, rax
2829 # else
2830 mov eax, [uDRVal]
2831 mov dr7, eax
2832 # endif
2834 # endif
2836 #endif
2840 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2842 * @param Port I/O port to write to.
2843 * @param u8 8-bit integer to write.
2845 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2846 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2847 #else
2848 DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2850 # if RT_INLINE_ASM_GNU_STYLE
2851 __asm__ __volatile__("outb %b1, %w0\n\t"
2852 :: "Nd" (Port),
2853 "a" (u8));
2855 # elif RT_INLINE_ASM_USES_INTRIN
2856 __outbyte(Port, u8);
2858 # else
2859 __asm
2861 mov dx, [Port]
2862 mov al, [u8]
2863 out dx, al
2865 # endif
2867 #endif
2871 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2873 * @returns 8-bit integer.
2874 * @param Port I/O port to read from.
2876 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2877 RT_ASM_DECL_PRAGMA_WATCOM(uint8_t) ASMInU8(RTIOPORT Port);
2878 #else
2879 DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2881 uint8_t u8;
2882 # if RT_INLINE_ASM_GNU_STYLE
2883 __asm__ __volatile__("inb %w1, %b0\n\t"
2884 : "=a" (u8)
2885 : "Nd" (Port));
2887 # elif RT_INLINE_ASM_USES_INTRIN
2888 u8 = __inbyte(Port);
2890 # else
2891 __asm
2893 mov dx, [Port]
2894 in al, dx
2895 mov [u8], al
2897 # endif
2898 return u8;
2900 #endif
2904 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2906 * @param Port I/O port to write to.
2907 * @param u16 16-bit integer to write.
2909 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2910 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2911 #else
2912 DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2914 # if RT_INLINE_ASM_GNU_STYLE
2915 __asm__ __volatile__("outw %w1, %w0\n\t"
2916 :: "Nd" (Port),
2917 "a" (u16));
2919 # elif RT_INLINE_ASM_USES_INTRIN
2920 __outword(Port, u16);
2922 # else
2923 __asm
2925 mov dx, [Port]
2926 mov ax, [u16]
2927 out dx, ax
2929 # endif
2931 #endif
2935 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2937 * @returns 16-bit integer.
2938 * @param Port I/O port to read from.
2940 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2941 RT_ASM_DECL_PRAGMA_WATCOM(uint16_t) ASMInU16(RTIOPORT Port);
2942 #else
2943 DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2945 uint16_t u16;
2946 # if RT_INLINE_ASM_GNU_STYLE
2947 __asm__ __volatile__("inw %w1, %w0\n\t"
2948 : "=a" (u16)
2949 : "Nd" (Port));
2951 # elif RT_INLINE_ASM_USES_INTRIN
2952 u16 = __inword(Port);
2954 # else
2955 __asm
2957 mov dx, [Port]
2958 in ax, dx
2959 mov [u16], ax
2961 # endif
2962 return u16;
2964 #endif
2968 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2970 * @param Port I/O port to write to.
2971 * @param u32 32-bit integer to write.
2973 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2974 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2975 #else
2976 DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2978 # if RT_INLINE_ASM_GNU_STYLE
2979 __asm__ __volatile__("outl %1, %w0\n\t"
2980 :: "Nd" (Port),
2981 "a" (u32));
2983 # elif RT_INLINE_ASM_USES_INTRIN
2984 __outdword(Port, u32);
2986 # else
2987 __asm
2989 mov dx, [Port]
2990 mov eax, [u32]
2991 out dx, eax
2993 # endif
2995 #endif
2999 * Reads a 32-bit unsigned integer from an I/O port, ordered.
3001 * @returns 32-bit integer.
3002 * @param Port I/O port to read from.
3004 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3005 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMInU32(RTIOPORT Port);
3006 #else
3007 DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
3009 uint32_t u32;
3010 # if RT_INLINE_ASM_GNU_STYLE
3011 __asm__ __volatile__("inl %w1, %0\n\t"
3012 : "=a" (u32)
3013 : "Nd" (Port));
3015 # elif RT_INLINE_ASM_USES_INTRIN
3016 u32 = __indword(Port);
3018 # else
3019 __asm
3021 mov dx, [Port]
3022 in eax, dx
3023 mov [u32], eax
3025 # endif
3026 return u32;
3028 #endif
3032 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
3034 * @param Port I/O port to write to.
3035 * @param pau8 Pointer to the string buffer.
3036 * @param c The number of items to write.
3038 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3039 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const RT_FAR *pau8, size_t c);
3040 #else
3041 DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const RT_FAR *pau8, size_t c)
3043 # if RT_INLINE_ASM_GNU_STYLE
3044 __asm__ __volatile__("rep; outsb\n\t"
3045 : "+S" (pau8),
3046 "+c" (c)
3047 : "d" (Port));
3049 # elif RT_INLINE_ASM_USES_INTRIN
3050 __outbytestring(Port, (unsigned char RT_FAR *)pau8, (unsigned long)c);
3052 # else
3053 __asm
3055 mov dx, [Port]
3056 mov ecx, [c]
3057 mov eax, [pau8]
3058 xchg esi, eax
3059 rep outsb
3060 xchg esi, eax
3062 # endif
3064 #endif
3068 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
3070 * @param Port I/O port to read from.
3071 * @param pau8 Pointer to the string buffer (output).
3072 * @param c The number of items to read.
3074 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3075 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU8(RTIOPORT Port, uint8_t RT_FAR *pau8, size_t c);
3076 #else
3077 DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t RT_FAR *pau8, size_t c)
3079 # if RT_INLINE_ASM_GNU_STYLE
3080 __asm__ __volatile__("rep; insb\n\t"
3081 : "+D" (pau8),
3082 "+c" (c)
3083 : "d" (Port));
3085 # elif RT_INLINE_ASM_USES_INTRIN
3086 __inbytestring(Port, pau8, (unsigned long)c);
3088 # else
3089 __asm
3091 mov dx, [Port]
3092 mov ecx, [c]
3093 mov eax, [pau8]
3094 xchg edi, eax
3095 rep insb
3096 xchg edi, eax
3098 # endif
3100 #endif
3104 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
3106 * @param Port I/O port to write to.
3107 * @param pau16 Pointer to the string buffer.
3108 * @param c The number of items to write.
3110 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3111 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const RT_FAR *pau16, size_t c);
3112 #else
3113 DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const RT_FAR *pau16, size_t c)
3115 # if RT_INLINE_ASM_GNU_STYLE
3116 __asm__ __volatile__("rep; outsw\n\t"
3117 : "+S" (pau16),
3118 "+c" (c)
3119 : "d" (Port));
3121 # elif RT_INLINE_ASM_USES_INTRIN
3122 __outwordstring(Port, (unsigned short RT_FAR *)pau16, (unsigned long)c);
3124 # else
3125 __asm
3127 mov dx, [Port]
3128 mov ecx, [c]
3129 mov eax, [pau16]
3130 xchg esi, eax
3131 rep outsw
3132 xchg esi, eax
3134 # endif
3136 #endif
3140 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
3142 * @param Port I/O port to read from.
3143 * @param pau16 Pointer to the string buffer (output).
3144 * @param c The number of items to read.
3146 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3147 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU16(RTIOPORT Port, uint16_t RT_FAR *pau16, size_t c);
3148 #else
3149 DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t RT_FAR *pau16, size_t c)
3151 # if RT_INLINE_ASM_GNU_STYLE
3152 __asm__ __volatile__("rep; insw\n\t"
3153 : "+D" (pau16),
3154 "+c" (c)
3155 : "d" (Port));
3157 # elif RT_INLINE_ASM_USES_INTRIN
3158 __inwordstring(Port, pau16, (unsigned long)c);
3160 # else
3161 __asm
3163 mov dx, [Port]
3164 mov ecx, [c]
3165 mov eax, [pau16]
3166 xchg edi, eax
3167 rep insw
3168 xchg edi, eax
3170 # endif
3172 #endif
3176 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
3178 * @param Port I/O port to write to.
3179 * @param pau32 Pointer to the string buffer.
3180 * @param c The number of items to write.
3182 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3183 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const RT_FAR *pau32, size_t c);
3184 #else
3185 DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const RT_FAR *pau32, size_t c)
3187 # if RT_INLINE_ASM_GNU_STYLE
3188 __asm__ __volatile__("rep; outsl\n\t"
3189 : "+S" (pau32),
3190 "+c" (c)
3191 : "d" (Port));
3193 # elif RT_INLINE_ASM_USES_INTRIN
3194 __outdwordstring(Port, (unsigned long RT_FAR *)pau32, (unsigned long)c);
3196 # else
3197 __asm
3199 mov dx, [Port]
3200 mov ecx, [c]
3201 mov eax, [pau32]
3202 xchg esi, eax
3203 rep outsd
3204 xchg esi, eax
3206 # endif
3208 #endif
3212 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
3214 * @param Port I/O port to read from.
3215 * @param pau32 Pointer to the string buffer (output).
3216 * @param c The number of items to read.
3218 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3219 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU32(RTIOPORT Port, uint32_t RT_FAR *pau32, size_t c);
3220 #else
3221 DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t RT_FAR *pau32, size_t c)
3223 # if RT_INLINE_ASM_GNU_STYLE
3224 __asm__ __volatile__("rep; insl\n\t"
3225 : "+D" (pau32),
3226 "+c" (c)
3227 : "d" (Port));
3229 # elif RT_INLINE_ASM_USES_INTRIN
3230 __indwordstring(Port, (unsigned long RT_FAR *)pau32, (unsigned long)c);
3232 # else
3233 __asm
3235 mov dx, [Port]
3236 mov ecx, [c]
3237 mov eax, [pau32]
3238 xchg edi, eax
3239 rep insd
3240 xchg edi, eax
3242 # endif
3244 #endif
3248 * Invalidate page.
3250 * @param uPtr Address of the page to invalidate.
3252 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3253 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInvalidatePage(RTCCUINTXREG uPtr);
3254 #else
3255 DECLINLINE(void) ASMInvalidatePage(RTCCUINTXREG uPtr)
3257 # if RT_INLINE_ASM_USES_INTRIN
3258 __invlpg((void RT_FAR *)uPtr);
3260 # elif RT_INLINE_ASM_GNU_STYLE
3261 __asm__ __volatile__("invlpg %0\n\t"
3262 : : "m" (*(uint8_t RT_FAR *)(uintptr_t)uPtr));
3263 # else
3264 __asm
3266 # ifdef RT_ARCH_AMD64
3267 mov rax, [uPtr]
3268 invlpg [rax]
3269 # else
3270 mov eax, [uPtr]
3271 invlpg [eax]
3272 # endif
3274 # endif
3276 #endif
3280 * Write back the internal caches and invalidate them.
3282 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3283 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMWriteBackAndInvalidateCaches(void);
3284 #else
3285 DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
3287 # if RT_INLINE_ASM_USES_INTRIN
3288 __wbinvd();
3290 # elif RT_INLINE_ASM_GNU_STYLE
3291 __asm__ __volatile__("wbinvd");
3292 # else
3293 __asm
3295 wbinvd
3297 # endif
3299 #endif
3303 * Invalidate internal and (perhaps) external caches without first
3304 * flushing dirty cache lines. Use with extreme care.
3306 #if RT_INLINE_ASM_EXTERNAL
3307 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInvalidateInternalCaches(void);
3308 #else
3309 DECLINLINE(void) ASMInvalidateInternalCaches(void)
3311 # if RT_INLINE_ASM_GNU_STYLE
3312 __asm__ __volatile__("invd");
3313 # else
3314 __asm
3316 invd
3318 # endif
3320 #endif
3324 * Memory load/store fence, waits for any pending writes and reads to complete.
3325 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3327 DECLINLINE(void) ASMMemoryFenceSSE2(void)
3329 #if RT_INLINE_ASM_GNU_STYLE
3330 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
3331 #elif RT_INLINE_ASM_USES_INTRIN
3332 _mm_mfence();
3333 #else
3334 __asm
3336 _emit 0x0f
3337 _emit 0xae
3338 _emit 0xf0
3340 #endif
3345 * Memory store fence, waits for any writes to complete.
3346 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
3348 DECLINLINE(void) ASMWriteFenceSSE(void)
3350 #if RT_INLINE_ASM_GNU_STYLE
3351 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
3352 #elif RT_INLINE_ASM_USES_INTRIN
3353 _mm_sfence();
3354 #else
3355 __asm
3357 _emit 0x0f
3358 _emit 0xae
3359 _emit 0xf8
3361 #endif
3366 * Memory load fence, waits for any pending reads to complete.
3367 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3369 DECLINLINE(void) ASMReadFenceSSE2(void)
3371 #if RT_INLINE_ASM_GNU_STYLE
3372 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
3373 #elif RT_INLINE_ASM_USES_INTRIN
3374 _mm_lfence();
3375 #else
3376 __asm
3378 _emit 0x0f
3379 _emit 0xae
3380 _emit 0xe8
3382 #endif
3385 #if !defined(_MSC_VER) || !defined(RT_ARCH_AMD64)
3388 * Clear the AC bit in the EFLAGS register.
3389 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3390 * Requires to be executed in R0.
3392 DECLINLINE(void) ASMClearAC(void)
3394 #if RT_INLINE_ASM_GNU_STYLE
3395 __asm__ __volatile__ (".byte 0x0f,0x01,0xca\n\t");
3396 #else
3397 __asm
3399 _emit 0x0f
3400 _emit 0x01
3401 _emit 0xca
3403 #endif
3408 * Set the AC bit in the EFLAGS register.
3409 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3410 * Requires to be executed in R0.
3412 DECLINLINE(void) ASMSetAC(void)
3414 #if RT_INLINE_ASM_GNU_STYLE
3415 __asm__ __volatile__ (".byte 0x0f,0x01,0xcb\n\t");
3416 #else
3417 __asm
3419 _emit 0x0f
3420 _emit 0x01
3421 _emit 0xcb
3423 #endif
3426 #endif /* !_MSC_VER || !RT_ARCH_AMD64 */
3430 * Include #pragma aux definitions for Watcom C/C++.
3432 #if defined(__WATCOMC__) && ARCH_BITS == 16
3433 # define IPRT_ASM_AMD64_X86_WATCOM_16_INSTANTIATE
3434 # undef IPRT_INCLUDED_asm_amd64_x86_watcom_16_h
3435 # include "asm-amd64-x86-watcom-16.h"
3436 #elif defined(__WATCOMC__) && ARCH_BITS == 32
3437 # define IPRT_ASM_AMD64_X86_WATCOM_32_INSTANTIATE
3438 # undef IPRT_INCLUDED_asm_amd64_x86_watcom_32_h
3439 # include "asm-amd64-x86-watcom-32.h"
3440 #endif
3443 /** @} */
3444 #endif /* !IPRT_INCLUDED_asm_amd64_x86_h */