5 Copyright © 1995-2012, The AROS Development Team. All rights reserved.
8 Desc: assembler-level specific definitions for x86 CPU
14 /* This file is very very incomplete :) */
16 #define HALT asm volatile("hlt")
18 /* Selector used for lgdt and lidt commands */
19 struct segment_selector
23 } __attribute__((packed
));
25 struct int_gate_32bit
{
28 unsigned ist
:3, __pad0
:5, type
:5, dpl
:2, p
:1;
30 } __attribute__((packed
));
32 /* Segment descriptor in the GDT */
38 unsigned type
:5, dpl
:2, p
:1;
39 unsigned limit_high
:4, avl
:1, l
:1, d
:1, g
:1;
41 } __attribute__((packed
));
44 * TaskStateStructure, defined only in matter of making life (setup)
49 unsigned int link
, /* link to previous task */
50 ssp
, /* Supervisor Stack Pointer */
51 ssp_seg
, /* SSP descriptor */
52 t0
,t1
, /* Stack for CPL1 code */
53 t2
,t3
, /* Stack for CPL2 code */
54 cr3
, /* used in paging */
55 eip
, /* Instruction pointer */
56 eflags
, /* Flags for given task */
57 r0
,r1
,r2
,r3
, /* 8 general purpouse registers */
59 es
,cs
,ss
,ds
,fs
,gs
, /* segment descriptors */
60 ldt
; /* LocalDescriptorTable */
61 unsigned short trap
,iomap
; /* trap flag and iomap pointer */
65 ({ long val; asm volatile("mov %%" #reg ",%0":"=r"(val)); val; })
67 #define wrcr(reg, val) \
68 do { asm volatile("mov %0,%%" #reg::"r"(val)); } while(0)
70 static inline void __attribute__((always_inline
)) rdmsr(uint32_t msr_no
, uint32_t *ret_lo
, uint32_t *ret_hi
)
74 asm volatile("rdmsr":"=a"(ret1
),"=d"(ret2
):"c"(msr_no
));
79 static inline uint32_t __attribute__((always_inline
)) rdmsri(uint32_t msr_no
)
83 asm volatile("rdmsr":"=a"(ret
):"c"(msr_no
));
88 Compare value stored at "addr" with "expected". If they are equal, function returns 1 and stores "xchg" value
89 at "addr". If *addr != expected, function returns 0. Either "expected" or current value at *addr are stored back
90 at *found. The operation is atomic
92 static inline int compare_and_exchange_long(uint32_t *addr
, uint32_t expected
, uint32_t xchg
, uint32_t *found
)
96 asm volatile("lock cmpxchg %4, %0; setz %1":"+m"(*addr
),"=q"(flag
),"=a"(ret
):"2"(expected
),"r"(xchg
):"memory","cc");
102 static inline int compare_and_exchange_short(uint16_t *lock
, uint16_t expected
, uint16_t xchg
, uint16_t *found
)
106 asm volatile("lock cmpxchg %4, %0; setz %1":"+m"(*lock
),"=q"(flag
),"=a"(ret
):"2"(expected
),"r"(xchg
):"memory","cc");
112 static inline int compare_and_exchange_byte(uint8_t *lock
, uint8_t expected
, uint8_t xchg
, uint8_t *found
)
116 asm volatile("lock cmpxchg %4, %0; setz %1":"+m"(*lock
),"=q"(flag
),"=a"(ret
):"2"(expected
),"q"(xchg
):"memory","cc");
122 static inline int bit_test_and_set_long(uint32_t *addr
, int32_t bit
)
125 asm volatile("lock btsl %2, %0; setc %1":"+m"(*addr
),"=q"(retval
):"Ir"(bit
):"memory");
129 static inline int bit_test_and_set_short(uint16_t *addr
, int32_t bit
)
132 asm volatile("lock btsw %2, %0; setc %1":"+m"(*addr
),"=q"(retval
):"Ir"(bit
):"memory");
136 static inline int bit_test_and_clear_long(uint32_t *addr
, int32_t bit
)
139 asm volatile("lock btrl %2, %0; setc %1":"+m"(*addr
),"=q"(retval
):"Ir"(bit
):"memory");
143 static inline int bit_test_and_clear_short(uint16_t *addr
, int32_t bit
)
146 asm volatile("lock btrw %2, %0; setc %1":"+m"(*addr
),"=q"(retval
):"Ir"(bit
):"memory");
150 static inline int bit_test_and_complement_long(uint32_t *addr
, int32_t bit
)
153 asm volatile("lock btcl %2, %0; setc %1":"+m"(*addr
),"=q"(retval
):"Ir"(bit
):"memory");
157 static inline int bit_test_and_complement_short(uint16_t *addr
, int32_t bit
)
160 asm volatile("lock btcw %2, %0; setc %1":"+m"(*addr
),"=q"(retval
):"Ir"(bit
):"memory");