1 /**************************************************************************
3 * Copyright (C) 1999-2005 Brian Paul All Rights Reserved.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * BRIAN PAUL BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
19 * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22 **************************************************************************/
24 #include "pipe/p_config.h"
25 #include "util/u_cpu_detect.h"
27 #if defined(PIPE_ARCH_X86) || defined(PIPE_ARCH_X86_64)
29 #include "pipe/p_compiler.h"
30 #include "util/u_debug.h"
31 #include "util/u_pointer.h"
33 #include "rtasm_execmem.h"
34 #include "rtasm_x86sse.h"
43 void x86_print_reg( struct x86_reg reg
)
45 if (reg
.mod
!= mod_REG
)
51 case reg_AX
: debug_printf( "EAX" ); break;
52 case reg_CX
: debug_printf( "ECX" ); break;
53 case reg_DX
: debug_printf( "EDX" ); break;
54 case reg_BX
: debug_printf( "EBX" ); break;
55 case reg_SP
: debug_printf( "ESP" ); break;
56 case reg_BP
: debug_printf( "EBP" ); break;
57 case reg_SI
: debug_printf( "ESI" ); break;
58 case reg_DI
: debug_printf( "EDI" ); break;
62 debug_printf( "MMX%u", reg
.idx
);
65 debug_printf( "XMM%u", reg
.idx
);
68 debug_printf( "fp%u", reg
.idx
);
72 if (reg
.mod
== mod_DISP8
||
73 reg
.mod
== mod_DISP32
)
74 debug_printf("+%d", reg
.disp
);
76 if (reg
.mod
!= mod_REG
)
82 #define DUMP_START() debug_printf( "\n" )
83 #define DUMP_END() debug_printf( "\n" )
86 const char *foo = __FUNCTION__; \
87 while (*foo && *foo != '_') \
91 debug_printf( "\n%4x %14s ", p->csr - p->store, foo ); \
94 #define DUMP_I( I ) do { \
96 debug_printf( "%u", I ); \
99 #define DUMP_R( R0 ) do { \
101 x86_print_reg( R0 ); \
104 #define DUMP_RR( R0, R1 ) do { \
106 x86_print_reg( R0 ); \
107 debug_printf( ", " ); \
108 x86_print_reg( R1 ); \
111 #define DUMP_RI( R0, I ) do { \
113 x86_print_reg( R0 ); \
114 debug_printf( ", %u", I ); \
117 #define DUMP_RRI( R0, R1, I ) do { \
119 x86_print_reg( R0 ); \
120 debug_printf( ", " ); \
121 x86_print_reg( R1 ); \
122 debug_printf( ", %u", I ); \
132 #define DUMP_RR( R0, R1 )
133 #define DUMP_RI( R0, I )
134 #define DUMP_RRI( R0, R1, I )
139 static void do_realloc( struct x86_function
*p
)
141 if (p
->store
== p
->error_overflow
) {
144 else if (p
->size
== 0) {
146 p
->store
= rtasm_exec_malloc(p
->size
);
150 uintptr_t used
= pointer_to_uintptr( p
->csr
) - pointer_to_uintptr( p
->store
);
151 unsigned char *tmp
= p
->store
;
153 p
->store
= rtasm_exec_malloc(p
->size
);
156 memcpy(p
->store
, tmp
, used
);
157 p
->csr
= p
->store
+ used
;
163 rtasm_exec_free(tmp
);
166 if (p
->store
== NULL
) {
167 p
->store
= p
->csr
= p
->error_overflow
;
168 p
->size
= sizeof(p
->error_overflow
);
172 /* Emit bytes to the instruction stream:
174 static unsigned char *reserve( struct x86_function
*p
, int bytes
)
176 if (p
->csr
+ bytes
- p
->store
> (int) p
->size
)
180 unsigned char *csr
= p
->csr
;
188 static void emit_1b( struct x86_function
*p
, char b0
)
190 char *csr
= (char *)reserve(p
, 1);
194 static void emit_1i( struct x86_function
*p
, int i0
)
196 int *icsr
= (int *)reserve(p
, sizeof(i0
));
200 static void emit_1ub( struct x86_function
*p
, unsigned char b0
)
202 unsigned char *csr
= reserve(p
, 1);
206 static void emit_2ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
)
208 unsigned char *csr
= reserve(p
, 2);
213 static void emit_3ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
, unsigned char b2
)
215 unsigned char *csr
= reserve(p
, 3);
222 /* Build a modRM byte + possible displacement. No treatment of SIB
223 * indexing. BZZT - no way to encode an absolute address.
225 * This is the "/r" field in the x86 manuals...
227 static void emit_modrm( struct x86_function
*p
,
229 struct x86_reg regmem
)
231 unsigned char val
= 0;
233 assert(reg
.mod
== mod_REG
);
235 /* TODO: support extended x86-64 registers */
237 assert(regmem
.idx
< 8);
239 val
|= regmem
.mod
<< 6; /* mod field */
240 val
|= reg
.idx
<< 3; /* reg field */
241 val
|= regmem
.idx
; /* r/m field */
245 /* Oh-oh we've stumbled into the SIB thing.
247 if (regmem
.file
== file_REG32
&&
248 regmem
.idx
== reg_SP
&&
249 regmem
.mod
!= mod_REG
) {
250 emit_1ub(p
, 0x24); /* simplistic! */
253 switch (regmem
.mod
) {
258 emit_1b(p
, (char) regmem
.disp
);
261 emit_1i(p
, regmem
.disp
);
269 /* Emits the "/0".."/7" specialized versions of the modrm ("/r") bytes.
271 static void emit_modrm_noreg( struct x86_function
*p
,
273 struct x86_reg regmem
)
275 struct x86_reg dummy
= x86_make_reg(file_REG32
, op
);
276 emit_modrm(p
, dummy
, regmem
);
279 /* Many x86 instructions have two opcodes to cope with the situations
280 * where the destination is a register or memory reference
281 * respectively. This function selects the correct opcode based on
282 * the arguments presented.
284 static void emit_op_modrm( struct x86_function
*p
,
285 unsigned char op_dst_is_reg
,
286 unsigned char op_dst_is_mem
,
292 emit_1ub(p
, op_dst_is_reg
);
293 emit_modrm(p
, dst
, src
);
298 assert(src
.mod
== mod_REG
);
299 emit_1ub(p
, op_dst_is_mem
);
300 emit_modrm(p
, src
, dst
);
314 /* Create and manipulate registers and regmem values:
316 struct x86_reg
x86_make_reg( enum x86_reg_file file
,
317 enum x86_reg_name idx
)
329 struct x86_reg
x86_make_disp( struct x86_reg reg
,
332 assert(reg
.file
== file_REG32
);
334 if (reg
.mod
== mod_REG
)
339 if (reg
.disp
== 0 && reg
.idx
!= reg_BP
)
340 reg
.mod
= mod_INDIRECT
;
341 else if (reg
.disp
<= 127 && reg
.disp
>= -128)
344 reg
.mod
= mod_DISP32
;
349 struct x86_reg
x86_deref( struct x86_reg reg
)
351 return x86_make_disp(reg
, 0);
354 struct x86_reg
x86_get_base_reg( struct x86_reg reg
)
356 return x86_make_reg( reg
.file
, reg
.idx
);
359 int x86_get_label( struct x86_function
*p
)
361 return p
->csr
- p
->store
;
366 /***********************************************************************
371 void x64_rexw(struct x86_function
*p
)
373 if(x86_target(p
) != X86_32
)
377 void x86_jcc( struct x86_function
*p
,
381 int offset
= label
- (x86_get_label(p
) + 2);
385 /*assert(p->csr - p->store > -offset);*/
386 if (p
->csr
- p
->store
<= -offset
) {
387 /* probably out of memory (using the error_overflow buffer) */
392 if (offset
<= 127 && offset
>= -128) {
393 emit_1ub(p
, 0x70 + cc
);
394 emit_1b(p
, (char) offset
);
397 offset
= label
- (x86_get_label(p
) + 6);
398 emit_2ub(p
, 0x0f, 0x80 + cc
);
403 /* Always use a 32bit offset for forward jumps:
405 int x86_jcc_forward( struct x86_function
*p
,
409 emit_2ub(p
, 0x0f, 0x80 + cc
);
411 return x86_get_label(p
);
414 int x86_jmp_forward( struct x86_function
*p
)
419 return x86_get_label(p
);
422 int x86_call_forward( struct x86_function
*p
)
428 return x86_get_label(p
);
431 /* Fixup offset from forward jump:
433 void x86_fixup_fwd_jump( struct x86_function
*p
,
436 *(int *)(p
->store
+ fixup
- 4) = x86_get_label(p
) - fixup
;
439 void x86_jmp( struct x86_function
*p
, int label
)
443 emit_1i(p
, label
- x86_get_label(p
) - 4);
446 void x86_call( struct x86_function
*p
, struct x86_reg reg
)
450 emit_modrm_noreg(p
, 2, reg
);
454 void x86_mov_reg_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
457 assert(dst
.file
== file_REG32
);
458 assert(dst
.mod
== mod_REG
);
459 emit_1ub(p
, 0xb8 + dst
.idx
);
463 void x86_mov_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
466 if(dst
.mod
== mod_REG
)
467 x86_mov_reg_imm(p
, dst
, imm
);
471 emit_modrm_noreg(p
, 0, dst
);
476 void x86_mov16_imm( struct x86_function
*p
, struct x86_reg dst
, uint16_t imm
)
480 if(dst
.mod
== mod_REG
)
482 emit_1ub(p
, 0xb8 + dst
.idx
);
483 emit_2ub(p
, imm
& 0xff, imm
>> 8);
488 emit_modrm_noreg(p
, 0, dst
);
489 emit_2ub(p
, imm
& 0xff, imm
>> 8);
493 void x86_mov8_imm( struct x86_function
*p
, struct x86_reg dst
, uint8_t imm
)
496 if(dst
.mod
== mod_REG
)
498 emit_1ub(p
, 0xb0 + dst
.idx
);
504 emit_modrm_noreg(p
, 0, dst
);
510 * Immediate group 1 instructions.
513 x86_group1_imm( struct x86_function
*p
,
514 unsigned op
, struct x86_reg dst
, int imm
)
516 assert(dst
.file
== file_REG32
);
517 assert(dst
.mod
== mod_REG
);
518 if(-0x80 <= imm
&& imm
< 0x80) {
520 emit_modrm_noreg(p
, op
, dst
);
521 emit_1b(p
, (char)imm
);
525 emit_modrm_noreg(p
, op
, dst
);
530 void x86_add_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
533 x86_group1_imm(p
, 0, dst
, imm
);
536 void x86_or_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
539 x86_group1_imm(p
, 1, dst
, imm
);
542 void x86_and_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
545 x86_group1_imm(p
, 4, dst
, imm
);
548 void x86_sub_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
551 x86_group1_imm(p
, 5, dst
, imm
);
554 void x86_xor_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
557 x86_group1_imm(p
, 6, dst
, imm
);
560 void x86_cmp_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
563 x86_group1_imm(p
, 7, dst
, imm
);
567 void x86_push( struct x86_function
*p
,
571 if (reg
.mod
== mod_REG
)
572 emit_1ub(p
, 0x50 + reg
.idx
);
576 emit_modrm_noreg(p
, 6, reg
);
580 p
->stack_offset
+= sizeof(void*);
583 void x86_push_imm32( struct x86_function
*p
,
590 p
->stack_offset
+= sizeof(void*);
594 void x86_pop( struct x86_function
*p
,
598 assert(reg
.mod
== mod_REG
);
599 emit_1ub(p
, 0x58 + reg
.idx
);
600 p
->stack_offset
-= sizeof(void*);
603 void x86_inc( struct x86_function
*p
,
607 if(x86_target(p
) == X86_32
&& reg
.mod
== mod_REG
)
609 emit_1ub(p
, 0x40 + reg
.idx
);
613 emit_modrm_noreg(p
, 0, reg
);
616 void x86_dec( struct x86_function
*p
,
620 if(x86_target(p
) == X86_32
&& reg
.mod
== mod_REG
)
622 emit_1ub(p
, 0x48 + reg
.idx
);
626 emit_modrm_noreg(p
, 1, reg
);
629 void x86_ret( struct x86_function
*p
)
632 assert(p
->stack_offset
== 0);
636 void x86_retw( struct x86_function
*p
, unsigned short imm
)
639 emit_3ub(p
, 0xc2, imm
& 0xff, (imm
>> 8) & 0xff);
642 void x86_sahf( struct x86_function
*p
)
648 void x86_mov( struct x86_function
*p
,
653 /* special hack for reading arguments until we support x86-64 registers everywhere */
654 if(src
.mod
== mod_REG
&& dst
.mod
== mod_REG
&& (src
.idx
>= 8 || dst
.idx
>= 8))
669 emit_op_modrm( p
, 0x8b, 0x89, dst
, src
);
672 void x86_mov16( struct x86_function
*p
,
678 emit_op_modrm( p
, 0x8b, 0x89, dst
, src
);
681 void x86_mov8( struct x86_function
*p
,
686 emit_op_modrm( p
, 0x8a, 0x88, dst
, src
);
689 void x64_mov64( struct x86_function
*p
,
695 assert(x86_target(p
) != X86_32
);
697 /* special hack for reading arguments until we support x86-64 registers everywhere */
698 if(src
.mod
== mod_REG
&& dst
.mod
== mod_REG
&& (src
.idx
>= 8 || dst
.idx
>= 8))
712 emit_op_modrm( p
, 0x8b, 0x89, dst
, src
);
715 void x86_movzx8(struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
718 emit_2ub(p
, 0x0f, 0xb6);
719 emit_modrm(p
, dst
, src
);
722 void x86_movzx16(struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
725 emit_2ub(p
, 0x0f, 0xb7);
726 emit_modrm(p
, dst
, src
);
729 void x86_cmovcc( struct x86_function
*p
,
734 DUMP_RRI( dst
, src
, cc
);
735 emit_2ub( p
, 0x0f, 0x40 + cc
);
736 emit_modrm( p
, dst
, src
);
739 void x86_xor( struct x86_function
*p
,
744 emit_op_modrm( p
, 0x33, 0x31, dst
, src
);
747 void x86_cmp( struct x86_function
*p
,
752 emit_op_modrm( p
, 0x3b, 0x39, dst
, src
);
755 void x86_lea( struct x86_function
*p
,
761 emit_modrm( p
, dst
, src
);
764 void x86_test( struct x86_function
*p
,
770 emit_modrm( p
, dst
, src
);
773 void x86_add( struct x86_function
*p
,
778 emit_op_modrm(p
, 0x03, 0x01, dst
, src
);
781 /* Calculate EAX * src, results in EDX:EAX.
783 void x86_mul( struct x86_function
*p
,
788 emit_modrm_noreg(p
, 4, src
);
792 void x86_imul( struct x86_function
*p
,
797 emit_2ub(p
, X86_TWOB
, 0xAF);
798 emit_modrm(p
, dst
, src
);
802 void x86_sub( struct x86_function
*p
,
807 emit_op_modrm(p
, 0x2b, 0x29, dst
, src
);
810 void x86_or( struct x86_function
*p
,
815 emit_op_modrm( p
, 0x0b, 0x09, dst
, src
);
818 void x86_and( struct x86_function
*p
,
823 emit_op_modrm( p
, 0x23, 0x21, dst
, src
);
826 void x86_div( struct x86_function
*p
,
829 assert(src
.file
== file_REG32
&& src
.mod
== mod_REG
);
830 emit_op_modrm(p
, 0xf7, 0, x86_make_reg(file_REG32
, 6), src
);
833 void x86_bswap( struct x86_function
*p
, struct x86_reg reg
)
836 assert(reg
.file
== file_REG32
);
837 assert(reg
.mod
== mod_REG
);
838 emit_2ub(p
, 0x0f, 0xc8 + reg
.idx
);
841 void x86_shr_imm( struct x86_function
*p
, struct x86_reg reg
, unsigned imm
)
847 emit_modrm_noreg(p
, 5, reg
);
852 emit_modrm_noreg(p
, 5, reg
);
857 void x86_sar_imm( struct x86_function
*p
, struct x86_reg reg
, unsigned imm
)
863 emit_modrm_noreg(p
, 7, reg
);
868 emit_modrm_noreg(p
, 7, reg
);
873 void x86_shl_imm( struct x86_function
*p
, struct x86_reg reg
, unsigned imm
)
879 emit_modrm_noreg(p
, 4, reg
);
884 emit_modrm_noreg(p
, 4, reg
);
890 /***********************************************************************
894 void sse_prefetchnta( struct x86_function
*p
, struct x86_reg ptr
)
897 assert(ptr
.mod
!= mod_REG
);
898 emit_2ub(p
, 0x0f, 0x18);
899 emit_modrm_noreg(p
, 0, ptr
);
902 void sse_prefetch0( struct x86_function
*p
, struct x86_reg ptr
)
905 assert(ptr
.mod
!= mod_REG
);
906 emit_2ub(p
, 0x0f, 0x18);
907 emit_modrm_noreg(p
, 1, ptr
);
910 void sse_prefetch1( struct x86_function
*p
, struct x86_reg ptr
)
913 assert(ptr
.mod
!= mod_REG
);
914 emit_2ub(p
, 0x0f, 0x18);
915 emit_modrm_noreg(p
, 2, ptr
);
918 void sse_movntps( struct x86_function
*p
,
924 assert(dst
.mod
!= mod_REG
);
925 assert(src
.mod
== mod_REG
);
926 emit_2ub(p
, 0x0f, 0x2b);
927 emit_modrm(p
, src
, dst
);
933 void sse_movss( struct x86_function
*p
,
938 emit_2ub(p
, 0xF3, X86_TWOB
);
939 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
942 void sse_movaps( struct x86_function
*p
,
947 emit_1ub(p
, X86_TWOB
);
948 emit_op_modrm( p
, 0x28, 0x29, dst
, src
);
951 void sse_movups( struct x86_function
*p
,
956 emit_1ub(p
, X86_TWOB
);
957 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
960 void sse_movhps( struct x86_function
*p
,
965 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
966 emit_1ub(p
, X86_TWOB
);
967 emit_op_modrm( p
, 0x16, 0x17, dst
, src
); /* cf movlhps */
970 void sse_movlps( struct x86_function
*p
,
975 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
976 emit_1ub(p
, X86_TWOB
);
977 emit_op_modrm( p
, 0x12, 0x13, dst
, src
); /* cf movhlps */
980 void sse_maxps( struct x86_function
*p
,
985 emit_2ub(p
, X86_TWOB
, 0x5F);
986 emit_modrm( p
, dst
, src
);
989 void sse_maxss( struct x86_function
*p
,
994 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5F);
995 emit_modrm( p
, dst
, src
);
998 void sse_divss( struct x86_function
*p
,
1000 struct x86_reg src
)
1002 DUMP_RR( dst
, src
);
1003 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5E);
1004 emit_modrm( p
, dst
, src
);
1007 void sse_minps( struct x86_function
*p
,
1009 struct x86_reg src
)
1011 DUMP_RR( dst
, src
);
1012 emit_2ub(p
, X86_TWOB
, 0x5D);
1013 emit_modrm( p
, dst
, src
);
1016 void sse_subps( struct x86_function
*p
,
1018 struct x86_reg src
)
1020 DUMP_RR( dst
, src
);
1021 emit_2ub(p
, X86_TWOB
, 0x5C);
1022 emit_modrm( p
, dst
, src
);
1025 void sse_mulps( struct x86_function
*p
,
1027 struct x86_reg src
)
1029 DUMP_RR( dst
, src
);
1030 emit_2ub(p
, X86_TWOB
, 0x59);
1031 emit_modrm( p
, dst
, src
);
1034 void sse_mulss( struct x86_function
*p
,
1036 struct x86_reg src
)
1038 DUMP_RR( dst
, src
);
1039 emit_3ub(p
, 0xF3, X86_TWOB
, 0x59);
1040 emit_modrm( p
, dst
, src
);
1043 void sse_addps( struct x86_function
*p
,
1045 struct x86_reg src
)
1047 DUMP_RR( dst
, src
);
1048 emit_2ub(p
, X86_TWOB
, 0x58);
1049 emit_modrm( p
, dst
, src
);
1052 void sse_addss( struct x86_function
*p
,
1054 struct x86_reg src
)
1056 DUMP_RR( dst
, src
);
1057 emit_3ub(p
, 0xF3, X86_TWOB
, 0x58);
1058 emit_modrm( p
, dst
, src
);
1061 void sse_andnps( struct x86_function
*p
,
1063 struct x86_reg src
)
1065 DUMP_RR( dst
, src
);
1066 emit_2ub(p
, X86_TWOB
, 0x55);
1067 emit_modrm( p
, dst
, src
);
1070 void sse_andps( struct x86_function
*p
,
1072 struct x86_reg src
)
1074 DUMP_RR( dst
, src
);
1075 emit_2ub(p
, X86_TWOB
, 0x54);
1076 emit_modrm( p
, dst
, src
);
1079 void sse_rsqrtps( struct x86_function
*p
,
1081 struct x86_reg src
)
1083 DUMP_RR( dst
, src
);
1084 emit_2ub(p
, X86_TWOB
, 0x52);
1085 emit_modrm( p
, dst
, src
);
1088 void sse_rsqrtss( struct x86_function
*p
,
1090 struct x86_reg src
)
1092 DUMP_RR( dst
, src
);
1093 emit_3ub(p
, 0xF3, X86_TWOB
, 0x52);
1094 emit_modrm( p
, dst
, src
);
1098 void sse_movhlps( struct x86_function
*p
,
1100 struct x86_reg src
)
1102 DUMP_RR( dst
, src
);
1103 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
1104 emit_2ub(p
, X86_TWOB
, 0x12);
1105 emit_modrm( p
, dst
, src
);
1108 void sse_movlhps( struct x86_function
*p
,
1110 struct x86_reg src
)
1112 DUMP_RR( dst
, src
);
1113 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
1114 emit_2ub(p
, X86_TWOB
, 0x16);
1115 emit_modrm( p
, dst
, src
);
1118 void sse_orps( struct x86_function
*p
,
1120 struct x86_reg src
)
1122 DUMP_RR( dst
, src
);
1123 emit_2ub(p
, X86_TWOB
, 0x56);
1124 emit_modrm( p
, dst
, src
);
1127 void sse_xorps( struct x86_function
*p
,
1129 struct x86_reg src
)
1131 DUMP_RR( dst
, src
);
1132 emit_2ub(p
, X86_TWOB
, 0x57);
1133 emit_modrm( p
, dst
, src
);
1136 void sse_cvtps2pi( struct x86_function
*p
,
1138 struct x86_reg src
)
1140 DUMP_RR( dst
, src
);
1141 assert(dst
.file
== file_MMX
&&
1142 (src
.file
== file_XMM
|| src
.mod
!= mod_REG
));
1146 emit_2ub(p
, X86_TWOB
, 0x2d);
1147 emit_modrm( p
, dst
, src
);
1150 void sse2_cvtdq2ps( struct x86_function
*p
,
1152 struct x86_reg src
)
1154 DUMP_RR( dst
, src
);
1155 emit_2ub(p
, X86_TWOB
, 0x5b);
1156 emit_modrm( p
, dst
, src
);
1160 /* Shufps can also be used to implement a reduced swizzle when dest ==
1163 void sse_shufps( struct x86_function
*p
,
1168 DUMP_RRI( dst
, src
, shuf
);
1169 emit_2ub(p
, X86_TWOB
, 0xC6);
1170 emit_modrm(p
, dst
, src
);
1174 void sse_unpckhps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1176 DUMP_RR( dst
, src
);
1177 emit_2ub( p
, X86_TWOB
, 0x15 );
1178 emit_modrm( p
, dst
, src
);
1181 void sse_unpcklps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1183 DUMP_RR( dst
, src
);
1184 emit_2ub( p
, X86_TWOB
, 0x14 );
1185 emit_modrm( p
, dst
, src
);
1188 void sse_cmpps( struct x86_function
*p
,
1193 DUMP_RRI( dst
, src
, cc
);
1194 emit_2ub(p
, X86_TWOB
, 0xC2);
1195 emit_modrm(p
, dst
, src
);
1199 void sse_pmovmskb( struct x86_function
*p
,
1203 DUMP_RR( dst
, src
);
1204 emit_3ub(p
, 0x66, X86_TWOB
, 0xD7);
1205 emit_modrm(p
, dst
, src
);
1208 void sse_movmskps( struct x86_function
*p
,
1212 DUMP_RR( dst
, src
);
1213 emit_2ub(p
, X86_TWOB
, 0x50);
1214 emit_modrm(p
, dst
, src
);
1217 /***********************************************************************
1221 void sse2_movd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1224 emit_2ub(p
, 0x66, 0x0f);
1225 if(dst
.mod
== mod_REG
&& dst
.file
== file_REG32
)
1228 emit_modrm(p
, src
, dst
);
1232 emit_op_modrm(p
, 0x6e, 0x7e, dst
, src
);
1236 void sse2_movq( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1241 emit_3ub(p
, 0xf3, 0x0f, 0x7e);
1242 emit_modrm(p
, dst
, src
);
1247 assert(src
.mod
== mod_REG
);
1248 emit_3ub(p
, 0x66, 0x0f, 0xd6);
1249 emit_modrm(p
, src
, dst
);
1257 void sse2_movdqu( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1260 emit_2ub(p
, 0xf3, 0x0f);
1261 emit_op_modrm(p
, 0x6f, 0x7f, dst
, src
);
1264 void sse2_movdqa( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1267 emit_2ub(p
, 0x66, 0x0f);
1268 emit_op_modrm(p
, 0x6f, 0x7f, dst
, src
);
1271 void sse2_movsd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1274 emit_2ub(p
, 0xf2, 0x0f);
1275 emit_op_modrm(p
, 0x10, 0x11, dst
, src
);
1278 void sse2_movupd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1281 emit_2ub(p
, 0x66, 0x0f);
1282 emit_op_modrm(p
, 0x10, 0x11, dst
, src
);
1285 void sse2_movapd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1288 emit_2ub(p
, 0x66, 0x0f);
1289 emit_op_modrm(p
, 0x28, 0x29, dst
, src
);
1293 * Perform a reduced swizzle:
1295 void sse2_pshufd( struct x86_function
*p
,
1300 DUMP_RRI( dst
, src
, shuf
);
1301 emit_3ub(p
, 0x66, X86_TWOB
, 0x70);
1302 emit_modrm(p
, dst
, src
);
1306 void sse2_pshuflw( struct x86_function
*p
,
1311 DUMP_RRI( dst
, src
, shuf
);
1312 emit_3ub(p
, 0xf2, X86_TWOB
, 0x70);
1313 emit_modrm(p
, dst
, src
);
1317 void sse2_pshufhw( struct x86_function
*p
,
1322 DUMP_RRI( dst
, src
, shuf
);
1323 emit_3ub(p
, 0xf3, X86_TWOB
, 0x70);
1324 emit_modrm(p
, dst
, src
);
1328 void sse2_cvttps2dq( struct x86_function
*p
,
1330 struct x86_reg src
)
1332 DUMP_RR( dst
, src
);
1333 emit_3ub( p
, 0xF3, X86_TWOB
, 0x5B );
1334 emit_modrm( p
, dst
, src
);
1337 void sse2_cvtps2dq( struct x86_function
*p
,
1339 struct x86_reg src
)
1341 DUMP_RR( dst
, src
);
1342 emit_3ub(p
, 0x66, X86_TWOB
, 0x5B);
1343 emit_modrm( p
, dst
, src
);
1346 void sse2_cvtsd2ss( struct x86_function
*p
,
1348 struct x86_reg src
)
1350 DUMP_RR( dst
, src
);
1351 emit_3ub(p
, 0xf2, 0x0f, 0x5a);
1352 emit_modrm( p
, dst
, src
);
1355 void sse2_cvtpd2ps( struct x86_function
*p
,
1357 struct x86_reg src
)
1359 DUMP_RR( dst
, src
);
1360 emit_3ub(p
, 0x66, 0x0f, 0x5a);
1361 emit_modrm( p
, dst
, src
);
1364 void sse2_packssdw( struct x86_function
*p
,
1366 struct x86_reg src
)
1368 DUMP_RR( dst
, src
);
1369 emit_3ub(p
, 0x66, X86_TWOB
, 0x6B);
1370 emit_modrm( p
, dst
, src
);
1373 void sse2_packsswb( struct x86_function
*p
,
1375 struct x86_reg src
)
1377 DUMP_RR( dst
, src
);
1378 emit_3ub(p
, 0x66, X86_TWOB
, 0x63);
1379 emit_modrm( p
, dst
, src
);
1382 void sse2_packuswb( struct x86_function
*p
,
1384 struct x86_reg src
)
1386 DUMP_RR( dst
, src
);
1387 emit_3ub(p
, 0x66, X86_TWOB
, 0x67);
1388 emit_modrm( p
, dst
, src
);
1391 void sse2_punpcklbw( struct x86_function
*p
,
1393 struct x86_reg src
)
1395 DUMP_RR( dst
, src
);
1396 emit_3ub(p
, 0x66, X86_TWOB
, 0x60);
1397 emit_modrm( p
, dst
, src
);
1400 void sse2_punpcklwd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1402 DUMP_RR( dst
, src
);
1403 emit_3ub(p
, 0x66, 0x0f, 0x61);
1404 emit_modrm( p
, dst
, src
);
1407 void sse2_punpckldq( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1409 DUMP_RR( dst
, src
);
1410 emit_3ub(p
, 0x66, 0x0f, 0x62);
1411 emit_modrm( p
, dst
, src
);
1414 void sse2_punpcklqdq( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1416 DUMP_RR( dst
, src
);
1417 emit_3ub(p
, 0x66, 0x0f, 0x6c);
1418 emit_modrm( p
, dst
, src
);
1421 void sse2_psllw_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1424 emit_3ub(p
, 0x66, 0x0f, 0x71);
1425 emit_modrm_noreg(p
, 6, dst
);
1429 void sse2_pslld_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1432 emit_3ub(p
, 0x66, 0x0f, 0x72);
1433 emit_modrm_noreg(p
, 6, dst
);
1437 void sse2_psllq_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1440 emit_3ub(p
, 0x66, 0x0f, 0x73);
1441 emit_modrm_noreg(p
, 6, dst
);
1445 void sse2_psrlw_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1448 emit_3ub(p
, 0x66, 0x0f, 0x71);
1449 emit_modrm_noreg(p
, 2, dst
);
1453 void sse2_psrld_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1456 emit_3ub(p
, 0x66, 0x0f, 0x72);
1457 emit_modrm_noreg(p
, 2, dst
);
1461 void sse2_psrlq_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1464 emit_3ub(p
, 0x66, 0x0f, 0x73);
1465 emit_modrm_noreg(p
, 2, dst
);
1469 void sse2_psraw_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1472 emit_3ub(p
, 0x66, 0x0f, 0x71);
1473 emit_modrm_noreg(p
, 4, dst
);
1477 void sse2_psrad_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1480 emit_3ub(p
, 0x66, 0x0f, 0x72);
1481 emit_modrm_noreg(p
, 4, dst
);
1485 void sse2_por( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1488 emit_3ub(p
, 0x66, 0x0f, 0xeb);
1489 emit_modrm(p
, dst
, src
);
1492 void sse2_rcpps( struct x86_function
*p
,
1494 struct x86_reg src
)
1496 DUMP_RR( dst
, src
);
1497 emit_2ub(p
, X86_TWOB
, 0x53);
1498 emit_modrm( p
, dst
, src
);
1501 void sse2_rcpss( struct x86_function
*p
,
1503 struct x86_reg src
)
1505 DUMP_RR( dst
, src
);
1506 emit_3ub(p
, 0xF3, X86_TWOB
, 0x53);
1507 emit_modrm( p
, dst
, src
);
1510 /***********************************************************************
1513 static void note_x87_pop( struct x86_function
*p
)
1516 assert(p
->x87_stack
>= 0);
1519 static void note_x87_push( struct x86_function
*p
)
1522 assert(p
->x87_stack
<= 7);
1525 void x87_assert_stack_empty( struct x86_function
*p
)
1527 assert (p
->x87_stack
== 0);
1531 void x87_fist( struct x86_function
*p
, struct x86_reg dst
)
1535 emit_modrm_noreg(p
, 2, dst
);
1538 void x87_fistp( struct x86_function
*p
, struct x86_reg dst
)
1542 emit_modrm_noreg(p
, 3, dst
);
1546 void x87_fild( struct x86_function
*p
, struct x86_reg arg
)
1550 emit_modrm_noreg(p
, 0, arg
);
1554 void x87_fldz( struct x86_function
*p
)
1557 emit_2ub(p
, 0xd9, 0xee);
1562 void x87_fldcw( struct x86_function
*p
, struct x86_reg arg
)
1565 assert(arg
.file
== file_REG32
);
1566 assert(arg
.mod
!= mod_REG
);
1568 emit_modrm_noreg(p
, 5, arg
);
1571 void x87_fld1( struct x86_function
*p
)
1574 emit_2ub(p
, 0xd9, 0xe8);
1578 void x87_fldl2e( struct x86_function
*p
)
1581 emit_2ub(p
, 0xd9, 0xea);
1585 void x87_fldln2( struct x86_function
*p
)
1588 emit_2ub(p
, 0xd9, 0xed);
1592 void x87_fwait( struct x86_function
*p
)
1598 void x87_fnclex( struct x86_function
*p
)
1601 emit_2ub(p
, 0xdb, 0xe2);
1604 void x87_fclex( struct x86_function
*p
)
1610 void x87_fcmovb( struct x86_function
*p
, struct x86_reg arg
)
1613 assert(arg
.file
== file_x87
);
1614 emit_2ub(p
, 0xda, 0xc0+arg
.idx
);
1617 void x87_fcmove( struct x86_function
*p
, struct x86_reg arg
)
1620 assert(arg
.file
== file_x87
);
1621 emit_2ub(p
, 0xda, 0xc8+arg
.idx
);
1624 void x87_fcmovbe( struct x86_function
*p
, struct x86_reg arg
)
1627 assert(arg
.file
== file_x87
);
1628 emit_2ub(p
, 0xda, 0xd0+arg
.idx
);
1631 void x87_fcmovnb( struct x86_function
*p
, struct x86_reg arg
)
1634 assert(arg
.file
== file_x87
);
1635 emit_2ub(p
, 0xdb, 0xc0+arg
.idx
);
1638 void x87_fcmovne( struct x86_function
*p
, struct x86_reg arg
)
1641 assert(arg
.file
== file_x87
);
1642 emit_2ub(p
, 0xdb, 0xc8+arg
.idx
);
1645 void x87_fcmovnbe( struct x86_function
*p
, struct x86_reg arg
)
1648 assert(arg
.file
== file_x87
);
1649 emit_2ub(p
, 0xdb, 0xd0+arg
.idx
);
1654 static void x87_arith_op( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg arg
,
1655 unsigned char dst0ub0
,
1656 unsigned char dst0ub1
,
1657 unsigned char arg0ub0
,
1658 unsigned char arg0ub1
,
1659 unsigned char argmem_noreg
)
1661 assert(dst
.file
== file_x87
);
1663 if (arg
.file
== file_x87
) {
1665 emit_2ub(p
, dst0ub0
, dst0ub1
+arg
.idx
);
1666 else if (arg
.idx
== 0)
1667 emit_2ub(p
, arg0ub0
, arg0ub1
+arg
.idx
);
1671 else if (dst
.idx
== 0) {
1672 assert(arg
.file
== file_REG32
);
1674 emit_modrm_noreg(p
, argmem_noreg
, arg
);
1680 void x87_fmul( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1682 DUMP_RR( dst
, src
);
1683 x87_arith_op(p
, dst
, src
,
1689 void x87_fsub( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1691 DUMP_RR( dst
, src
);
1692 x87_arith_op(p
, dst
, src
,
1698 void x87_fsubr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1700 DUMP_RR( dst
, src
);
1701 x87_arith_op(p
, dst
, src
,
1707 void x87_fadd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1709 DUMP_RR( dst
, src
);
1710 x87_arith_op(p
, dst
, src
,
1716 void x87_fdiv( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1718 DUMP_RR( dst
, src
);
1719 x87_arith_op(p
, dst
, src
,
1725 void x87_fdivr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1727 DUMP_RR( dst
, src
);
1728 x87_arith_op(p
, dst
, src
,
1734 void x87_fmulp( struct x86_function
*p
, struct x86_reg dst
)
1737 assert(dst
.file
== file_x87
);
1738 assert(dst
.idx
>= 1);
1739 emit_2ub(p
, 0xde, 0xc8+dst
.idx
);
1743 void x87_fsubp( struct x86_function
*p
, struct x86_reg dst
)
1746 assert(dst
.file
== file_x87
);
1747 assert(dst
.idx
>= 1);
1748 emit_2ub(p
, 0xde, 0xe8+dst
.idx
);
1752 void x87_fsubrp( struct x86_function
*p
, struct x86_reg dst
)
1755 assert(dst
.file
== file_x87
);
1756 assert(dst
.idx
>= 1);
1757 emit_2ub(p
, 0xde, 0xe0+dst
.idx
);
1761 void x87_faddp( struct x86_function
*p
, struct x86_reg dst
)
1764 assert(dst
.file
== file_x87
);
1765 assert(dst
.idx
>= 1);
1766 emit_2ub(p
, 0xde, 0xc0+dst
.idx
);
1770 void x87_fdivp( struct x86_function
*p
, struct x86_reg dst
)
1773 assert(dst
.file
== file_x87
);
1774 assert(dst
.idx
>= 1);
1775 emit_2ub(p
, 0xde, 0xf8+dst
.idx
);
1779 void x87_fdivrp( struct x86_function
*p
, struct x86_reg dst
)
1782 assert(dst
.file
== file_x87
);
1783 assert(dst
.idx
>= 1);
1784 emit_2ub(p
, 0xde, 0xf0+dst
.idx
);
1788 void x87_ftst( struct x86_function
*p
)
1791 emit_2ub(p
, 0xd9, 0xe4);
1794 void x87_fucom( struct x86_function
*p
, struct x86_reg arg
)
1797 assert(arg
.file
== file_x87
);
1798 emit_2ub(p
, 0xdd, 0xe0+arg
.idx
);
1801 void x87_fucomp( struct x86_function
*p
, struct x86_reg arg
)
1804 assert(arg
.file
== file_x87
);
1805 emit_2ub(p
, 0xdd, 0xe8+arg
.idx
);
1809 void x87_fucompp( struct x86_function
*p
)
1812 emit_2ub(p
, 0xda, 0xe9);
1813 note_x87_pop(p
); /* pop twice */
1814 note_x87_pop(p
); /* pop twice */
1817 void x87_fxch( struct x86_function
*p
, struct x86_reg arg
)
1820 assert(arg
.file
== file_x87
);
1821 emit_2ub(p
, 0xd9, 0xc8+arg
.idx
);
1824 void x87_fabs( struct x86_function
*p
)
1827 emit_2ub(p
, 0xd9, 0xe1);
1830 void x87_fchs( struct x86_function
*p
)
1833 emit_2ub(p
, 0xd9, 0xe0);
1836 void x87_fcos( struct x86_function
*p
)
1839 emit_2ub(p
, 0xd9, 0xff);
1843 void x87_fprndint( struct x86_function
*p
)
1846 emit_2ub(p
, 0xd9, 0xfc);
1849 void x87_fscale( struct x86_function
*p
)
1852 emit_2ub(p
, 0xd9, 0xfd);
1855 void x87_fsin( struct x86_function
*p
)
1858 emit_2ub(p
, 0xd9, 0xfe);
1861 void x87_fsincos( struct x86_function
*p
)
1864 emit_2ub(p
, 0xd9, 0xfb);
1867 void x87_fsqrt( struct x86_function
*p
)
1870 emit_2ub(p
, 0xd9, 0xfa);
1873 void x87_fxtract( struct x86_function
*p
)
1876 emit_2ub(p
, 0xd9, 0xf4);
1881 * Restrictions: -1.0 <= st0 <= 1.0
1883 void x87_f2xm1( struct x86_function
*p
)
1886 emit_2ub(p
, 0xd9, 0xf0);
1889 /* st1 = st1 * log2(st0);
1892 void x87_fyl2x( struct x86_function
*p
)
1895 emit_2ub(p
, 0xd9, 0xf1);
1899 /* st1 = st1 * log2(st0 + 1.0);
1902 * A fast operation, with restrictions: -.29 < st0 < .29
1904 void x87_fyl2xp1( struct x86_function
*p
)
1907 emit_2ub(p
, 0xd9, 0xf9);
1912 void x87_fld( struct x86_function
*p
, struct x86_reg arg
)
1915 if (arg
.file
== file_x87
)
1916 emit_2ub(p
, 0xd9, 0xc0 + arg
.idx
);
1919 emit_modrm_noreg(p
, 0, arg
);
1924 void x87_fst( struct x86_function
*p
, struct x86_reg dst
)
1927 if (dst
.file
== file_x87
)
1928 emit_2ub(p
, 0xdd, 0xd0 + dst
.idx
);
1931 emit_modrm_noreg(p
, 2, dst
);
1935 void x87_fstp( struct x86_function
*p
, struct x86_reg dst
)
1938 if (dst
.file
== file_x87
)
1939 emit_2ub(p
, 0xdd, 0xd8 + dst
.idx
);
1942 emit_modrm_noreg(p
, 3, dst
);
1947 void x87_fpop( struct x86_function
*p
)
1949 x87_fstp( p
, x86_make_reg( file_x87
, 0 ));
1953 void x87_fcom( struct x86_function
*p
, struct x86_reg dst
)
1956 if (dst
.file
== file_x87
)
1957 emit_2ub(p
, 0xd8, 0xd0 + dst
.idx
);
1960 emit_modrm_noreg(p
, 2, dst
);
1965 void x87_fcomp( struct x86_function
*p
, struct x86_reg dst
)
1968 if (dst
.file
== file_x87
)
1969 emit_2ub(p
, 0xd8, 0xd8 + dst
.idx
);
1972 emit_modrm_noreg(p
, 3, dst
);
1977 void x87_fcomi( struct x86_function
*p
, struct x86_reg arg
)
1980 emit_2ub(p
, 0xdb, 0xf0+arg
.idx
);
1983 void x87_fcomip( struct x86_function
*p
, struct x86_reg arg
)
1986 emit_2ub(p
, 0xdb, 0xf0+arg
.idx
);
1991 void x87_fnstsw( struct x86_function
*p
, struct x86_reg dst
)
1994 assert(dst
.file
== file_REG32
);
1996 if (dst
.idx
== reg_AX
&&
1998 emit_2ub(p
, 0xdf, 0xe0);
2001 emit_modrm_noreg(p
, 7, dst
);
2006 void x87_fnstcw( struct x86_function
*p
, struct x86_reg dst
)
2009 assert(dst
.file
== file_REG32
);
2011 emit_1ub(p
, 0x9b); /* WAIT -- needed? */
2013 emit_modrm_noreg(p
, 7, dst
);
2019 /***********************************************************************
2023 void mmx_emms( struct x86_function
*p
)
2026 assert(p
->need_emms
);
2027 emit_2ub(p
, 0x0f, 0x77);
2031 void mmx_packssdw( struct x86_function
*p
,
2033 struct x86_reg src
)
2035 DUMP_RR( dst
, src
);
2036 assert(dst
.file
== file_MMX
&&
2037 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
2041 emit_2ub(p
, X86_TWOB
, 0x6b);
2042 emit_modrm( p
, dst
, src
);
2045 void mmx_packuswb( struct x86_function
*p
,
2047 struct x86_reg src
)
2049 DUMP_RR( dst
, src
);
2050 assert(dst
.file
== file_MMX
&&
2051 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
2055 emit_2ub(p
, X86_TWOB
, 0x67);
2056 emit_modrm( p
, dst
, src
);
2059 void mmx_movd( struct x86_function
*p
,
2061 struct x86_reg src
)
2063 DUMP_RR( dst
, src
);
2065 emit_1ub(p
, X86_TWOB
);
2066 emit_op_modrm( p
, 0x6e, 0x7e, dst
, src
);
2069 void mmx_movq( struct x86_function
*p
,
2071 struct x86_reg src
)
2073 DUMP_RR( dst
, src
);
2075 emit_1ub(p
, X86_TWOB
);
2076 emit_op_modrm( p
, 0x6f, 0x7f, dst
, src
);
2080 /***********************************************************************
2085 void x86_cdecl_caller_push_regs( struct x86_function
*p
)
2087 x86_push(p
, x86_make_reg(file_REG32
, reg_AX
));
2088 x86_push(p
, x86_make_reg(file_REG32
, reg_CX
));
2089 x86_push(p
, x86_make_reg(file_REG32
, reg_DX
));
2092 void x86_cdecl_caller_pop_regs( struct x86_function
*p
)
2094 x86_pop(p
, x86_make_reg(file_REG32
, reg_DX
));
2095 x86_pop(p
, x86_make_reg(file_REG32
, reg_CX
));
2096 x86_pop(p
, x86_make_reg(file_REG32
, reg_AX
));
2100 struct x86_reg
x86_fn_arg( struct x86_function
*p
,
2103 switch(x86_target(p
))
2105 case X86_64_WIN64_ABI
:
2106 /* Microsoft uses a different calling convention than the rest of the world */
2110 return x86_make_reg(file_REG32
, reg_CX
);
2112 return x86_make_reg(file_REG32
, reg_DX
);
2114 return x86_make_reg(file_REG32
, reg_R8
);
2116 return x86_make_reg(file_REG32
, reg_R9
);
2118 /* Win64 allocates stack slots as if it pushed the first 4 arguments too */
2119 return x86_make_disp(x86_make_reg(file_REG32
, reg_SP
),
2120 p
->stack_offset
+ arg
* 8);
2122 case X86_64_STD_ABI
:
2126 return x86_make_reg(file_REG32
, reg_DI
);
2128 return x86_make_reg(file_REG32
, reg_SI
);
2130 return x86_make_reg(file_REG32
, reg_DX
);
2132 return x86_make_reg(file_REG32
, reg_CX
);
2134 return x86_make_reg(file_REG32
, reg_R8
);
2136 return x86_make_reg(file_REG32
, reg_R9
);
2138 return x86_make_disp(x86_make_reg(file_REG32
, reg_SP
),
2139 p
->stack_offset
+ (arg
- 6) * 8); /* ??? */
2142 return x86_make_disp(x86_make_reg(file_REG32
, reg_SP
),
2143 p
->stack_offset
+ arg
* 4); /* ??? */
2145 assert(0 && "Unexpected x86 target ABI in x86_fn_arg");
2146 return x86_make_reg(file_REG32
, reg_CX
); /* not used / silence warning */
2150 static void x86_init_func_common( struct x86_function
*p
)
2154 if(util_cpu_caps
.has_mmx
)
2156 if(util_cpu_caps
.has_mmx2
)
2157 p
->caps
|= X86_MMX2
;
2158 if(util_cpu_caps
.has_sse
)
2160 if(util_cpu_caps
.has_sse2
)
2161 p
->caps
|= X86_SSE2
;
2162 if(util_cpu_caps
.has_sse3
)
2163 p
->caps
|= X86_SSE3
;
2164 if(util_cpu_caps
.has_sse4_1
)
2165 p
->caps
|= X86_SSE4_1
;
2170 void x86_init_func( struct x86_function
*p
)
2174 x86_init_func_common(p
);
2177 void x86_init_func_size( struct x86_function
*p
, unsigned code_size
)
2179 p
->size
= code_size
;
2180 p
->store
= rtasm_exec_malloc(code_size
);
2181 if (p
->store
== NULL
) {
2182 p
->store
= p
->error_overflow
;
2184 x86_init_func_common(p
);
2187 void x86_release_func( struct x86_function
*p
)
2189 if (p
->store
&& p
->store
!= p
->error_overflow
)
2190 rtasm_exec_free(p
->store
);
2198 static INLINE x86_func
2199 voidptr_to_x86_func(void *v
)
2205 assert(sizeof(u
.v
) == sizeof(u
.f
));
2211 x86_func
x86_get_func( struct x86_function
*p
)
2214 if (DISASSEM
&& p
->store
)
2215 debug_printf("disassemble %p %p\n", p
->store
, p
->csr
);
2217 if (p
->store
== p
->error_overflow
)
2218 return voidptr_to_x86_func(NULL
);
2220 return voidptr_to_x86_func(p
->store
);
2225 void x86sse_dummy( void );
2227 void x86sse_dummy( void )