1 /**************************************************************************
3 * Copyright (C) 1999-2005 Brian Paul All Rights Reserved.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * BRIAN PAUL BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
19 * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22 **************************************************************************/
24 #include "pipe/p_config.h"
26 #if defined(PIPE_ARCH_X86)
28 #include "pipe/p_compiler.h"
29 #include "util/u_debug.h"
30 #include "util/u_pointer.h"
32 #include "rtasm_execmem.h"
33 #include "rtasm_x86sse.h"
42 void x86_print_reg( struct x86_reg reg
)
44 if (reg
.mod
!= mod_REG
)
50 case reg_AX
: debug_printf( "EAX" ); break;
51 case reg_CX
: debug_printf( "ECX" ); break;
52 case reg_DX
: debug_printf( "EDX" ); break;
53 case reg_BX
: debug_printf( "EBX" ); break;
54 case reg_SP
: debug_printf( "ESP" ); break;
55 case reg_BP
: debug_printf( "EBP" ); break;
56 case reg_SI
: debug_printf( "ESI" ); break;
57 case reg_DI
: debug_printf( "EDI" ); break;
61 debug_printf( "MMX%u", reg
.idx
);
64 debug_printf( "XMM%u", reg
.idx
);
67 debug_printf( "fp%u", reg
.idx
);
71 if (reg
.mod
== mod_DISP8
||
72 reg
.mod
== mod_DISP32
)
73 debug_printf("+%d", reg
.disp
);
75 if (reg
.mod
!= mod_REG
)
81 #define DUMP_START() debug_printf( "\n" )
82 #define DUMP_END() debug_printf( "\n" )
85 const char *foo = __FUNCTION__; \
86 while (*foo && *foo != '_') \
90 debug_printf( "\n% 4x% 15s ", p->csr - p->store, foo ); \
93 #define DUMP_I( I ) do { \
95 debug_printf( "%u", I ); \
98 #define DUMP_R( R0 ) do { \
100 x86_print_reg( R0 ); \
103 #define DUMP_RR( R0, R1 ) do { \
105 x86_print_reg( R0 ); \
106 debug_printf( ", " ); \
107 x86_print_reg( R1 ); \
110 #define DUMP_RI( R0, I ) do { \
112 x86_print_reg( R0 ); \
113 debug_printf( ", %u", I ); \
116 #define DUMP_RRI( R0, R1, I ) do { \
118 x86_print_reg( R0 ); \
119 debug_printf( ", " ); \
120 x86_print_reg( R1 ); \
121 debug_printf( ", %u", I ); \
131 #define DUMP_RR( R0, R1 )
132 #define DUMP_RI( R0, I )
133 #define DUMP_RRI( R0, R1, I )
138 static void do_realloc( struct x86_function
*p
)
140 if (p
->store
== p
->error_overflow
) {
143 else if (p
->size
== 0) {
145 p
->store
= rtasm_exec_malloc(p
->size
);
149 uintptr_t used
= pointer_to_uintptr( p
->csr
) - pointer_to_uintptr( p
->store
);
150 unsigned char *tmp
= p
->store
;
152 p
->store
= rtasm_exec_malloc(p
->size
);
155 memcpy(p
->store
, tmp
, used
);
156 p
->csr
= p
->store
+ used
;
162 rtasm_exec_free(tmp
);
165 if (p
->store
== NULL
) {
166 p
->store
= p
->csr
= p
->error_overflow
;
167 p
->size
= sizeof(p
->error_overflow
);
171 /* Emit bytes to the instruction stream:
173 static unsigned char *reserve( struct x86_function
*p
, int bytes
)
175 if (p
->csr
+ bytes
- p
->store
> (int) p
->size
)
179 unsigned char *csr
= p
->csr
;
187 static void emit_1b( struct x86_function
*p
, char b0
)
189 char *csr
= (char *)reserve(p
, 1);
193 static void emit_1i( struct x86_function
*p
, int i0
)
195 int *icsr
= (int *)reserve(p
, sizeof(i0
));
199 static void emit_1ub( struct x86_function
*p
, unsigned char b0
)
201 unsigned char *csr
= reserve(p
, 1);
205 static void emit_2ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
)
207 unsigned char *csr
= reserve(p
, 2);
212 static void emit_3ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
, unsigned char b2
)
214 unsigned char *csr
= reserve(p
, 3);
221 /* Build a modRM byte + possible displacement. No treatment of SIB
222 * indexing. BZZT - no way to encode an absolute address.
224 * This is the "/r" field in the x86 manuals...
226 static void emit_modrm( struct x86_function
*p
,
228 struct x86_reg regmem
)
230 unsigned char val
= 0;
232 assert(reg
.mod
== mod_REG
);
234 val
|= regmem
.mod
<< 6; /* mod field */
235 val
|= reg
.idx
<< 3; /* reg field */
236 val
|= regmem
.idx
; /* r/m field */
240 /* Oh-oh we've stumbled into the SIB thing.
242 if (regmem
.file
== file_REG32
&&
243 regmem
.idx
== reg_SP
&&
244 regmem
.mod
!= mod_REG
) {
245 emit_1ub(p
, 0x24); /* simplistic! */
248 switch (regmem
.mod
) {
253 emit_1b(p
, (char) regmem
.disp
);
256 emit_1i(p
, regmem
.disp
);
264 /* Emits the "/0".."/7" specialized versions of the modrm ("/r") bytes.
266 static void emit_modrm_noreg( struct x86_function
*p
,
268 struct x86_reg regmem
)
270 struct x86_reg dummy
= x86_make_reg(file_REG32
, op
);
271 emit_modrm(p
, dummy
, regmem
);
274 /* Many x86 instructions have two opcodes to cope with the situations
275 * where the destination is a register or memory reference
276 * respectively. This function selects the correct opcode based on
277 * the arguments presented.
279 static void emit_op_modrm( struct x86_function
*p
,
280 unsigned char op_dst_is_reg
,
281 unsigned char op_dst_is_mem
,
287 emit_1ub(p
, op_dst_is_reg
);
288 emit_modrm(p
, dst
, src
);
293 assert(src
.mod
== mod_REG
);
294 emit_1ub(p
, op_dst_is_mem
);
295 emit_modrm(p
, src
, dst
);
309 /* Create and manipulate registers and regmem values:
311 struct x86_reg
x86_make_reg( enum x86_reg_file file
,
312 enum x86_reg_name idx
)
324 struct x86_reg
x86_make_disp( struct x86_reg reg
,
327 assert(reg
.file
== file_REG32
);
329 if (reg
.mod
== mod_REG
)
334 if (reg
.disp
== 0 && reg
.idx
!= reg_BP
)
335 reg
.mod
= mod_INDIRECT
;
336 else if (reg
.disp
<= 127 && reg
.disp
>= -128)
339 reg
.mod
= mod_DISP32
;
344 struct x86_reg
x86_deref( struct x86_reg reg
)
346 return x86_make_disp(reg
, 0);
349 struct x86_reg
x86_get_base_reg( struct x86_reg reg
)
351 return x86_make_reg( reg
.file
, reg
.idx
);
354 int x86_get_label( struct x86_function
*p
)
356 return p
->csr
- p
->store
;
361 /***********************************************************************
366 void x86_jcc( struct x86_function
*p
,
370 int offset
= label
- (x86_get_label(p
) + 2);
374 /*assert(p->csr - p->store > -offset);*/
375 if (p
->csr
- p
->store
<= -offset
) {
376 /* probably out of memory (using the error_overflow buffer) */
381 if (offset
<= 127 && offset
>= -128) {
382 emit_1ub(p
, 0x70 + cc
);
383 emit_1b(p
, (char) offset
);
386 offset
= label
- (x86_get_label(p
) + 6);
387 emit_2ub(p
, 0x0f, 0x80 + cc
);
392 /* Always use a 32bit offset for forward jumps:
394 int x86_jcc_forward( struct x86_function
*p
,
398 emit_2ub(p
, 0x0f, 0x80 + cc
);
400 return x86_get_label(p
);
403 int x86_jmp_forward( struct x86_function
*p
)
408 return x86_get_label(p
);
411 int x86_call_forward( struct x86_function
*p
)
417 return x86_get_label(p
);
420 /* Fixup offset from forward jump:
422 void x86_fixup_fwd_jump( struct x86_function
*p
,
425 *(int *)(p
->store
+ fixup
- 4) = x86_get_label(p
) - fixup
;
428 void x86_jmp( struct x86_function
*p
, int label
)
432 emit_1i(p
, label
- x86_get_label(p
) - 4);
435 void x86_call( struct x86_function
*p
, struct x86_reg reg
)
439 emit_modrm_noreg(p
, 2, reg
);
443 void x86_mov_reg_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
446 assert(dst
.file
== file_REG32
);
447 assert(dst
.mod
== mod_REG
);
448 emit_1ub(p
, 0xb8 + dst
.idx
);
453 * Immediate group 1 instructions.
456 x86_group1_imm( struct x86_function
*p
,
457 unsigned op
, struct x86_reg dst
, int imm
)
459 assert(dst
.file
== file_REG32
);
460 assert(dst
.mod
== mod_REG
);
461 if(-0x80 <= imm
&& imm
< 0x80) {
463 emit_modrm_noreg(p
, op
, dst
);
464 emit_1b(p
, (char)imm
);
468 emit_modrm_noreg(p
, op
, dst
);
473 void x86_add_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
476 x86_group1_imm(p
, 0, dst
, imm
);
479 void x86_or_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
482 x86_group1_imm(p
, 1, dst
, imm
);
485 void x86_and_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
488 x86_group1_imm(p
, 4, dst
, imm
);
491 void x86_sub_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
494 x86_group1_imm(p
, 5, dst
, imm
);
497 void x86_xor_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
500 x86_group1_imm(p
, 6, dst
, imm
);
503 void x86_cmp_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
506 x86_group1_imm(p
, 7, dst
, imm
);
510 void x86_push( struct x86_function
*p
,
514 if (reg
.mod
== mod_REG
)
515 emit_1ub(p
, 0x50 + reg
.idx
);
519 emit_modrm_noreg(p
, 6, reg
);
523 p
->stack_offset
+= 4;
526 void x86_push_imm32( struct x86_function
*p
,
533 p
->stack_offset
+= 4;
537 void x86_pop( struct x86_function
*p
,
541 assert(reg
.mod
== mod_REG
);
542 emit_1ub(p
, 0x58 + reg
.idx
);
543 p
->stack_offset
-= 4;
546 void x86_inc( struct x86_function
*p
,
550 assert(reg
.mod
== mod_REG
);
551 emit_1ub(p
, 0x40 + reg
.idx
);
554 void x86_dec( struct x86_function
*p
,
558 assert(reg
.mod
== mod_REG
);
559 emit_1ub(p
, 0x48 + reg
.idx
);
562 void x86_ret( struct x86_function
*p
)
565 assert(p
->stack_offset
== 0);
569 void x86_retw( struct x86_function
*p
, unsigned short imm
)
572 emit_3ub(p
, 0xc2, imm
& 0xff, (imm
>> 8) & 0xff);
575 void x86_sahf( struct x86_function
*p
)
581 void x86_mov( struct x86_function
*p
,
586 emit_op_modrm( p
, 0x8b, 0x89, dst
, src
);
589 void x86_xor( struct x86_function
*p
,
594 emit_op_modrm( p
, 0x33, 0x31, dst
, src
);
597 void x86_cmp( struct x86_function
*p
,
602 emit_op_modrm( p
, 0x3b, 0x39, dst
, src
);
605 void x86_lea( struct x86_function
*p
,
611 emit_modrm( p
, dst
, src
);
614 void x86_test( struct x86_function
*p
,
620 emit_modrm( p
, dst
, src
);
623 void x86_add( struct x86_function
*p
,
628 emit_op_modrm(p
, 0x03, 0x01, dst
, src
);
631 /* Calculate EAX * src, results in EDX:EAX.
633 void x86_mul( struct x86_function
*p
,
638 emit_modrm_noreg(p
, 4, src
);
642 void x86_imul( struct x86_function
*p
,
647 emit_2ub(p
, X86_TWOB
, 0xAF);
648 emit_modrm(p
, dst
, src
);
652 void x86_sub( struct x86_function
*p
,
657 emit_op_modrm(p
, 0x2b, 0x29, dst
, src
);
660 void x86_or( struct x86_function
*p
,
665 emit_op_modrm( p
, 0x0b, 0x09, dst
, src
);
668 void x86_and( struct x86_function
*p
,
673 emit_op_modrm( p
, 0x23, 0x21, dst
, src
);
676 void x86_div( struct x86_function
*p
,
679 assert(src
.file
== file_REG32
&& src
.mod
== mod_REG
);
680 emit_op_modrm(p
, 0xf7, 0, x86_make_reg(file_REG32
, 6), src
);
685 /***********************************************************************
689 void sse_prefetchnta( struct x86_function
*p
, struct x86_reg ptr
)
692 assert(ptr
.mod
!= mod_REG
);
693 emit_2ub(p
, 0x0f, 0x18);
694 emit_modrm_noreg(p
, 0, ptr
);
697 void sse_prefetch0( struct x86_function
*p
, struct x86_reg ptr
)
700 assert(ptr
.mod
!= mod_REG
);
701 emit_2ub(p
, 0x0f, 0x18);
702 emit_modrm_noreg(p
, 1, ptr
);
705 void sse_prefetch1( struct x86_function
*p
, struct x86_reg ptr
)
708 assert(ptr
.mod
!= mod_REG
);
709 emit_2ub(p
, 0x0f, 0x18);
710 emit_modrm_noreg(p
, 2, ptr
);
713 void sse_movntps( struct x86_function
*p
,
719 assert(dst
.mod
!= mod_REG
);
720 assert(src
.mod
== mod_REG
);
721 emit_2ub(p
, 0x0f, 0x2b);
722 emit_modrm(p
, src
, dst
);
728 void sse_movss( struct x86_function
*p
,
733 emit_2ub(p
, 0xF3, X86_TWOB
);
734 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
737 void sse_movaps( struct x86_function
*p
,
742 emit_1ub(p
, X86_TWOB
);
743 emit_op_modrm( p
, 0x28, 0x29, dst
, src
);
746 void sse_movups( struct x86_function
*p
,
751 emit_1ub(p
, X86_TWOB
);
752 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
755 void sse_movhps( struct x86_function
*p
,
760 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
761 emit_1ub(p
, X86_TWOB
);
762 emit_op_modrm( p
, 0x16, 0x17, dst
, src
); /* cf movlhps */
765 void sse_movlps( struct x86_function
*p
,
770 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
771 emit_1ub(p
, X86_TWOB
);
772 emit_op_modrm( p
, 0x12, 0x13, dst
, src
); /* cf movhlps */
775 void sse_maxps( struct x86_function
*p
,
780 emit_2ub(p
, X86_TWOB
, 0x5F);
781 emit_modrm( p
, dst
, src
);
784 void sse_maxss( struct x86_function
*p
,
789 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5F);
790 emit_modrm( p
, dst
, src
);
793 void sse_divss( struct x86_function
*p
,
798 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5E);
799 emit_modrm( p
, dst
, src
);
802 void sse_minps( struct x86_function
*p
,
807 emit_2ub(p
, X86_TWOB
, 0x5D);
808 emit_modrm( p
, dst
, src
);
811 void sse_subps( struct x86_function
*p
,
816 emit_2ub(p
, X86_TWOB
, 0x5C);
817 emit_modrm( p
, dst
, src
);
820 void sse_mulps( struct x86_function
*p
,
825 emit_2ub(p
, X86_TWOB
, 0x59);
826 emit_modrm( p
, dst
, src
);
829 void sse_mulss( struct x86_function
*p
,
834 emit_3ub(p
, 0xF3, X86_TWOB
, 0x59);
835 emit_modrm( p
, dst
, src
);
838 void sse_addps( struct x86_function
*p
,
843 emit_2ub(p
, X86_TWOB
, 0x58);
844 emit_modrm( p
, dst
, src
);
847 void sse_addss( struct x86_function
*p
,
852 emit_3ub(p
, 0xF3, X86_TWOB
, 0x58);
853 emit_modrm( p
, dst
, src
);
856 void sse_andnps( struct x86_function
*p
,
861 emit_2ub(p
, X86_TWOB
, 0x55);
862 emit_modrm( p
, dst
, src
);
865 void sse_andps( struct x86_function
*p
,
870 emit_2ub(p
, X86_TWOB
, 0x54);
871 emit_modrm( p
, dst
, src
);
874 void sse_rsqrtps( struct x86_function
*p
,
879 emit_2ub(p
, X86_TWOB
, 0x52);
880 emit_modrm( p
, dst
, src
);
883 void sse_rsqrtss( struct x86_function
*p
,
888 emit_3ub(p
, 0xF3, X86_TWOB
, 0x52);
889 emit_modrm( p
, dst
, src
);
893 void sse_movhlps( struct x86_function
*p
,
898 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
899 emit_2ub(p
, X86_TWOB
, 0x12);
900 emit_modrm( p
, dst
, src
);
903 void sse_movlhps( struct x86_function
*p
,
908 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
909 emit_2ub(p
, X86_TWOB
, 0x16);
910 emit_modrm( p
, dst
, src
);
913 void sse_orps( struct x86_function
*p
,
918 emit_2ub(p
, X86_TWOB
, 0x56);
919 emit_modrm( p
, dst
, src
);
922 void sse_xorps( struct x86_function
*p
,
927 emit_2ub(p
, X86_TWOB
, 0x57);
928 emit_modrm( p
, dst
, src
);
931 void sse_cvtps2pi( struct x86_function
*p
,
936 assert(dst
.file
== file_MMX
&&
937 (src
.file
== file_XMM
|| src
.mod
!= mod_REG
));
941 emit_2ub(p
, X86_TWOB
, 0x2d);
942 emit_modrm( p
, dst
, src
);
945 void sse2_cvtdq2ps( struct x86_function
*p
,
950 emit_2ub(p
, X86_TWOB
, 0x5b);
951 emit_modrm( p
, dst
, src
);
955 /* Shufps can also be used to implement a reduced swizzle when dest ==
958 void sse_shufps( struct x86_function
*p
,
963 DUMP_RRI( dst
, src
, shuf
);
964 emit_2ub(p
, X86_TWOB
, 0xC6);
965 emit_modrm(p
, dst
, src
);
969 void sse_unpckhps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
972 emit_2ub( p
, X86_TWOB
, 0x15 );
973 emit_modrm( p
, dst
, src
);
976 void sse_unpcklps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
979 emit_2ub( p
, X86_TWOB
, 0x14 );
980 emit_modrm( p
, dst
, src
);
983 void sse_cmpps( struct x86_function
*p
,
988 DUMP_RRI( dst
, src
, cc
);
989 emit_2ub(p
, X86_TWOB
, 0xC2);
990 emit_modrm(p
, dst
, src
);
994 void sse_pmovmskb( struct x86_function
*p
,
999 emit_3ub(p
, 0x66, X86_TWOB
, 0xD7);
1000 emit_modrm(p
, dst
, src
);
1003 void sse_movmskps( struct x86_function
*p
,
1007 DUMP_RR( dst
, src
);
1008 emit_2ub(p
, X86_TWOB
, 0x50);
1009 emit_modrm(p
, dst
, src
);
1012 /***********************************************************************
1017 * Perform a reduced swizzle:
1019 void sse2_pshufd( struct x86_function
*p
,
1024 DUMP_RRI( dst
, src
, shuf
);
1025 emit_3ub(p
, 0x66, X86_TWOB
, 0x70);
1026 emit_modrm(p
, dst
, src
);
1030 void sse2_cvttps2dq( struct x86_function
*p
,
1032 struct x86_reg src
)
1034 DUMP_RR( dst
, src
);
1035 emit_3ub( p
, 0xF3, X86_TWOB
, 0x5B );
1036 emit_modrm( p
, dst
, src
);
1039 void sse2_cvtps2dq( struct x86_function
*p
,
1041 struct x86_reg src
)
1043 DUMP_RR( dst
, src
);
1044 emit_3ub(p
, 0x66, X86_TWOB
, 0x5B);
1045 emit_modrm( p
, dst
, src
);
1048 void sse2_packssdw( struct x86_function
*p
,
1050 struct x86_reg src
)
1052 DUMP_RR( dst
, src
);
1053 emit_3ub(p
, 0x66, X86_TWOB
, 0x6B);
1054 emit_modrm( p
, dst
, src
);
1057 void sse2_packsswb( struct x86_function
*p
,
1059 struct x86_reg src
)
1061 DUMP_RR( dst
, src
);
1062 emit_3ub(p
, 0x66, X86_TWOB
, 0x63);
1063 emit_modrm( p
, dst
, src
);
1066 void sse2_packuswb( struct x86_function
*p
,
1068 struct x86_reg src
)
1070 DUMP_RR( dst
, src
);
1071 emit_3ub(p
, 0x66, X86_TWOB
, 0x67);
1072 emit_modrm( p
, dst
, src
);
1075 void sse2_punpcklbw( struct x86_function
*p
,
1077 struct x86_reg src
)
1079 DUMP_RR( dst
, src
);
1080 emit_3ub(p
, 0x66, X86_TWOB
, 0x60);
1081 emit_modrm( p
, dst
, src
);
1085 void sse2_rcpps( struct x86_function
*p
,
1087 struct x86_reg src
)
1089 DUMP_RR( dst
, src
);
1090 emit_2ub(p
, X86_TWOB
, 0x53);
1091 emit_modrm( p
, dst
, src
);
1094 void sse2_rcpss( struct x86_function
*p
,
1096 struct x86_reg src
)
1098 DUMP_RR( dst
, src
);
1099 emit_3ub(p
, 0xF3, X86_TWOB
, 0x53);
1100 emit_modrm( p
, dst
, src
);
1103 void sse2_movd( struct x86_function
*p
,
1105 struct x86_reg src
)
1107 DUMP_RR( dst
, src
);
1108 emit_2ub(p
, 0x66, X86_TWOB
);
1109 emit_op_modrm( p
, 0x6e, 0x7e, dst
, src
);
1115 /***********************************************************************
1118 static void note_x87_pop( struct x86_function
*p
)
1121 assert(p
->x87_stack
>= 0);
1124 static void note_x87_push( struct x86_function
*p
)
1127 assert(p
->x87_stack
<= 7);
1130 void x87_assert_stack_empty( struct x86_function
*p
)
1132 assert (p
->x87_stack
== 0);
1136 void x87_fist( struct x86_function
*p
, struct x86_reg dst
)
1140 emit_modrm_noreg(p
, 2, dst
);
1143 void x87_fistp( struct x86_function
*p
, struct x86_reg dst
)
1147 emit_modrm_noreg(p
, 3, dst
);
1151 void x87_fild( struct x86_function
*p
, struct x86_reg arg
)
1155 emit_modrm_noreg(p
, 0, arg
);
1159 void x87_fldz( struct x86_function
*p
)
1162 emit_2ub(p
, 0xd9, 0xee);
1167 void x87_fldcw( struct x86_function
*p
, struct x86_reg arg
)
1170 assert(arg
.file
== file_REG32
);
1171 assert(arg
.mod
!= mod_REG
);
1173 emit_modrm_noreg(p
, 5, arg
);
1176 void x87_fld1( struct x86_function
*p
)
1179 emit_2ub(p
, 0xd9, 0xe8);
1183 void x87_fldl2e( struct x86_function
*p
)
1186 emit_2ub(p
, 0xd9, 0xea);
1190 void x87_fldln2( struct x86_function
*p
)
1193 emit_2ub(p
, 0xd9, 0xed);
1197 void x87_fwait( struct x86_function
*p
)
1203 void x87_fnclex( struct x86_function
*p
)
1206 emit_2ub(p
, 0xdb, 0xe2);
1209 void x87_fclex( struct x86_function
*p
)
1215 void x87_fcmovb( struct x86_function
*p
, struct x86_reg arg
)
1218 assert(arg
.file
== file_x87
);
1219 emit_2ub(p
, 0xda, 0xc0+arg
.idx
);
1222 void x87_fcmove( struct x86_function
*p
, struct x86_reg arg
)
1225 assert(arg
.file
== file_x87
);
1226 emit_2ub(p
, 0xda, 0xc8+arg
.idx
);
1229 void x87_fcmovbe( struct x86_function
*p
, struct x86_reg arg
)
1232 assert(arg
.file
== file_x87
);
1233 emit_2ub(p
, 0xda, 0xd0+arg
.idx
);
1236 void x87_fcmovnb( struct x86_function
*p
, struct x86_reg arg
)
1239 assert(arg
.file
== file_x87
);
1240 emit_2ub(p
, 0xdb, 0xc0+arg
.idx
);
1243 void x87_fcmovne( struct x86_function
*p
, struct x86_reg arg
)
1246 assert(arg
.file
== file_x87
);
1247 emit_2ub(p
, 0xdb, 0xc8+arg
.idx
);
1250 void x87_fcmovnbe( struct x86_function
*p
, struct x86_reg arg
)
1253 assert(arg
.file
== file_x87
);
1254 emit_2ub(p
, 0xdb, 0xd0+arg
.idx
);
1259 static void x87_arith_op( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg arg
,
1260 unsigned char dst0ub0
,
1261 unsigned char dst0ub1
,
1262 unsigned char arg0ub0
,
1263 unsigned char arg0ub1
,
1264 unsigned char argmem_noreg
)
1266 assert(dst
.file
== file_x87
);
1268 if (arg
.file
== file_x87
) {
1270 emit_2ub(p
, dst0ub0
, dst0ub1
+arg
.idx
);
1271 else if (arg
.idx
== 0)
1272 emit_2ub(p
, arg0ub0
, arg0ub1
+arg
.idx
);
1276 else if (dst
.idx
== 0) {
1277 assert(arg
.file
== file_REG32
);
1279 emit_modrm_noreg(p
, argmem_noreg
, arg
);
1285 void x87_fmul( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1287 DUMP_RR( dst
, src
);
1288 x87_arith_op(p
, dst
, src
,
1294 void x87_fsub( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1296 DUMP_RR( dst
, src
);
1297 x87_arith_op(p
, dst
, src
,
1303 void x87_fsubr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1305 DUMP_RR( dst
, src
);
1306 x87_arith_op(p
, dst
, src
,
1312 void x87_fadd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1314 DUMP_RR( dst
, src
);
1315 x87_arith_op(p
, dst
, src
,
1321 void x87_fdiv( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1323 DUMP_RR( dst
, src
);
1324 x87_arith_op(p
, dst
, src
,
1330 void x87_fdivr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1332 DUMP_RR( dst
, src
);
1333 x87_arith_op(p
, dst
, src
,
1339 void x87_fmulp( struct x86_function
*p
, struct x86_reg dst
)
1342 assert(dst
.file
== file_x87
);
1343 assert(dst
.idx
>= 1);
1344 emit_2ub(p
, 0xde, 0xc8+dst
.idx
);
1348 void x87_fsubp( struct x86_function
*p
, struct x86_reg dst
)
1351 assert(dst
.file
== file_x87
);
1352 assert(dst
.idx
>= 1);
1353 emit_2ub(p
, 0xde, 0xe8+dst
.idx
);
1357 void x87_fsubrp( struct x86_function
*p
, struct x86_reg dst
)
1360 assert(dst
.file
== file_x87
);
1361 assert(dst
.idx
>= 1);
1362 emit_2ub(p
, 0xde, 0xe0+dst
.idx
);
1366 void x87_faddp( struct x86_function
*p
, struct x86_reg dst
)
1369 assert(dst
.file
== file_x87
);
1370 assert(dst
.idx
>= 1);
1371 emit_2ub(p
, 0xde, 0xc0+dst
.idx
);
1375 void x87_fdivp( struct x86_function
*p
, struct x86_reg dst
)
1378 assert(dst
.file
== file_x87
);
1379 assert(dst
.idx
>= 1);
1380 emit_2ub(p
, 0xde, 0xf8+dst
.idx
);
1384 void x87_fdivrp( struct x86_function
*p
, struct x86_reg dst
)
1387 assert(dst
.file
== file_x87
);
1388 assert(dst
.idx
>= 1);
1389 emit_2ub(p
, 0xde, 0xf0+dst
.idx
);
1393 void x87_ftst( struct x86_function
*p
)
1396 emit_2ub(p
, 0xd9, 0xe4);
1399 void x87_fucom( struct x86_function
*p
, struct x86_reg arg
)
1402 assert(arg
.file
== file_x87
);
1403 emit_2ub(p
, 0xdd, 0xe0+arg
.idx
);
1406 void x87_fucomp( struct x86_function
*p
, struct x86_reg arg
)
1409 assert(arg
.file
== file_x87
);
1410 emit_2ub(p
, 0xdd, 0xe8+arg
.idx
);
1414 void x87_fucompp( struct x86_function
*p
)
1417 emit_2ub(p
, 0xda, 0xe9);
1418 note_x87_pop(p
); /* pop twice */
1419 note_x87_pop(p
); /* pop twice */
1422 void x87_fxch( struct x86_function
*p
, struct x86_reg arg
)
1425 assert(arg
.file
== file_x87
);
1426 emit_2ub(p
, 0xd9, 0xc8+arg
.idx
);
1429 void x87_fabs( struct x86_function
*p
)
1432 emit_2ub(p
, 0xd9, 0xe1);
1435 void x87_fchs( struct x86_function
*p
)
1438 emit_2ub(p
, 0xd9, 0xe0);
1441 void x87_fcos( struct x86_function
*p
)
1444 emit_2ub(p
, 0xd9, 0xff);
1448 void x87_fprndint( struct x86_function
*p
)
1451 emit_2ub(p
, 0xd9, 0xfc);
1454 void x87_fscale( struct x86_function
*p
)
1457 emit_2ub(p
, 0xd9, 0xfd);
1460 void x87_fsin( struct x86_function
*p
)
1463 emit_2ub(p
, 0xd9, 0xfe);
1466 void x87_fsincos( struct x86_function
*p
)
1469 emit_2ub(p
, 0xd9, 0xfb);
1472 void x87_fsqrt( struct x86_function
*p
)
1475 emit_2ub(p
, 0xd9, 0xfa);
1478 void x87_fxtract( struct x86_function
*p
)
1481 emit_2ub(p
, 0xd9, 0xf4);
1486 * Restrictions: -1.0 <= st0 <= 1.0
1488 void x87_f2xm1( struct x86_function
*p
)
1491 emit_2ub(p
, 0xd9, 0xf0);
1494 /* st1 = st1 * log2(st0);
1497 void x87_fyl2x( struct x86_function
*p
)
1500 emit_2ub(p
, 0xd9, 0xf1);
1504 /* st1 = st1 * log2(st0 + 1.0);
1507 * A fast operation, with restrictions: -.29 < st0 < .29
1509 void x87_fyl2xp1( struct x86_function
*p
)
1512 emit_2ub(p
, 0xd9, 0xf9);
1517 void x87_fld( struct x86_function
*p
, struct x86_reg arg
)
1520 if (arg
.file
== file_x87
)
1521 emit_2ub(p
, 0xd9, 0xc0 + arg
.idx
);
1524 emit_modrm_noreg(p
, 0, arg
);
1529 void x87_fst( struct x86_function
*p
, struct x86_reg dst
)
1532 if (dst
.file
== file_x87
)
1533 emit_2ub(p
, 0xdd, 0xd0 + dst
.idx
);
1536 emit_modrm_noreg(p
, 2, dst
);
1540 void x87_fstp( struct x86_function
*p
, struct x86_reg dst
)
1543 if (dst
.file
== file_x87
)
1544 emit_2ub(p
, 0xdd, 0xd8 + dst
.idx
);
1547 emit_modrm_noreg(p
, 3, dst
);
1552 void x87_fpop( struct x86_function
*p
)
1554 x87_fstp( p
, x86_make_reg( file_x87
, 0 ));
1558 void x87_fcom( struct x86_function
*p
, struct x86_reg dst
)
1561 if (dst
.file
== file_x87
)
1562 emit_2ub(p
, 0xd8, 0xd0 + dst
.idx
);
1565 emit_modrm_noreg(p
, 2, dst
);
1570 void x87_fcomp( struct x86_function
*p
, struct x86_reg dst
)
1573 if (dst
.file
== file_x87
)
1574 emit_2ub(p
, 0xd8, 0xd8 + dst
.idx
);
1577 emit_modrm_noreg(p
, 3, dst
);
1582 void x87_fcomi( struct x86_function
*p
, struct x86_reg arg
)
1585 emit_2ub(p
, 0xdb, 0xf0+arg
.idx
);
1588 void x87_fcomip( struct x86_function
*p
, struct x86_reg arg
)
1591 emit_2ub(p
, 0xdb, 0xf0+arg
.idx
);
1596 void x87_fnstsw( struct x86_function
*p
, struct x86_reg dst
)
1599 assert(dst
.file
== file_REG32
);
1601 if (dst
.idx
== reg_AX
&&
1603 emit_2ub(p
, 0xdf, 0xe0);
1606 emit_modrm_noreg(p
, 7, dst
);
1611 void x87_fnstcw( struct x86_function
*p
, struct x86_reg dst
)
1614 assert(dst
.file
== file_REG32
);
1616 emit_1ub(p
, 0x9b); /* WAIT -- needed? */
1618 emit_modrm_noreg(p
, 7, dst
);
1624 /***********************************************************************
1628 void mmx_emms( struct x86_function
*p
)
1631 assert(p
->need_emms
);
1632 emit_2ub(p
, 0x0f, 0x77);
1636 void mmx_packssdw( struct x86_function
*p
,
1638 struct x86_reg src
)
1640 DUMP_RR( dst
, src
);
1641 assert(dst
.file
== file_MMX
&&
1642 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
1646 emit_2ub(p
, X86_TWOB
, 0x6b);
1647 emit_modrm( p
, dst
, src
);
1650 void mmx_packuswb( struct x86_function
*p
,
1652 struct x86_reg src
)
1654 DUMP_RR( dst
, src
);
1655 assert(dst
.file
== file_MMX
&&
1656 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
1660 emit_2ub(p
, X86_TWOB
, 0x67);
1661 emit_modrm( p
, dst
, src
);
1664 void mmx_movd( struct x86_function
*p
,
1666 struct x86_reg src
)
1668 DUMP_RR( dst
, src
);
1670 emit_1ub(p
, X86_TWOB
);
1671 emit_op_modrm( p
, 0x6e, 0x7e, dst
, src
);
1674 void mmx_movq( struct x86_function
*p
,
1676 struct x86_reg src
)
1678 DUMP_RR( dst
, src
);
1680 emit_1ub(p
, X86_TWOB
);
1681 emit_op_modrm( p
, 0x6f, 0x7f, dst
, src
);
1685 /***********************************************************************
1690 void x86_cdecl_caller_push_regs( struct x86_function
*p
)
1692 x86_push(p
, x86_make_reg(file_REG32
, reg_AX
));
1693 x86_push(p
, x86_make_reg(file_REG32
, reg_CX
));
1694 x86_push(p
, x86_make_reg(file_REG32
, reg_DX
));
1697 void x86_cdecl_caller_pop_regs( struct x86_function
*p
)
1699 x86_pop(p
, x86_make_reg(file_REG32
, reg_DX
));
1700 x86_pop(p
, x86_make_reg(file_REG32
, reg_CX
));
1701 x86_pop(p
, x86_make_reg(file_REG32
, reg_AX
));
1705 /* Retreive a reference to one of the function arguments, taking into
1706 * account any push/pop activity:
1708 struct x86_reg
x86_fn_arg( struct x86_function
*p
,
1711 return x86_make_disp(x86_make_reg(file_REG32
, reg_SP
),
1712 p
->stack_offset
+ arg
* 4); /* ??? */
1716 void x86_init_func( struct x86_function
*p
)
1724 void x86_init_func_size( struct x86_function
*p
, unsigned code_size
)
1726 p
->size
= code_size
;
1727 p
->store
= rtasm_exec_malloc(code_size
);
1728 if (p
->store
== NULL
) {
1729 p
->store
= p
->error_overflow
;
1735 void x86_release_func( struct x86_function
*p
)
1737 if (p
->store
&& p
->store
!= p
->error_overflow
)
1738 rtasm_exec_free(p
->store
);
1746 void (*x86_get_func( struct x86_function
*p
))(void)
1749 if (DISASSEM
&& p
->store
)
1750 debug_printf("disassemble %p %p\n", p
->store
, p
->csr
);
1752 if (p
->store
== p
->error_overflow
)
1753 return (void (*)(void)) NULL
;
1755 return (void (*)(void)) p
->store
;
1760 void x86sse_dummy( void )