Improve x86 float compare
[sljit.git] / sljit_src / sljitNativeX86_common.c
blobf56dd57dca2e320d4b1d10d1ddb1ce8053170631
1 /*
2 * Stack-less Just-In-Time compiler
4 * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
29 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
30 return "x86" SLJIT_CPUINFO " ABI:fastcall";
31 #else
32 return "x86" SLJIT_CPUINFO;
33 #endif
37 32b register indexes:
38 0 - EAX
39 1 - ECX
40 2 - EDX
41 3 - EBX
42 4 - ESP
43 5 - EBP
44 6 - ESI
45 7 - EDI
49 64b register indexes:
50 0 - RAX
51 1 - RCX
52 2 - RDX
53 3 - RBX
54 4 - RSP
55 5 - RBP
56 6 - RSI
57 7 - RDI
58 8 - R8 - From now on REX prefix is required
59 9 - R9
60 10 - R10
61 11 - R11
62 12 - R12
63 13 - R13
64 14 - R14
65 15 - R15
68 #define TMP_FREG (0)
70 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
72 /* Last register + 1. */
73 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
75 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
76 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 7, 6, 3, 4, 5
79 #define CHECK_EXTRA_REGS(p, w, do) \
80 if (p >= SLJIT_R3 && p <= SLJIT_S3) { \
81 if (p <= compiler->scratches) \
82 w = compiler->scratches_offset + ((p) - SLJIT_R3) * SSIZE_OF(sw); \
83 else \
84 w = compiler->locals_offset + ((p) - SLJIT_S2) * SSIZE_OF(sw); \
85 p = SLJIT_MEM1(SLJIT_SP); \
86 do; \
89 #else /* SLJIT_CONFIG_X86_32 */
91 /* Last register + 1. */
92 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
93 #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
95 /* Note: r12 & 0x7 == 0b100, which decoded as SIB byte present
96 Note: avoid to use r12 and r13 for memory addessing
97 therefore r12 is better to be a higher saved register. */
98 #ifndef _WIN64
99 /* Args: rdi(=7), rsi(=6), rdx(=2), rcx(=1), r8, r9. Scratches: rax(=0), r10, r11 */
100 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
101 0, 0, 6, 7, 1, 8, 11, 10, 12, 5, 13, 14, 15, 3, 4, 2, 9
103 /* low-map. reg_map & 0x7. */
104 static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
105 0, 0, 6, 7, 1, 0, 3, 2, 4, 5, 5, 6, 7, 3, 4, 2, 1
107 #else
108 /* Args: rcx(=1), rdx(=2), r8, r9. Scratches: rax(=0), r10, r11 */
109 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
110 0, 0, 2, 8, 1, 11, 12, 5, 13, 14, 15, 7, 6, 3, 4, 9, 10
112 /* low-map. reg_map & 0x7. */
113 static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
114 0, 0, 2, 0, 1, 3, 4, 5, 5, 6, 7, 7, 6, 3, 4, 1, 2
116 #endif
118 /* Args: xmm0-xmm3 */
119 static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
120 4, 0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
122 /* low-map. freg_map & 0x7. */
123 static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
124 4, 0, 1, 2, 3, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7
127 #define REX_W 0x48
128 #define REX_R 0x44
129 #define REX_X 0x42
130 #define REX_B 0x41
131 #define REX 0x40
133 #ifndef _WIN64
134 #define HALFWORD_MAX 0x7fffffffl
135 #define HALFWORD_MIN -0x80000000l
136 #else
137 #define HALFWORD_MAX 0x7fffffffll
138 #define HALFWORD_MIN -0x80000000ll
139 #endif
141 #define IS_HALFWORD(x) ((x) <= HALFWORD_MAX && (x) >= HALFWORD_MIN)
142 #define NOT_HALFWORD(x) ((x) > HALFWORD_MAX || (x) < HALFWORD_MIN)
144 #define CHECK_EXTRA_REGS(p, w, do)
146 #endif /* SLJIT_CONFIG_X86_32 */
148 #define U8(v) ((sljit_u8)(v))
151 /* Size flags for emit_x86_instruction: */
152 #define EX86_BIN_INS 0x0010
153 #define EX86_SHIFT_INS 0x0020
154 #define EX86_REX 0x0040
155 #define EX86_NO_REXW 0x0080
156 #define EX86_BYTE_ARG 0x0100
157 #define EX86_HALF_ARG 0x0200
158 #define EX86_PREF_66 0x0400
159 #define EX86_PREF_F2 0x0800
160 #define EX86_PREF_F3 0x1000
161 #define EX86_SSE2_OP1 0x2000
162 #define EX86_SSE2_OP2 0x4000
163 #define EX86_SSE2 (EX86_SSE2_OP1 | EX86_SSE2_OP2)
165 /* --------------------------------------------------------------------- */
166 /* Instrucion forms */
167 /* --------------------------------------------------------------------- */
169 #define ADD (/* BINARY */ 0 << 3)
170 #define ADD_EAX_i32 0x05
171 #define ADD_r_rm 0x03
172 #define ADD_rm_r 0x01
173 #define ADDSD_x_xm 0x58
174 #define ADC (/* BINARY */ 2 << 3)
175 #define ADC_EAX_i32 0x15
176 #define ADC_r_rm 0x13
177 #define ADC_rm_r 0x11
178 #define AND (/* BINARY */ 4 << 3)
179 #define AND_EAX_i32 0x25
180 #define AND_r_rm 0x23
181 #define AND_rm_r 0x21
182 #define ANDPD_x_xm 0x54
183 #define BSR_r_rm (/* GROUP_0F */ 0xbd)
184 #define CALL_i32 0xe8
185 #define CALL_rm (/* GROUP_FF */ 2 << 3)
186 #define CDQ 0x99
187 #define CMOVE_r_rm (/* GROUP_0F */ 0x44)
188 #define CMP (/* BINARY */ 7 << 3)
189 #define CMP_EAX_i32 0x3d
190 #define CMP_r_rm 0x3b
191 #define CMP_rm_r 0x39
192 #define CVTPD2PS_x_xm 0x5a
193 #define CVTSI2SD_x_rm 0x2a
194 #define CVTTSD2SI_r_xm 0x2c
195 #define DIV (/* GROUP_F7 */ 6 << 3)
196 #define DIVSD_x_xm 0x5e
197 #define FSTPS 0xd9
198 #define FSTPD 0xdd
199 #define INT3 0xcc
200 #define IDIV (/* GROUP_F7 */ 7 << 3)
201 #define IMUL (/* GROUP_F7 */ 5 << 3)
202 #define IMUL_r_rm (/* GROUP_0F */ 0xaf)
203 #define IMUL_r_rm_i8 0x6b
204 #define IMUL_r_rm_i32 0x69
205 #define JE_i8 0x74
206 #define JNE_i8 0x75
207 #define JMP_i8 0xeb
208 #define JMP_i32 0xe9
209 #define JMP_rm (/* GROUP_FF */ 4 << 3)
210 #define LEA_r_m 0x8d
211 #define LOOP_i8 0xe2
212 #define MOV_r_rm 0x8b
213 #define MOV_r_i32 0xb8
214 #define MOV_rm_r 0x89
215 #define MOV_rm_i32 0xc7
216 #define MOV_rm8_i8 0xc6
217 #define MOV_rm8_r8 0x88
218 #define MOVAPS_x_xm 0x28
219 #define MOVAPS_xm_x 0x29
220 #define MOVSD_x_xm 0x10
221 #define MOVSD_xm_x 0x11
222 #define MOVSXD_r_rm 0x63
223 #define MOVSX_r_rm8 (/* GROUP_0F */ 0xbe)
224 #define MOVSX_r_rm16 (/* GROUP_0F */ 0xbf)
225 #define MOVZX_r_rm8 (/* GROUP_0F */ 0xb6)
226 #define MOVZX_r_rm16 (/* GROUP_0F */ 0xb7)
227 #define MUL (/* GROUP_F7 */ 4 << 3)
228 #define MULSD_x_xm 0x59
229 #define NEG_rm (/* GROUP_F7 */ 3 << 3)
230 #define NOP 0x90
231 #define NOT_rm (/* GROUP_F7 */ 2 << 3)
232 #define OR (/* BINARY */ 1 << 3)
233 #define OR_r_rm 0x0b
234 #define OR_EAX_i32 0x0d
235 #define OR_rm_r 0x09
236 #define OR_rm8_r8 0x08
237 #define POP_r 0x58
238 #define POP_rm 0x8f
239 #define POPF 0x9d
240 #define PREFETCH 0x18
241 #define PUSH_i32 0x68
242 #define PUSH_r 0x50
243 #define PUSH_rm (/* GROUP_FF */ 6 << 3)
244 #define PUSHF 0x9c
245 #define RET_near 0xc3
246 #define RET_i16 0xc2
247 #define SBB (/* BINARY */ 3 << 3)
248 #define SBB_EAX_i32 0x1d
249 #define SBB_r_rm 0x1b
250 #define SBB_rm_r 0x19
251 #define SAR (/* SHIFT */ 7 << 3)
252 #define SHL (/* SHIFT */ 4 << 3)
253 #define SHR (/* SHIFT */ 5 << 3)
254 #define SUB (/* BINARY */ 5 << 3)
255 #define SUB_EAX_i32 0x2d
256 #define SUB_r_rm 0x2b
257 #define SUB_rm_r 0x29
258 #define SUBSD_x_xm 0x5c
259 #define TEST_EAX_i32 0xa9
260 #define TEST_rm_r 0x85
261 #define UCOMISD_x_xm 0x2e
262 #define UNPCKLPD_x_xm 0x14
263 #define XCHG_EAX_r 0x90
264 #define XCHG_r_rm 0x87
265 #define XOR (/* BINARY */ 6 << 3)
266 #define XOR_EAX_i32 0x35
267 #define XOR_r_rm 0x33
268 #define XOR_rm_r 0x31
269 #define XORPD_x_xm 0x57
271 #define GROUP_0F 0x0f
272 #define GROUP_F7 0xf7
273 #define GROUP_FF 0xff
274 #define GROUP_BINARY_81 0x81
275 #define GROUP_BINARY_83 0x83
276 #define GROUP_SHIFT_1 0xd1
277 #define GROUP_SHIFT_N 0xc1
278 #define GROUP_SHIFT_CL 0xd3
280 #define MOD_REG 0xc0
281 #define MOD_DISP8 0x40
283 #define INC_SIZE(s) (*inst++ = U8(s), compiler->size += (s))
285 #define PUSH_REG(r) (*inst++ = U8(PUSH_r + (r)))
286 #define POP_REG(r) (*inst++ = U8(POP_r + (r)))
287 #define RET() (*inst++ = RET_near)
288 #define RET_I16(n) (*inst++ = RET_i16, *inst++ = U8(n), *inst++ = 0)
290 /* Multithreading does not affect these static variables, since they store
291 built-in CPU features. Therefore they can be overwritten by different threads
292 if they detect the CPU features in the same time. */
293 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
294 static sljit_s32 cpu_has_sse2 = -1;
295 #endif
296 static sljit_s32 cpu_has_cmov = -1;
298 #ifdef _WIN32_WCE
299 #include <cmnintrin.h>
300 #elif defined(_MSC_VER) && _MSC_VER >= 1400
301 #include <intrin.h>
302 #endif
304 /******************************************************/
305 /* Unaligned-store functions */
306 /******************************************************/
308 static SLJIT_INLINE void sljit_unaligned_store_s16(void *addr, sljit_s16 value)
310 SLJIT_MEMCPY(addr, &value, sizeof(value));
313 static SLJIT_INLINE void sljit_unaligned_store_s32(void *addr, sljit_s32 value)
315 SLJIT_MEMCPY(addr, &value, sizeof(value));
318 static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value)
320 SLJIT_MEMCPY(addr, &value, sizeof(value));
323 /******************************************************/
324 /* Utility functions */
325 /******************************************************/
327 static void get_cpu_features(void)
329 sljit_u32 features;
331 #if defined(_MSC_VER) && _MSC_VER >= 1400
333 int CPUInfo[4];
334 __cpuid(CPUInfo, 1);
335 features = (sljit_u32)CPUInfo[3];
337 #elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C)
339 /* AT&T syntax. */
340 __asm__ (
341 "movl $0x1, %%eax\n"
342 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
343 /* On x86-32, there is no red zone, so this
344 should work (no need for a local variable). */
345 "push %%ebx\n"
346 #endif
347 "cpuid\n"
348 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
349 "pop %%ebx\n"
350 #endif
351 "movl %%edx, %0\n"
352 : "=g" (features)
354 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
355 : "%eax", "%ecx", "%edx"
356 #else
357 : "%rax", "%rbx", "%rcx", "%rdx"
358 #endif
361 #else /* _MSC_VER && _MSC_VER >= 1400 */
363 /* Intel syntax. */
364 __asm {
365 mov eax, 1
366 cpuid
367 mov features, edx
370 #endif /* _MSC_VER && _MSC_VER >= 1400 */
372 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
373 cpu_has_sse2 = (features >> 26) & 0x1;
374 #endif
375 cpu_has_cmov = (features >> 15) & 0x1;
378 static sljit_u8 get_jump_code(sljit_uw type)
380 switch (type) {
381 case SLJIT_EQUAL:
382 case SLJIT_F_EQUAL:
383 case SLJIT_UNORDERED_OR_EQUAL:
384 case SLJIT_ORDERED_EQUAL: /* Not supported. */
385 return 0x84 /* je */;
387 case SLJIT_NOT_EQUAL:
388 case SLJIT_F_NOT_EQUAL:
389 case SLJIT_ORDERED_NOT_EQUAL:
390 case SLJIT_UNORDERED_OR_NOT_EQUAL: /* Not supported. */
391 return 0x85 /* jne */;
393 case SLJIT_LESS:
394 case SLJIT_CARRY:
395 case SLJIT_F_LESS:
396 case SLJIT_UNORDERED_OR_LESS:
397 case SLJIT_UNORDERED_OR_GREATER:
398 return 0x82 /* jc */;
400 case SLJIT_GREATER_EQUAL:
401 case SLJIT_NOT_CARRY:
402 case SLJIT_F_GREATER_EQUAL:
403 case SLJIT_ORDERED_GREATER_EQUAL:
404 case SLJIT_ORDERED_LESS_EQUAL:
405 return 0x83 /* jae */;
407 case SLJIT_GREATER:
408 case SLJIT_F_GREATER:
409 case SLJIT_ORDERED_LESS:
410 case SLJIT_ORDERED_GREATER:
411 return 0x87 /* jnbe */;
413 case SLJIT_LESS_EQUAL:
414 case SLJIT_F_LESS_EQUAL:
415 case SLJIT_UNORDERED_OR_GREATER_EQUAL:
416 case SLJIT_UNORDERED_OR_LESS_EQUAL:
417 return 0x86 /* jbe */;
419 case SLJIT_SIG_LESS:
420 return 0x8c /* jl */;
422 case SLJIT_SIG_GREATER_EQUAL:
423 return 0x8d /* jnl */;
425 case SLJIT_SIG_GREATER:
426 return 0x8f /* jnle */;
428 case SLJIT_SIG_LESS_EQUAL:
429 return 0x8e /* jle */;
431 case SLJIT_OVERFLOW:
432 return 0x80 /* jo */;
434 case SLJIT_NOT_OVERFLOW:
435 return 0x81 /* jno */;
437 case SLJIT_UNORDERED:
438 return 0x8a /* jp */;
440 case SLJIT_ORDERED:
441 return 0x8b /* jpo */;
443 return 0;
446 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
447 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw executable_offset);
448 #else
449 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr);
450 static sljit_u8* generate_put_label_code(struct sljit_put_label *put_label, sljit_u8 *code_ptr, sljit_uw max_label);
451 #endif
453 static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
455 sljit_uw type = jump->flags >> TYPE_SHIFT;
456 sljit_s32 short_jump;
457 sljit_uw label_addr;
459 if (jump->flags & JUMP_LABEL)
460 label_addr = (sljit_uw)(code + jump->u.label->size);
461 else
462 label_addr = jump->u.target - (sljit_uw)executable_offset;
464 short_jump = (sljit_sw)(label_addr - (jump->addr + 2)) >= -128 && (sljit_sw)(label_addr - (jump->addr + 2)) <= 127;
466 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
467 if ((sljit_sw)(label_addr - (jump->addr + 1)) > HALFWORD_MAX || (sljit_sw)(label_addr - (jump->addr + 1)) < HALFWORD_MIN)
468 return generate_far_jump_code(jump, code_ptr);
469 #endif
471 if (type == SLJIT_JUMP) {
472 if (short_jump)
473 *code_ptr++ = JMP_i8;
474 else
475 *code_ptr++ = JMP_i32;
476 jump->addr++;
478 else if (type >= SLJIT_FAST_CALL) {
479 short_jump = 0;
480 *code_ptr++ = CALL_i32;
481 jump->addr++;
483 else if (short_jump) {
484 *code_ptr++ = U8(get_jump_code(type) - 0x10);
485 jump->addr++;
487 else {
488 *code_ptr++ = GROUP_0F;
489 *code_ptr++ = get_jump_code(type);
490 jump->addr += 2;
493 if (short_jump) {
494 jump->flags |= PATCH_MB;
495 code_ptr += sizeof(sljit_s8);
496 } else {
497 jump->flags |= PATCH_MW;
498 code_ptr += sizeof(sljit_s32);
501 return code_ptr;
504 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
506 struct sljit_memory_fragment *buf;
507 sljit_u8 *code;
508 sljit_u8 *code_ptr;
509 sljit_u8 *buf_ptr;
510 sljit_u8 *buf_end;
511 sljit_u8 len;
512 sljit_sw executable_offset;
513 sljit_uw jump_addr;
515 struct sljit_label *label;
516 struct sljit_jump *jump;
517 struct sljit_const *const_;
518 struct sljit_put_label *put_label;
520 CHECK_ERROR_PTR();
521 CHECK_PTR(check_sljit_generate_code(compiler));
522 reverse_buf(compiler);
524 /* Second code generation pass. */
525 code = (sljit_u8*)SLJIT_MALLOC_EXEC(compiler->size, compiler->exec_allocator_data);
526 PTR_FAIL_WITH_EXEC_IF(code);
527 buf = compiler->buf;
529 code_ptr = code;
530 label = compiler->labels;
531 jump = compiler->jumps;
532 const_ = compiler->consts;
533 put_label = compiler->put_labels;
534 executable_offset = SLJIT_EXEC_OFFSET(code);
536 do {
537 buf_ptr = buf->memory;
538 buf_end = buf_ptr + buf->used_size;
539 do {
540 len = *buf_ptr++;
541 if (len > 0) {
542 /* The code is already generated. */
543 SLJIT_MEMCPY(code_ptr, buf_ptr, len);
544 code_ptr += len;
545 buf_ptr += len;
547 else {
548 switch (*buf_ptr) {
549 case 0:
550 label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
551 label->size = (sljit_uw)(code_ptr - code);
552 label = label->next;
553 break;
554 case 1:
555 jump->addr = (sljit_uw)code_ptr;
556 if (!(jump->flags & SLJIT_REWRITABLE_JUMP))
557 code_ptr = generate_near_jump_code(jump, code_ptr, code, executable_offset);
558 else {
559 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
560 code_ptr = generate_far_jump_code(jump, code_ptr, executable_offset);
561 #else
562 code_ptr = generate_far_jump_code(jump, code_ptr);
563 #endif
565 jump = jump->next;
566 break;
567 case 2:
568 const_->addr = ((sljit_uw)code_ptr) - sizeof(sljit_sw);
569 const_ = const_->next;
570 break;
571 default:
572 SLJIT_ASSERT(*buf_ptr == 3);
573 SLJIT_ASSERT(put_label->label);
574 put_label->addr = (sljit_uw)code_ptr;
575 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
576 code_ptr = generate_put_label_code(put_label, code_ptr, (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size);
577 #endif
578 put_label = put_label->next;
579 break;
581 buf_ptr++;
583 } while (buf_ptr < buf_end);
584 SLJIT_ASSERT(buf_ptr == buf_end);
585 buf = buf->next;
586 } while (buf);
588 SLJIT_ASSERT(!label);
589 SLJIT_ASSERT(!jump);
590 SLJIT_ASSERT(!const_);
591 SLJIT_ASSERT(!put_label);
592 SLJIT_ASSERT(code_ptr <= code + compiler->size);
594 jump = compiler->jumps;
595 while (jump) {
596 jump_addr = jump->addr + (sljit_uw)executable_offset;
598 if (jump->flags & PATCH_MB) {
599 SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8))) >= -128 && (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8))) <= 127);
600 *(sljit_u8*)jump->addr = U8(jump->u.label->addr - (jump_addr + sizeof(sljit_s8)));
601 } else if (jump->flags & PATCH_MW) {
602 if (jump->flags & JUMP_LABEL) {
603 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
604 sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_sw))));
605 #else
606 SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
607 sljit_unaligned_store_s32((void*)jump->addr, (sljit_s32)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))));
608 #endif
610 else {
611 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
612 sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_sw))));
613 #else
614 SLJIT_ASSERT((sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
615 sljit_unaligned_store_s32((void*)jump->addr, (sljit_s32)(jump->u.target - (jump_addr + sizeof(sljit_s32))));
616 #endif
619 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
620 else if (jump->flags & PATCH_MD)
621 sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)jump->u.label->addr);
622 #endif
624 jump = jump->next;
627 put_label = compiler->put_labels;
628 while (put_label) {
629 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
630 sljit_unaligned_store_sw((void*)(put_label->addr - sizeof(sljit_sw)), (sljit_sw)put_label->label->addr);
631 #else
632 if (put_label->flags & PATCH_MD) {
633 SLJIT_ASSERT(put_label->label->addr > HALFWORD_MAX);
634 sljit_unaligned_store_sw((void*)(put_label->addr - sizeof(sljit_sw)), (sljit_sw)put_label->label->addr);
636 else {
637 SLJIT_ASSERT(put_label->label->addr <= HALFWORD_MAX);
638 sljit_unaligned_store_s32((void*)(put_label->addr - sizeof(sljit_s32)), (sljit_s32)put_label->label->addr);
640 #endif
642 put_label = put_label->next;
645 compiler->error = SLJIT_ERR_COMPILED;
646 compiler->executable_offset = executable_offset;
647 compiler->executable_size = (sljit_uw)(code_ptr - code);
649 code = (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
651 SLJIT_UPDATE_WX_FLAGS(code, (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset), 1);
652 return (void*)code;
655 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
657 switch (feature_type) {
658 case SLJIT_HAS_FPU:
659 #ifdef SLJIT_IS_FPU_AVAILABLE
660 return SLJIT_IS_FPU_AVAILABLE;
661 #elif (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
662 if (cpu_has_sse2 == -1)
663 get_cpu_features();
664 return cpu_has_sse2;
665 #else /* SLJIT_DETECT_SSE2 */
666 return 1;
667 #endif /* SLJIT_DETECT_SSE2 */
669 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
670 case SLJIT_HAS_VIRTUAL_REGISTERS:
671 return 1;
672 #endif
674 case SLJIT_HAS_CLZ:
675 case SLJIT_HAS_CMOV:
676 if (cpu_has_cmov == -1)
677 get_cpu_features();
678 return cpu_has_cmov;
680 case SLJIT_HAS_PREFETCH:
681 return 1;
683 case SLJIT_HAS_SSE2:
684 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
685 if (cpu_has_sse2 == -1)
686 get_cpu_features();
687 return cpu_has_sse2;
688 #else
689 return 1;
690 #endif
692 default:
693 return 0;
697 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
699 if (type < SLJIT_UNORDERED || type > SLJIT_ORDERED_LESS_EQUAL)
700 return 0;
702 switch (type) {
703 case SLJIT_ORDERED_EQUAL:
704 case SLJIT_UNORDERED_OR_NOT_EQUAL:
705 return 0;
708 return 1;
711 /* --------------------------------------------------------------------- */
712 /* Operators */
713 /* --------------------------------------------------------------------- */
715 #define BINARY_OPCODE(opcode) (((opcode ## _EAX_i32) << 24) | ((opcode ## _r_rm) << 16) | ((opcode ## _rm_r) << 8) | (opcode))
717 #define BINARY_IMM32(op_imm, immw, arg, argw) \
718 do { \
719 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \
720 FAIL_IF(!inst); \
721 *(inst + 1) |= (op_imm); \
722 } while (0)
724 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
726 #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
727 do { \
728 if (IS_HALFWORD(immw) || compiler->mode32) { \
729 BINARY_IMM32(op_imm, immw, arg, argw); \
731 else { \
732 FAIL_IF(emit_load_imm64(compiler, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, immw)); \
733 inst = emit_x86_instruction(compiler, 1, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, 0, arg, argw); \
734 FAIL_IF(!inst); \
735 *inst = (op_mr); \
737 } while (0)
739 #define BINARY_EAX_IMM(op_eax_imm, immw) \
740 FAIL_IF(emit_do_imm32(compiler, (!compiler->mode32) ? REX_W : 0, (op_eax_imm), immw))
742 #else /* !SLJIT_CONFIG_X86_64 */
744 #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
745 BINARY_IMM32(op_imm, immw, arg, argw)
747 #define BINARY_EAX_IMM(op_eax_imm, immw) \
748 FAIL_IF(emit_do_imm(compiler, (op_eax_imm), immw))
750 #endif /* SLJIT_CONFIG_X86_64 */
752 static sljit_s32 emit_mov(struct sljit_compiler *compiler,
753 sljit_s32 dst, sljit_sw dstw,
754 sljit_s32 src, sljit_sw srcw);
756 #define EMIT_MOV(compiler, dst, dstw, src, srcw) \
757 FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
759 static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
760 sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src);
762 static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
763 sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
765 static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
766 sljit_s32 src1, sljit_sw src1w,
767 sljit_s32 src2, sljit_sw src2w);
769 static SLJIT_INLINE sljit_s32 emit_endbranch(struct sljit_compiler *compiler)
771 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET)
772 /* Emit endbr32/endbr64 when CET is enabled. */
773 sljit_u8 *inst;
774 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
775 FAIL_IF(!inst);
776 INC_SIZE(4);
777 *inst++ = 0xf3;
778 *inst++ = 0x0f;
779 *inst++ = 0x1e;
780 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
781 *inst = 0xfb;
782 #else
783 *inst = 0xfa;
784 #endif
785 #else /* !SLJIT_CONFIG_X86_CET */
786 SLJIT_UNUSED_ARG(compiler);
787 #endif /* SLJIT_CONFIG_X86_CET */
788 return SLJIT_SUCCESS;
791 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
793 static SLJIT_INLINE sljit_s32 emit_rdssp(struct sljit_compiler *compiler, sljit_s32 reg)
795 sljit_u8 *inst;
796 sljit_s32 size;
798 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
799 size = 5;
800 #else
801 size = 4;
802 #endif
804 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
805 FAIL_IF(!inst);
806 INC_SIZE(size);
807 *inst++ = 0xf3;
808 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
809 *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
810 #endif
811 *inst++ = 0x0f;
812 *inst++ = 0x1e;
813 *inst = (0x3 << 6) | (0x1 << 3) | (reg_map[reg] & 0x7);
814 return SLJIT_SUCCESS;
817 static SLJIT_INLINE sljit_s32 emit_incssp(struct sljit_compiler *compiler, sljit_s32 reg)
819 sljit_u8 *inst;
820 sljit_s32 size;
822 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
823 size = 5;
824 #else
825 size = 4;
826 #endif
828 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
829 FAIL_IF(!inst);
830 INC_SIZE(size);
831 *inst++ = 0xf3;
832 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
833 *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
834 #endif
835 *inst++ = 0x0f;
836 *inst++ = 0xae;
837 *inst = (0x3 << 6) | (0x5 << 3) | (reg_map[reg] & 0x7);
838 return SLJIT_SUCCESS;
841 #endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
843 static SLJIT_INLINE sljit_s32 cpu_has_shadow_stack(void)
845 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
846 return _get_ssp() != 0;
847 #else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
848 return 0;
849 #endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
852 static SLJIT_INLINE sljit_s32 adjust_shadow_stack(struct sljit_compiler *compiler,
853 sljit_s32 src, sljit_sw srcw)
855 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
856 sljit_u8 *inst, *jz_after_cmp_inst;
857 sljit_uw size_jz_after_cmp_inst;
859 sljit_uw size_before_rdssp_inst = compiler->size;
861 /* Generate "RDSSP TMP_REG1". */
862 FAIL_IF(emit_rdssp(compiler, TMP_REG1));
864 /* Load return address on shadow stack into TMP_REG1. */
865 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
866 SLJIT_ASSERT(reg_map[TMP_REG1] == 5);
868 /* Hand code unsupported "mov 0x0(%ebp),%ebp". */
869 inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
870 FAIL_IF(!inst);
871 INC_SIZE(3);
872 *inst++ = 0x8b;
873 *inst++ = 0x6d;
874 *inst = 0;
875 #else /* !SLJIT_CONFIG_X86_32 */
876 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(TMP_REG1), 0);
877 #endif /* SLJIT_CONFIG_X86_32 */
879 /* Compare return address against TMP_REG1. */
880 FAIL_IF(emit_cmp_binary (compiler, TMP_REG1, 0, src, srcw));
882 /* Generate JZ to skip shadow stack ajdustment when shadow
883 stack matches normal stack. */
884 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
885 FAIL_IF(!inst);
886 INC_SIZE(2);
887 *inst++ = get_jump_code(SLJIT_EQUAL) - 0x10;
888 size_jz_after_cmp_inst = compiler->size;
889 jz_after_cmp_inst = inst;
891 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
892 /* REX_W is not necessary. */
893 compiler->mode32 = 1;
894 #endif
895 /* Load 1 into TMP_REG1. */
896 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
898 /* Generate "INCSSP TMP_REG1". */
899 FAIL_IF(emit_incssp(compiler, TMP_REG1));
901 /* Jump back to "RDSSP TMP_REG1" to check shadow stack again. */
902 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
903 FAIL_IF(!inst);
904 INC_SIZE(2);
905 *inst++ = JMP_i8;
906 *inst = size_before_rdssp_inst - compiler->size;
908 *jz_after_cmp_inst = compiler->size - size_jz_after_cmp_inst;
909 #else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
910 SLJIT_UNUSED_ARG(compiler);
911 SLJIT_UNUSED_ARG(src);
912 SLJIT_UNUSED_ARG(srcw);
913 #endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
914 return SLJIT_SUCCESS;
917 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
918 #include "sljitNativeX86_32.c"
919 #else
920 #include "sljitNativeX86_64.c"
921 #endif
923 static sljit_s32 emit_mov(struct sljit_compiler *compiler,
924 sljit_s32 dst, sljit_sw dstw,
925 sljit_s32 src, sljit_sw srcw)
927 sljit_u8* inst;
929 if (FAST_IS_REG(src)) {
930 inst = emit_x86_instruction(compiler, 1, src, 0, dst, dstw);
931 FAIL_IF(!inst);
932 *inst = MOV_rm_r;
933 return SLJIT_SUCCESS;
935 if (src & SLJIT_IMM) {
936 if (FAST_IS_REG(dst)) {
937 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
938 return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
939 #else
940 if (!compiler->mode32) {
941 if (NOT_HALFWORD(srcw))
942 return emit_load_imm64(compiler, dst, srcw);
944 else
945 return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, U8(MOV_r_i32 | reg_lmap[dst]), srcw);
946 #endif
948 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
949 if (!compiler->mode32 && NOT_HALFWORD(srcw)) {
950 /* Immediate to memory move. Only SLJIT_MOV operation copies
951 an immediate directly into memory so TMP_REG1 can be used. */
952 FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
953 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
954 FAIL_IF(!inst);
955 *inst = MOV_rm_r;
956 return SLJIT_SUCCESS;
958 #endif
959 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, dstw);
960 FAIL_IF(!inst);
961 *inst = MOV_rm_i32;
962 return SLJIT_SUCCESS;
964 if (FAST_IS_REG(dst)) {
965 inst = emit_x86_instruction(compiler, 1, dst, 0, src, srcw);
966 FAIL_IF(!inst);
967 *inst = MOV_r_rm;
968 return SLJIT_SUCCESS;
971 /* Memory to memory move. Only SLJIT_MOV operation copies
972 data from memory to memory so TMP_REG1 can be used. */
973 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src, srcw);
974 FAIL_IF(!inst);
975 *inst = MOV_r_rm;
976 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
977 FAIL_IF(!inst);
978 *inst = MOV_rm_r;
979 return SLJIT_SUCCESS;
982 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
984 sljit_u8 *inst;
985 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
986 sljit_uw size;
987 #endif
989 CHECK_ERROR();
990 CHECK(check_sljit_emit_op0(compiler, op));
992 switch (GET_OPCODE(op)) {
993 case SLJIT_BREAKPOINT:
994 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
995 FAIL_IF(!inst);
996 INC_SIZE(1);
997 *inst = INT3;
998 break;
999 case SLJIT_NOP:
1000 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1001 FAIL_IF(!inst);
1002 INC_SIZE(1);
1003 *inst = NOP;
1004 break;
1005 case SLJIT_LMUL_UW:
1006 case SLJIT_LMUL_SW:
1007 case SLJIT_DIVMOD_UW:
1008 case SLJIT_DIVMOD_SW:
1009 case SLJIT_DIV_UW:
1010 case SLJIT_DIV_SW:
1011 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1012 #ifdef _WIN64
1013 SLJIT_ASSERT(
1014 reg_map[SLJIT_R0] == 0
1015 && reg_map[SLJIT_R1] == 2
1016 && reg_map[TMP_REG1] > 7);
1017 #else
1018 SLJIT_ASSERT(
1019 reg_map[SLJIT_R0] == 0
1020 && reg_map[SLJIT_R1] < 7
1021 && reg_map[TMP_REG1] == 2);
1022 #endif
1023 compiler->mode32 = op & SLJIT_32;
1024 #endif
1025 SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
1027 op = GET_OPCODE(op);
1028 if ((op | 0x2) == SLJIT_DIV_UW) {
1029 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
1030 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
1031 inst = emit_x86_instruction(compiler, 1, SLJIT_R1, 0, SLJIT_R1, 0);
1032 #else
1033 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
1034 #endif
1035 FAIL_IF(!inst);
1036 *inst = XOR_r_rm;
1039 if ((op | 0x2) == SLJIT_DIV_SW) {
1040 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
1041 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
1042 #endif
1044 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1045 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1046 FAIL_IF(!inst);
1047 INC_SIZE(1);
1048 *inst = CDQ;
1049 #else
1050 if (compiler->mode32) {
1051 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1052 FAIL_IF(!inst);
1053 INC_SIZE(1);
1054 *inst = CDQ;
1055 } else {
1056 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1057 FAIL_IF(!inst);
1058 INC_SIZE(2);
1059 *inst++ = REX_W;
1060 *inst = CDQ;
1062 #endif
1065 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1066 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1067 FAIL_IF(!inst);
1068 INC_SIZE(2);
1069 *inst++ = GROUP_F7;
1070 *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
1071 #else
1072 #ifdef _WIN64
1073 size = (!compiler->mode32 || op >= SLJIT_DIVMOD_UW) ? 3 : 2;
1074 #else
1075 size = (!compiler->mode32) ? 3 : 2;
1076 #endif
1077 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1078 FAIL_IF(!inst);
1079 INC_SIZE(size);
1080 #ifdef _WIN64
1081 if (!compiler->mode32)
1082 *inst++ = REX_W | ((op >= SLJIT_DIVMOD_UW) ? REX_B : 0);
1083 else if (op >= SLJIT_DIVMOD_UW)
1084 *inst++ = REX_B;
1085 *inst++ = GROUP_F7;
1086 *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
1087 #else
1088 if (!compiler->mode32)
1089 *inst++ = REX_W;
1090 *inst++ = GROUP_F7;
1091 *inst = MOD_REG | reg_map[SLJIT_R1];
1092 #endif
1093 #endif
1094 switch (op) {
1095 case SLJIT_LMUL_UW:
1096 *inst |= MUL;
1097 break;
1098 case SLJIT_LMUL_SW:
1099 *inst |= IMUL;
1100 break;
1101 case SLJIT_DIVMOD_UW:
1102 case SLJIT_DIV_UW:
1103 *inst |= DIV;
1104 break;
1105 case SLJIT_DIVMOD_SW:
1106 case SLJIT_DIV_SW:
1107 *inst |= IDIV;
1108 break;
1110 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64)
1111 if (op <= SLJIT_DIVMOD_SW)
1112 EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
1113 #else
1114 if (op >= SLJIT_DIV_UW)
1115 EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
1116 #endif
1117 break;
1118 case SLJIT_ENDBR:
1119 return emit_endbranch(compiler);
1120 case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
1121 return skip_frames_before_return(compiler);
1124 return SLJIT_SUCCESS;
1127 #define ENCODE_PREFIX(prefix) \
1128 do { \
1129 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1); \
1130 FAIL_IF(!inst); \
1131 INC_SIZE(1); \
1132 *inst = U8(prefix); \
1133 } while (0)
1135 static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign,
1136 sljit_s32 dst, sljit_sw dstw,
1137 sljit_s32 src, sljit_sw srcw)
1139 sljit_u8* inst;
1140 sljit_s32 dst_r;
1141 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1142 sljit_s32 work_r;
1143 #endif
1145 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1146 compiler->mode32 = 0;
1147 #endif
1149 if (src & SLJIT_IMM) {
1150 if (FAST_IS_REG(dst)) {
1151 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1152 return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1153 #else
1154 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
1155 FAIL_IF(!inst);
1156 *inst = MOV_rm_i32;
1157 return SLJIT_SUCCESS;
1158 #endif
1160 inst = emit_x86_instruction(compiler, 1 | EX86_BYTE_ARG | EX86_NO_REXW, SLJIT_IMM, srcw, dst, dstw);
1161 FAIL_IF(!inst);
1162 *inst = MOV_rm8_i8;
1163 return SLJIT_SUCCESS;
1166 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1168 if ((dst & SLJIT_MEM) && FAST_IS_REG(src)) {
1169 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1170 if (reg_map[src] >= 4) {
1171 SLJIT_ASSERT(dst_r == TMP_REG1);
1172 EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
1173 } else
1174 dst_r = src;
1175 #else
1176 dst_r = src;
1177 #endif
1179 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1180 else if (FAST_IS_REG(src) && reg_map[src] >= 4) {
1181 /* src, dst are registers. */
1182 SLJIT_ASSERT(FAST_IS_REG(dst));
1183 if (reg_map[dst] < 4) {
1184 if (dst != src)
1185 EMIT_MOV(compiler, dst, 0, src, 0);
1186 inst = emit_x86_instruction(compiler, 2, dst, 0, dst, 0);
1187 FAIL_IF(!inst);
1188 *inst++ = GROUP_0F;
1189 *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
1191 else {
1192 if (dst != src)
1193 EMIT_MOV(compiler, dst, 0, src, 0);
1194 if (sign) {
1195 /* shl reg, 24 */
1196 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
1197 FAIL_IF(!inst);
1198 *inst |= SHL;
1199 /* sar reg, 24 */
1200 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
1201 FAIL_IF(!inst);
1202 *inst |= SAR;
1204 else {
1205 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0xff, dst, 0);
1206 FAIL_IF(!inst);
1207 *(inst + 1) |= AND;
1210 return SLJIT_SUCCESS;
1212 #endif
1213 else {
1214 /* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */
1215 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
1216 FAIL_IF(!inst);
1217 *inst++ = GROUP_0F;
1218 *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
1221 if (dst & SLJIT_MEM) {
1222 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1223 if (dst_r == TMP_REG1) {
1224 /* Find a non-used register, whose reg_map[src] < 4. */
1225 if ((dst & REG_MASK) == SLJIT_R0) {
1226 if ((dst & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_R1))
1227 work_r = SLJIT_R2;
1228 else
1229 work_r = SLJIT_R1;
1231 else {
1232 if ((dst & OFFS_REG_MASK) != TO_OFFS_REG(SLJIT_R0))
1233 work_r = SLJIT_R0;
1234 else if ((dst & REG_MASK) == SLJIT_R1)
1235 work_r = SLJIT_R2;
1236 else
1237 work_r = SLJIT_R1;
1240 if (work_r == SLJIT_R0) {
1241 ENCODE_PREFIX(XCHG_EAX_r | reg_map[TMP_REG1]);
1243 else {
1244 inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
1245 FAIL_IF(!inst);
1246 *inst = XCHG_r_rm;
1249 inst = emit_x86_instruction(compiler, 1, work_r, 0, dst, dstw);
1250 FAIL_IF(!inst);
1251 *inst = MOV_rm8_r8;
1253 if (work_r == SLJIT_R0) {
1254 ENCODE_PREFIX(XCHG_EAX_r | reg_map[TMP_REG1]);
1256 else {
1257 inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
1258 FAIL_IF(!inst);
1259 *inst = XCHG_r_rm;
1262 else {
1263 inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
1264 FAIL_IF(!inst);
1265 *inst = MOV_rm8_r8;
1267 #else
1268 inst = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw);
1269 FAIL_IF(!inst);
1270 *inst = MOV_rm8_r8;
1271 #endif
1274 return SLJIT_SUCCESS;
1277 static sljit_s32 emit_prefetch(struct sljit_compiler *compiler, sljit_s32 op,
1278 sljit_s32 src, sljit_sw srcw)
1280 sljit_u8* inst;
1282 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1283 compiler->mode32 = 1;
1284 #endif
1286 inst = emit_x86_instruction(compiler, 2, 0, 0, src, srcw);
1287 FAIL_IF(!inst);
1288 *inst++ = GROUP_0F;
1289 *inst++ = PREFETCH;
1291 if (op == SLJIT_PREFETCH_L1)
1292 *inst |= (1 << 3);
1293 else if (op == SLJIT_PREFETCH_L2)
1294 *inst |= (2 << 3);
1295 else if (op == SLJIT_PREFETCH_L3)
1296 *inst |= (3 << 3);
1298 return SLJIT_SUCCESS;
1301 static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign,
1302 sljit_s32 dst, sljit_sw dstw,
1303 sljit_s32 src, sljit_sw srcw)
1305 sljit_u8* inst;
1306 sljit_s32 dst_r;
1308 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1309 compiler->mode32 = 0;
1310 #endif
1312 if (src & SLJIT_IMM) {
1313 if (FAST_IS_REG(dst)) {
1314 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1315 return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1316 #else
1317 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
1318 FAIL_IF(!inst);
1319 *inst = MOV_rm_i32;
1320 return SLJIT_SUCCESS;
1321 #endif
1323 inst = emit_x86_instruction(compiler, 1 | EX86_HALF_ARG | EX86_NO_REXW | EX86_PREF_66, SLJIT_IMM, srcw, dst, dstw);
1324 FAIL_IF(!inst);
1325 *inst = MOV_rm_i32;
1326 return SLJIT_SUCCESS;
1329 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1331 if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
1332 dst_r = src;
1333 else {
1334 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
1335 FAIL_IF(!inst);
1336 *inst++ = GROUP_0F;
1337 *inst = sign ? MOVSX_r_rm16 : MOVZX_r_rm16;
1340 if (dst & SLJIT_MEM) {
1341 inst = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw);
1342 FAIL_IF(!inst);
1343 *inst = MOV_rm_r;
1346 return SLJIT_SUCCESS;
1349 static sljit_s32 emit_unary(struct sljit_compiler *compiler, sljit_u8 opcode,
1350 sljit_s32 dst, sljit_sw dstw,
1351 sljit_s32 src, sljit_sw srcw)
1353 sljit_u8* inst;
1355 if (dst == src && dstw == srcw) {
1356 /* Same input and output */
1357 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
1358 FAIL_IF(!inst);
1359 *inst++ = GROUP_F7;
1360 *inst |= opcode;
1361 return SLJIT_SUCCESS;
1364 if (FAST_IS_REG(dst)) {
1365 EMIT_MOV(compiler, dst, 0, src, srcw);
1366 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
1367 FAIL_IF(!inst);
1368 *inst++ = GROUP_F7;
1369 *inst |= opcode;
1370 return SLJIT_SUCCESS;
1373 EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1374 inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
1375 FAIL_IF(!inst);
1376 *inst++ = GROUP_F7;
1377 *inst |= opcode;
1378 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1379 return SLJIT_SUCCESS;
1382 static sljit_s32 emit_not_with_flags(struct sljit_compiler *compiler,
1383 sljit_s32 dst, sljit_sw dstw,
1384 sljit_s32 src, sljit_sw srcw)
1386 sljit_u8* inst;
1388 if (FAST_IS_REG(dst)) {
1389 EMIT_MOV(compiler, dst, 0, src, srcw);
1390 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
1391 FAIL_IF(!inst);
1392 *inst++ = GROUP_F7;
1393 *inst |= NOT_rm;
1394 inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0);
1395 FAIL_IF(!inst);
1396 *inst = OR_r_rm;
1397 return SLJIT_SUCCESS;
1400 EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1401 inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
1402 FAIL_IF(!inst);
1403 *inst++ = GROUP_F7;
1404 *inst |= NOT_rm;
1405 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
1406 FAIL_IF(!inst);
1407 *inst = OR_r_rm;
1408 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1409 return SLJIT_SUCCESS;
1412 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1413 static const sljit_sw emit_clz_arg = 32 + 31;
1414 #endif
1416 static sljit_s32 emit_clz(struct sljit_compiler *compiler, sljit_s32 op_flags,
1417 sljit_s32 dst, sljit_sw dstw,
1418 sljit_s32 src, sljit_sw srcw)
1420 sljit_u8* inst;
1421 sljit_s32 dst_r;
1423 SLJIT_UNUSED_ARG(op_flags);
1425 if (cpu_has_cmov == -1)
1426 get_cpu_features();
1428 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1430 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
1431 FAIL_IF(!inst);
1432 *inst++ = GROUP_0F;
1433 *inst = BSR_r_rm;
1435 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1436 if (cpu_has_cmov) {
1437 if (dst_r != TMP_REG1) {
1438 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 32 + 31);
1439 inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG1, 0);
1441 else
1442 inst = emit_x86_instruction(compiler, 2, dst_r, 0, SLJIT_MEM0(), (sljit_sw)&emit_clz_arg);
1444 FAIL_IF(!inst);
1445 *inst++ = GROUP_0F;
1446 *inst = CMOVE_r_rm;
1448 else
1449 FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, 32 + 31));
1451 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0);
1452 #else
1453 if (cpu_has_cmov) {
1454 EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, !(op_flags & SLJIT_32) ? (64 + 63) : (32 + 31));
1456 inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
1457 FAIL_IF(!inst);
1458 *inst++ = GROUP_0F;
1459 *inst = CMOVE_r_rm;
1461 else
1462 FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, !(op_flags & SLJIT_32) ? (64 + 63) : (32 + 31)));
1464 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, !(op_flags & SLJIT_32) ? 63 : 31, dst_r, 0);
1465 #endif
1467 FAIL_IF(!inst);
1468 *(inst + 1) |= XOR;
1470 if (dst & SLJIT_MEM)
1471 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1472 return SLJIT_SUCCESS;
1475 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1476 sljit_s32 dst, sljit_sw dstw,
1477 sljit_s32 src, sljit_sw srcw)
1479 sljit_s32 op_flags = GET_ALL_FLAGS(op);
1480 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1481 sljit_s32 dst_is_ereg = 0;
1482 #endif
1484 CHECK_ERROR();
1485 CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1486 ADJUST_LOCAL_OFFSET(dst, dstw);
1487 ADJUST_LOCAL_OFFSET(src, srcw);
1489 CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1);
1490 CHECK_EXTRA_REGS(src, srcw, (void)0);
1491 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1492 compiler->mode32 = op_flags & SLJIT_32;
1493 #endif
1495 op = GET_OPCODE(op);
1497 if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
1498 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1499 compiler->mode32 = 0;
1500 #endif
1502 if (FAST_IS_REG(src) && src == dst) {
1503 if (!TYPE_CAST_NEEDED(op))
1504 return SLJIT_SUCCESS;
1507 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1508 if (op_flags & SLJIT_32) {
1509 if (src & SLJIT_MEM) {
1510 if (op == SLJIT_MOV_S32)
1511 op = SLJIT_MOV_U32;
1513 else if (src & SLJIT_IMM) {
1514 if (op == SLJIT_MOV_U32)
1515 op = SLJIT_MOV_S32;
1518 #endif
1520 if (src & SLJIT_IMM) {
1521 switch (op) {
1522 case SLJIT_MOV_U8:
1523 srcw = (sljit_u8)srcw;
1524 break;
1525 case SLJIT_MOV_S8:
1526 srcw = (sljit_s8)srcw;
1527 break;
1528 case SLJIT_MOV_U16:
1529 srcw = (sljit_u16)srcw;
1530 break;
1531 case SLJIT_MOV_S16:
1532 srcw = (sljit_s16)srcw;
1533 break;
1534 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1535 case SLJIT_MOV_U32:
1536 srcw = (sljit_u32)srcw;
1537 break;
1538 case SLJIT_MOV_S32:
1539 srcw = (sljit_s32)srcw;
1540 break;
1541 #endif
1543 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1544 if (SLJIT_UNLIKELY(dst_is_ereg))
1545 return emit_mov(compiler, dst, dstw, src, srcw);
1546 #endif
1549 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1550 if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_U32 || op == SLJIT_MOV_S32 || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) {
1551 SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP));
1552 dst = TMP_REG1;
1554 #endif
1556 switch (op) {
1557 case SLJIT_MOV:
1558 case SLJIT_MOV_P:
1559 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1560 case SLJIT_MOV_U32:
1561 case SLJIT_MOV_S32:
1562 case SLJIT_MOV32:
1563 #endif
1564 EMIT_MOV(compiler, dst, dstw, src, srcw);
1565 break;
1566 case SLJIT_MOV_U8:
1567 FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw));
1568 break;
1569 case SLJIT_MOV_S8:
1570 FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw));
1571 break;
1572 case SLJIT_MOV_U16:
1573 FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw));
1574 break;
1575 case SLJIT_MOV_S16:
1576 FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw));
1577 break;
1578 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1579 case SLJIT_MOV_U32:
1580 FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw));
1581 break;
1582 case SLJIT_MOV_S32:
1583 FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw));
1584 break;
1585 case SLJIT_MOV32:
1586 compiler->mode32 = 1;
1587 EMIT_MOV(compiler, dst, dstw, src, srcw);
1588 compiler->mode32 = 0;
1589 break;
1590 #endif
1593 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1594 if (SLJIT_UNLIKELY(dst_is_ereg) && dst == TMP_REG1)
1595 return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), dstw, TMP_REG1, 0);
1596 #endif
1597 return SLJIT_SUCCESS;
1600 switch (op) {
1601 case SLJIT_NOT:
1602 if (SLJIT_UNLIKELY(op_flags & SLJIT_SET_Z))
1603 return emit_not_with_flags(compiler, dst, dstw, src, srcw);
1604 return emit_unary(compiler, NOT_rm, dst, dstw, src, srcw);
1606 case SLJIT_CLZ:
1607 return emit_clz(compiler, op_flags, dst, dstw, src, srcw);
1610 return SLJIT_SUCCESS;
1613 static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
1614 sljit_u32 op_types,
1615 sljit_s32 dst, sljit_sw dstw,
1616 sljit_s32 src1, sljit_sw src1w,
1617 sljit_s32 src2, sljit_sw src2w)
1619 sljit_u8* inst;
1620 sljit_u8 op_eax_imm = U8(op_types >> 24);
1621 sljit_u8 op_rm = U8((op_types >> 16) & 0xff);
1622 sljit_u8 op_mr = U8((op_types >> 8) & 0xff);
1623 sljit_u8 op_imm = U8(op_types & 0xff);
1625 if (dst == src1 && dstw == src1w) {
1626 if (src2 & SLJIT_IMM) {
1627 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1628 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1629 #else
1630 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
1631 #endif
1632 BINARY_EAX_IMM(op_eax_imm, src2w);
1634 else {
1635 BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
1638 else if (FAST_IS_REG(dst)) {
1639 inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
1640 FAIL_IF(!inst);
1641 *inst = op_rm;
1643 else if (FAST_IS_REG(src2)) {
1644 /* Special exception for sljit_emit_op_flags. */
1645 inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
1646 FAIL_IF(!inst);
1647 *inst = op_mr;
1649 else {
1650 EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
1651 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1652 FAIL_IF(!inst);
1653 *inst = op_mr;
1655 return SLJIT_SUCCESS;
1658 /* Only for cumulative operations. */
1659 if (dst == src2 && dstw == src2w) {
1660 if (src1 & SLJIT_IMM) {
1661 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1662 if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
1663 #else
1664 if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128)) {
1665 #endif
1666 BINARY_EAX_IMM(op_eax_imm, src1w);
1668 else {
1669 BINARY_IMM(op_imm, op_mr, src1w, dst, dstw);
1672 else if (FAST_IS_REG(dst)) {
1673 inst = emit_x86_instruction(compiler, 1, dst, dstw, src1, src1w);
1674 FAIL_IF(!inst);
1675 *inst = op_rm;
1677 else if (FAST_IS_REG(src1)) {
1678 inst = emit_x86_instruction(compiler, 1, src1, src1w, dst, dstw);
1679 FAIL_IF(!inst);
1680 *inst = op_mr;
1682 else {
1683 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1684 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1685 FAIL_IF(!inst);
1686 *inst = op_mr;
1688 return SLJIT_SUCCESS;
1691 /* General version. */
1692 if (FAST_IS_REG(dst)) {
1693 EMIT_MOV(compiler, dst, 0, src1, src1w);
1694 if (src2 & SLJIT_IMM) {
1695 BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
1697 else {
1698 inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
1699 FAIL_IF(!inst);
1700 *inst = op_rm;
1703 else {
1704 /* This version requires less memory writing. */
1705 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1706 if (src2 & SLJIT_IMM) {
1707 BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
1709 else {
1710 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1711 FAIL_IF(!inst);
1712 *inst = op_rm;
1714 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1717 return SLJIT_SUCCESS;
1720 static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
1721 sljit_u32 op_types,
1722 sljit_s32 dst, sljit_sw dstw,
1723 sljit_s32 src1, sljit_sw src1w,
1724 sljit_s32 src2, sljit_sw src2w)
1726 sljit_u8* inst;
1727 sljit_u8 op_eax_imm = U8(op_types >> 24);
1728 sljit_u8 op_rm = U8((op_types >> 16) & 0xff);
1729 sljit_u8 op_mr = U8((op_types >> 8) & 0xff);
1730 sljit_u8 op_imm = U8(op_types & 0xff);
1732 if (dst == src1 && dstw == src1w) {
1733 if (src2 & SLJIT_IMM) {
1734 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1735 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1736 #else
1737 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
1738 #endif
1739 BINARY_EAX_IMM(op_eax_imm, src2w);
1741 else {
1742 BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
1745 else if (FAST_IS_REG(dst)) {
1746 inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
1747 FAIL_IF(!inst);
1748 *inst = op_rm;
1750 else if (FAST_IS_REG(src2)) {
1751 inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
1752 FAIL_IF(!inst);
1753 *inst = op_mr;
1755 else {
1756 EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
1757 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1758 FAIL_IF(!inst);
1759 *inst = op_mr;
1761 return SLJIT_SUCCESS;
1764 /* General version. */
1765 if (FAST_IS_REG(dst) && dst != src2) {
1766 EMIT_MOV(compiler, dst, 0, src1, src1w);
1767 if (src2 & SLJIT_IMM) {
1768 BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
1770 else {
1771 inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
1772 FAIL_IF(!inst);
1773 *inst = op_rm;
1776 else {
1777 /* This version requires less memory writing. */
1778 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1779 if (src2 & SLJIT_IMM) {
1780 BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
1782 else {
1783 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1784 FAIL_IF(!inst);
1785 *inst = op_rm;
1787 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1790 return SLJIT_SUCCESS;
1793 static sljit_s32 emit_mul(struct sljit_compiler *compiler,
1794 sljit_s32 dst, sljit_sw dstw,
1795 sljit_s32 src1, sljit_sw src1w,
1796 sljit_s32 src2, sljit_sw src2w)
1798 sljit_u8* inst;
1799 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1801 /* Register destination. */
1802 if (dst_r == src1 && !(src2 & SLJIT_IMM)) {
1803 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
1804 FAIL_IF(!inst);
1805 *inst++ = GROUP_0F;
1806 *inst = IMUL_r_rm;
1808 else if (dst_r == src2 && !(src1 & SLJIT_IMM)) {
1809 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src1, src1w);
1810 FAIL_IF(!inst);
1811 *inst++ = GROUP_0F;
1812 *inst = IMUL_r_rm;
1814 else if (src1 & SLJIT_IMM) {
1815 if (src2 & SLJIT_IMM) {
1816 EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, src2w);
1817 src2 = dst_r;
1818 src2w = 0;
1821 if (src1w <= 127 && src1w >= -128) {
1822 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
1823 FAIL_IF(!inst);
1824 *inst = IMUL_r_rm_i8;
1825 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1826 FAIL_IF(!inst);
1827 INC_SIZE(1);
1828 *inst = U8(src1w);
1830 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1831 else {
1832 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
1833 FAIL_IF(!inst);
1834 *inst = IMUL_r_rm_i32;
1835 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1836 FAIL_IF(!inst);
1837 INC_SIZE(4);
1838 sljit_unaligned_store_sw(inst, src1w);
1840 #else
1841 else if (IS_HALFWORD(src1w)) {
1842 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
1843 FAIL_IF(!inst);
1844 *inst = IMUL_r_rm_i32;
1845 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1846 FAIL_IF(!inst);
1847 INC_SIZE(4);
1848 sljit_unaligned_store_s32(inst, (sljit_s32)src1w);
1850 else {
1851 if (dst_r != src2)
1852 EMIT_MOV(compiler, dst_r, 0, src2, src2w);
1853 FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src1w));
1854 inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
1855 FAIL_IF(!inst);
1856 *inst++ = GROUP_0F;
1857 *inst = IMUL_r_rm;
1859 #endif
1861 else if (src2 & SLJIT_IMM) {
1862 /* Note: src1 is NOT immediate. */
1864 if (src2w <= 127 && src2w >= -128) {
1865 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
1866 FAIL_IF(!inst);
1867 *inst = IMUL_r_rm_i8;
1868 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1869 FAIL_IF(!inst);
1870 INC_SIZE(1);
1871 *inst = U8(src2w);
1873 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1874 else {
1875 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
1876 FAIL_IF(!inst);
1877 *inst = IMUL_r_rm_i32;
1878 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1879 FAIL_IF(!inst);
1880 INC_SIZE(4);
1881 sljit_unaligned_store_sw(inst, src2w);
1883 #else
1884 else if (IS_HALFWORD(src2w)) {
1885 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
1886 FAIL_IF(!inst);
1887 *inst = IMUL_r_rm_i32;
1888 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1889 FAIL_IF(!inst);
1890 INC_SIZE(4);
1891 sljit_unaligned_store_s32(inst, (sljit_s32)src2w);
1893 else {
1894 if (dst_r != src1)
1895 EMIT_MOV(compiler, dst_r, 0, src1, src1w);
1896 FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
1897 inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
1898 FAIL_IF(!inst);
1899 *inst++ = GROUP_0F;
1900 *inst = IMUL_r_rm;
1902 #endif
1904 else {
1905 /* Neither argument is immediate. */
1906 if (ADDRESSING_DEPENDS_ON(src2, dst_r))
1907 dst_r = TMP_REG1;
1908 EMIT_MOV(compiler, dst_r, 0, src1, src1w);
1909 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
1910 FAIL_IF(!inst);
1911 *inst++ = GROUP_0F;
1912 *inst = IMUL_r_rm;
1915 if (dst & SLJIT_MEM)
1916 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1918 return SLJIT_SUCCESS;
1921 static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler,
1922 sljit_s32 dst, sljit_sw dstw,
1923 sljit_s32 src1, sljit_sw src1w,
1924 sljit_s32 src2, sljit_sw src2w)
1926 sljit_u8* inst;
1927 sljit_s32 dst_r, done = 0;
1929 /* These cases better be left to handled by normal way. */
1930 if (dst == src1 && dstw == src1w)
1931 return SLJIT_ERR_UNSUPPORTED;
1932 if (dst == src2 && dstw == src2w)
1933 return SLJIT_ERR_UNSUPPORTED;
1935 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1937 if (FAST_IS_REG(src1)) {
1938 if (FAST_IS_REG(src2)) {
1939 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM2(src1, src2), 0);
1940 FAIL_IF(!inst);
1941 *inst = LEA_r_m;
1942 done = 1;
1944 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1945 if ((src2 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1946 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_s32)src2w);
1947 #else
1948 if (src2 & SLJIT_IMM) {
1949 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w);
1950 #endif
1951 FAIL_IF(!inst);
1952 *inst = LEA_r_m;
1953 done = 1;
1956 else if (FAST_IS_REG(src2)) {
1957 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1958 if ((src1 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src1w))) {
1959 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_s32)src1w);
1960 #else
1961 if (src1 & SLJIT_IMM) {
1962 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w);
1963 #endif
1964 FAIL_IF(!inst);
1965 *inst = LEA_r_m;
1966 done = 1;
1970 if (done) {
1971 if (dst_r == TMP_REG1)
1972 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
1973 return SLJIT_SUCCESS;
1975 return SLJIT_ERR_UNSUPPORTED;
1978 static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
1979 sljit_s32 src1, sljit_sw src1w,
1980 sljit_s32 src2, sljit_sw src2w)
1982 sljit_u8* inst;
1984 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1985 if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1986 #else
1987 if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
1988 #endif
1989 BINARY_EAX_IMM(CMP_EAX_i32, src2w);
1990 return SLJIT_SUCCESS;
1993 if (FAST_IS_REG(src1)) {
1994 if (src2 & SLJIT_IMM) {
1995 BINARY_IMM(CMP, CMP_rm_r, src2w, src1, 0);
1997 else {
1998 inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
1999 FAIL_IF(!inst);
2000 *inst = CMP_r_rm;
2002 return SLJIT_SUCCESS;
2005 if (FAST_IS_REG(src2) && !(src1 & SLJIT_IMM)) {
2006 inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
2007 FAIL_IF(!inst);
2008 *inst = CMP_rm_r;
2009 return SLJIT_SUCCESS;
2012 if (src2 & SLJIT_IMM) {
2013 if (src1 & SLJIT_IMM) {
2014 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2015 src1 = TMP_REG1;
2016 src1w = 0;
2018 BINARY_IMM(CMP, CMP_rm_r, src2w, src1, src1w);
2020 else {
2021 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2022 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2023 FAIL_IF(!inst);
2024 *inst = CMP_r_rm;
2026 return SLJIT_SUCCESS;
2029 static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
2030 sljit_s32 src1, sljit_sw src1w,
2031 sljit_s32 src2, sljit_sw src2w)
2033 sljit_u8* inst;
2035 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2036 if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2037 #else
2038 if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
2039 #endif
2040 BINARY_EAX_IMM(TEST_EAX_i32, src2w);
2041 return SLJIT_SUCCESS;
2044 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2045 if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
2046 #else
2047 if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128)) {
2048 #endif
2049 BINARY_EAX_IMM(TEST_EAX_i32, src1w);
2050 return SLJIT_SUCCESS;
2053 if (!(src1 & SLJIT_IMM)) {
2054 if (src2 & SLJIT_IMM) {
2055 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2056 if (IS_HALFWORD(src2w) || compiler->mode32) {
2057 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
2058 FAIL_IF(!inst);
2059 *inst = GROUP_F7;
2061 else {
2062 FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src2w));
2063 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src1, src1w);
2064 FAIL_IF(!inst);
2065 *inst = TEST_rm_r;
2067 #else
2068 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
2069 FAIL_IF(!inst);
2070 *inst = GROUP_F7;
2071 #endif
2072 return SLJIT_SUCCESS;
2074 else if (FAST_IS_REG(src1)) {
2075 inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
2076 FAIL_IF(!inst);
2077 *inst = TEST_rm_r;
2078 return SLJIT_SUCCESS;
2082 if (!(src2 & SLJIT_IMM)) {
2083 if (src1 & SLJIT_IMM) {
2084 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2085 if (IS_HALFWORD(src1w) || compiler->mode32) {
2086 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src1w, src2, src2w);
2087 FAIL_IF(!inst);
2088 *inst = GROUP_F7;
2090 else {
2091 FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src1w));
2092 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2093 FAIL_IF(!inst);
2094 *inst = TEST_rm_r;
2096 #else
2097 inst = emit_x86_instruction(compiler, 1, src1, src1w, src2, src2w);
2098 FAIL_IF(!inst);
2099 *inst = GROUP_F7;
2100 #endif
2101 return SLJIT_SUCCESS;
2103 else if (FAST_IS_REG(src2)) {
2104 inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
2105 FAIL_IF(!inst);
2106 *inst = TEST_rm_r;
2107 return SLJIT_SUCCESS;
2111 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2112 if (src2 & SLJIT_IMM) {
2113 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2114 if (IS_HALFWORD(src2w) || compiler->mode32) {
2115 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
2116 FAIL_IF(!inst);
2117 *inst = GROUP_F7;
2119 else {
2120 FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
2121 inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, TMP_REG1, 0);
2122 FAIL_IF(!inst);
2123 *inst = TEST_rm_r;
2125 #else
2126 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
2127 FAIL_IF(!inst);
2128 *inst = GROUP_F7;
2129 #endif
2131 else {
2132 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2133 FAIL_IF(!inst);
2134 *inst = TEST_rm_r;
2136 return SLJIT_SUCCESS;
2139 static sljit_s32 emit_shift(struct sljit_compiler *compiler,
2140 sljit_u8 mode,
2141 sljit_s32 dst, sljit_sw dstw,
2142 sljit_s32 src1, sljit_sw src1w,
2143 sljit_s32 src2, sljit_sw src2w)
2145 sljit_u8* inst;
2147 if ((src2 & SLJIT_IMM) || (src2 == SLJIT_PREF_SHIFT_REG)) {
2148 if (dst == src1 && dstw == src1w) {
2149 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw);
2150 FAIL_IF(!inst);
2151 *inst |= mode;
2152 return SLJIT_SUCCESS;
2154 if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) {
2155 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2156 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2157 FAIL_IF(!inst);
2158 *inst |= mode;
2159 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2160 return SLJIT_SUCCESS;
2162 if (FAST_IS_REG(dst)) {
2163 EMIT_MOV(compiler, dst, 0, src1, src1w);
2164 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0);
2165 FAIL_IF(!inst);
2166 *inst |= mode;
2167 return SLJIT_SUCCESS;
2170 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2171 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REG1, 0);
2172 FAIL_IF(!inst);
2173 *inst |= mode;
2174 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2175 return SLJIT_SUCCESS;
2178 if (dst == SLJIT_PREF_SHIFT_REG) {
2179 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2180 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2181 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2182 FAIL_IF(!inst);
2183 *inst |= mode;
2184 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2186 else if (FAST_IS_REG(dst) && dst != src2 && dst != TMP_REG1 && !ADDRESSING_DEPENDS_ON(src2, dst)) {
2187 if (src1 != dst)
2188 EMIT_MOV(compiler, dst, 0, src1, src1w);
2189 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2190 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2191 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0);
2192 FAIL_IF(!inst);
2193 *inst |= mode;
2194 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2196 else {
2197 /* This case is complex since ecx itself may be used for
2198 addressing, and this case must be supported as well. */
2199 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2200 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2201 EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_PREF_SHIFT_REG, 0);
2202 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2203 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2204 FAIL_IF(!inst);
2205 *inst |= mode;
2206 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, SLJIT_MEM1(SLJIT_SP), 0);
2207 #else
2208 EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_PREF_SHIFT_REG, 0);
2209 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2210 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2211 FAIL_IF(!inst);
2212 *inst |= mode;
2213 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG2, 0);
2214 #endif
2215 if (dst != TMP_REG1)
2216 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2219 return SLJIT_SUCCESS;
2222 static sljit_s32 emit_shift_with_flags(struct sljit_compiler *compiler,
2223 sljit_u8 mode, sljit_s32 set_flags,
2224 sljit_s32 dst, sljit_sw dstw,
2225 sljit_s32 src1, sljit_sw src1w,
2226 sljit_s32 src2, sljit_sw src2w)
2228 /* The CPU does not set flags if the shift count is 0. */
2229 if (src2 & SLJIT_IMM) {
2230 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2231 if ((src2w & 0x3f) != 0 || (compiler->mode32 && (src2w & 0x1f) != 0))
2232 return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2233 #else
2234 if ((src2w & 0x1f) != 0)
2235 return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2236 #endif
2237 if (!set_flags)
2238 return emit_mov(compiler, dst, dstw, src1, src1w);
2239 /* OR dst, src, 0 */
2240 return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2241 dst, dstw, src1, src1w, SLJIT_IMM, 0);
2244 if (!set_flags)
2245 return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2247 if (!FAST_IS_REG(dst))
2248 FAIL_IF(emit_cmp_binary(compiler, src1, src1w, SLJIT_IMM, 0));
2250 FAIL_IF(emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w));
2252 if (FAST_IS_REG(dst))
2253 return emit_cmp_binary(compiler, dst, dstw, SLJIT_IMM, 0);
2254 return SLJIT_SUCCESS;
2257 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
2258 sljit_s32 dst, sljit_sw dstw,
2259 sljit_s32 src1, sljit_sw src1w,
2260 sljit_s32 src2, sljit_sw src2w)
2262 CHECK_ERROR();
2263 CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
2264 ADJUST_LOCAL_OFFSET(dst, dstw);
2265 ADJUST_LOCAL_OFFSET(src1, src1w);
2266 ADJUST_LOCAL_OFFSET(src2, src2w);
2268 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2269 CHECK_EXTRA_REGS(src1, src1w, (void)0);
2270 CHECK_EXTRA_REGS(src2, src2w, (void)0);
2271 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2272 compiler->mode32 = op & SLJIT_32;
2273 #endif
2275 SLJIT_ASSERT(dst != TMP_REG1 || HAS_FLAGS(op));
2277 switch (GET_OPCODE(op)) {
2278 case SLJIT_ADD:
2279 if (!HAS_FLAGS(op)) {
2280 if (emit_lea_binary(compiler, dst, dstw, src1, src1w, src2, src2w) != SLJIT_ERR_UNSUPPORTED)
2281 return compiler->error;
2283 return emit_cum_binary(compiler, BINARY_OPCODE(ADD),
2284 dst, dstw, src1, src1w, src2, src2w);
2285 case SLJIT_ADDC:
2286 return emit_cum_binary(compiler, BINARY_OPCODE(ADC),
2287 dst, dstw, src1, src1w, src2, src2w);
2288 case SLJIT_SUB:
2289 if (src1 == SLJIT_IMM && src1w == 0)
2290 return emit_unary(compiler, NEG_rm, dst, dstw, src2, src2w);
2292 if (!HAS_FLAGS(op)) {
2293 if ((src2 & SLJIT_IMM) && emit_lea_binary(compiler, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED)
2294 return compiler->error;
2295 if (FAST_IS_REG(dst) && src2 == dst) {
2296 FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB), dst, 0, dst, 0, src1, src1w));
2297 return emit_unary(compiler, NEG_rm, dst, 0, dst, 0);
2301 return emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
2302 dst, dstw, src1, src1w, src2, src2w);
2303 case SLJIT_SUBC:
2304 return emit_non_cum_binary(compiler, BINARY_OPCODE(SBB),
2305 dst, dstw, src1, src1w, src2, src2w);
2306 case SLJIT_MUL:
2307 return emit_mul(compiler, dst, dstw, src1, src1w, src2, src2w);
2308 case SLJIT_AND:
2309 return emit_cum_binary(compiler, BINARY_OPCODE(AND),
2310 dst, dstw, src1, src1w, src2, src2w);
2311 case SLJIT_OR:
2312 return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2313 dst, dstw, src1, src1w, src2, src2w);
2314 case SLJIT_XOR:
2315 return emit_cum_binary(compiler, BINARY_OPCODE(XOR),
2316 dst, dstw, src1, src1w, src2, src2w);
2317 case SLJIT_SHL:
2318 return emit_shift_with_flags(compiler, SHL, HAS_FLAGS(op),
2319 dst, dstw, src1, src1w, src2, src2w);
2320 case SLJIT_LSHR:
2321 return emit_shift_with_flags(compiler, SHR, HAS_FLAGS(op),
2322 dst, dstw, src1, src1w, src2, src2w);
2323 case SLJIT_ASHR:
2324 return emit_shift_with_flags(compiler, SAR, HAS_FLAGS(op),
2325 dst, dstw, src1, src1w, src2, src2w);
2328 return SLJIT_SUCCESS;
2331 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
2332 sljit_s32 src1, sljit_sw src1w,
2333 sljit_s32 src2, sljit_sw src2w)
2335 sljit_s32 opcode = GET_OPCODE(op);
2337 CHECK_ERROR();
2338 CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
2340 if (opcode != SLJIT_SUB && opcode != SLJIT_AND) {
2341 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
2342 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
2343 compiler->skip_checks = 1;
2344 #endif
2345 return sljit_emit_op2(compiler, op, TMP_REG1, 0, src1, src1w, src2, src2w);
2348 ADJUST_LOCAL_OFFSET(src1, src1w);
2349 ADJUST_LOCAL_OFFSET(src2, src2w);
2351 CHECK_EXTRA_REGS(src1, src1w, (void)0);
2352 CHECK_EXTRA_REGS(src2, src2w, (void)0);
2353 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2354 compiler->mode32 = op & SLJIT_32;
2355 #endif
2357 if (opcode == SLJIT_SUB) {
2358 return emit_cmp_binary(compiler, src1, src1w, src2, src2w);
2360 return emit_test_binary(compiler, src1, src1w, src2, src2w);
2363 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
2364 sljit_s32 src, sljit_sw srcw)
2366 CHECK_ERROR();
2367 CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
2368 ADJUST_LOCAL_OFFSET(src, srcw);
2370 CHECK_EXTRA_REGS(src, srcw, (void)0);
2372 switch (op) {
2373 case SLJIT_FAST_RETURN:
2374 return emit_fast_return(compiler, src, srcw);
2375 case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
2376 /* Don't adjust shadow stack if it isn't enabled. */
2377 if (!cpu_has_shadow_stack ())
2378 return SLJIT_SUCCESS;
2379 return adjust_shadow_stack(compiler, src, srcw);
2380 case SLJIT_PREFETCH_L1:
2381 case SLJIT_PREFETCH_L2:
2382 case SLJIT_PREFETCH_L3:
2383 case SLJIT_PREFETCH_ONCE:
2384 return emit_prefetch(compiler, op, src, srcw);
2387 return SLJIT_SUCCESS;
2390 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
2392 CHECK_REG_INDEX(check_sljit_get_register_index(reg));
2393 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2394 if (reg >= SLJIT_R3 && reg <= SLJIT_R8)
2395 return -1;
2396 #endif
2397 return reg_map[reg];
2400 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
2402 CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
2403 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2404 return reg;
2405 #else
2406 return freg_map[reg];
2407 #endif
2410 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
2411 void *instruction, sljit_u32 size)
2413 sljit_u8 *inst;
2415 CHECK_ERROR();
2416 CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
2418 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
2419 FAIL_IF(!inst);
2420 INC_SIZE(size);
2421 SLJIT_MEMCPY(inst, instruction, size);
2422 return SLJIT_SUCCESS;
2425 /* --------------------------------------------------------------------- */
2426 /* Floating point operators */
2427 /* --------------------------------------------------------------------- */
2429 /* Alignment(3) + 4 * 16 bytes. */
2430 static sljit_u32 sse2_data[3 + (4 * 4)];
2431 static sljit_u32 *sse2_buffer;
2433 static void init_compiler(void)
2435 /* Align to 16 bytes. */
2436 sse2_buffer = (sljit_u32*)(((sljit_uw)sse2_data + 15) & ~(sljit_uw)0xf);
2438 /* Single precision constants (each constant is 16 byte long). */
2439 sse2_buffer[0] = 0x80000000;
2440 sse2_buffer[4] = 0x7fffffff;
2441 /* Double precision constants (each constant is 16 byte long). */
2442 sse2_buffer[8] = 0;
2443 sse2_buffer[9] = 0x80000000;
2444 sse2_buffer[12] = 0xffffffff;
2445 sse2_buffer[13] = 0x7fffffff;
2448 static sljit_s32 emit_sse2(struct sljit_compiler *compiler, sljit_u8 opcode,
2449 sljit_s32 single, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
2451 sljit_u8 *inst;
2453 inst = emit_x86_instruction(compiler, 2 | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
2454 FAIL_IF(!inst);
2455 *inst++ = GROUP_0F;
2456 *inst = opcode;
2457 return SLJIT_SUCCESS;
2460 static sljit_s32 emit_sse2_logic(struct sljit_compiler *compiler, sljit_u8 opcode,
2461 sljit_s32 pref66, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
2463 sljit_u8 *inst;
2465 inst = emit_x86_instruction(compiler, 2 | (pref66 ? EX86_PREF_66 : 0) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
2466 FAIL_IF(!inst);
2467 *inst++ = GROUP_0F;
2468 *inst = opcode;
2469 return SLJIT_SUCCESS;
2472 static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
2473 sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
2475 return emit_sse2(compiler, MOVSD_x_xm, single, dst, src, srcw);
2478 static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
2479 sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src)
2481 return emit_sse2(compiler, MOVSD_xm_x, single, src, dst, dstw);
2484 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
2485 sljit_s32 dst, sljit_sw dstw,
2486 sljit_s32 src, sljit_sw srcw)
2488 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
2489 sljit_u8 *inst;
2491 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2492 if (GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)
2493 compiler->mode32 = 0;
2494 #endif
2496 inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP2, dst_r, 0, src, srcw);
2497 FAIL_IF(!inst);
2498 *inst++ = GROUP_0F;
2499 *inst = CVTTSD2SI_r_xm;
2501 if (dst & SLJIT_MEM)
2502 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2503 return SLJIT_SUCCESS;
2506 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
2507 sljit_s32 dst, sljit_sw dstw,
2508 sljit_s32 src, sljit_sw srcw)
2510 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
2511 sljit_u8 *inst;
2513 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2514 if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
2515 compiler->mode32 = 0;
2516 #endif
2518 if (src & SLJIT_IMM) {
2519 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2520 if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
2521 srcw = (sljit_s32)srcw;
2522 #endif
2523 EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
2524 src = TMP_REG1;
2525 srcw = 0;
2528 inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP1, dst_r, 0, src, srcw);
2529 FAIL_IF(!inst);
2530 *inst++ = GROUP_0F;
2531 *inst = CVTSI2SD_x_rm;
2533 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2534 compiler->mode32 = 1;
2535 #endif
2536 if (dst_r == TMP_FREG)
2537 return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2538 return SLJIT_SUCCESS;
2541 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
2542 sljit_s32 src1, sljit_sw src1w,
2543 sljit_s32 src2, sljit_sw src2w)
2545 switch (GET_FLAG_TYPE(op)) {
2546 case SLJIT_ORDERED_LESS:
2547 case SLJIT_UNORDERED_OR_GREATER_EQUAL:
2548 case SLJIT_UNORDERED_OR_GREATER:
2549 case SLJIT_ORDERED_LESS_EQUAL:
2550 if (!FAST_IS_REG(src2)) {
2551 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src2, src2w));
2552 src2 = TMP_FREG;
2555 return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_32), src2, src1, src1w);
2558 if (!FAST_IS_REG(src1)) {
2559 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
2560 src1 = TMP_FREG;
2563 return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_32), src1, src2, src2w);
2566 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
2567 sljit_s32 dst, sljit_sw dstw,
2568 sljit_s32 src, sljit_sw srcw)
2570 sljit_s32 dst_r;
2572 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2573 compiler->mode32 = 1;
2574 #endif
2576 CHECK_ERROR();
2577 SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
2579 if (GET_OPCODE(op) == SLJIT_MOV_F64) {
2580 if (FAST_IS_REG(dst))
2581 return emit_sse2_load(compiler, op & SLJIT_32, dst, src, srcw);
2582 if (FAST_IS_REG(src))
2583 return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, src);
2584 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
2585 return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2588 if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) {
2589 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
2590 if (FAST_IS_REG(src)) {
2591 /* We overwrite the high bits of source. From SLJIT point of view,
2592 this is not an issue.
2593 Note: In SSE3, we could also use MOVDDUP and MOVSLDUP. */
2594 FAIL_IF(emit_sse2_logic(compiler, UNPCKLPD_x_xm, op & SLJIT_32, src, src, 0));
2596 else {
2597 FAIL_IF(emit_sse2_load(compiler, !(op & SLJIT_32), TMP_FREG, src, srcw));
2598 src = TMP_FREG;
2601 FAIL_IF(emit_sse2_logic(compiler, CVTPD2PS_x_xm, op & SLJIT_32, dst_r, src, 0));
2602 if (dst_r == TMP_FREG)
2603 return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2604 return SLJIT_SUCCESS;
2607 if (FAST_IS_REG(dst)) {
2608 dst_r = dst;
2609 if (dst != src)
2610 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src, srcw));
2612 else {
2613 dst_r = TMP_FREG;
2614 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src, srcw));
2617 switch (GET_OPCODE(op)) {
2618 case SLJIT_NEG_F64:
2619 FAIL_IF(emit_sse2_logic(compiler, XORPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_32 ? sse2_buffer : sse2_buffer + 8)));
2620 break;
2622 case SLJIT_ABS_F64:
2623 FAIL_IF(emit_sse2_logic(compiler, ANDPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_32 ? sse2_buffer + 4 : sse2_buffer + 12)));
2624 break;
2627 if (dst_r == TMP_FREG)
2628 return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2629 return SLJIT_SUCCESS;
2632 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
2633 sljit_s32 dst, sljit_sw dstw,
2634 sljit_s32 src1, sljit_sw src1w,
2635 sljit_s32 src2, sljit_sw src2w)
2637 sljit_s32 dst_r;
2639 CHECK_ERROR();
2640 CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2641 ADJUST_LOCAL_OFFSET(dst, dstw);
2642 ADJUST_LOCAL_OFFSET(src1, src1w);
2643 ADJUST_LOCAL_OFFSET(src2, src2w);
2645 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2646 compiler->mode32 = 1;
2647 #endif
2649 if (FAST_IS_REG(dst)) {
2650 dst_r = dst;
2651 if (dst == src1)
2652 ; /* Do nothing here. */
2653 else if (dst == src2 && (op == SLJIT_ADD_F64 || op == SLJIT_MUL_F64)) {
2654 /* Swap arguments. */
2655 src2 = src1;
2656 src2w = src1w;
2658 else if (dst != src2)
2659 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src1, src1w));
2660 else {
2661 dst_r = TMP_FREG;
2662 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
2665 else {
2666 dst_r = TMP_FREG;
2667 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
2670 switch (GET_OPCODE(op)) {
2671 case SLJIT_ADD_F64:
2672 FAIL_IF(emit_sse2(compiler, ADDSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
2673 break;
2675 case SLJIT_SUB_F64:
2676 FAIL_IF(emit_sse2(compiler, SUBSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
2677 break;
2679 case SLJIT_MUL_F64:
2680 FAIL_IF(emit_sse2(compiler, MULSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
2681 break;
2683 case SLJIT_DIV_F64:
2684 FAIL_IF(emit_sse2(compiler, DIVSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
2685 break;
2688 if (dst_r == TMP_FREG)
2689 return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2690 return SLJIT_SUCCESS;
2693 /* --------------------------------------------------------------------- */
2694 /* Conditional instructions */
2695 /* --------------------------------------------------------------------- */
2697 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
2699 sljit_u8 *inst;
2700 struct sljit_label *label;
2702 CHECK_ERROR_PTR();
2703 CHECK_PTR(check_sljit_emit_label(compiler));
2705 if (compiler->last_label && compiler->last_label->size == compiler->size)
2706 return compiler->last_label;
2708 label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
2709 PTR_FAIL_IF(!label);
2710 set_label(label, compiler);
2712 inst = (sljit_u8*)ensure_buf(compiler, 2);
2713 PTR_FAIL_IF(!inst);
2715 *inst++ = 0;
2716 *inst++ = 0;
2718 return label;
2721 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
2723 sljit_u8 *inst;
2724 struct sljit_jump *jump;
2726 CHECK_ERROR_PTR();
2727 CHECK_PTR(check_sljit_emit_jump(compiler, type));
2729 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2730 PTR_FAIL_IF_NULL(jump);
2731 set_jump(jump, compiler, (sljit_u32)((type & SLJIT_REWRITABLE_JUMP) | ((type & 0xff) << TYPE_SHIFT)));
2732 type &= 0xff;
2734 /* Worst case size. */
2735 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2736 compiler->size += (type >= SLJIT_JUMP) ? 5 : 6;
2737 #else
2738 compiler->size += (type >= SLJIT_JUMP) ? (10 + 3) : (2 + 10 + 3);
2739 #endif
2741 inst = (sljit_u8*)ensure_buf(compiler, 2);
2742 PTR_FAIL_IF_NULL(inst);
2744 *inst++ = 0;
2745 *inst++ = 1;
2746 return jump;
2749 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
2751 sljit_u8 *inst;
2752 struct sljit_jump *jump;
2754 CHECK_ERROR();
2755 CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
2756 ADJUST_LOCAL_OFFSET(src, srcw);
2758 CHECK_EXTRA_REGS(src, srcw, (void)0);
2760 if (src == SLJIT_IMM) {
2761 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2762 FAIL_IF_NULL(jump);
2763 set_jump(jump, compiler, (sljit_u32)(JUMP_ADDR | (type << TYPE_SHIFT)));
2764 jump->u.target = (sljit_uw)srcw;
2766 /* Worst case size. */
2767 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2768 compiler->size += 5;
2769 #else
2770 compiler->size += 10 + 3;
2771 #endif
2773 inst = (sljit_u8*)ensure_buf(compiler, 2);
2774 FAIL_IF_NULL(inst);
2776 *inst++ = 0;
2777 *inst++ = 1;
2779 else {
2780 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2781 /* REX_W is not necessary (src is not immediate). */
2782 compiler->mode32 = 1;
2783 #endif
2784 inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
2785 FAIL_IF(!inst);
2786 *inst++ = GROUP_FF;
2787 *inst = U8(*inst | ((type >= SLJIT_FAST_CALL) ? CALL_rm : JMP_rm));
2789 return SLJIT_SUCCESS;
2792 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
2793 sljit_s32 dst, sljit_sw dstw,
2794 sljit_s32 type)
2796 sljit_u8 *inst;
2797 sljit_u8 cond_set = 0;
2798 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2799 sljit_s32 reg;
2800 #endif
2801 /* ADJUST_LOCAL_OFFSET and CHECK_EXTRA_REGS might overwrite these values. */
2802 sljit_s32 dst_save = dst;
2803 sljit_sw dstw_save = dstw;
2805 CHECK_ERROR();
2806 CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
2808 ADJUST_LOCAL_OFFSET(dst, dstw);
2809 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2811 /* setcc = jcc + 0x10. */
2812 cond_set = U8(get_jump_code((sljit_uw)type) + 0x10);
2814 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2815 if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst)) {
2816 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 3);
2817 FAIL_IF(!inst);
2818 INC_SIZE(4 + 3);
2819 /* Set low register to conditional flag. */
2820 *inst++ = (reg_map[TMP_REG1] <= 7) ? REX : REX_B;
2821 *inst++ = GROUP_0F;
2822 *inst++ = cond_set;
2823 *inst++ = MOD_REG | reg_lmap[TMP_REG1];
2824 *inst++ = U8(REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B));
2825 *inst++ = OR_rm8_r8;
2826 *inst++ = U8(MOD_REG | (reg_lmap[TMP_REG1] << 3) | reg_lmap[dst]);
2827 return SLJIT_SUCCESS;
2830 reg = (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG1;
2832 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 4);
2833 FAIL_IF(!inst);
2834 INC_SIZE(4 + 4);
2835 /* Set low register to conditional flag. */
2836 *inst++ = (reg_map[reg] <= 7) ? REX : REX_B;
2837 *inst++ = GROUP_0F;
2838 *inst++ = cond_set;
2839 *inst++ = MOD_REG | reg_lmap[reg];
2840 *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R));
2841 /* The movzx instruction does not affect flags. */
2842 *inst++ = GROUP_0F;
2843 *inst++ = MOVZX_r_rm8;
2844 *inst = U8(MOD_REG | (reg_lmap[reg] << 3) | reg_lmap[reg]);
2846 if (reg != TMP_REG1)
2847 return SLJIT_SUCCESS;
2849 if (GET_OPCODE(op) < SLJIT_ADD) {
2850 compiler->mode32 = GET_OPCODE(op) != SLJIT_MOV;
2851 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2854 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
2855 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
2856 compiler->skip_checks = 1;
2857 #endif
2858 return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
2860 #else
2861 /* The SLJIT_CONFIG_X86_32 code path starts here. */
2862 if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) {
2863 if (reg_map[dst] <= 4) {
2864 /* Low byte is accessible. */
2865 inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
2866 FAIL_IF(!inst);
2867 INC_SIZE(3 + 3);
2868 /* Set low byte to conditional flag. */
2869 *inst++ = GROUP_0F;
2870 *inst++ = cond_set;
2871 *inst++ = U8(MOD_REG | reg_map[dst]);
2873 *inst++ = GROUP_0F;
2874 *inst++ = MOVZX_r_rm8;
2875 *inst = U8(MOD_REG | (reg_map[dst] << 3) | reg_map[dst]);
2876 return SLJIT_SUCCESS;
2879 /* Low byte is not accessible. */
2880 if (cpu_has_cmov == -1)
2881 get_cpu_features();
2883 if (cpu_has_cmov) {
2884 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
2885 /* a xor reg, reg operation would overwrite the flags. */
2886 EMIT_MOV(compiler, dst, 0, SLJIT_IMM, 0);
2888 inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
2889 FAIL_IF(!inst);
2890 INC_SIZE(3);
2892 *inst++ = GROUP_0F;
2893 /* cmovcc = setcc - 0x50. */
2894 *inst++ = U8(cond_set - 0x50);
2895 *inst++ = U8(MOD_REG | (reg_map[dst] << 3) | reg_map[TMP_REG1]);
2896 return SLJIT_SUCCESS;
2899 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
2900 FAIL_IF(!inst);
2901 INC_SIZE(1 + 3 + 3 + 1);
2902 *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2903 /* Set al to conditional flag. */
2904 *inst++ = GROUP_0F;
2905 *inst++ = cond_set;
2906 *inst++ = MOD_REG | 0 /* eax */;
2908 *inst++ = GROUP_0F;
2909 *inst++ = MOVZX_r_rm8;
2910 *inst++ = U8(MOD_REG | (reg_map[dst] << 3) | 0 /* eax */);
2911 *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2912 return SLJIT_SUCCESS;
2915 if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
2916 SLJIT_ASSERT(reg_map[SLJIT_R0] == 0);
2918 if (dst != SLJIT_R0) {
2919 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 2 + 1);
2920 FAIL_IF(!inst);
2921 INC_SIZE(1 + 3 + 2 + 1);
2922 /* Set low register to conditional flag. */
2923 *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2924 *inst++ = GROUP_0F;
2925 *inst++ = cond_set;
2926 *inst++ = MOD_REG | 0 /* eax */;
2927 *inst++ = OR_rm8_r8;
2928 *inst++ = MOD_REG | (0 /* eax */ << 3) | reg_map[dst];
2929 *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2931 else {
2932 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 3 + 2 + 2);
2933 FAIL_IF(!inst);
2934 INC_SIZE(2 + 3 + 2 + 2);
2935 /* Set low register to conditional flag. */
2936 *inst++ = XCHG_r_rm;
2937 *inst++ = U8(MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1]);
2938 *inst++ = GROUP_0F;
2939 *inst++ = cond_set;
2940 *inst++ = MOD_REG | 1 /* ecx */;
2941 *inst++ = OR_rm8_r8;
2942 *inst++ = MOD_REG | (1 /* ecx */ << 3) | 0 /* eax */;
2943 *inst++ = XCHG_r_rm;
2944 *inst++ = U8(MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1]);
2946 return SLJIT_SUCCESS;
2949 /* Set TMP_REG1 to the bit. */
2950 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
2951 FAIL_IF(!inst);
2952 INC_SIZE(1 + 3 + 3 + 1);
2953 *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2954 /* Set al to conditional flag. */
2955 *inst++ = GROUP_0F;
2956 *inst++ = cond_set;
2957 *inst++ = MOD_REG | 0 /* eax */;
2959 *inst++ = GROUP_0F;
2960 *inst++ = MOVZX_r_rm8;
2961 *inst++ = MOD_REG | (0 << 3) /* eax */ | 0 /* eax */;
2963 *inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2965 if (GET_OPCODE(op) < SLJIT_ADD)
2966 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2968 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
2969 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
2970 compiler->skip_checks = 1;
2971 #endif
2972 return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
2973 #endif /* SLJIT_CONFIG_X86_64 */
2976 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
2977 sljit_s32 dst_reg,
2978 sljit_s32 src, sljit_sw srcw)
2980 sljit_u8* inst;
2982 CHECK_ERROR();
2983 CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
2985 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2986 dst_reg &= ~SLJIT_32;
2988 if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV) || (dst_reg >= SLJIT_R3 && dst_reg <= SLJIT_S3))
2989 return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
2990 #else
2991 if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV))
2992 return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
2993 #endif
2995 /* ADJUST_LOCAL_OFFSET is not needed. */
2996 CHECK_EXTRA_REGS(src, srcw, (void)0);
2998 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2999 compiler->mode32 = dst_reg & SLJIT_32;
3000 dst_reg &= ~SLJIT_32;
3001 #endif
3003 if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
3004 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcw);
3005 src = TMP_REG1;
3006 srcw = 0;
3009 inst = emit_x86_instruction(compiler, 2, dst_reg, 0, src, srcw);
3010 FAIL_IF(!inst);
3011 *inst++ = GROUP_0F;
3012 *inst = U8(get_jump_code((sljit_uw)type) - 0x40);
3013 return SLJIT_SUCCESS;
3016 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
3018 CHECK_ERROR();
3019 CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
3020 ADJUST_LOCAL_OFFSET(dst, dstw);
3022 CHECK_EXTRA_REGS(dst, dstw, (void)0);
3024 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3025 compiler->mode32 = 0;
3026 #endif
3028 ADJUST_LOCAL_OFFSET(SLJIT_MEM1(SLJIT_SP), offset);
3030 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3031 if (NOT_HALFWORD(offset)) {
3032 FAIL_IF(emit_load_imm64(compiler, TMP_REG1, offset));
3033 #if (defined SLJIT_DEBUG && SLJIT_DEBUG)
3034 SLJIT_ASSERT(emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0) != SLJIT_ERR_UNSUPPORTED);
3035 return compiler->error;
3036 #else
3037 return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0);
3038 #endif
3040 #endif
3042 if (offset != 0)
3043 return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, SLJIT_IMM, offset);
3044 return emit_mov(compiler, dst, dstw, SLJIT_SP, 0);
3047 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
3049 sljit_u8 *inst;
3050 struct sljit_const *const_;
3051 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3052 sljit_s32 reg;
3053 #endif
3055 CHECK_ERROR_PTR();
3056 CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
3057 ADJUST_LOCAL_OFFSET(dst, dstw);
3059 CHECK_EXTRA_REGS(dst, dstw, (void)0);
3061 const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
3062 PTR_FAIL_IF(!const_);
3063 set_const(const_, compiler);
3065 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3066 compiler->mode32 = 0;
3067 reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
3069 if (emit_load_imm64(compiler, reg, init_value))
3070 return NULL;
3071 #else
3072 if (emit_mov(compiler, dst, dstw, SLJIT_IMM, init_value))
3073 return NULL;
3074 #endif
3076 inst = (sljit_u8*)ensure_buf(compiler, 2);
3077 PTR_FAIL_IF(!inst);
3079 *inst++ = 0;
3080 *inst++ = 2;
3082 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3083 if (dst & SLJIT_MEM)
3084 if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
3085 return NULL;
3086 #endif
3088 return const_;
3091 SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
3093 struct sljit_put_label *put_label;
3094 sljit_u8 *inst;
3095 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3096 sljit_s32 reg;
3097 sljit_uw start_size;
3098 #endif
3100 CHECK_ERROR_PTR();
3101 CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
3102 ADJUST_LOCAL_OFFSET(dst, dstw);
3104 CHECK_EXTRA_REGS(dst, dstw, (void)0);
3106 put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
3107 PTR_FAIL_IF(!put_label);
3108 set_put_label(put_label, compiler, 0);
3110 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3111 compiler->mode32 = 0;
3112 reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
3114 if (emit_load_imm64(compiler, reg, 0))
3115 return NULL;
3116 #else
3117 if (emit_mov(compiler, dst, dstw, SLJIT_IMM, 0))
3118 return NULL;
3119 #endif
3121 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3122 if (dst & SLJIT_MEM) {
3123 start_size = compiler->size;
3124 if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
3125 return NULL;
3126 put_label->flags = compiler->size - start_size;
3128 #endif
3130 inst = (sljit_u8*)ensure_buf(compiler, 2);
3131 PTR_FAIL_IF(!inst);
3133 *inst++ = 0;
3134 *inst++ = 3;
3136 return put_label;
3139 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
3141 SLJIT_UNUSED_ARG(executable_offset);
3143 SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 0);
3144 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
3145 sljit_unaligned_store_sw((void*)addr, (sljit_sw)(new_target - (addr + 4) - (sljit_uw)executable_offset));
3146 #else
3147 sljit_unaligned_store_sw((void*)addr, (sljit_sw)new_target);
3148 #endif
3149 SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 1);
3152 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
3154 SLJIT_UNUSED_ARG(executable_offset);
3156 SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_sw)), 0);
3157 sljit_unaligned_store_sw((void*)addr, new_constant);
3158 SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_sw)), 1);