2 * Stack-less Just-In-Time compiler
4 * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 /* x86 32-bit arch dependent functions. */
29 /* --------------------------------------------------------------------- */
31 /* --------------------------------------------------------------------- */
33 static sljit_s32
emit_do_imm(struct sljit_compiler
*compiler
, sljit_u8 opcode
, sljit_sw imm
)
37 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 1 + sizeof(sljit_sw
));
39 INC_SIZE(1 + sizeof(sljit_sw
));
41 sljit_unaligned_store_sw(inst
, imm
);
45 /* Size contains the flags as well. */
46 static sljit_u8
* emit_x86_instruction(struct sljit_compiler
*compiler
, sljit_uw size
,
47 /* The register or immediate operand. */
48 sljit_s32 a
, sljit_sw imma
,
49 /* The general operand (not immediate). */
50 sljit_s32 b
, sljit_sw immb
)
55 sljit_uw flags
= size
;
58 /* Both cannot be switched on. */
59 SLJIT_ASSERT((flags
& (EX86_BIN_INS
| EX86_SHIFT_INS
)) != (EX86_BIN_INS
| EX86_SHIFT_INS
));
60 /* Size flags not allowed for typed instructions. */
61 SLJIT_ASSERT(!(flags
& (EX86_BIN_INS
| EX86_SHIFT_INS
)) || (flags
& (EX86_BYTE_ARG
| EX86_HALF_ARG
)) == 0);
62 /* Both size flags cannot be switched on. */
63 SLJIT_ASSERT((flags
& (EX86_BYTE_ARG
| EX86_HALF_ARG
)) != (EX86_BYTE_ARG
| EX86_HALF_ARG
));
64 /* SSE2 and immediate is not possible. */
65 SLJIT_ASSERT(!(a
& SLJIT_IMM
) || !(flags
& EX86_SSE2
));
66 SLJIT_ASSERT((flags
& (EX86_PREF_F2
| EX86_PREF_F3
)) != (EX86_PREF_F2
| EX86_PREF_F3
)
67 && (flags
& (EX86_PREF_F2
| EX86_PREF_66
)) != (EX86_PREF_F2
| EX86_PREF_66
)
68 && (flags
& (EX86_PREF_F3
| EX86_PREF_66
)) != (EX86_PREF_F3
| EX86_PREF_66
));
73 if (flags
& (EX86_PREF_F2
| EX86_PREF_F3
))
75 if (flags
& EX86_PREF_66
)
78 /* Calculate size of b. */
79 inst_size
+= 1; /* mod r/m byte. */
82 inst_size
+= sizeof(sljit_sw
);
83 else if (immb
!= 0 && !(b
& OFFS_REG_MASK
)) {
84 /* Immediate operand. */
85 if (immb
<= 127 && immb
>= -128)
86 inst_size
+= sizeof(sljit_s8
);
88 inst_size
+= sizeof(sljit_sw
);
90 else if (reg_map
[b
& REG_MASK
] == 5)
91 inst_size
+= sizeof(sljit_s8
);
93 if ((b
& REG_MASK
) == SLJIT_SP
&& !(b
& OFFS_REG_MASK
))
94 b
|= TO_OFFS_REG(SLJIT_SP
);
96 if (b
& OFFS_REG_MASK
)
97 inst_size
+= 1; /* SIB byte. */
100 /* Calculate size of a. */
102 if (flags
& EX86_BIN_INS
) {
103 if (imma
<= 127 && imma
>= -128) {
105 flags
|= EX86_BYTE_ARG
;
109 else if (flags
& EX86_SHIFT_INS
) {
113 flags
|= EX86_BYTE_ARG
;
115 } else if (flags
& EX86_BYTE_ARG
)
117 else if (flags
& EX86_HALF_ARG
)
118 inst_size
+= sizeof(short);
120 inst_size
+= sizeof(sljit_sw
);
123 SLJIT_ASSERT(!(flags
& EX86_SHIFT_INS
) || a
== SLJIT_PREF_SHIFT_REG
);
125 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + inst_size
);
128 /* Encoding the byte. */
130 if (flags
& EX86_PREF_F2
)
132 if (flags
& EX86_PREF_F3
)
134 if (flags
& EX86_PREF_66
)
137 buf_ptr
= inst
+ size
;
139 /* Encode mod/rm byte. */
140 if (!(flags
& EX86_SHIFT_INS
)) {
141 if ((flags
& EX86_BIN_INS
) && (a
& SLJIT_IMM
))
142 *inst
= (flags
& EX86_BYTE_ARG
) ? GROUP_BINARY_83
: GROUP_BINARY_81
;
146 else if (!(flags
& EX86_SSE2_OP1
))
147 *buf_ptr
= U8(reg_map
[a
] << 3);
149 *buf_ptr
= U8(a
<< 3);
154 *inst
= GROUP_SHIFT_1
;
156 *inst
= GROUP_SHIFT_N
;
158 *inst
= GROUP_SHIFT_CL
;
162 if (!(b
& SLJIT_MEM
)) {
163 *buf_ptr
= U8(*buf_ptr
| MOD_REG
| (!(flags
& EX86_SSE2_OP2
) ? reg_map
[b
] : b
));
165 } else if (b
& REG_MASK
) {
166 reg_map_b
= reg_map
[b
& REG_MASK
];
168 if (!(b
& OFFS_REG_MASK
) || (b
& OFFS_REG_MASK
) == TO_OFFS_REG(SLJIT_SP
) || reg_map_b
== 5) {
169 if (immb
!= 0 || reg_map_b
== 5) {
170 if (immb
<= 127 && immb
>= -128)
176 if (!(b
& OFFS_REG_MASK
))
177 *buf_ptr
++ |= reg_map_b
;
180 *buf_ptr
++ = U8(reg_map_b
| (reg_map
[OFFS_REG(b
)] << 3));
183 if (immb
!= 0 || reg_map_b
== 5) {
184 if (immb
<= 127 && immb
>= -128)
185 *buf_ptr
++ = U8(immb
); /* 8 bit displacement. */
187 sljit_unaligned_store_sw(buf_ptr
, immb
); /* 32 bit displacement. */
188 buf_ptr
+= sizeof(sljit_sw
);
194 *buf_ptr
++ = U8(reg_map_b
| (reg_map
[OFFS_REG(b
)] << 3) | (immb
<< 6));
199 sljit_unaligned_store_sw(buf_ptr
, immb
); /* 32 bit displacement. */
200 buf_ptr
+= sizeof(sljit_sw
);
204 if (flags
& EX86_BYTE_ARG
)
206 else if (flags
& EX86_HALF_ARG
)
207 sljit_unaligned_store_s16(buf_ptr
, (sljit_s16
)imma
);
208 else if (!(flags
& EX86_SHIFT_INS
))
209 sljit_unaligned_store_sw(buf_ptr
, imma
);
212 return !(flags
& EX86_SHIFT_INS
) ? inst
: (inst
+ 1);
215 /* --------------------------------------------------------------------- */
217 /* --------------------------------------------------------------------- */
219 static sljit_u8
* generate_far_jump_code(struct sljit_jump
*jump
, sljit_u8
*code_ptr
, sljit_sw executable_offset
)
221 sljit_uw type
= jump
->flags
>> TYPE_SHIFT
;
223 if (type
== SLJIT_JUMP
) {
224 *code_ptr
++ = JMP_i32
;
227 else if (type
>= SLJIT_FAST_CALL
) {
228 *code_ptr
++ = CALL_i32
;
232 *code_ptr
++ = GROUP_0F
;
233 *code_ptr
++ = get_jump_code(type
);
237 if (jump
->flags
& JUMP_LABEL
)
238 jump
->flags
|= PATCH_MW
;
240 sljit_unaligned_store_sw(code_ptr
, (sljit_sw
)(jump
->u
.target
- (jump
->addr
+ 4) - (sljit_uw
)executable_offset
));
246 #define ENTER_R2_USED 0x00001
247 #define ENTER_R2_TO_S 0x00002
248 #define ENTER_R2_TO_R0 0x00004
249 #define ENTER_R1_TO_S 0x00008
250 #define ENTER_TMP_TO_R4 0x00010
251 #define ENTER_TMP_TO_S 0x00020
253 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_enter(struct sljit_compiler
*compiler
,
254 sljit_s32 options
, sljit_s32 arg_types
, sljit_s32 scratches
, sljit_s32 saveds
,
255 sljit_s32 fscratches
, sljit_s32 fsaveds
, sljit_s32 local_size
)
257 sljit_s32 word_arg_count
, saved_arg_count
, float_arg_count
;
258 sljit_s32 size
, locals_offset
, args_size
, types
, status
;
261 sljit_s32 r2_offset
= -1;
265 CHECK(check_sljit_emit_enter(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
));
266 set_emit_enter(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
);
268 /* Emit ENDBR32 at function entry if needed. */
269 FAIL_IF(emit_endbranch(compiler
));
271 SLJIT_COMPILE_ASSERT(SLJIT_FR0
== 1, float_register_index_start
);
273 arg_types
>>= SLJIT_ARG_SHIFT
;
278 args_size
= SSIZE_OF(sw
);
281 switch (types
& SLJIT_ARG_MASK
) {
282 case SLJIT_ARG_TYPE_F64
:
284 FAIL_IF(emit_sse2_load(compiler
, 0, float_arg_count
, SLJIT_MEM1(SLJIT_SP
), args_size
));
285 args_size
+= SSIZE_OF(f64
);
287 case SLJIT_ARG_TYPE_F32
:
289 FAIL_IF(emit_sse2_load(compiler
, 1, float_arg_count
, SLJIT_MEM1(SLJIT_SP
), args_size
));
290 args_size
+= SSIZE_OF(f32
);
295 if (!(types
& SLJIT_ARG_TYPE_SCRATCH_REG
)) {
297 if (saved_arg_count
== 4)
298 status
|= ENTER_TMP_TO_S
;
300 if (word_arg_count
== 4)
301 status
|= ENTER_TMP_TO_R4
;
302 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
303 if (word_arg_count
== 3)
304 status
|= ENTER_R2_USED
;
308 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
309 if (word_arg_count
<= 2 && !(options
& SLJIT_ENTER_CDECL
))
313 args_size
+= SSIZE_OF(sw
);
316 types
>>= SLJIT_ARG_SHIFT
;
319 args_size
-= SSIZE_OF(sw
);
320 compiler
->args_size
= args_size
;
322 /* [esp+0] for saving temporaries and function calls. */
323 locals_offset
= 2 * SSIZE_OF(sw
);
325 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
326 if ((options
& SLJIT_ENTER_CDECL
) && scratches
>= 3)
327 locals_offset
= 4 * SSIZE_OF(sw
);
330 locals_offset
= 4 * SSIZE_OF(sw
);
333 compiler
->scratches_offset
= locals_offset
;
336 locals_offset
+= ((scratches
> (3 + 6)) ? 6 : (scratches
- 3)) * SSIZE_OF(sw
);
339 locals_offset
+= (saveds
- 3) * SSIZE_OF(sw
);
341 compiler
->locals_offset
= locals_offset
;
343 size
= 1 + (scratches
> 9 ? (scratches
- 9) : 0) + (saveds
<= 3 ? saveds
: 3);
344 inst
= (sljit_u8
*)ensure_buf(compiler
, (sljit_uw
)(size
+ 1));
347 INC_SIZE((sljit_uw
)size
);
348 PUSH_REG(reg_map
[TMP_REG1
]);
349 if (saveds
> 2 || scratches
> 9)
350 PUSH_REG(reg_map
[SLJIT_S2
]);
351 if (saveds
> 1 || scratches
> 10)
352 PUSH_REG(reg_map
[SLJIT_S1
]);
353 if (saveds
> 0 || scratches
> 11)
354 PUSH_REG(reg_map
[SLJIT_S0
]);
356 size
*= SSIZE_OF(sw
);
358 if (status
& (ENTER_TMP_TO_R4
| ENTER_TMP_TO_S
))
359 EMIT_MOV(compiler
, TMP_REG1
, 0, SLJIT_MEM1(SLJIT_SP
), args_size
+ size
);
361 size
+= SSIZE_OF(sw
);
363 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
364 if (!(options
& SLJIT_ENTER_CDECL
))
368 local_size
= ((locals_offset
+ local_size
+ size
+ 0xf) & ~0xf) - size
;
369 compiler
->local_size
= local_size
;
371 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
372 if (!(options
& SLJIT_ENTER_CDECL
))
380 switch (arg_types
& SLJIT_ARG_MASK
) {
381 case SLJIT_ARG_TYPE_F64
:
382 args_size
+= SSIZE_OF(f64
);
384 case SLJIT_ARG_TYPE_F32
:
385 args_size
+= SSIZE_OF(f32
);
390 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
391 if (!(options
& SLJIT_ENTER_CDECL
) && word_arg_count
<= 2) {
392 if (word_arg_count
== 1) {
393 if (status
& ENTER_R2_USED
) {
394 EMIT_MOV(compiler
, (arg_types
& SLJIT_ARG_TYPE_SCRATCH_REG
) ? SLJIT_R0
: SLJIT_S0
, 0, SLJIT_R2
, 0);
395 } else if (!(arg_types
& SLJIT_ARG_TYPE_SCRATCH_REG
)) {
396 status
|= ENTER_R2_TO_S
;
399 status
|= ENTER_R2_TO_R0
;
400 } else if (!(arg_types
& SLJIT_ARG_TYPE_SCRATCH_REG
)) {
401 status
|= ENTER_R1_TO_S
;
407 if (arg_types
& SLJIT_ARG_TYPE_SCRATCH_REG
) {
408 SLJIT_ASSERT(word_arg_count
<= 3 || (status
& ENTER_TMP_TO_R4
));
410 if (word_arg_count
<= 3) {
412 if (word_arg_count
== 3 && local_size
> 4 * 4096)
413 r2_offset
= local_size
+ args_size
;
416 EMIT_MOV(compiler
, word_arg_count
, 0, SLJIT_MEM1(SLJIT_SP
), args_size
);
419 SLJIT_ASSERT(saved_arg_count
<= 3 || (status
& ENTER_TMP_TO_S
));
421 if (saved_arg_count
<= 3)
422 EMIT_MOV(compiler
, SLJIT_S0
- saved_arg_count
, 0, SLJIT_MEM1(SLJIT_SP
), args_size
);
425 args_size
+= SSIZE_OF(sw
);
428 arg_types
>>= SLJIT_ARG_SHIFT
;
431 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
432 if (!(options
& SLJIT_ENTER_CDECL
)) {
433 if (status
& ENTER_R2_TO_R0
)
434 EMIT_MOV(compiler
, SLJIT_R0
, 0, SLJIT_R2
, 0);
437 if (status
& ENTER_R2_TO_S
) {
438 EMIT_MOV(compiler
, SLJIT_S0
, 0, SLJIT_R2
, 0);
442 if (status
& ENTER_R1_TO_S
)
443 EMIT_MOV(compiler
, SLJIT_S0
- saved_arg_count
, 0, SLJIT_R1
, 0);
447 SLJIT_ASSERT(SLJIT_LOCALS_OFFSET
> 0);
450 SLJIT_ASSERT(r2_offset
== -1 || local_size
> 4 * 4096);
452 if (local_size
> 4096) {
453 if (local_size
<= 4 * 4096) {
454 BINARY_IMM32(OR
, 0, SLJIT_MEM1(SLJIT_SP
), -4096);
456 if (local_size
> 2 * 4096)
457 BINARY_IMM32(OR
, 0, SLJIT_MEM1(SLJIT_SP
), -4096 * 2);
458 if (local_size
> 3 * 4096)
459 BINARY_IMM32(OR
, 0, SLJIT_MEM1(SLJIT_SP
), -4096 * 3);
462 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_IMM
, local_size
>> 12);
464 BINARY_IMM32(OR
, 0, SLJIT_MEM1(SLJIT_SP
), -4096);
465 BINARY_IMM32(SUB
, 4096, SLJIT_SP
, 0);
467 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 2);
472 inst
[1] = (sljit_u8
)-16;
477 if (local_size
> 0) {
478 BINARY_IMM32(OR
, 0, SLJIT_MEM1(SLJIT_SP
), -local_size
);
479 BINARY_IMM32(SUB
, local_size
, SLJIT_SP
, 0);
483 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), r2_offset
);
487 SLJIT_ASSERT(local_size
> 0);
489 BINARY_IMM32(SUB
, local_size
, SLJIT_SP
, 0);
493 if (status
& (ENTER_TMP_TO_R4
| ENTER_TMP_TO_S
)) {
494 size
= (status
& ENTER_TMP_TO_R4
) ? compiler
->scratches_offset
: compiler
->locals_offset
- SSIZE_OF(sw
);
495 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), size
, TMP_REG1
, 0);
498 return SLJIT_SUCCESS
;
501 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_set_context(struct sljit_compiler
*compiler
,
502 sljit_s32 options
, sljit_s32 arg_types
, sljit_s32 scratches
, sljit_s32 saveds
,
503 sljit_s32 fscratches
, sljit_s32 fsaveds
, sljit_s32 local_size
)
505 sljit_s32 args_size
, locals_offset
;
506 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
507 sljit_s32 word_arg_count
= 0;
511 CHECK(check_sljit_set_context(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
));
512 set_set_context(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
);
514 arg_types
>>= SLJIT_ARG_SHIFT
;
517 switch (arg_types
& SLJIT_ARG_MASK
) {
518 case SLJIT_ARG_TYPE_F64
:
519 args_size
+= SSIZE_OF(f64
);
521 case SLJIT_ARG_TYPE_F32
:
522 args_size
+= SSIZE_OF(f32
);
525 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
526 if (word_arg_count
>= 2)
527 args_size
+= SSIZE_OF(sw
);
530 args_size
+= SSIZE_OF(sw
);
534 arg_types
>>= SLJIT_ARG_SHIFT
;
537 compiler
->args_size
= args_size
;
539 /* [esp+0] for saving temporaries and function calls. */
540 locals_offset
= 2 * SSIZE_OF(sw
);
542 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
543 if ((options
& SLJIT_ENTER_CDECL
) && scratches
>= 3)
544 locals_offset
= 4 * SSIZE_OF(sw
);
547 locals_offset
= 4 * SSIZE_OF(sw
);
550 compiler
->scratches_offset
= locals_offset
;
553 locals_offset
+= ((scratches
> (3 + 6)) ? 6 : (scratches
- 3)) * SSIZE_OF(sw
);
556 locals_offset
+= (saveds
- 3) * SSIZE_OF(sw
);
558 compiler
->locals_offset
= locals_offset
;
560 saveds
= (2 + (scratches
> 9 ? (scratches
- 9) : 0) + (saveds
<= 3 ? saveds
: 3)) * SSIZE_OF(sw
);
562 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
563 if (!(options
& SLJIT_ENTER_CDECL
))
567 compiler
->local_size
= ((locals_offset
+ local_size
+ saveds
+ 0xf) & ~0xf) - saveds
;
568 return SLJIT_SUCCESS
;
571 static sljit_s32
emit_stack_frame_release(struct sljit_compiler
*compiler
)
576 size
= (sljit_uw
)(1 + (compiler
->scratches
> 9 ? (compiler
->scratches
- 9) : 0) +
577 (compiler
->saveds
<= 3 ? compiler
->saveds
: 3));
578 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + size
);
583 if (compiler
->saveds
> 0 || compiler
->scratches
> 11)
584 POP_REG(reg_map
[SLJIT_S0
]);
585 if (compiler
->saveds
> 1 || compiler
->scratches
> 10)
586 POP_REG(reg_map
[SLJIT_S1
]);
587 if (compiler
->saveds
> 2 || compiler
->scratches
> 9)
588 POP_REG(reg_map
[SLJIT_S2
]);
589 POP_REG(reg_map
[TMP_REG1
]);
591 return SLJIT_SUCCESS
;
594 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_return_void(struct sljit_compiler
*compiler
)
600 CHECK(check_sljit_emit_return_void(compiler
));
602 SLJIT_ASSERT(compiler
->args_size
>= 0);
603 SLJIT_ASSERT(compiler
->local_size
> 0);
605 BINARY_IMM32(ADD
, compiler
->local_size
, SLJIT_SP
, 0);
607 FAIL_IF(emit_stack_frame_release(compiler
));
610 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
611 if (compiler
->args_size
> 0 && !(compiler
->options
& SLJIT_ENTER_CDECL
))
614 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + size
);
619 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
620 if (compiler
->args_size
> 0 && !(compiler
->options
& SLJIT_ENTER_CDECL
)) {
621 RET_I16(U8(compiler
->args_size
));
622 return SLJIT_SUCCESS
;
627 return SLJIT_SUCCESS
;
630 /* --------------------------------------------------------------------- */
631 /* Call / return instructions */
632 /* --------------------------------------------------------------------- */
634 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
636 static sljit_sw
c_fast_call_get_stack_size(sljit_s32 arg_types
, sljit_s32
*word_arg_count_ptr
)
638 sljit_sw stack_size
= 0;
639 sljit_s32 word_arg_count
= 0;
641 arg_types
>>= SLJIT_ARG_SHIFT
;
644 switch (arg_types
& SLJIT_ARG_MASK
) {
645 case SLJIT_ARG_TYPE_F64
:
646 stack_size
+= SSIZE_OF(f64
);
648 case SLJIT_ARG_TYPE_F32
:
649 stack_size
+= SSIZE_OF(f32
);
653 if (word_arg_count
> 2)
654 stack_size
+= SSIZE_OF(sw
);
658 arg_types
>>= SLJIT_ARG_SHIFT
;
661 if (word_arg_count_ptr
)
662 *word_arg_count_ptr
= word_arg_count
;
667 static sljit_s32
c_fast_call_with_args(struct sljit_compiler
*compiler
,
668 sljit_s32 arg_types
, sljit_sw stack_size
, sljit_s32 word_arg_count
, sljit_s32 swap_args
)
671 sljit_s32 float_arg_count
;
673 if (stack_size
== SSIZE_OF(sw
) && word_arg_count
== 3) {
674 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 1);
677 PUSH_REG(reg_map
[SLJIT_R2
]);
679 else if (stack_size
> 0) {
680 if (word_arg_count
>= 4)
681 EMIT_MOV(compiler
, TMP_REG1
, 0, SLJIT_MEM1(SLJIT_SP
), compiler
->scratches_offset
);
683 BINARY_IMM32(SUB
, stack_size
, SLJIT_SP
, 0);
686 arg_types
>>= SLJIT_ARG_SHIFT
;
690 switch (arg_types
& SLJIT_ARG_MASK
) {
691 case SLJIT_ARG_TYPE_F64
:
693 FAIL_IF(emit_sse2_store(compiler
, 0, SLJIT_MEM1(SLJIT_SP
), stack_size
, float_arg_count
));
694 stack_size
+= SSIZE_OF(f64
);
696 case SLJIT_ARG_TYPE_F32
:
698 FAIL_IF(emit_sse2_store(compiler
, 1, SLJIT_MEM1(SLJIT_SP
), stack_size
, float_arg_count
));
699 stack_size
+= SSIZE_OF(f32
);
703 if (word_arg_count
== 3) {
704 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), stack_size
, SLJIT_R2
, 0);
705 stack_size
+= SSIZE_OF(sw
);
707 else if (word_arg_count
== 4) {
708 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), stack_size
, TMP_REG1
, 0);
709 stack_size
+= SSIZE_OF(sw
);
714 arg_types
>>= SLJIT_ARG_SHIFT
;
718 if (word_arg_count
> 0) {
720 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 1);
724 *inst
++ = U8(XCHG_EAX_r
| reg_map
[SLJIT_R2
]);
727 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 2);
732 *inst
++ = U8(MOD_REG
| (reg_map
[SLJIT_R2
] << 3) | reg_map
[SLJIT_R0
]);
736 return SLJIT_SUCCESS
;
741 static sljit_s32
cdecl_call_get_stack_size(struct sljit_compiler
*compiler
, sljit_s32 arg_types
, sljit_s32
*word_arg_count_ptr
)
743 sljit_sw stack_size
= 0;
744 sljit_s32 word_arg_count
= 0;
746 arg_types
>>= SLJIT_ARG_SHIFT
;
749 switch (arg_types
& SLJIT_ARG_MASK
) {
750 case SLJIT_ARG_TYPE_F64
:
751 stack_size
+= SSIZE_OF(f64
);
753 case SLJIT_ARG_TYPE_F32
:
754 stack_size
+= SSIZE_OF(f32
);
758 stack_size
+= SSIZE_OF(sw
);
762 arg_types
>>= SLJIT_ARG_SHIFT
;
765 if (word_arg_count_ptr
)
766 *word_arg_count_ptr
= word_arg_count
;
768 if (stack_size
<= compiler
->scratches_offset
)
771 return ((stack_size
- compiler
->scratches_offset
+ 0xf) & ~0xf);
774 static sljit_s32
cdecl_call_with_args(struct sljit_compiler
*compiler
,
775 sljit_s32 arg_types
, sljit_sw stack_size
, sljit_s32 word_arg_count
)
777 sljit_s32 float_arg_count
= 0;
780 if (word_arg_count
>= 4)
781 EMIT_MOV(compiler
, TMP_REG1
, 0, SLJIT_MEM1(SLJIT_SP
), compiler
->scratches_offset
);
784 BINARY_IMM32(SUB
, stack_size
, SLJIT_SP
, 0);
788 arg_types
>>= SLJIT_ARG_SHIFT
;
791 switch (arg_types
& SLJIT_ARG_MASK
) {
792 case SLJIT_ARG_TYPE_F64
:
794 FAIL_IF(emit_sse2_store(compiler
, 0, SLJIT_MEM1(SLJIT_SP
), stack_size
, float_arg_count
));
795 stack_size
+= SSIZE_OF(f64
);
797 case SLJIT_ARG_TYPE_F32
:
799 FAIL_IF(emit_sse2_store(compiler
, 1, SLJIT_MEM1(SLJIT_SP
), stack_size
, float_arg_count
));
800 stack_size
+= SSIZE_OF(f32
);
804 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), stack_size
, (word_arg_count
>= 4) ? TMP_REG1
: word_arg_count
, 0);
805 stack_size
+= SSIZE_OF(sw
);
809 arg_types
>>= SLJIT_ARG_SHIFT
;
812 return SLJIT_SUCCESS
;
815 static sljit_s32
post_call_with_args(struct sljit_compiler
*compiler
,
816 sljit_s32 arg_types
, sljit_s32 stack_size
)
822 BINARY_IMM32(ADD
, stack_size
, SLJIT_SP
, 0);
824 if ((arg_types
& SLJIT_ARG_MASK
) < SLJIT_ARG_TYPE_F64
)
825 return SLJIT_SUCCESS
;
827 single
= ((arg_types
& SLJIT_ARG_MASK
) == SLJIT_ARG_TYPE_F32
);
829 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 3);
832 inst
[0] = single
? FSTPS
: FSTPD
;
833 inst
[1] = (0x03 << 3) | 0x04;
834 inst
[2] = (0x04 << 3) | reg_map
[SLJIT_SP
];
836 return emit_sse2_load(compiler
, single
, SLJIT_FR0
, SLJIT_MEM1(SLJIT_SP
), 0);
839 static sljit_s32
tail_call_with_args(struct sljit_compiler
*compiler
,
840 sljit_s32
*extra_space
, sljit_s32 arg_types
,
841 sljit_s32 src
, sljit_sw srcw
)
843 sljit_sw args_size
, prev_args_size
, saved_regs_size
;
844 sljit_sw types
, word_arg_count
, float_arg_count
;
845 sljit_sw stack_size
, prev_stack_size
, min_size
, offset
;
846 sljit_sw word_arg4_offset
;
847 sljit_u8 r2_offset
= 0;
848 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
849 sljit_u8 fast_call
= (*extra_space
& 0xff) == SLJIT_CALL
;
853 ADJUST_LOCAL_OFFSET(src
, srcw
);
854 CHECK_EXTRA_REGS(src
, srcw
, (void)0);
856 saved_regs_size
= (1 + (compiler
->scratches
> 9 ? (compiler
->scratches
- 9) : 0)
857 + (compiler
->saveds
<= 3 ? compiler
->saveds
: 3)) * SSIZE_OF(sw
);
861 arg_types
>>= SLJIT_ARG_SHIFT
;
865 while (arg_types
!= 0) {
866 types
= (types
<< SLJIT_ARG_SHIFT
) | (arg_types
& SLJIT_ARG_MASK
);
868 switch (arg_types
& SLJIT_ARG_MASK
) {
869 case SLJIT_ARG_TYPE_F64
:
870 args_size
+= SSIZE_OF(f64
);
873 case SLJIT_ARG_TYPE_F32
:
874 args_size
+= SSIZE_OF(f32
);
879 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
880 if (!fast_call
|| word_arg_count
> 2)
881 args_size
+= SSIZE_OF(sw
);
883 args_size
+= SSIZE_OF(sw
);
887 arg_types
>>= SLJIT_ARG_SHIFT
;
890 if (args_size
<= compiler
->args_size
891 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
892 && (!(compiler
->options
& SLJIT_ENTER_CDECL
) || args_size
== 0 || !fast_call
)
893 #endif /* SLJIT_X86_32_FASTCALL */
895 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
896 *extra_space
= fast_call
? 0 : args_size
;
897 prev_args_size
= compiler
->args_size
;
898 stack_size
= prev_args_size
+ SSIZE_OF(sw
) + saved_regs_size
;
899 #else /* !SLJIT_X86_32_FASTCALL */
901 stack_size
= args_size
+ SSIZE_OF(sw
) + saved_regs_size
;
902 #endif /* SLJIT_X86_32_FASTCALL */
904 offset
= stack_size
+ compiler
->local_size
;
906 if (!(src
& SLJIT_IMM
) && src
!= SLJIT_R0
) {
907 if (word_arg_count
>= 1) {
908 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), 0, SLJIT_R0
, 0);
909 r2_offset
= sizeof(sljit_sw
);
911 EMIT_MOV(compiler
, SLJIT_R0
, 0, src
, srcw
);
914 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
915 if (!(compiler
->options
& SLJIT_ENTER_CDECL
)) {
917 offset
-= SSIZE_OF(sw
);
919 if (word_arg_count
>= 3) {
920 word_arg4_offset
= SSIZE_OF(sw
);
922 if (word_arg_count
+ float_arg_count
>= 4) {
923 word_arg4_offset
= SSIZE_OF(sw
) + SSIZE_OF(sw
);
924 if ((types
& SLJIT_ARG_MASK
) == SLJIT_ARG_TYPE_F64
)
925 word_arg4_offset
= SSIZE_OF(sw
) + SSIZE_OF(f64
);
928 /* In cdecl mode, at least one more word value must
929 * be present on the stack before the return address. */
930 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
- word_arg4_offset
, SLJIT_R2
, 0);
934 if (args_size
< prev_args_size
) {
935 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), offset
- prev_args_size
- SSIZE_OF(sw
));
936 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
- args_size
- SSIZE_OF(sw
), SLJIT_R2
, 0);
938 } else if (prev_args_size
> 0) {
939 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), offset
- prev_args_size
);
940 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R2
, 0);
943 #endif /* SLJIT_X86_32_FASTCALL */
946 switch (types
& SLJIT_ARG_MASK
) {
947 case SLJIT_ARG_TYPE_F64
:
948 offset
-= SSIZE_OF(f64
);
949 FAIL_IF(emit_sse2_store(compiler
, 0, SLJIT_MEM1(SLJIT_SP
), offset
, float_arg_count
));
952 case SLJIT_ARG_TYPE_F32
:
953 offset
-= SSIZE_OF(f32
);
954 FAIL_IF(emit_sse2_store(compiler
, 0, SLJIT_MEM1(SLJIT_SP
), offset
, float_arg_count
));
958 switch (word_arg_count
) {
960 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
962 EMIT_MOV(compiler
, SLJIT_R2
, 0, r2_offset
!= 0 ? SLJIT_MEM1(SLJIT_SP
) : SLJIT_R0
, 0);
966 offset
-= SSIZE_OF(sw
);
967 if (r2_offset
!= 0) {
968 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), 0);
969 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R2
, 0);
971 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R0
, 0);
974 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
978 offset
-= SSIZE_OF(sw
);
979 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R1
, 0);
982 offset
-= SSIZE_OF(sw
);
985 offset
-= SSIZE_OF(sw
);
986 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), compiler
->scratches_offset
);
987 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R2
, 0);
993 types
>>= SLJIT_ARG_SHIFT
;
996 BINARY_IMM32(ADD
, compiler
->local_size
, SLJIT_SP
, 0);
997 FAIL_IF(emit_stack_frame_release(compiler
));
999 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1000 if (args_size
< prev_args_size
)
1001 BINARY_IMM32(ADD
, prev_args_size
- args_size
, SLJIT_SP
, 0);
1004 return SLJIT_SUCCESS
;
1007 stack_size
= args_size
+ SSIZE_OF(sw
);
1009 if (word_arg_count
>= 1 && !(src
& SLJIT_IMM
) && src
!= SLJIT_R0
) {
1010 r2_offset
= SSIZE_OF(sw
);
1011 stack_size
+= SSIZE_OF(sw
);
1014 if (word_arg_count
>= 3)
1015 stack_size
+= SSIZE_OF(sw
);
1018 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1019 if (!(compiler
->options
& SLJIT_ENTER_CDECL
))
1020 prev_args_size
= compiler
->args_size
;
1023 prev_stack_size
= prev_args_size
+ SSIZE_OF(sw
) + saved_regs_size
;
1024 min_size
= prev_stack_size
+ compiler
->local_size
;
1026 word_arg4_offset
= compiler
->scratches_offset
;
1028 if (stack_size
> min_size
) {
1029 BINARY_IMM32(SUB
, stack_size
- min_size
, SLJIT_SP
, 0);
1030 if (src
== SLJIT_MEM1(SLJIT_SP
))
1031 srcw
+= stack_size
- min_size
;
1032 word_arg4_offset
+= stack_size
- min_size
;
1035 stack_size
= min_size
;
1037 if (word_arg_count
>= 3) {
1038 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), r2_offset
, SLJIT_R2
, 0);
1040 if (word_arg_count
>= 4)
1041 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), word_arg4_offset
);
1044 if (!(src
& SLJIT_IMM
) && src
!= SLJIT_R0
) {
1045 if (word_arg_count
>= 1) {
1046 SLJIT_ASSERT(r2_offset
== sizeof(sljit_sw
));
1047 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), 0, SLJIT_R0
, 0);
1049 EMIT_MOV(compiler
, SLJIT_R0
, 0, src
, srcw
);
1052 /* Restore saved registers. */
1053 offset
= stack_size
- prev_args_size
- 2 * SSIZE_OF(sw
);
1054 EMIT_MOV(compiler
, TMP_REG1
, 0, SLJIT_MEM1(SLJIT_SP
), offset
);
1056 if (compiler
->saveds
> 2 || compiler
->scratches
> 9) {
1057 offset
-= SSIZE_OF(sw
);
1058 EMIT_MOV(compiler
, SLJIT_S2
, 0, SLJIT_MEM1(SLJIT_SP
), offset
);
1060 if (compiler
->saveds
> 1 || compiler
->scratches
> 10) {
1061 offset
-= SSIZE_OF(sw
);
1062 EMIT_MOV(compiler
, SLJIT_S1
, 0, SLJIT_MEM1(SLJIT_SP
), offset
);
1064 if (compiler
->saveds
> 0 || compiler
->scratches
> 11) {
1065 offset
-= SSIZE_OF(sw
);
1066 EMIT_MOV(compiler
, SLJIT_S0
, 0, SLJIT_MEM1(SLJIT_SP
), offset
);
1069 /* Copy fourth argument and return address. */
1070 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1072 offset
= stack_size
;
1075 if (word_arg_count
>= 4 && prev_args_size
== 0) {
1076 offset
-= SSIZE_OF(sw
);
1077 inst
= emit_x86_instruction(compiler
, 1, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), offset
);
1081 SLJIT_ASSERT(args_size
!= prev_args_size
);
1083 if (word_arg_count
>= 4) {
1084 offset
-= SSIZE_OF(sw
);
1085 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R2
, 0);
1088 if (args_size
!= prev_args_size
)
1089 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), stack_size
- prev_args_size
- SSIZE_OF(sw
));
1092 if (args_size
!= prev_args_size
)
1093 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), stack_size
- args_size
- SSIZE_OF(sw
), SLJIT_R2
, 0);
1095 #endif /* SLJIT_X86_32_FASTCALL */
1096 offset
= stack_size
- SSIZE_OF(sw
);
1097 *extra_space
= args_size
;
1099 if (word_arg_count
>= 4 && prev_args_size
== SSIZE_OF(sw
)) {
1100 offset
-= SSIZE_OF(sw
);
1101 inst
= emit_x86_instruction(compiler
, 1, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), offset
);
1105 SLJIT_ASSERT(prev_args_size
> 0);
1107 if (word_arg_count
>= 4) {
1108 offset
-= SSIZE_OF(sw
);
1109 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R2
, 0);
1112 if (prev_args_size
> 0)
1113 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), stack_size
- prev_args_size
- SSIZE_OF(sw
));
1116 /* Copy return address. */
1117 if (prev_args_size
> 0)
1118 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), stack_size
- SSIZE_OF(sw
), SLJIT_R2
, 0);
1119 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1121 #endif /* SLJIT_X86_32_FASTCALL */
1123 while (types
!= 0) {
1124 switch (types
& SLJIT_ARG_MASK
) {
1125 case SLJIT_ARG_TYPE_F64
:
1126 offset
-= SSIZE_OF(f64
);
1127 FAIL_IF(emit_sse2_store(compiler
, 0, SLJIT_MEM1(SLJIT_SP
), offset
, float_arg_count
));
1130 case SLJIT_ARG_TYPE_F32
:
1131 offset
-= SSIZE_OF(f32
);
1132 FAIL_IF(emit_sse2_store(compiler
, 0, SLJIT_MEM1(SLJIT_SP
), offset
, float_arg_count
));
1136 switch (word_arg_count
) {
1138 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1140 EMIT_MOV(compiler
, SLJIT_R2
, 0, r2_offset
!= 0 ? SLJIT_MEM1(SLJIT_SP
) : SLJIT_R0
, 0);
1144 offset
-= SSIZE_OF(sw
);
1145 if (r2_offset
!= 0) {
1146 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), 0);
1147 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R2
, 0);
1149 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R0
, 0);
1152 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1156 offset
-= SSIZE_OF(sw
);
1157 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R1
, 0);
1160 offset
-= SSIZE_OF(sw
);
1161 EMIT_MOV(compiler
, SLJIT_R2
, 0, SLJIT_MEM1(SLJIT_SP
), r2_offset
);
1162 EMIT_MOV(compiler
, SLJIT_MEM1(SLJIT_SP
), offset
, SLJIT_R2
, 0);
1168 types
>>= SLJIT_ARG_SHIFT
;
1171 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1172 /* Skip return address. */
1174 offset
-= SSIZE_OF(sw
);
1177 SLJIT_ASSERT(offset
>= 0);
1180 return SLJIT_SUCCESS
;
1182 BINARY_IMM32(ADD
, offset
, SLJIT_SP
, 0);
1183 return SLJIT_SUCCESS
;
1186 static sljit_s32
emit_tail_call_end(struct sljit_compiler
*compiler
, sljit_s32 extra_space
)
1188 /* Called when stack consumption cannot be reduced to 0. */
1191 BINARY_IMM32(ADD
, extra_space
, SLJIT_SP
, 0);
1193 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 1);
1198 return SLJIT_SUCCESS
;
1201 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_jump
* sljit_emit_call(struct sljit_compiler
*compiler
, sljit_s32 type
,
1202 sljit_s32 arg_types
)
1204 struct sljit_jump
*jump
;
1205 sljit_sw stack_size
= 0;
1206 sljit_s32 word_arg_count
;
1209 CHECK_PTR(check_sljit_emit_call(compiler
, type
, arg_types
));
1211 if (type
& SLJIT_CALL_RETURN
) {
1213 PTR_FAIL_IF(tail_call_with_args(compiler
, &stack_size
, arg_types
, SLJIT_IMM
, 0));
1215 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1216 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1217 compiler
->skip_checks
= 1;
1220 if (stack_size
== 0) {
1221 type
= SLJIT_JUMP
| (type
& SLJIT_REWRITABLE_JUMP
);
1222 return sljit_emit_jump(compiler
, type
);
1225 jump
= sljit_emit_jump(compiler
, type
);
1226 PTR_FAIL_IF(jump
== NULL
);
1228 PTR_FAIL_IF(emit_tail_call_end(compiler
, stack_size
));
1232 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1233 if ((type
& 0xff) == SLJIT_CALL
) {
1234 stack_size
= c_fast_call_get_stack_size(arg_types
, &word_arg_count
);
1235 PTR_FAIL_IF(c_fast_call_with_args(compiler
, arg_types
, stack_size
, word_arg_count
, 0));
1237 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1238 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1239 compiler
->skip_checks
= 1;
1242 jump
= sljit_emit_jump(compiler
, type
);
1243 PTR_FAIL_IF(jump
== NULL
);
1245 PTR_FAIL_IF(post_call_with_args(compiler
, arg_types
, 0));
1250 stack_size
= cdecl_call_get_stack_size(compiler
, arg_types
, &word_arg_count
);
1251 PTR_FAIL_IF(cdecl_call_with_args(compiler
, arg_types
, stack_size
, word_arg_count
));
1253 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1254 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1255 compiler
->skip_checks
= 1;
1258 jump
= sljit_emit_jump(compiler
, type
);
1259 PTR_FAIL_IF(jump
== NULL
);
1261 PTR_FAIL_IF(post_call_with_args(compiler
, arg_types
, stack_size
));
1265 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_icall(struct sljit_compiler
*compiler
, sljit_s32 type
,
1266 sljit_s32 arg_types
,
1267 sljit_s32 src
, sljit_sw srcw
)
1269 sljit_sw stack_size
= 0;
1270 sljit_s32 word_arg_count
;
1271 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1272 sljit_s32 swap_args
;
1276 CHECK(check_sljit_emit_icall(compiler
, type
, arg_types
, src
, srcw
));
1278 if (type
& SLJIT_CALL_RETURN
) {
1280 FAIL_IF(tail_call_with_args(compiler
, &stack_size
, arg_types
, src
, srcw
));
1282 if (!(src
& SLJIT_IMM
)) {
1287 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1288 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1289 compiler
->skip_checks
= 1;
1292 if (stack_size
== 0)
1293 return sljit_emit_ijump(compiler
, SLJIT_JUMP
, src
, srcw
);
1295 FAIL_IF(sljit_emit_ijump(compiler
, type
, src
, srcw
));
1296 return emit_tail_call_end(compiler
, stack_size
);
1299 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
1300 SLJIT_ASSERT(reg_map
[SLJIT_R0
] == 0 && reg_map
[SLJIT_R2
] == 1 && SLJIT_R0
== 1 && SLJIT_R2
== 3);
1302 if ((type
& 0xff) == SLJIT_CALL
) {
1303 stack_size
= c_fast_call_get_stack_size(arg_types
, &word_arg_count
);
1306 if (word_arg_count
> 0) {
1307 if ((src
& REG_MASK
) == SLJIT_R2
|| OFFS_REG(src
) == SLJIT_R2
) {
1309 if (((src
& REG_MASK
) | 0x2) == SLJIT_R2
)
1311 if ((OFFS_REG(src
) | 0x2) == SLJIT_R2
)
1312 src
^= TO_OFFS_REG(0x2);
1316 FAIL_IF(c_fast_call_with_args(compiler
, arg_types
, stack_size
, word_arg_count
, swap_args
));
1318 compiler
->scratches_offset
+= stack_size
;
1319 compiler
->locals_offset
+= stack_size
;
1321 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1322 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1323 compiler
->skip_checks
= 1;
1325 FAIL_IF(sljit_emit_ijump(compiler
, type
, src
, srcw
));
1327 compiler
->scratches_offset
-= stack_size
;
1328 compiler
->locals_offset
-= stack_size
;
1330 return post_call_with_args(compiler
, arg_types
, 0);
1334 stack_size
= cdecl_call_get_stack_size(compiler
, arg_types
, &word_arg_count
);
1335 FAIL_IF(cdecl_call_with_args(compiler
, arg_types
, stack_size
, word_arg_count
));
1337 compiler
->scratches_offset
+= stack_size
;
1338 compiler
->locals_offset
+= stack_size
;
1340 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1341 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1342 compiler
->skip_checks
= 1;
1344 FAIL_IF(sljit_emit_ijump(compiler
, type
, src
, srcw
));
1346 compiler
->scratches_offset
-= stack_size
;
1347 compiler
->locals_offset
-= stack_size
;
1349 return post_call_with_args(compiler
, arg_types
, stack_size
);
1352 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_fast_enter(struct sljit_compiler
*compiler
, sljit_s32 dst
, sljit_sw dstw
)
1357 CHECK(check_sljit_emit_fast_enter(compiler
, dst
, dstw
));
1358 ADJUST_LOCAL_OFFSET(dst
, dstw
);
1360 CHECK_EXTRA_REGS(dst
, dstw
, (void)0);
1362 if (FAST_IS_REG(dst
)) {
1363 /* Unused dest is possible here. */
1364 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 1);
1368 POP_REG(reg_map
[dst
]);
1369 return SLJIT_SUCCESS
;
1373 inst
= emit_x86_instruction(compiler
, 1, 0, 0, dst
, dstw
);
1376 return SLJIT_SUCCESS
;
1379 static sljit_s32
emit_fast_return(struct sljit_compiler
*compiler
, sljit_s32 src
, sljit_sw srcw
)
1383 CHECK_EXTRA_REGS(src
, srcw
, (void)0);
1385 if (FAST_IS_REG(src
)) {
1386 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 1 + 1);
1390 PUSH_REG(reg_map
[src
]);
1393 inst
= emit_x86_instruction(compiler
, 1, 0, 0, src
, srcw
);
1398 inst
= (sljit_u8
*)ensure_buf(compiler
, 1 + 1);
1404 return SLJIT_SUCCESS
;
1407 static sljit_s32
skip_frames_before_return(struct sljit_compiler
*compiler
)
1411 /* Don't adjust shadow stack if it isn't enabled. */
1412 if (!cpu_has_shadow_stack())
1413 return SLJIT_SUCCESS
;
1415 SLJIT_ASSERT(compiler
->args_size
>= 0);
1416 SLJIT_ASSERT(compiler
->local_size
> 0);
1418 size
= compiler
->local_size
;
1419 size
+= (1 + (compiler
->scratches
> 9 ? (compiler
->scratches
- 9) : 0)
1420 + (compiler
->saveds
<= 3 ? compiler
->saveds
: 3)) * SSIZE_OF(sw
);
1422 return adjust_shadow_stack(compiler
, SLJIT_MEM1(SLJIT_SP
), size
);