2 * Stack-less Just-In-Time compiler
4 * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 /* Latest MIPS architecture. */
29 #ifndef __mips_hard_float
30 /* Disable automatic detection, covers both -msoft-float and -mno-float */
31 #undef SLJIT_IS_FPU_AVAILABLE
32 #define SLJIT_IS_FPU_AVAILABLE 0
35 SLJIT_API_FUNC_ATTRIBUTE
const char* sljit_get_platform_name(void)
37 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
39 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
40 return "MIPS32-R6" SLJIT_CPUINFO
;
41 #else /* !SLJIT_CONFIG_MIPS_32 */
42 return "MIPS64-R6" SLJIT_CPUINFO
;
43 #endif /* SLJIT_CONFIG_MIPS_32 */
45 #elif (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
47 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
48 return "MIPS32-R1" SLJIT_CPUINFO
;
49 #else /* !SLJIT_CONFIG_MIPS_32 */
50 return "MIPS64-R1" SLJIT_CPUINFO
;
51 #endif /* SLJIT_CONFIG_MIPS_32 */
53 #else /* SLJIT_MIPS_REV < 1 */
54 return "MIPS III" SLJIT_CPUINFO
;
55 #endif /* SLJIT_MIPS_REV >= 6 */
58 /* Length of an instruction word
59 Both for mips-32 and mips-64 */
60 typedef sljit_u32 sljit_ins
;
62 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
63 #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
64 #define TMP_REG3 (SLJIT_NUMBER_OF_REGISTERS + 4)
66 /* For position independent code, t9 must contain the function address. */
67 #define PIC_ADDR_REG TMP_REG2
69 /* Floating point status register. */
71 /* Return address register. */
72 #define RETURN_ADDR_REG 31
74 /* Flags are kept in volatile registers. */
78 #define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
79 #define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
80 #define TMP_FREG3 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3)
82 static const sljit_u8 reg_map
[SLJIT_NUMBER_OF_REGISTERS
+ 5] = {
83 0, 2, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 24, 23, 22, 21, 20, 19, 18, 17, 16, 29, 4, 25, 31
86 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
88 static const sljit_u8 freg_map
[SLJIT_NUMBER_OF_FLOAT_REGISTERS
+ 4] = {
89 0, 0, 14, 2, 4, 6, 8, 18, 30, 28, 26, 24, 22, 20, 12, 10, 16
94 static const sljit_u8 freg_map
[SLJIT_NUMBER_OF_FLOAT_REGISTERS
+ 4] = {
95 0, 0, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 1, 2, 3, 4, 5, 6, 7, 8, 9, 31, 30, 29, 28, 27, 26, 25, 24, 12, 11, 10
100 /* --------------------------------------------------------------------- */
101 /* Instrucion forms */
102 /* --------------------------------------------------------------------- */
104 #define S(s) ((sljit_ins)reg_map[s] << 21)
105 #define T(t) ((sljit_ins)reg_map[t] << 16)
106 #define D(d) ((sljit_ins)reg_map[d] << 11)
107 #define FT(t) ((sljit_ins)freg_map[t] << 16)
108 #define FS(s) ((sljit_ins)freg_map[s] << 11)
109 #define FD(d) ((sljit_ins)freg_map[d] << 6)
110 /* Absolute registers. */
111 #define SA(s) ((sljit_ins)(s) << 21)
112 #define TA(t) ((sljit_ins)(t) << 16)
113 #define DA(d) ((sljit_ins)(d) << 11)
114 #define IMM(imm) ((sljit_ins)(imm) & 0xffff)
115 #define SH_IMM(imm) ((sljit_ins)(imm) << 6)
117 #define DR(dr) (reg_map[dr])
118 #define FR(dr) (freg_map[dr])
119 #define HI(opcode) ((sljit_ins)(opcode) << 26)
120 #define LO(opcode) ((sljit_ins)(opcode))
121 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
123 /* S = (20 << 21) D = (21 << 21) */
124 #define CMP_FMT_S (20 << 21)
125 #endif /* SLJIT_MIPS_REV >= 6 */
126 /* S = (16 << 21) D = (17 << 21) */
127 #define FMT_S (16 << 21)
128 #define FMT_D (17 << 21)
130 #define ABS_S (HI(17) | FMT_S | LO(5))
131 #define ADD_S (HI(17) | FMT_S | LO(0))
132 #define ADDIU (HI(9))
133 #define ADDU (HI(0) | LO(33))
134 #define AND (HI(0) | LO(36))
135 #define ANDI (HI(12))
137 #define BAL (HI(1) | (17 << 16))
138 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
139 #define BC1EQZ (HI(17) | (9 << 21) | FT(TMP_FREG3))
140 #define BC1NEZ (HI(17) | (13 << 21) | FT(TMP_FREG3))
141 #else /* SLJIT_MIPS_REV < 6 */
142 #define BC1F (HI(17) | (8 << 21))
143 #define BC1T (HI(17) | (8 << 21) | (1 << 16))
144 #endif /* SLJIT_MIPS_REV >= 6 */
146 #define BGEZ (HI(1) | (1 << 16))
149 #define BLTZ (HI(1) | (0 << 16))
151 #define BREAK (HI(0) | LO(13))
152 #define CFC1 (HI(17) | (2 << 21))
153 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
154 #define C_EQ_S (HI(17) | CMP_FMT_S | LO(2))
155 #define C_OLE_S (HI(17) | CMP_FMT_S | LO(6))
156 #define C_OLT_S (HI(17) | CMP_FMT_S | LO(4))
157 #define C_UEQ_S (HI(17) | CMP_FMT_S | LO(3))
158 #define C_ULE_S (HI(17) | CMP_FMT_S | LO(7))
159 #define C_ULT_S (HI(17) | CMP_FMT_S | LO(5))
160 #define C_UN_S (HI(17) | CMP_FMT_S | LO(1))
161 #define C_FD (FD(TMP_FREG3))
162 #else /* SLJIT_MIPS_REV < 6 */
163 #define C_EQ_S (HI(17) | FMT_S | LO(50))
164 #define C_OLE_S (HI(17) | FMT_S | LO(54))
165 #define C_OLT_S (HI(17) | FMT_S | LO(52))
166 #define C_UEQ_S (HI(17) | FMT_S | LO(51))
167 #define C_ULE_S (HI(17) | FMT_S | LO(55))
168 #define C_ULT_S (HI(17) | FMT_S | LO(53))
169 #define C_UN_S (HI(17) | FMT_S | LO(49))
171 #endif /* SLJIT_MIPS_REV >= 6 */
172 #define CVT_S_S (HI(17) | FMT_S | LO(32))
173 #define DADDIU (HI(25))
174 #define DADDU (HI(0) | LO(45))
175 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
176 #define DDIV (HI(0) | (2 << 6) | LO(30))
177 #define DDIVU (HI(0) | (2 << 6) | LO(31))
178 #define DMOD (HI(0) | (3 << 6) | LO(30))
179 #define DMODU (HI(0) | (3 << 6) | LO(31))
180 #define DIV (HI(0) | (2 << 6) | LO(26))
181 #define DIVU (HI(0) | (2 << 6) | LO(27))
182 #define DMUH (HI(0) | (3 << 6) | LO(28))
183 #define DMUHU (HI(0) | (3 << 6) | LO(29))
184 #define DMUL (HI(0) | (2 << 6) | LO(28))
185 #define DMULU (HI(0) | (2 << 6) | LO(29))
186 #else /* SLJIT_MIPS_REV < 6 */
187 #define DDIV (HI(0) | LO(30))
188 #define DDIVU (HI(0) | LO(31))
189 #define DIV (HI(0) | LO(26))
190 #define DIVU (HI(0) | LO(27))
191 #define DMULT (HI(0) | LO(28))
192 #define DMULTU (HI(0) | LO(29))
193 #endif /* SLJIT_MIPS_REV >= 6 */
194 #define DIV_S (HI(17) | FMT_S | LO(3))
195 #define DINSU (HI(31) | LO(6))
196 #define DSLL (HI(0) | LO(56))
197 #define DSLL32 (HI(0) | LO(60))
198 #define DSLLV (HI(0) | LO(20))
199 #define DSRA (HI(0) | LO(59))
200 #define DSRA32 (HI(0) | LO(63))
201 #define DSRAV (HI(0) | LO(23))
202 #define DSRL (HI(0) | LO(58))
203 #define DSRL32 (HI(0) | LO(62))
204 #define DSRLV (HI(0) | LO(22))
205 #define DSUBU (HI(0) | LO(47))
208 #define JALR (HI(0) | LO(9))
209 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
210 #define JR (HI(0) | LO(9))
211 #else /* SLJIT_MIPS_REV < 6 */
212 #define JR (HI(0) | LO(8))
213 #endif /* SLJIT_MIPS_REV >= 6 */
217 #define LDC1 (HI(53))
222 #define LWC1 (HI(49))
223 #define MFC1 (HI(17))
224 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
225 #define MOD (HI(0) | (3 << 6) | LO(26))
226 #define MODU (HI(0) | (3 << 6) | LO(27))
227 #else /* SLJIT_MIPS_REV < 6 */
228 #define MFHI (HI(0) | LO(16))
229 #define MFLO (HI(0) | LO(18))
230 #endif /* SLJIT_MIPS_REV >= 6 */
231 #define MOV_S (HI(17) | FMT_S | LO(6))
232 #define MTC1 (HI(17) | (4 << 21))
233 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
234 #define MUH (HI(0) | (3 << 6) | LO(24))
235 #define MUHU (HI(0) | (3 << 6) | LO(25))
236 #define MUL (HI(0) | (2 << 6) | LO(24))
237 #define MULU (HI(0) | (2 << 6) | LO(25))
238 #else /* SLJIT_MIPS_REV < 6 */
239 #define MULT (HI(0) | LO(24))
240 #define MULTU (HI(0) | LO(25))
241 #endif /* SLJIT_MIPS_REV >= 6 */
242 #define MUL_S (HI(17) | FMT_S | LO(2))
243 #define NEG_S (HI(17) | FMT_S | LO(7))
244 #define NOP (HI(0) | LO(0))
245 #define NOR (HI(0) | LO(39))
246 #define OR (HI(0) | LO(37))
251 #define SDC1 (HI(61))
252 #define SLT (HI(0) | LO(42))
253 #define SLTI (HI(10))
254 #define SLTIU (HI(11))
255 #define SLTU (HI(0) | LO(43))
256 #define SLL (HI(0) | LO(0))
257 #define SLLV (HI(0) | LO(4))
258 #define SRL (HI(0) | LO(2))
259 #define SRLV (HI(0) | LO(6))
260 #define SRA (HI(0) | LO(3))
261 #define SRAV (HI(0) | LO(7))
262 #define SUB_S (HI(17) | FMT_S | LO(1))
263 #define SUBU (HI(0) | LO(35))
267 #define SWC1 (HI(57))
268 #define TRUNC_W_S (HI(17) | FMT_S | LO(13))
269 #define XOR (HI(0) | LO(38))
270 #define XORI (HI(14))
272 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
273 #define CLZ (HI(28) | LO(32))
274 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
275 #define DCLZ (LO(18))
276 #else /* SLJIT_MIPS_REV < 6 */
277 #define DCLZ (HI(28) | LO(36))
278 #define MOVF (HI(0) | (0 << 16) | LO(1))
279 #define MOVN (HI(0) | LO(11))
280 #define MOVT (HI(0) | (1 << 16) | LO(1))
281 #define MOVZ (HI(0) | LO(10))
282 #define MUL (HI(28) | LO(2))
283 #endif /* SLJIT_MIPS_REV >= 6 */
284 #define PREF (HI(51))
285 #define PREFX (HI(19) | LO(15))
286 #define SEB (HI(31) | (16 << 6) | LO(32))
287 #define SEH (HI(31) | (24 << 6) | LO(32))
288 #endif /* SLJIT_MIPS_REV >= 1 */
290 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
292 #define ADDIU_W ADDIU
298 #define ADDIU_W DADDIU
304 #define SIMM_MAX (0x7fff)
305 #define SIMM_MIN (-0x8000)
306 #define UIMM_MAX (0xffff)
308 /* dest_reg is the absolute name of the register
309 Useful for reordering instructions in the delay slot. */
310 static sljit_s32
push_inst(struct sljit_compiler
*compiler
, sljit_ins ins
, sljit_s32 delay_slot
)
312 sljit_ins
*ptr
= (sljit_ins
*)ensure_buf(compiler
, sizeof(sljit_ins
));
313 SLJIT_ASSERT(delay_slot
== MOVABLE_INS
|| delay_slot
>= UNMOVABLE_INS
314 || (sljit_ins
)delay_slot
== ((ins
>> 11) & 0x1f)
315 || (sljit_ins
)delay_slot
== ((ins
>> 16) & 0x1f));
319 compiler
->delay_slot
= delay_slot
;
320 return SLJIT_SUCCESS
;
323 static SLJIT_INLINE sljit_ins
invert_branch(sljit_uw flags
)
325 if (flags
& IS_BIT26_COND
)
327 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
328 if (flags
& IS_BIT23_COND
)
330 #endif /* SLJIT_MIPS_REV >= 6 */
334 static SLJIT_INLINE sljit_ins
* detect_jump_type(struct sljit_jump
*jump
, sljit_ins
*code
, sljit_sw executable_offset
)
337 sljit_uw target_addr
;
339 sljit_ins saved_inst
;
341 inst
= (sljit_ins
*)jump
->addr
;
343 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
344 if (jump
->flags
& (SLJIT_REWRITABLE_JUMP
| IS_CALL
))
347 if (jump
->flags
& SLJIT_REWRITABLE_JUMP
)
351 if (jump
->flags
& JUMP_ADDR
)
352 target_addr
= jump
->u
.target
;
354 SLJIT_ASSERT(jump
->flags
& JUMP_LABEL
);
355 target_addr
= (sljit_uw
)(code
+ jump
->u
.label
->size
) + (sljit_uw
)executable_offset
;
358 if (jump
->flags
& IS_COND
)
361 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
362 if (jump
->flags
& IS_CALL
)
366 /* B instructions. */
367 if (jump
->flags
& IS_MOVABLE
) {
368 diff
= ((sljit_sw
)target_addr
- (sljit_sw
)inst
- executable_offset
) >> 2;
369 if (diff
<= SIMM_MAX
&& diff
>= SIMM_MIN
) {
370 jump
->flags
|= PATCH_B
;
372 if (!(jump
->flags
& IS_COND
)) {
374 inst
[-1] = (jump
->flags
& IS_JAL
) ? BAL
: B
;
375 jump
->addr
-= sizeof(sljit_ins
);
378 saved_inst
= inst
[0];
380 inst
[-1] = saved_inst
^ invert_branch(jump
->flags
);
381 jump
->addr
-= 2 * sizeof(sljit_ins
);
385 diff
= ((sljit_sw
)target_addr
- (sljit_sw
)(inst
+ 1) - executable_offset
) >> 2;
386 if (diff
<= SIMM_MAX
&& diff
>= SIMM_MIN
) {
387 jump
->flags
|= PATCH_B
;
389 if (!(jump
->flags
& IS_COND
)) {
390 inst
[0] = (jump
->flags
& IS_JAL
) ? BAL
: B
;
394 inst
[0] ^= invert_branch(jump
->flags
);
396 jump
->addr
-= sizeof(sljit_ins
);
401 if (jump
->flags
& IS_COND
) {
402 if ((jump
->flags
& IS_MOVABLE
) && (target_addr
& ~(sljit_uw
)0xfffffff) == ((jump
->addr
+ 2 * sizeof(sljit_ins
)) & ~(sljit_uw
)0xfffffff)) {
403 jump
->flags
|= PATCH_J
;
404 saved_inst
= inst
[0];
406 inst
[-1] = (saved_inst
& 0xffff0000) | 3;
411 else if ((target_addr
& ~(sljit_uw
)0xfffffff) == ((jump
->addr
+ 3 * sizeof(sljit_ins
)) & ~(sljit_uw
)0xfffffff)) {
412 jump
->flags
|= PATCH_J
;
413 inst
[0] = (inst
[0] & 0xffff0000) | 3;
417 jump
->addr
+= sizeof(sljit_ins
);
423 if ((jump
->flags
& IS_MOVABLE
) && (target_addr
& ~(sljit_uw
)0xfffffff) == (jump
->addr
& ~(sljit_uw
)0xfffffff)) {
424 jump
->flags
|= PATCH_J
;
426 inst
[-1] = (jump
->flags
& IS_JAL
) ? JAL
: J
;
427 jump
->addr
-= sizeof(sljit_ins
);
431 if ((target_addr
& ~(sljit_uw
)0xfffffff) == ((jump
->addr
+ sizeof(sljit_ins
)) & ~(sljit_uw
)0xfffffff)) {
432 jump
->flags
|= PATCH_J
;
433 inst
[0] = (jump
->flags
& IS_JAL
) ? JAL
: J
;
439 if (jump
->flags
& IS_COND
)
442 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
444 if (target_addr
<= 0x7fffffff) {
445 jump
->flags
|= PATCH_ABS32
;
446 if (jump
->flags
& IS_COND
)
453 if (target_addr
<= 0x7fffffffffffl
) {
454 jump
->flags
|= PATCH_ABS48
;
455 if (jump
->flags
& IS_COND
)
465 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
477 static __attribute__ ((noinline
)) void sljit_cache_flush(void* code
, void* code_ptr
)
479 SLJIT_CACHE_FLUSH(code
, code_ptr
);
483 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
485 static SLJIT_INLINE sljit_sw
put_label_get_length(struct sljit_put_label
*put_label
, sljit_uw max_label
)
487 if (max_label
< 0x80000000l
) {
488 put_label
->flags
= PATCH_ABS32
;
492 if (max_label
< 0x800000000000l
) {
493 put_label
->flags
= PATCH_ABS48
;
497 put_label
->flags
= 0;
501 #endif /* SLJIT_CONFIG_MIPS_64 */
503 static SLJIT_INLINE
void load_addr_to_reg(void *dst
, sljit_u32 reg
)
505 struct sljit_jump
*jump
;
506 struct sljit_put_label
*put_label
;
512 jump
= (struct sljit_jump
*)dst
;
514 inst
= (sljit_ins
*)jump
->addr
;
515 addr
= (flags
& JUMP_LABEL
) ? jump
->u
.label
->addr
: jump
->u
.target
;
517 put_label
= (struct sljit_put_label
*)dst
;
518 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
519 flags
= put_label
->flags
;
521 inst
= (sljit_ins
*)put_label
->addr
;
522 addr
= put_label
->label
->addr
;
526 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
527 inst
[0] = LUI
| T(reg
) | IMM(addr
>> 16);
528 #else /* !SLJIT_CONFIG_MIPS_32 */
529 if (flags
& PATCH_ABS32
) {
530 SLJIT_ASSERT(addr
< 0x80000000l
);
531 inst
[0] = LUI
| T(reg
) | IMM(addr
>> 16);
533 else if (flags
& PATCH_ABS48
) {
534 SLJIT_ASSERT(addr
< 0x800000000000l
);
535 inst
[0] = LUI
| T(reg
) | IMM(addr
>> 32);
536 inst
[1] = ORI
| S(reg
) | T(reg
) | IMM((addr
>> 16) & 0xffff);
537 inst
[2] = DSLL
| T(reg
) | D(reg
) | SH_IMM(16);
541 inst
[0] = LUI
| T(reg
) | IMM(addr
>> 48);
542 inst
[1] = ORI
| S(reg
) | T(reg
) | IMM((addr
>> 32) & 0xffff);
543 inst
[2] = DSLL
| T(reg
) | D(reg
) | SH_IMM(16);
544 inst
[3] = ORI
| S(reg
) | T(reg
) | IMM((addr
>> 16) & 0xffff);
545 inst
[4] = DSLL
| T(reg
) | D(reg
) | SH_IMM(16);
548 #endif /* SLJIT_CONFIG_MIPS_32 */
550 inst
[1] = ORI
| S(reg
) | T(reg
) | IMM(addr
& 0xffff);
553 SLJIT_API_FUNC_ATTRIBUTE
void* sljit_generate_code(struct sljit_compiler
*compiler
)
555 struct sljit_memory_fragment
*buf
;
562 sljit_sw executable_offset
;
565 struct sljit_label
*label
;
566 struct sljit_jump
*jump
;
567 struct sljit_const
*const_
;
568 struct sljit_put_label
*put_label
;
571 CHECK_PTR(check_sljit_generate_code(compiler
));
572 reverse_buf(compiler
);
574 code
= (sljit_ins
*)SLJIT_MALLOC_EXEC(compiler
->size
* sizeof(sljit_ins
), compiler
->exec_allocator_data
);
575 PTR_FAIL_WITH_EXEC_IF(code
);
581 executable_offset
= SLJIT_EXEC_OFFSET(code
);
583 label
= compiler
->labels
;
584 jump
= compiler
->jumps
;
585 const_
= compiler
->consts
;
586 put_label
= compiler
->put_labels
;
589 buf_ptr
= (sljit_ins
*)buf
->memory
;
590 buf_end
= buf_ptr
+ (buf
->used_size
>> 2);
592 *code_ptr
= *buf_ptr
++;
593 if (next_addr
== word_count
) {
594 SLJIT_ASSERT(!label
|| label
->size
>= word_count
);
595 SLJIT_ASSERT(!jump
|| jump
->addr
>= word_count
);
596 SLJIT_ASSERT(!const_
|| const_
->addr
>= word_count
);
597 SLJIT_ASSERT(!put_label
|| put_label
->addr
>= word_count
);
599 /* These structures are ordered by their address. */
600 if (label
&& label
->size
== word_count
) {
601 label
->addr
= (sljit_uw
)SLJIT_ADD_EXEC_OFFSET(code_ptr
, executable_offset
);
602 label
->size
= (sljit_uw
)(code_ptr
- code
);
605 if (jump
&& jump
->addr
== word_count
) {
606 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
611 jump
->addr
= (sljit_uw
)(code_ptr
- 1);
612 code_ptr
= detect_jump_type(jump
, code
, executable_offset
);
615 if (const_
&& const_
->addr
== word_count
) {
616 const_
->addr
= (sljit_uw
)code_ptr
;
617 const_
= const_
->next
;
619 if (put_label
&& put_label
->addr
== word_count
) {
620 SLJIT_ASSERT(put_label
->label
);
621 put_label
->addr
= (sljit_uw
)code_ptr
;
622 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
626 code_ptr
+= put_label_get_length(put_label
, (sljit_uw
)(SLJIT_ADD_EXEC_OFFSET(code
, executable_offset
) + put_label
->label
->size
));
629 put_label
= put_label
->next
;
631 next_addr
= compute_next_addr(label
, jump
, const_
, put_label
);
635 } while (buf_ptr
< buf_end
);
640 if (label
&& label
->size
== word_count
) {
641 label
->addr
= (sljit_uw
)code_ptr
;
642 label
->size
= (sljit_uw
)(code_ptr
- code
);
646 SLJIT_ASSERT(!label
);
648 SLJIT_ASSERT(!const_
);
649 SLJIT_ASSERT(!put_label
);
650 SLJIT_ASSERT(code_ptr
- code
<= (sljit_sw
)compiler
->size
);
652 jump
= compiler
->jumps
;
655 addr
= (jump
->flags
& JUMP_LABEL
) ? jump
->u
.label
->addr
: jump
->u
.target
;
656 buf_ptr
= (sljit_ins
*)jump
->addr
;
658 if (jump
->flags
& PATCH_B
) {
659 addr
= (sljit_uw
)((sljit_sw
)(addr
- (sljit_uw
)SLJIT_ADD_EXEC_OFFSET(buf_ptr
, executable_offset
) - sizeof(sljit_ins
)) >> 2);
660 SLJIT_ASSERT((sljit_sw
)addr
<= SIMM_MAX
&& (sljit_sw
)addr
>= SIMM_MIN
);
661 buf_ptr
[0] = (buf_ptr
[0] & 0xffff0000) | ((sljit_ins
)addr
& 0xffff);
664 if (jump
->flags
& PATCH_J
) {
665 SLJIT_ASSERT((addr
& ~(sljit_uw
)0xfffffff)
666 == (((sljit_uw
)SLJIT_ADD_EXEC_OFFSET(buf_ptr
, executable_offset
) + sizeof(sljit_ins
)) & ~(sljit_uw
)0xfffffff));
667 buf_ptr
[0] |= (sljit_ins
)(addr
>> 2) & 0x03ffffff;
671 load_addr_to_reg(jump
, PIC_ADDR_REG
);
676 put_label
= compiler
->put_labels
;
678 load_addr_to_reg(put_label
, 0);
679 put_label
= put_label
->next
;
682 compiler
->error
= SLJIT_ERR_COMPILED
;
683 compiler
->executable_offset
= executable_offset
;
684 compiler
->executable_size
= (sljit_uw
)(code_ptr
- code
) * sizeof(sljit_ins
);
686 code
= (sljit_ins
*)SLJIT_ADD_EXEC_OFFSET(code
, executable_offset
);
687 code_ptr
= (sljit_ins
*)SLJIT_ADD_EXEC_OFFSET(code_ptr
, executable_offset
);
690 SLJIT_CACHE_FLUSH(code
, code_ptr
);
692 /* GCC workaround for invalid code generation with -O2. */
693 sljit_cache_flush(code
, code_ptr
);
695 SLJIT_UPDATE_WX_FLAGS(code
, code_ptr
, 1);
699 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_has_cpu_feature(sljit_s32 feature_type
)
701 #if defined(__GNUC__) && !defined(SLJIT_IS_FPU_AVAILABLE)
703 #endif /* __GNUC__ && !SLJIT_IS_FPU_AVAILABLE */
705 switch (feature_type
) {
707 #ifdef SLJIT_IS_FPU_AVAILABLE
708 return SLJIT_IS_FPU_AVAILABLE
;
709 #elif defined(__GNUC__)
710 __asm__ ("cfc1 %0, $0" : "=r"(fir
));
711 return (fir
>> 22) & 0x1;
713 #error "FIR check is not implemented for this architecture"
715 case SLJIT_HAS_ZERO_REGISTER
:
718 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
721 case SLJIT_HAS_PREFETCH
:
723 #endif /* SLJIT_MIPS_REV >= 1 */
730 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_cmp_info(sljit_s32 type
)
732 return (type
>= SLJIT_ORDERED_EQUAL
&& type
<= SLJIT_ORDERED_LESS_EQUAL
);
735 /* --------------------------------------------------------------------- */
737 /* --------------------------------------------------------------------- */
739 /* Creates an index in data_transfer_insts array. */
740 #define LOAD_DATA 0x01
741 #define WORD_DATA 0x00
742 #define BYTE_DATA 0x02
743 #define HALF_DATA 0x04
744 #define INT_DATA 0x06
745 #define SIGNED_DATA 0x08
746 /* Separates integer and floating point registers */
748 #define DOUBLE_DATA 0x10
749 #define SINGLE_DATA 0x12
751 #define MEM_MASK 0x1f
753 #define ARG_TEST 0x00020
754 #define ALT_KEEP_CACHE 0x00040
755 #define CUMULATIVE_OP 0x00080
756 #define LOGICAL_OP 0x00100
757 #define IMM_OP 0x00200
758 #define MOVE_OP 0x00400
759 #define SRC2_IMM 0x00800
761 #define UNUSED_DEST 0x01000
762 #define REG_DEST 0x02000
763 #define REG1_SOURCE 0x04000
764 #define REG2_SOURCE 0x08000
765 #define SLOW_SRC1 0x10000
766 #define SLOW_SRC2 0x20000
767 #define SLOW_DEST 0x40000
769 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
770 #define STACK_STORE SW
771 #define STACK_LOAD LW
773 #define STACK_STORE SD
774 #define STACK_LOAD LD
777 static sljit_s32
emit_op_mem(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg_ar
, sljit_s32 arg
, sljit_sw argw
);
778 static sljit_s32
emit_stack_frame_release(struct sljit_compiler
*compiler
, sljit_s32 frame_size
, sljit_ins
*ins_ptr
);
780 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
781 #include "sljitNativeMIPS_32.c"
783 #include "sljitNativeMIPS_64.c"
786 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_enter(struct sljit_compiler
*compiler
,
787 sljit_s32 options
, sljit_s32 arg_types
, sljit_s32 scratches
, sljit_s32 saveds
,
788 sljit_s32 fscratches
, sljit_s32 fsaveds
, sljit_s32 local_size
)
791 sljit_s32 i
, tmp
, offset
;
792 sljit_s32 arg_count
, word_arg_count
, float_arg_count
;
793 sljit_s32 saved_arg_count
= SLJIT_KEPT_SAVEDS_COUNT(options
);
796 CHECK(check_sljit_emit_enter(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
));
797 set_emit_enter(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
);
799 local_size
+= GET_SAVED_REGISTERS_SIZE(scratches
, saveds
- saved_arg_count
, 1);
800 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
801 if (fsaveds
> 0 || fscratches
>= SLJIT_FIRST_SAVED_FLOAT_REG
) {
802 if ((local_size
& SSIZE_OF(sw
)) != 0)
803 local_size
+= SSIZE_OF(sw
);
804 local_size
+= GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches
, fsaveds
, sizeof(sljit_f64
));
807 local_size
= (local_size
+ SLJIT_LOCALS_OFFSET
+ 15) & ~0xf;
809 local_size
+= GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches
, fsaveds
, sizeof(sljit_f64
));
810 local_size
= (local_size
+ SLJIT_LOCALS_OFFSET
+ 31) & ~0x1f;
812 compiler
->local_size
= local_size
;
815 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
816 if (!(options
& SLJIT_ENTER_REG_ARG
)) {
817 tmp
= arg_types
>> SLJIT_ARG_SHIFT
;
822 if ((tmp
& SLJIT_ARG_MASK
) == SLJIT_ARG_TYPE_F64
) {
823 if ((arg_count
& 0x1) != 0)
829 tmp
>>= SLJIT_ARG_SHIFT
;
832 compiler
->args_size
= (sljit_uw
)arg_count
<< 2;
833 offset
= (offset
>= 4) ? (offset
<< 2) : 0;
835 #endif /* SLJIT_CONFIG_MIPS_32 */
837 if (local_size
+ offset
<= -SIMM_MIN
) {
839 FAIL_IF(push_inst(compiler
, ADDIU_W
| S(SLJIT_SP
) | T(SLJIT_SP
) | IMM(-local_size
), DR(SLJIT_SP
)));
841 offset
= local_size
- SSIZE_OF(sw
);
843 FAIL_IF(load_immediate(compiler
, OTHER_FLAG
, local_size
));
844 FAIL_IF(push_inst(compiler
, ADDU_W
| S(SLJIT_SP
) | TA(0) | D(TMP_REG2
), DR(TMP_REG2
)));
845 FAIL_IF(push_inst(compiler
, SUBU_W
| S(SLJIT_SP
) | TA(OTHER_FLAG
) | D(SLJIT_SP
), DR(SLJIT_SP
)));
847 offset
= -SSIZE_OF(sw
);
848 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
853 FAIL_IF(push_inst(compiler
, STACK_STORE
| base
| TA(RETURN_ADDR_REG
) | IMM(offset
), MOVABLE_INS
));
855 tmp
= SLJIT_S0
- saveds
;
856 for (i
= SLJIT_S0
- saved_arg_count
; i
> tmp
; i
--) {
857 offset
-= SSIZE_OF(sw
);
858 FAIL_IF(push_inst(compiler
, STACK_STORE
| base
| T(i
) | IMM(offset
), MOVABLE_INS
));
861 for (i
= scratches
; i
>= SLJIT_FIRST_SAVED_REG
; i
--) {
862 offset
-= SSIZE_OF(sw
);
863 FAIL_IF(push_inst(compiler
, STACK_STORE
| base
| T(i
) | IMM(offset
), MOVABLE_INS
));
866 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
867 /* This alignment is valid because offset is not used after storing FPU regs. */
868 if ((offset
& SSIZE_OF(sw
)) != 0)
869 offset
-= SSIZE_OF(sw
);
872 tmp
= SLJIT_FS0
- fsaveds
;
873 for (i
= SLJIT_FS0
; i
> tmp
; i
--) {
874 offset
-= SSIZE_OF(f64
);
875 FAIL_IF(push_inst(compiler
, SDC1
| base
| FT(i
) | IMM(offset
), MOVABLE_INS
));
878 for (i
= fscratches
; i
>= SLJIT_FIRST_SAVED_FLOAT_REG
; i
--) {
879 offset
-= SSIZE_OF(f64
);
880 FAIL_IF(push_inst(compiler
, SDC1
| base
| FT(i
) | IMM(offset
), MOVABLE_INS
));
883 if (options
& SLJIT_ENTER_REG_ARG
)
884 return SLJIT_SUCCESS
;
886 arg_types
>>= SLJIT_ARG_SHIFT
;
891 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
892 /* The first maximum two floating point arguments are passed in floating point
893 registers if no integer argument precedes them. The first 16 byte data is
894 passed in four integer registers, the rest is placed onto the stack.
895 The floating point registers are also part of the first 16 byte data, so
896 their corresponding integer registers are not used when they are present. */
899 switch (arg_types
& SLJIT_ARG_MASK
) {
900 case SLJIT_ARG_TYPE_F64
:
902 if ((arg_count
& 0x1) != 0)
905 if (word_arg_count
== 0 && float_arg_count
<= 2) {
906 if (float_arg_count
== 1)
907 FAIL_IF(push_inst(compiler
, MOV_S
| FMT_D
| FS(TMP_FREG1
) | FD(SLJIT_FR0
), MOVABLE_INS
));
908 } else if (arg_count
< 4) {
909 FAIL_IF(push_inst(compiler
, MTC1
| TA(4 + arg_count
) | FS(float_arg_count
), MOVABLE_INS
));
910 FAIL_IF(push_inst(compiler
, MTC1
| TA(5 + arg_count
) | FS(float_arg_count
) | (1 << 11), MOVABLE_INS
));
912 FAIL_IF(push_inst(compiler
, LDC1
| base
| FT(float_arg_count
) | IMM(local_size
+ (arg_count
<< 2)), MOVABLE_INS
));
915 case SLJIT_ARG_TYPE_F32
:
918 if (word_arg_count
== 0 && float_arg_count
<= 2) {
919 if (float_arg_count
== 1)
920 FAIL_IF(push_inst(compiler
, MOV_S
| FMT_S
| FS(TMP_FREG1
) | FD(SLJIT_FR0
), MOVABLE_INS
));
921 } else if (arg_count
< 4)
922 FAIL_IF(push_inst(compiler
, MTC1
| TA(4 + arg_count
) | FS(float_arg_count
), MOVABLE_INS
));
924 FAIL_IF(push_inst(compiler
, LWC1
| base
| FT(float_arg_count
) | IMM(local_size
+ (arg_count
<< 2)), MOVABLE_INS
));
929 if (!(arg_types
& SLJIT_ARG_TYPE_SCRATCH_REG
)) {
930 tmp
= SLJIT_S0
- saved_arg_count
;
932 } else if (word_arg_count
!= arg_count
+ 1 || arg_count
== 0)
933 tmp
= word_arg_count
;
938 FAIL_IF(push_inst(compiler
, ADDU_W
| SA(4 + arg_count
) | TA(0) | D(tmp
), DR(tmp
)));
940 FAIL_IF(push_inst(compiler
, LW
| base
| T(tmp
) | IMM(local_size
+ (arg_count
<< 2)), DR(tmp
)));
944 arg_types
>>= SLJIT_ARG_SHIFT
;
947 SLJIT_ASSERT(compiler
->args_size
== (sljit_uw
)arg_count
<< 2);
948 #else /* !SLJIT_CONFIG_MIPS_32 */
951 switch (arg_types
& SLJIT_ARG_MASK
) {
952 case SLJIT_ARG_TYPE_F64
:
954 if (arg_count
!= float_arg_count
)
955 FAIL_IF(push_inst(compiler
, MOV_S
| FMT_D
| FS(arg_count
) | FD(float_arg_count
), MOVABLE_INS
));
956 else if (arg_count
== 1)
957 FAIL_IF(push_inst(compiler
, MOV_S
| FMT_D
| FS(TMP_FREG1
) | FD(SLJIT_FR0
), MOVABLE_INS
));
959 case SLJIT_ARG_TYPE_F32
:
961 if (arg_count
!= float_arg_count
)
962 FAIL_IF(push_inst(compiler
, MOV_S
| FMT_S
| FS(arg_count
) | FD(float_arg_count
), MOVABLE_INS
));
963 else if (arg_count
== 1)
964 FAIL_IF(push_inst(compiler
, MOV_S
| FMT_S
| FS(TMP_FREG1
) | FD(SLJIT_FR0
), MOVABLE_INS
));
969 if (!(arg_types
& SLJIT_ARG_TYPE_SCRATCH_REG
)) {
970 tmp
= SLJIT_S0
- saved_arg_count
;
972 } else if (word_arg_count
!= arg_count
|| word_arg_count
<= 1)
973 tmp
= word_arg_count
;
977 FAIL_IF(push_inst(compiler
, ADDU_W
| SA(3 + arg_count
) | TA(0) | D(tmp
), DR(tmp
)));
980 arg_types
>>= SLJIT_ARG_SHIFT
;
982 #endif /* SLJIT_CONFIG_MIPS_32 */
984 return SLJIT_SUCCESS
;
987 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_set_context(struct sljit_compiler
*compiler
,
988 sljit_s32 options
, sljit_s32 arg_types
, sljit_s32 scratches
, sljit_s32 saveds
,
989 sljit_s32 fscratches
, sljit_s32 fsaveds
, sljit_s32 local_size
)
992 CHECK(check_sljit_set_context(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
));
993 set_set_context(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
);
995 local_size
+= GET_SAVED_REGISTERS_SIZE(scratches
, saveds
- SLJIT_KEPT_SAVEDS_COUNT(options
), 1);
996 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
997 if (fsaveds
> 0 || fscratches
>= SLJIT_FIRST_SAVED_FLOAT_REG
) {
998 if ((local_size
& SSIZE_OF(sw
)) != 0)
999 local_size
+= SSIZE_OF(sw
);
1000 local_size
+= GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches
, fsaveds
, sizeof(sljit_f64
));
1003 compiler
->local_size
= (local_size
+ SLJIT_LOCALS_OFFSET
+ 15) & ~0xf;
1005 local_size
+= GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches
, fsaveds
, sizeof(sljit_f64
));
1006 compiler
->local_size
= (local_size
+ SLJIT_LOCALS_OFFSET
+ 31) & ~0x1f;
1008 return SLJIT_SUCCESS
;
1011 static sljit_s32
emit_stack_frame_release(struct sljit_compiler
*compiler
, sljit_s32 frame_size
, sljit_ins
*ins_ptr
)
1013 sljit_s32 local_size
, i
, tmp
, offset
;
1014 sljit_s32 scratches
= compiler
->scratches
;
1015 sljit_s32 saveds
= compiler
->saveds
;
1016 sljit_s32 fsaveds
= compiler
->fsaveds
;
1017 sljit_s32 fscratches
= compiler
->fscratches
;
1018 sljit_s32 kept_saveds_count
= SLJIT_KEPT_SAVEDS_COUNT(compiler
->options
);
1020 local_size
= compiler
->local_size
;
1022 tmp
= GET_SAVED_REGISTERS_SIZE(scratches
, saveds
- kept_saveds_count
, 1);
1023 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1024 if (fsaveds
> 0 || fscratches
>= SLJIT_FIRST_SAVED_FLOAT_REG
) {
1025 if ((tmp
& SSIZE_OF(sw
)) != 0)
1026 tmp
+= SSIZE_OF(sw
);
1027 tmp
+= GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches
, fsaveds
, sizeof(sljit_f64
));
1030 tmp
+= GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches
, fsaveds
, sizeof(sljit_f64
));
1033 if (local_size
<= SIMM_MAX
) {
1034 if (local_size
< frame_size
) {
1035 FAIL_IF(push_inst(compiler
, ADDIU_W
| S(SLJIT_SP
) | T(SLJIT_SP
) | IMM(local_size
- frame_size
), DR(SLJIT_SP
)));
1036 local_size
= frame_size
;
1039 if (tmp
< frame_size
)
1042 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), local_size
- tmp
));
1043 FAIL_IF(push_inst(compiler
, ADDU_W
| S(SLJIT_SP
) | T(TMP_REG1
) | D(SLJIT_SP
), DR(SLJIT_SP
)));
1047 SLJIT_ASSERT(local_size
>= frame_size
);
1049 offset
= local_size
- SSIZE_OF(sw
);
1050 if (frame_size
== 0)
1051 FAIL_IF(push_inst(compiler
, STACK_LOAD
| S(SLJIT_SP
) | TA(RETURN_ADDR_REG
) | IMM(offset
), RETURN_ADDR_REG
));
1053 tmp
= SLJIT_S0
- saveds
;
1054 for (i
= SLJIT_S0
- kept_saveds_count
; i
> tmp
; i
--) {
1055 offset
-= SSIZE_OF(sw
);
1056 FAIL_IF(push_inst(compiler
, STACK_LOAD
| S(SLJIT_SP
) | T(i
) | IMM(offset
), MOVABLE_INS
));
1059 for (i
= scratches
; i
>= SLJIT_FIRST_SAVED_REG
; i
--) {
1060 offset
-= SSIZE_OF(sw
);
1061 FAIL_IF(push_inst(compiler
, STACK_LOAD
| S(SLJIT_SP
) | T(i
) | IMM(offset
), MOVABLE_INS
));
1064 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1065 /* This alignment is valid because offset is not used after storing FPU regs. */
1066 if ((offset
& SSIZE_OF(sw
)) != 0)
1067 offset
-= SSIZE_OF(sw
);
1070 tmp
= SLJIT_FS0
- fsaveds
;
1071 for (i
= SLJIT_FS0
; i
> tmp
; i
--) {
1072 offset
-= SSIZE_OF(f64
);
1073 FAIL_IF(push_inst(compiler
, LDC1
| S(SLJIT_SP
) | FT(i
) | IMM(offset
), MOVABLE_INS
));
1076 for (i
= fscratches
; i
>= SLJIT_FIRST_SAVED_FLOAT_REG
; i
--) {
1077 offset
-= SSIZE_OF(f64
);
1078 FAIL_IF(push_inst(compiler
, LDC1
| S(SLJIT_SP
) | FT(i
) | IMM(offset
), MOVABLE_INS
));
1081 if (local_size
> frame_size
)
1082 *ins_ptr
= ADDIU_W
| S(SLJIT_SP
) | T(SLJIT_SP
) | IMM(local_size
- frame_size
);
1086 return SLJIT_SUCCESS
;
1089 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_return_void(struct sljit_compiler
*compiler
)
1094 CHECK(check_sljit_emit_return_void(compiler
));
1096 emit_stack_frame_release(compiler
, 0, &ins
);
1098 FAIL_IF(push_inst(compiler
, JR
| SA(RETURN_ADDR_REG
), UNMOVABLE_INS
));
1099 return push_inst(compiler
, ins
, UNMOVABLE_INS
);
1105 /* --------------------------------------------------------------------- */
1107 /* --------------------------------------------------------------------- */
1109 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1110 #define ARCH_32_64(a, b) a
1112 #define ARCH_32_64(a, b) b
1115 static const sljit_ins data_transfer_insts
[16 + 4] = {
1116 /* u w s */ ARCH_32_64(HI(43) /* sw */, HI(63) /* sd */),
1117 /* u w l */ ARCH_32_64(HI(35) /* lw */, HI(55) /* ld */),
1118 /* u b s */ HI(40) /* sb */,
1119 /* u b l */ HI(36) /* lbu */,
1120 /* u h s */ HI(41) /* sh */,
1121 /* u h l */ HI(37) /* lhu */,
1122 /* u i s */ HI(43) /* sw */,
1123 /* u i l */ ARCH_32_64(HI(35) /* lw */, HI(39) /* lwu */),
1125 /* s w s */ ARCH_32_64(HI(43) /* sw */, HI(63) /* sd */),
1126 /* s w l */ ARCH_32_64(HI(35) /* lw */, HI(55) /* ld */),
1127 /* s b s */ HI(40) /* sb */,
1128 /* s b l */ HI(32) /* lb */,
1129 /* s h s */ HI(41) /* sh */,
1130 /* s h l */ HI(33) /* lh */,
1131 /* s i s */ HI(43) /* sw */,
1132 /* s i l */ HI(35) /* lw */,
1134 /* d s */ HI(61) /* sdc1 */,
1135 /* d l */ HI(53) /* ldc1 */,
1136 /* s s */ HI(57) /* swc1 */,
1137 /* s l */ HI(49) /* lwc1 */,
1142 /* reg_ar is an absoulute register! */
1144 /* Can perform an operation using at most 1 instruction. */
1145 static sljit_s32
getput_arg_fast(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg_ar
, sljit_s32 arg
, sljit_sw argw
)
1147 SLJIT_ASSERT(arg
& SLJIT_MEM
);
1149 if (!(arg
& OFFS_REG_MASK
) && argw
<= SIMM_MAX
&& argw
>= SIMM_MIN
) {
1150 /* Works for both absoulte and relative addresses. */
1151 if (SLJIT_UNLIKELY(flags
& ARG_TEST
))
1153 FAIL_IF(push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(arg
& REG_MASK
)
1154 | TA(reg_ar
) | IMM(argw
), ((flags
& MEM_MASK
) <= GPR_REG
&& (flags
& LOAD_DATA
)) ? reg_ar
: MOVABLE_INS
));
1160 #define TO_ARGW_HI(argw) (((argw) & ~0xffff) + (((argw) & 0x8000) ? 0x10000 : 0))
1162 /* See getput_arg below.
1163 Note: can_cache is called only for binary operators. */
1164 static sljit_s32
can_cache(sljit_s32 arg
, sljit_sw argw
, sljit_s32 next_arg
, sljit_sw next_argw
)
1166 SLJIT_ASSERT((arg
& SLJIT_MEM
) && (next_arg
& SLJIT_MEM
));
1168 /* Simple operation except for updates. */
1169 if (arg
& OFFS_REG_MASK
) {
1172 if (argw
&& argw
== next_argw
&& (arg
== next_arg
|| (arg
& OFFS_REG_MASK
) == (next_arg
& OFFS_REG_MASK
)))
1177 if (arg
== next_arg
) {
1178 if (((next_argw
- argw
) <= SIMM_MAX
&& (next_argw
- argw
) >= SIMM_MIN
)
1179 || TO_ARGW_HI(argw
) == TO_ARGW_HI(next_argw
))
1187 /* Emit the necessary instructions. See can_cache above. */
1188 static sljit_s32
getput_arg(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg_ar
, sljit_s32 arg
, sljit_sw argw
, sljit_s32 next_arg
, sljit_sw next_argw
)
1190 sljit_s32 tmp_ar
, base
, delay_slot
;
1191 sljit_sw offset
, argw_hi
;
1193 SLJIT_ASSERT(arg
& SLJIT_MEM
);
1194 if (!(next_arg
& SLJIT_MEM
)) {
1199 /* Since tmp can be the same as base or offset registers,
1200 * these might be unavailable after modifying tmp. */
1201 if ((flags
& MEM_MASK
) <= GPR_REG
&& (flags
& LOAD_DATA
)) {
1203 delay_slot
= reg_ar
;
1206 tmp_ar
= DR(TMP_REG1
);
1207 delay_slot
= MOVABLE_INS
;
1209 base
= arg
& REG_MASK
;
1211 if (SLJIT_UNLIKELY(arg
& OFFS_REG_MASK
)) {
1214 /* Using the cache. */
1215 if (argw
== compiler
->cache_argw
) {
1216 if (arg
== compiler
->cache_arg
)
1217 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
), delay_slot
);
1219 if ((SLJIT_MEM
| (arg
& OFFS_REG_MASK
)) == compiler
->cache_arg
) {
1220 if (arg
== next_arg
&& argw
== (next_argw
& 0x3)) {
1221 compiler
->cache_arg
= arg
;
1222 compiler
->cache_argw
= argw
;
1223 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(TMP_REG3
) | D(TMP_REG3
), DR(TMP_REG3
)));
1224 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
), delay_slot
);
1226 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(TMP_REG3
) | DA(tmp_ar
), tmp_ar
));
1227 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
), delay_slot
);
1231 if (SLJIT_UNLIKELY(argw
)) {
1232 compiler
->cache_arg
= SLJIT_MEM
| (arg
& OFFS_REG_MASK
);
1233 compiler
->cache_argw
= argw
;
1234 FAIL_IF(push_inst(compiler
, SLL_W
| T(OFFS_REG(arg
)) | D(TMP_REG3
) | SH_IMM(argw
), DR(TMP_REG3
)));
1237 if (arg
== next_arg
&& argw
== (next_argw
& 0x3)) {
1238 compiler
->cache_arg
= arg
;
1239 compiler
->cache_argw
= argw
;
1240 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(!argw
? OFFS_REG(arg
) : TMP_REG3
) | D(TMP_REG3
), DR(TMP_REG3
)));
1241 tmp_ar
= DR(TMP_REG3
);
1244 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(!argw
? OFFS_REG(arg
) : TMP_REG3
) | DA(tmp_ar
), tmp_ar
));
1245 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
), delay_slot
);
1248 if (compiler
->cache_arg
== arg
&& argw
- compiler
->cache_argw
<= SIMM_MAX
&& argw
- compiler
->cache_argw
>= SIMM_MIN
)
1249 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
) | IMM(argw
- compiler
->cache_argw
), delay_slot
);
1251 if (compiler
->cache_arg
== SLJIT_MEM
&& (argw
- compiler
->cache_argw
) <= SIMM_MAX
&& (argw
- compiler
->cache_argw
) >= SIMM_MIN
) {
1252 offset
= argw
- compiler
->cache_argw
;
1254 compiler
->cache_arg
= SLJIT_MEM
;
1256 argw_hi
= TO_ARGW_HI(argw
);
1258 if (next_arg
&& next_argw
- argw
<= SIMM_MAX
&& next_argw
- argw
>= SIMM_MIN
&& argw_hi
!= TO_ARGW_HI(next_argw
)) {
1259 FAIL_IF(load_immediate(compiler
, DR(TMP_REG3
), argw
));
1260 compiler
->cache_argw
= argw
;
1263 FAIL_IF(load_immediate(compiler
, DR(TMP_REG3
), argw_hi
));
1264 compiler
->cache_argw
= argw_hi
;
1265 offset
= argw
& 0xffff;
1271 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
) | IMM(offset
), delay_slot
);
1273 if (arg
== next_arg
&& next_argw
- argw
<= SIMM_MAX
&& next_argw
- argw
>= SIMM_MIN
) {
1274 compiler
->cache_arg
= arg
;
1275 FAIL_IF(push_inst(compiler
, ADDU_W
| S(TMP_REG3
) | T(base
) | D(TMP_REG3
), DR(TMP_REG3
)));
1276 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
) | IMM(offset
), delay_slot
);
1279 FAIL_IF(push_inst(compiler
, ADDU_W
| S(TMP_REG3
) | T(base
) | DA(tmp_ar
), tmp_ar
));
1280 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
) | IMM(offset
), delay_slot
);
1283 static sljit_s32
emit_op_mem(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg_ar
, sljit_s32 arg
, sljit_sw argw
)
1285 sljit_s32 tmp_ar
, base
, delay_slot
;
1287 if (getput_arg_fast(compiler
, flags
, reg_ar
, arg
, argw
))
1288 return compiler
->error
;
1290 if ((flags
& MEM_MASK
) <= GPR_REG
&& (flags
& LOAD_DATA
)) {
1292 delay_slot
= reg_ar
;
1295 tmp_ar
= DR(TMP_REG1
);
1296 delay_slot
= MOVABLE_INS
;
1298 base
= arg
& REG_MASK
;
1300 if (SLJIT_UNLIKELY(arg
& OFFS_REG_MASK
)) {
1303 if (SLJIT_UNLIKELY(argw
)) {
1304 FAIL_IF(push_inst(compiler
, SLL_W
| T(OFFS_REG(arg
)) | DA(tmp_ar
) | SH_IMM(argw
), tmp_ar
));
1305 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | TA(tmp_ar
) | DA(tmp_ar
), tmp_ar
));
1308 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(OFFS_REG(arg
)) | DA(tmp_ar
), tmp_ar
));
1309 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
), delay_slot
);
1312 FAIL_IF(load_immediate(compiler
, tmp_ar
, TO_ARGW_HI(argw
)));
1315 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | TA(tmp_ar
) | DA(tmp_ar
), tmp_ar
));
1317 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
) | IMM(argw
), delay_slot
);
1320 static SLJIT_INLINE sljit_s32
emit_op_mem2(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg
, sljit_s32 arg1
, sljit_sw arg1w
, sljit_s32 arg2
, sljit_sw arg2w
)
1322 if (getput_arg_fast(compiler
, flags
, reg
, arg1
, arg1w
))
1323 return compiler
->error
;
1324 return getput_arg(compiler
, flags
, reg
, arg1
, arg1w
, arg2
, arg2w
);
1327 #define EMIT_LOGICAL(op_imm, op_reg) \
1328 if (flags & SRC2_IMM) { \
1329 if (op & SLJIT_SET_Z) \
1330 FAIL_IF(push_inst(compiler, op_imm | S(src1) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG)); \
1331 if (!(flags & UNUSED_DEST)) \
1332 FAIL_IF(push_inst(compiler, op_imm | S(src1) | T(dst) | IMM(src2), DR(dst))); \
1335 if (op & SLJIT_SET_Z) \
1336 FAIL_IF(push_inst(compiler, op_reg | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG)); \
1337 if (!(flags & UNUSED_DEST)) \
1338 FAIL_IF(push_inst(compiler, op_reg | S(src1) | T(src2) | D(dst), DR(dst))); \
1341 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1343 #define SELECT_OP(a, b) (b)
1345 #define EMIT_SHIFT(op_dimm, op_dimm32, op_imm, op_dv, op_v) \
1346 if (flags & SRC2_IMM) { \
1347 if (op & SLJIT_SET_Z) \
1348 FAIL_IF(push_inst(compiler, op_imm | T(src1) | DA(EQUAL_FLAG) | SH_IMM(src2), EQUAL_FLAG)); \
1349 if (!(flags & UNUSED_DEST)) \
1350 FAIL_IF(push_inst(compiler, op_imm | T(src1) | D(dst) | SH_IMM(src2), DR(dst))); \
1353 if (op & SLJIT_SET_Z) \
1354 FAIL_IF(push_inst(compiler, op_v | S(src2) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG)); \
1355 if (!(flags & UNUSED_DEST)) \
1356 FAIL_IF(push_inst(compiler, op_v | S(src2) | T(src1) | D(dst), DR(dst))); \
1359 #else /* !SLJIT_CONFIG_MIPS_32 */
1361 #define SELECT_OP(a, b) \
1362 (!(op & SLJIT_32) ? a : b)
1364 #define EMIT_SHIFT(op_dimm, op_dimm32, op_imm, op_dv, op_v) \
1365 if (flags & SRC2_IMM) { \
1367 SLJIT_ASSERT(!(op & SLJIT_32)); \
1372 ins = (op & SLJIT_32) ? op_imm : op_dimm; \
1373 if (op & SLJIT_SET_Z) \
1374 FAIL_IF(push_inst(compiler, ins | T(src1) | DA(EQUAL_FLAG) | SH_IMM(src2), EQUAL_FLAG)); \
1375 if (!(flags & UNUSED_DEST)) \
1376 FAIL_IF(push_inst(compiler, ins | T(src1) | D(dst) | SH_IMM(src2), DR(dst))); \
1379 ins = (op & SLJIT_32) ? op_v : op_dv; \
1380 if (op & SLJIT_SET_Z) \
1381 FAIL_IF(push_inst(compiler, ins | S(src2) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG)); \
1382 if (!(flags & UNUSED_DEST)) \
1383 FAIL_IF(push_inst(compiler, ins | S(src2) | T(src1) | D(dst), DR(dst))); \
1386 #endif /* SLJIT_CONFIG_MIPS_32 */
1388 static SLJIT_INLINE sljit_s32
emit_single_op(struct sljit_compiler
*compiler
, sljit_s32 op
, sljit_s32 flags
,
1389 sljit_s32 dst
, sljit_s32 src1
, sljit_sw src2
)
1391 sljit_s32 is_overflow
, is_carry
, carry_src_ar
, is_handled
;
1392 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1396 switch (GET_OPCODE(op
)) {
1398 SLJIT_ASSERT(src1
== TMP_REG1
&& !(flags
& SRC2_IMM
));
1400 return push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(src2
) | TA(0) | D(dst
), DR(dst
));
1401 return SLJIT_SUCCESS
;
1404 SLJIT_ASSERT(src1
== TMP_REG1
&& !(flags
& SRC2_IMM
));
1405 if ((flags
& (REG_DEST
| REG2_SOURCE
)) == (REG_DEST
| REG2_SOURCE
))
1406 return push_inst(compiler
, ANDI
| S(src2
) | T(dst
) | IMM(0xff), DR(dst
));
1407 SLJIT_ASSERT(dst
== src2
);
1408 return SLJIT_SUCCESS
;
1411 SLJIT_ASSERT(src1
== TMP_REG1
&& !(flags
& SRC2_IMM
));
1412 if ((flags
& (REG_DEST
| REG2_SOURCE
)) == (REG_DEST
| REG2_SOURCE
)) {
1413 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1414 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
1415 return push_inst(compiler
, SEB
| T(src2
) | D(dst
), DR(dst
));
1416 #else /* SLJIT_MIPS_REV < 1 */
1417 FAIL_IF(push_inst(compiler
, SLL
| T(src2
) | D(dst
) | SH_IMM(24), DR(dst
)));
1418 return push_inst(compiler
, SRA
| T(dst
) | D(dst
) | SH_IMM(24), DR(dst
));
1419 #endif /* SLJIT_MIPS_REV >= 1 */
1420 #else /* !SLJIT_CONFIG_MIPS_32 */
1421 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
1423 return push_inst(compiler
, SEB
| T(src2
) | D(dst
), DR(dst
));
1424 #endif /* SLJIT_MIPS_REV >= 1 */
1425 FAIL_IF(push_inst(compiler
, DSLL32
| T(src2
) | D(dst
) | SH_IMM(24), DR(dst
)));
1426 return push_inst(compiler
, DSRA32
| T(dst
) | D(dst
) | SH_IMM(24), DR(dst
));
1427 #endif /* SLJIT_CONFIG_MIPS_32 */
1429 SLJIT_ASSERT(dst
== src2
);
1430 return SLJIT_SUCCESS
;
1433 SLJIT_ASSERT(src1
== TMP_REG1
&& !(flags
& SRC2_IMM
));
1434 if ((flags
& (REG_DEST
| REG2_SOURCE
)) == (REG_DEST
| REG2_SOURCE
))
1435 return push_inst(compiler
, ANDI
| S(src2
) | T(dst
) | IMM(0xffff), DR(dst
));
1436 SLJIT_ASSERT(dst
== src2
);
1437 return SLJIT_SUCCESS
;
1440 SLJIT_ASSERT(src1
== TMP_REG1
&& !(flags
& SRC2_IMM
));
1441 if ((flags
& (REG_DEST
| REG2_SOURCE
)) == (REG_DEST
| REG2_SOURCE
)) {
1442 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1443 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
1444 return push_inst(compiler
, SEH
| T(src2
) | D(dst
), DR(dst
));
1445 #else /* SLJIT_MIPS_REV < 1 */
1446 FAIL_IF(push_inst(compiler
, SLL
| T(src2
) | D(dst
) | SH_IMM(16), DR(dst
)));
1447 return push_inst(compiler
, SRA
| T(dst
) | D(dst
) | SH_IMM(16), DR(dst
));
1448 #endif /* SLJIT_MIPS_REV >= 1 */
1449 #else /* !SLJIT_CONFIG_MIPS_32 */
1450 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
1452 return push_inst(compiler
, SEH
| T(src2
) | D(dst
), DR(dst
));
1453 #endif /* SLJIT_MIPS_REV >= 1 */
1454 FAIL_IF(push_inst(compiler
, DSLL32
| T(src2
) | D(dst
) | SH_IMM(16), DR(dst
)));
1455 return push_inst(compiler
, DSRA32
| T(dst
) | D(dst
) | SH_IMM(16), DR(dst
));
1456 #endif /* SLJIT_CONFIG_MIPS_32 */
1458 SLJIT_ASSERT(dst
== src2
);
1459 return SLJIT_SUCCESS
;
1461 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1463 SLJIT_ASSERT(src1
== TMP_REG1
&& !(flags
& SRC2_IMM
) && !(op
& SLJIT_32
));
1464 if ((flags
& (REG_DEST
| REG2_SOURCE
)) == (REG_DEST
| REG2_SOURCE
)) {
1465 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
1467 return push_inst(compiler
, DINSU
| T(src2
) | SA(0) | (31 << 11) | (0 << 11), DR(dst
));
1468 #endif /* SLJIT_MIPS_REV >= 2 */
1469 FAIL_IF(push_inst(compiler
, DSLL32
| T(src2
) | D(dst
) | SH_IMM(0), DR(dst
)));
1470 return push_inst(compiler
, DSRL32
| T(dst
) | D(dst
) | SH_IMM(0), DR(dst
));
1472 SLJIT_ASSERT(dst
== src2
);
1473 return SLJIT_SUCCESS
;
1476 SLJIT_ASSERT(src1
== TMP_REG1
&& !(flags
& SRC2_IMM
) && !(op
& SLJIT_32
));
1477 if ((flags
& (REG_DEST
| REG2_SOURCE
)) == (REG_DEST
| REG2_SOURCE
)) {
1478 return push_inst(compiler
, SLL
| T(src2
) | D(dst
) | SH_IMM(0), DR(dst
));
1480 SLJIT_ASSERT(dst
== src2
);
1481 return SLJIT_SUCCESS
;
1482 #endif /* SLJIT_CONFIG_MIPS_64 */
1485 SLJIT_ASSERT(src1
== TMP_REG1
&& !(flags
& SRC2_IMM
));
1486 if (op
& SLJIT_SET_Z
)
1487 FAIL_IF(push_inst(compiler
, NOR
| S(src2
) | T(src2
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1488 if (!(flags
& UNUSED_DEST
))
1489 FAIL_IF(push_inst(compiler
, NOR
| S(src2
) | T(src2
) | D(dst
), DR(dst
)));
1490 return SLJIT_SUCCESS
;
1493 SLJIT_ASSERT(src1
== TMP_REG1
&& !(flags
& SRC2_IMM
));
1494 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
1495 if (op
& SLJIT_SET_Z
)
1496 FAIL_IF(push_inst(compiler
, SELECT_OP(DCLZ
, CLZ
) | S(src2
) | TA(EQUAL_FLAG
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1497 if (!(flags
& UNUSED_DEST
))
1498 FAIL_IF(push_inst(compiler
, SELECT_OP(DCLZ
, CLZ
) | S(src2
) | T(dst
) | D(dst
), DR(dst
)));
1499 #else /* SLJIT_MIPS_REV < 1 */
1500 /* Nearly all instructions are unmovable in the following sequence. */
1501 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(src2
) | TA(0) | D(TMP_REG1
), DR(TMP_REG1
)));
1503 FAIL_IF(push_inst(compiler
, BEQ
| S(TMP_REG1
) | TA(0) | IMM(5), UNMOVABLE_INS
));
1504 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1505 FAIL_IF(push_inst(compiler
, ORI
| SA(0) | T(dst
) | IMM(32), UNMOVABLE_INS
));
1506 #else /* !SLJIT_CONFIG_MIPS_32 */
1507 FAIL_IF(push_inst(compiler
, ORI
| SA(0) | T(dst
) | IMM((op
& SLJIT_32
) ? 32 : 64), UNMOVABLE_INS
));
1508 #endif /* SLJIT_CONFIG_MIPS_32 */
1509 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | SA(0) | T(dst
) | IMM(-1), DR(dst
)));
1510 /* Loop for searching the highest bit. */
1511 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | S(dst
) | T(dst
) | IMM(1), DR(dst
)));
1512 FAIL_IF(push_inst(compiler
, BGEZ
| S(TMP_REG1
) | IMM(-2), UNMOVABLE_INS
));
1513 FAIL_IF(push_inst(compiler
, SELECT_OP(DSLL
, SLL
) | T(TMP_REG1
) | D(TMP_REG1
) | SH_IMM(1), UNMOVABLE_INS
));
1514 #endif /* SLJIT_MIPS_REV >= 1 */
1515 return SLJIT_SUCCESS
;
1518 /* Overflow computation (both add and sub): overflow = src1_sign ^ src2_sign ^ result_sign ^ carry_flag */
1519 is_overflow
= GET_FLAG_TYPE(op
) == SLJIT_OVERFLOW
;
1520 carry_src_ar
= GET_FLAG_TYPE(op
) == GET_FLAG_TYPE(SLJIT_SET_CARRY
);
1522 if (flags
& SRC2_IMM
) {
1525 FAIL_IF(push_inst(compiler
, OR
| S(src1
) | T(src1
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1527 FAIL_IF(push_inst(compiler
, NOR
| S(src1
) | T(src1
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1529 else if (op
& SLJIT_SET_Z
)
1530 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | S(src1
) | TA(EQUAL_FLAG
) | IMM(src2
), EQUAL_FLAG
));
1532 /* Only the zero flag is needed. */
1533 if (!(flags
& UNUSED_DEST
) || (op
& VARIABLE_FLAG_MASK
))
1534 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | S(src1
) | T(dst
) | IMM(src2
), DR(dst
)));
1538 FAIL_IF(push_inst(compiler
, XOR
| S(src1
) | T(src2
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1539 else if (op
& SLJIT_SET_Z
)
1540 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(src1
) | T(src2
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1542 if (is_overflow
|| carry_src_ar
!= 0) {
1544 carry_src_ar
= DR(src1
);
1545 else if (src2
!= dst
)
1546 carry_src_ar
= DR(src2
);
1548 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(src1
) | TA(0) | DA(OTHER_FLAG
), OTHER_FLAG
));
1549 carry_src_ar
= OTHER_FLAG
;
1553 /* Only the zero flag is needed. */
1554 if (!(flags
& UNUSED_DEST
) || (op
& VARIABLE_FLAG_MASK
))
1555 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(src1
) | T(src2
) | D(dst
), DR(dst
)));
1558 /* Carry is zero if a + b >= a or a + b >= b, otherwise it is 1. */
1559 if (is_overflow
|| carry_src_ar
!= 0) {
1560 if (flags
& SRC2_IMM
)
1561 FAIL_IF(push_inst(compiler
, SLTIU
| S(dst
) | TA(OTHER_FLAG
) | IMM(src2
), OTHER_FLAG
));
1563 FAIL_IF(push_inst(compiler
, SLTU
| S(dst
) | TA(carry_src_ar
) | DA(OTHER_FLAG
), OTHER_FLAG
));
1567 return SLJIT_SUCCESS
;
1569 FAIL_IF(push_inst(compiler
, XOR
| S(dst
) | TA(EQUAL_FLAG
) | D(TMP_REG1
), DR(TMP_REG1
)));
1570 if (op
& SLJIT_SET_Z
)
1571 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(dst
) | TA(0) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1572 FAIL_IF(push_inst(compiler
, SELECT_OP(DSRL32
, SRL
) | T(TMP_REG1
) | D(TMP_REG1
) | SH_IMM(31), DR(TMP_REG1
)));
1573 return push_inst(compiler
, XOR
| S(TMP_REG1
) | TA(OTHER_FLAG
) | DA(OTHER_FLAG
), OTHER_FLAG
);
1576 carry_src_ar
= GET_FLAG_TYPE(op
) == GET_FLAG_TYPE(SLJIT_SET_CARRY
);
1578 if (flags
& SRC2_IMM
) {
1579 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | S(src1
) | T(dst
) | IMM(src2
), DR(dst
)));
1581 if (carry_src_ar
!= 0) {
1583 carry_src_ar
= DR(src1
);
1584 else if (src2
!= dst
)
1585 carry_src_ar
= DR(src2
);
1587 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(src1
) | TA(0) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1588 carry_src_ar
= EQUAL_FLAG
;
1592 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(src1
) | T(src2
) | D(dst
), DR(dst
)));
1595 /* Carry is zero if a + b >= a or a + b >= b, otherwise it is 1. */
1596 if (carry_src_ar
!= 0) {
1597 if (flags
& SRC2_IMM
)
1598 FAIL_IF(push_inst(compiler
, SLTIU
| S(dst
) | TA(EQUAL_FLAG
) | IMM(src2
), EQUAL_FLAG
));
1600 FAIL_IF(push_inst(compiler
, SLTU
| S(dst
) | TA(carry_src_ar
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1603 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(dst
) | TA(OTHER_FLAG
) | D(dst
), DR(dst
)));
1605 if (carry_src_ar
== 0)
1606 return SLJIT_SUCCESS
;
1608 /* Set ULESS_FLAG (dst == 0) && (OTHER_FLAG == 1). */
1609 FAIL_IF(push_inst(compiler
, SLTU
| S(dst
) | TA(OTHER_FLAG
) | DA(OTHER_FLAG
), OTHER_FLAG
));
1610 /* Set carry flag. */
1611 return push_inst(compiler
, OR
| SA(OTHER_FLAG
) | TA(EQUAL_FLAG
) | DA(OTHER_FLAG
), OTHER_FLAG
);
1614 if ((flags
& SRC2_IMM
) && src2
== SIMM_MIN
) {
1615 FAIL_IF(push_inst(compiler
, ADDIU
| SA(0) | T(TMP_REG2
) | IMM(src2
), DR(TMP_REG2
)));
1622 if (flags
& SRC2_IMM
) {
1623 if (GET_FLAG_TYPE(op
) == SLJIT_LESS
|| GET_FLAG_TYPE(op
) == SLJIT_GREATER_EQUAL
) {
1624 FAIL_IF(push_inst(compiler
, SLTIU
| S(src1
) | TA(OTHER_FLAG
) | IMM(src2
), OTHER_FLAG
));
1627 else if (GET_FLAG_TYPE(op
) == SLJIT_SIG_LESS
|| GET_FLAG_TYPE(op
) == SLJIT_SIG_GREATER_EQUAL
) {
1628 FAIL_IF(push_inst(compiler
, SLTI
| S(src1
) | TA(OTHER_FLAG
) | IMM(src2
), OTHER_FLAG
));
1633 if (!is_handled
&& GET_FLAG_TYPE(op
) >= SLJIT_LESS
&& GET_FLAG_TYPE(op
) <= SLJIT_SIG_LESS_EQUAL
) {
1636 if (flags
& SRC2_IMM
) {
1637 FAIL_IF(push_inst(compiler
, ADDIU
| SA(0) | T(TMP_REG2
) | IMM(src2
), DR(TMP_REG2
)));
1642 switch (GET_FLAG_TYPE(op
)) {
1644 case SLJIT_GREATER_EQUAL
:
1645 FAIL_IF(push_inst(compiler
, SLTU
| S(src1
) | T(src2
) | DA(OTHER_FLAG
), OTHER_FLAG
));
1648 case SLJIT_LESS_EQUAL
:
1649 FAIL_IF(push_inst(compiler
, SLTU
| S(src2
) | T(src1
) | DA(OTHER_FLAG
), OTHER_FLAG
));
1651 case SLJIT_SIG_LESS
:
1652 case SLJIT_SIG_GREATER_EQUAL
:
1653 FAIL_IF(push_inst(compiler
, SLT
| S(src1
) | T(src2
) | DA(OTHER_FLAG
), OTHER_FLAG
));
1655 case SLJIT_SIG_GREATER
:
1656 case SLJIT_SIG_LESS_EQUAL
:
1657 FAIL_IF(push_inst(compiler
, SLT
| S(src2
) | T(src1
) | DA(OTHER_FLAG
), OTHER_FLAG
));
1663 if (flags
& SRC2_IMM
) {
1664 if (op
& SLJIT_SET_Z
)
1665 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | S(src1
) | TA(EQUAL_FLAG
) | IMM(-src2
), EQUAL_FLAG
));
1666 if (!(flags
& UNUSED_DEST
))
1667 return push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | S(src1
) | T(dst
) | IMM(-src2
), DR(dst
));
1670 if (op
& SLJIT_SET_Z
)
1671 FAIL_IF(push_inst(compiler
, SELECT_OP(DSUBU
, SUBU
) | S(src1
) | T(src2
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1672 if (!(flags
& UNUSED_DEST
))
1673 return push_inst(compiler
, SELECT_OP(DSUBU
, SUBU
) | S(src1
) | T(src2
) | D(dst
), DR(dst
));
1675 return SLJIT_SUCCESS
;
1678 is_overflow
= GET_FLAG_TYPE(op
) == SLJIT_OVERFLOW
;
1679 is_carry
= GET_FLAG_TYPE(op
) == GET_FLAG_TYPE(SLJIT_SET_CARRY
);
1681 if (flags
& SRC2_IMM
) {
1684 FAIL_IF(push_inst(compiler
, OR
| S(src1
) | T(src1
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1686 FAIL_IF(push_inst(compiler
, NOR
| S(src1
) | T(src1
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1688 else if (op
& SLJIT_SET_Z
)
1689 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | S(src1
) | TA(EQUAL_FLAG
) | IMM(-src2
), EQUAL_FLAG
));
1691 if (is_overflow
|| is_carry
)
1692 FAIL_IF(push_inst(compiler
, SLTIU
| S(src1
) | TA(OTHER_FLAG
) | IMM(src2
), OTHER_FLAG
));
1694 /* Only the zero flag is needed. */
1695 if (!(flags
& UNUSED_DEST
) || (op
& VARIABLE_FLAG_MASK
))
1696 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | S(src1
) | T(dst
) | IMM(-src2
), DR(dst
)));
1700 FAIL_IF(push_inst(compiler
, XOR
| S(src1
) | T(src2
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1701 else if (op
& SLJIT_SET_Z
)
1702 FAIL_IF(push_inst(compiler
, SELECT_OP(DSUBU
, SUBU
) | S(src1
) | T(src2
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1704 if (is_overflow
|| is_carry
)
1705 FAIL_IF(push_inst(compiler
, SLTU
| S(src1
) | T(src2
) | DA(OTHER_FLAG
), OTHER_FLAG
));
1707 /* Only the zero flag is needed. */
1708 if (!(flags
& UNUSED_DEST
) || (op
& VARIABLE_FLAG_MASK
))
1709 FAIL_IF(push_inst(compiler
, SELECT_OP(DSUBU
, SUBU
) | S(src1
) | T(src2
) | D(dst
), DR(dst
)));
1713 return SLJIT_SUCCESS
;
1715 FAIL_IF(push_inst(compiler
, XOR
| S(dst
) | TA(EQUAL_FLAG
) | D(TMP_REG1
), DR(TMP_REG1
)));
1716 if (op
& SLJIT_SET_Z
)
1717 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDU
, ADDU
) | S(dst
) | TA(0) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1718 FAIL_IF(push_inst(compiler
, SELECT_OP(DSRL32
, SRL
) | T(TMP_REG1
) | D(TMP_REG1
) | SH_IMM(31), DR(TMP_REG1
)));
1719 return push_inst(compiler
, XOR
| S(TMP_REG1
) | TA(OTHER_FLAG
) | DA(OTHER_FLAG
), OTHER_FLAG
);
1722 if ((flags
& SRC2_IMM
) && src2
== SIMM_MIN
) {
1723 FAIL_IF(push_inst(compiler
, ADDIU
| SA(0) | T(TMP_REG2
) | IMM(src2
), DR(TMP_REG2
)));
1728 is_carry
= GET_FLAG_TYPE(op
) == GET_FLAG_TYPE(SLJIT_SET_CARRY
);
1730 if (flags
& SRC2_IMM
) {
1732 FAIL_IF(push_inst(compiler
, SLTIU
| S(src1
) | TA(EQUAL_FLAG
) | IMM(src2
), EQUAL_FLAG
));
1734 FAIL_IF(push_inst(compiler
, SELECT_OP(DADDIU
, ADDIU
) | S(src1
) | T(dst
) | IMM(-src2
), DR(dst
)));
1738 FAIL_IF(push_inst(compiler
, SLTU
| S(src1
) | T(src2
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1740 FAIL_IF(push_inst(compiler
, SELECT_OP(DSUBU
, SUBU
) | S(src1
) | T(src2
) | D(dst
), DR(dst
)));
1744 FAIL_IF(push_inst(compiler
, SLTU
| S(dst
) | TA(OTHER_FLAG
) | D(TMP_REG1
), DR(TMP_REG1
)));
1746 FAIL_IF(push_inst(compiler
, SELECT_OP(DSUBU
, SUBU
) | S(dst
) | TA(OTHER_FLAG
) | D(dst
), DR(dst
)));
1749 return SLJIT_SUCCESS
;
1751 return push_inst(compiler
, OR
| SA(EQUAL_FLAG
) | T(TMP_REG1
) | DA(OTHER_FLAG
), OTHER_FLAG
);
1754 SLJIT_ASSERT(!(flags
& SRC2_IMM
));
1756 if (GET_FLAG_TYPE(op
) != SLJIT_OVERFLOW
) {
1757 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
1758 return push_inst(compiler
, SELECT_OP(DMUL
, MUL
) | S(src1
) | T(src2
) | D(dst
), DR(dst
));
1759 #elif (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
1760 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1761 return push_inst(compiler
, MUL
| S(src1
) | T(src2
) | D(dst
), DR(dst
));
1762 #else /* !SLJIT_CONFIG_MIPS_32 */
1764 return push_inst(compiler
, MUL
| S(src1
) | T(src2
) | D(dst
), DR(dst
));
1765 FAIL_IF(push_inst(compiler
, DMULT
| S(src1
) | T(src2
), MOVABLE_INS
));
1766 return push_inst(compiler
, MFLO
| D(dst
), DR(dst
));
1767 #endif /* SLJIT_CONFIG_MIPS_32 */
1768 #else /* SLJIT_MIPS_REV < 1 */
1769 FAIL_IF(push_inst(compiler
, SELECT_OP(DMULT
, MULT
) | S(src1
) | T(src2
), MOVABLE_INS
));
1770 return push_inst(compiler
, MFLO
| D(dst
), DR(dst
));
1771 #endif /* SLJIT_MIPS_REV >= 6 */
1774 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
1775 FAIL_IF(push_inst(compiler
, SELECT_OP(DMUL
, MUL
) | S(src1
) | T(src2
) | D(dst
), DR(dst
)));
1776 FAIL_IF(push_inst(compiler
, SELECT_OP(DMUH
, MUH
) | S(src1
) | T(src2
) | DA(EQUAL_FLAG
), EQUAL_FLAG
));
1777 #else /* SLJIT_MIPS_REV < 6 */
1778 FAIL_IF(push_inst(compiler
, SELECT_OP(DMULT
, MULT
) | S(src1
) | T(src2
), MOVABLE_INS
));
1779 FAIL_IF(push_inst(compiler
, MFHI
| DA(EQUAL_FLAG
), EQUAL_FLAG
));
1780 FAIL_IF(push_inst(compiler
, MFLO
| D(dst
), DR(dst
)));
1781 #endif /* SLJIT_MIPS_REV >= 6 */
1782 FAIL_IF(push_inst(compiler
, SELECT_OP(DSRA32
, SRA
) | T(dst
) | DA(OTHER_FLAG
) | SH_IMM(31), OTHER_FLAG
));
1783 return push_inst(compiler
, SELECT_OP(DSUBU
, SUBU
) | SA(EQUAL_FLAG
) | TA(OTHER_FLAG
) | DA(OTHER_FLAG
), OTHER_FLAG
);
1786 EMIT_LOGICAL(ANDI
, AND
);
1787 return SLJIT_SUCCESS
;
1790 EMIT_LOGICAL(ORI
, OR
);
1791 return SLJIT_SUCCESS
;
1794 EMIT_LOGICAL(XORI
, XOR
);
1795 return SLJIT_SUCCESS
;
1798 EMIT_SHIFT(DSLL
, DSLL32
, SLL
, DSLLV
, SLLV
);
1799 return SLJIT_SUCCESS
;
1802 EMIT_SHIFT(DSRL
, DSRL32
, SRL
, DSRLV
, SRLV
);
1803 return SLJIT_SUCCESS
;
1806 EMIT_SHIFT(DSRA
, DSRA32
, SRA
, DSRAV
, SRAV
);
1807 return SLJIT_SUCCESS
;
1810 SLJIT_UNREACHABLE();
1811 return SLJIT_SUCCESS
;
1814 #define CHECK_IMM(flags, srcw) \
1815 ((!((flags) & LOGICAL_OP) && ((srcw) <= SIMM_MAX && (srcw) >= SIMM_MIN)) \
1816 || (((flags) & LOGICAL_OP) && !((srcw) & ~UIMM_MAX)))
1818 static sljit_s32
emit_op(struct sljit_compiler
*compiler
, sljit_s32 op
, sljit_s32 flags
,
1819 sljit_s32 dst
, sljit_sw dstw
,
1820 sljit_s32 src1
, sljit_sw src1w
,
1821 sljit_s32 src2
, sljit_sw src2w
)
1823 /* arg1 goes to TMP_REG1 or src reg
1824 arg2 goes to TMP_REG2, imm or src reg
1825 TMP_REG3 can be used for caching
1826 result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
1827 sljit_s32 dst_r
= TMP_REG2
;
1829 sljit_sw src2_r
= 0;
1830 sljit_s32 sugg_src2_r
= TMP_REG2
;
1832 if (!(flags
& ALT_KEEP_CACHE
)) {
1833 compiler
->cache_arg
= 0;
1834 compiler
->cache_argw
= 0;
1837 if (dst
== TMP_REG2
) {
1838 SLJIT_ASSERT(HAS_FLAGS(op
));
1839 flags
|= UNUSED_DEST
;
1841 else if (FAST_IS_REG(dst
)) {
1844 if (flags
& MOVE_OP
)
1845 sugg_src2_r
= dst_r
;
1847 else if ((dst
& SLJIT_MEM
) && !getput_arg_fast(compiler
, flags
| ARG_TEST
, DR(TMP_REG1
), dst
, dstw
))
1850 if (flags
& IMM_OP
) {
1851 if ((src2
& SLJIT_IMM
) && src2w
!= 0 && CHECK_IMM(flags
, src2w
)) {
1854 } else if ((flags
& CUMULATIVE_OP
) && (src1
& SLJIT_IMM
) && src1w
!= 0 && CHECK_IMM(flags
, src1w
)) {
1858 /* And swap arguments. */
1862 /* src2w = src2_r unneeded. */
1867 if (FAST_IS_REG(src1
)) {
1869 flags
|= REG1_SOURCE
;
1871 else if (src1
& SLJIT_IMM
) {
1873 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), src1w
));
1880 if (getput_arg_fast(compiler
, flags
| LOAD_DATA
, DR(TMP_REG1
), src1
, src1w
))
1881 FAIL_IF(compiler
->error
);
1888 if (FAST_IS_REG(src2
)) {
1890 flags
|= REG2_SOURCE
;
1891 if ((flags
& (REG_DEST
| MOVE_OP
)) == MOVE_OP
)
1892 dst_r
= (sljit_s32
)src2_r
;
1894 else if (src2
& SLJIT_IMM
) {
1895 if (!(flags
& SRC2_IMM
)) {
1897 FAIL_IF(load_immediate(compiler
, DR(sugg_src2_r
), src2w
));
1898 src2_r
= sugg_src2_r
;
1902 if (flags
& MOVE_OP
) {
1903 if (dst
& SLJIT_MEM
)
1912 if (getput_arg_fast(compiler
, flags
| LOAD_DATA
, DR(sugg_src2_r
), src2
, src2w
))
1913 FAIL_IF(compiler
->error
);
1916 src2_r
= sugg_src2_r
;
1919 if ((flags
& (SLOW_SRC1
| SLOW_SRC2
)) == (SLOW_SRC1
| SLOW_SRC2
)) {
1920 SLJIT_ASSERT(src2_r
== TMP_REG2
);
1921 if (!can_cache(src1
, src1w
, src2
, src2w
) && can_cache(src1
, src1w
, dst
, dstw
)) {
1922 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG2
), src2
, src2w
, src1
, src1w
));
1923 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG1
), src1
, src1w
, dst
, dstw
));
1926 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG1
), src1
, src1w
, src2
, src2w
));
1927 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG2
), src2
, src2w
, dst
, dstw
));
1930 else if (flags
& SLOW_SRC1
)
1931 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG1
), src1
, src1w
, dst
, dstw
));
1932 else if (flags
& SLOW_SRC2
)
1933 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(sugg_src2_r
), src2
, src2w
, dst
, dstw
));
1935 FAIL_IF(emit_single_op(compiler
, op
, flags
, dst_r
, src1_r
, src2_r
));
1937 if (dst
& SLJIT_MEM
) {
1938 if (!(flags
& SLOW_DEST
)) {
1939 getput_arg_fast(compiler
, flags
, DR(dst_r
), dst
, dstw
);
1940 return compiler
->error
;
1942 return getput_arg(compiler
, flags
, DR(dst_r
), dst
, dstw
, 0, 0);
1945 return SLJIT_SUCCESS
;
1950 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op0(struct sljit_compiler
*compiler
, sljit_s32 op
)
1952 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1953 sljit_s32 int_op
= op
& SLJIT_32
;
1957 CHECK(check_sljit_emit_op0(compiler
, op
));
1959 op
= GET_OPCODE(op
);
1961 case SLJIT_BREAKPOINT
:
1962 return push_inst(compiler
, BREAK
, UNMOVABLE_INS
);
1964 return push_inst(compiler
, NOP
, UNMOVABLE_INS
);
1967 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
1968 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1969 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? DMULU
: DMUL
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
1970 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? DMUHU
: DMUH
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
1971 #else /* !SLJIT_CONFIG_MIPS_64 */
1972 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? MULU
: MUL
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
1973 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? MUHU
: MUH
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
1974 #endif /* SLJIT_CONFIG_MIPS_64 */
1975 FAIL_IF(push_inst(compiler
, ADDU_W
| S(TMP_REG3
) | TA(0) | D(SLJIT_R0
), DR(SLJIT_R0
)));
1976 return push_inst(compiler
, ADDU_W
| S(TMP_REG1
) | TA(0) | D(SLJIT_R1
), DR(SLJIT_R1
));
1977 #else /* SLJIT_MIPS_REV < 6 */
1978 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1979 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? DMULTU
: DMULT
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
1980 #else /* !SLJIT_CONFIG_MIPS_64 */
1981 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? MULTU
: MULT
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
1982 #endif /* SLJIT_CONFIG_MIPS_64 */
1983 FAIL_IF(push_inst(compiler
, MFLO
| D(SLJIT_R0
), DR(SLJIT_R0
)));
1984 return push_inst(compiler
, MFHI
| D(SLJIT_R1
), DR(SLJIT_R1
));
1985 #endif /* SLJIT_MIPS_REV >= 6 */
1986 case SLJIT_DIVMOD_UW
:
1987 case SLJIT_DIVMOD_SW
:
1990 SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW
& 0x2) == 0 && SLJIT_DIV_UW
- 0x2 == SLJIT_DIVMOD_UW
, bad_div_opcode_assignments
);
1991 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
1992 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1994 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DIVU
: DIV
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
1995 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? MODU
: MOD
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
1998 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DDIVU
: DDIV
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
1999 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DMODU
: DMOD
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
2001 #else /* !SLJIT_CONFIG_MIPS_64 */
2002 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DIVU
: DIV
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
2003 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? MODU
: MOD
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
2004 #endif /* SLJIT_CONFIG_MIPS_64 */
2005 FAIL_IF(push_inst(compiler
, ADDU_W
| S(TMP_REG3
) | TA(0) | D(SLJIT_R0
), DR(SLJIT_R0
)));
2006 return (op
>= SLJIT_DIV_UW
) ? SLJIT_SUCCESS
: push_inst(compiler
, ADDU_W
| S(TMP_REG1
) | TA(0) | D(SLJIT_R1
), DR(SLJIT_R1
));
2007 #else /* SLJIT_MIPS_REV < 6 */
2008 #if !(defined SLJIT_MIPS_REV)
2009 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2010 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2011 #endif /* !SLJIT_MIPS_REV */
2012 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
2014 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DIVU
: DIV
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
2016 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DDIVU
: DDIV
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
2017 #else /* !SLJIT_CONFIG_MIPS_64 */
2018 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DIVU
: DIV
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
2019 #endif /* SLJIT_CONFIG_MIPS_64 */
2020 FAIL_IF(push_inst(compiler
, MFLO
| D(SLJIT_R0
), DR(SLJIT_R0
)));
2021 return (op
>= SLJIT_DIV_UW
) ? SLJIT_SUCCESS
: push_inst(compiler
, MFHI
| D(SLJIT_R1
), DR(SLJIT_R1
));
2022 #endif /* SLJIT_MIPS_REV >= 6 */
2024 case SLJIT_SKIP_FRAMES_BEFORE_RETURN
:
2025 return SLJIT_SUCCESS
;
2028 return SLJIT_SUCCESS
;
2031 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
2032 static sljit_s32
emit_prefetch(struct sljit_compiler
*compiler
,
2033 sljit_s32 src
, sljit_sw srcw
)
2035 if (!(src
& OFFS_REG_MASK
)) {
2036 if (srcw
<= SIMM_MAX
&& srcw
>= SIMM_MIN
)
2037 return push_inst(compiler
, PREF
| S(src
& REG_MASK
) | IMM(srcw
), MOVABLE_INS
);
2039 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), srcw
));
2040 return push_inst(compiler
, PREFX
| S(src
& REG_MASK
) | T(TMP_REG1
), MOVABLE_INS
);
2045 if (SLJIT_UNLIKELY(srcw
!= 0)) {
2046 FAIL_IF(push_inst(compiler
, SLL_W
| T(OFFS_REG(src
)) | D(TMP_REG1
) | SH_IMM(srcw
), DR(TMP_REG1
)));
2047 return push_inst(compiler
, PREFX
| S(src
& REG_MASK
) | T(TMP_REG1
), MOVABLE_INS
);
2050 return push_inst(compiler
, PREFX
| S(src
& REG_MASK
) | T(OFFS_REG(src
)), MOVABLE_INS
);
2052 #endif /* SLJIT_MIPS_REV >= 1 */
2054 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op1(struct sljit_compiler
*compiler
, sljit_s32 op
,
2055 sljit_s32 dst
, sljit_sw dstw
,
2056 sljit_s32 src
, sljit_sw srcw
)
2058 sljit_s32 flags
= 0;
2061 CHECK(check_sljit_emit_op1(compiler
, op
, dst
, dstw
, src
, srcw
));
2062 ADJUST_LOCAL_OFFSET(dst
, dstw
);
2063 ADJUST_LOCAL_OFFSET(src
, srcw
);
2065 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
2067 flags
= INT_DATA
| SIGNED_DATA
;
2070 switch (GET_OPCODE(op
)) {
2072 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2078 return emit_op(compiler
, SLJIT_MOV
, WORD_DATA
| MOVE_OP
, dst
, dstw
, TMP_REG1
, 0, src
, srcw
);
2080 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
2082 return emit_op(compiler
, SLJIT_MOV_U32
, INT_DATA
| MOVE_OP
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_u32
)srcw
: srcw
);
2086 return emit_op(compiler
, SLJIT_MOV_S32
, INT_DATA
| SIGNED_DATA
| MOVE_OP
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_s32
)srcw
: srcw
);
2090 return emit_op(compiler
, op
, BYTE_DATA
| MOVE_OP
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_u8
)srcw
: srcw
);
2093 return emit_op(compiler
, op
, BYTE_DATA
| SIGNED_DATA
| MOVE_OP
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_s8
)srcw
: srcw
);
2096 return emit_op(compiler
, op
, HALF_DATA
| MOVE_OP
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_u16
)srcw
: srcw
);
2099 return emit_op(compiler
, op
, HALF_DATA
| SIGNED_DATA
| MOVE_OP
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_s16
)srcw
: srcw
);
2102 return emit_op(compiler
, op
, flags
, dst
, dstw
, TMP_REG1
, 0, src
, srcw
);
2105 return emit_op(compiler
, op
, flags
, dst
, dstw
, TMP_REG1
, 0, src
, srcw
);
2108 SLJIT_UNREACHABLE();
2109 return SLJIT_SUCCESS
;
2112 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op2(struct sljit_compiler
*compiler
, sljit_s32 op
,
2113 sljit_s32 dst
, sljit_sw dstw
,
2114 sljit_s32 src1
, sljit_sw src1w
,
2115 sljit_s32 src2
, sljit_sw src2w
)
2117 sljit_s32 flags
= 0;
2120 CHECK(check_sljit_emit_op2(compiler
, op
, 0, dst
, dstw
, src1
, src1w
, src2
, src2w
));
2121 ADJUST_LOCAL_OFFSET(dst
, dstw
);
2122 ADJUST_LOCAL_OFFSET(src1
, src1w
);
2123 ADJUST_LOCAL_OFFSET(src2
, src2w
);
2125 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
2126 if (op
& SLJIT_32
) {
2127 flags
|= INT_DATA
| SIGNED_DATA
;
2128 if (src1
& SLJIT_IMM
)
2129 src1w
= (sljit_s32
)src1w
;
2130 if (src2
& SLJIT_IMM
)
2131 src2w
= (sljit_s32
)src2w
;
2135 switch (GET_OPCODE(op
)) {
2138 compiler
->status_flags_state
= SLJIT_CURRENT_FLAGS_ADD
;
2139 return emit_op(compiler
, op
, flags
| CUMULATIVE_OP
| IMM_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
2143 compiler
->status_flags_state
= SLJIT_CURRENT_FLAGS_SUB
;
2144 return emit_op(compiler
, op
, flags
| IMM_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
2147 compiler
->status_flags_state
= 0;
2148 return emit_op(compiler
, op
, flags
| CUMULATIVE_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
2153 return emit_op(compiler
, op
, flags
| CUMULATIVE_OP
| LOGICAL_OP
| IMM_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
2158 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2159 if (src2
& SLJIT_IMM
)
2162 if (src2
& SLJIT_IMM
) {
2169 return emit_op(compiler
, op
, flags
| IMM_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
2172 SLJIT_UNREACHABLE();
2173 return SLJIT_SUCCESS
;
2176 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op2u(struct sljit_compiler
*compiler
, sljit_s32 op
,
2177 sljit_s32 src1
, sljit_sw src1w
,
2178 sljit_s32 src2
, sljit_sw src2w
)
2181 CHECK(check_sljit_emit_op2(compiler
, op
, 1, 0, 0, src1
, src1w
, src2
, src2w
));
2183 SLJIT_SKIP_CHECKS(compiler
);
2184 return sljit_emit_op2(compiler
, op
, TMP_REG2
, 0, src1
, src1w
, src2
, src2w
);
2187 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op_src(struct sljit_compiler
*compiler
, sljit_s32 op
,
2188 sljit_s32 src
, sljit_sw srcw
)
2191 CHECK(check_sljit_emit_op_src(compiler
, op
, src
, srcw
));
2192 ADJUST_LOCAL_OFFSET(src
, srcw
);
2195 case SLJIT_FAST_RETURN
:
2196 if (FAST_IS_REG(src
))
2197 FAIL_IF(push_inst(compiler
, ADDU_W
| S(src
) | TA(0) | DA(RETURN_ADDR_REG
), RETURN_ADDR_REG
));
2199 FAIL_IF(emit_op_mem(compiler
, WORD_DATA
| LOAD_DATA
, RETURN_ADDR_REG
, src
, srcw
));
2201 FAIL_IF(push_inst(compiler
, JR
| SA(RETURN_ADDR_REG
), UNMOVABLE_INS
));
2202 return push_inst(compiler
, NOP
, UNMOVABLE_INS
);
2203 case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN
:
2204 return SLJIT_SUCCESS
;
2205 case SLJIT_PREFETCH_L1
:
2206 case SLJIT_PREFETCH_L2
:
2207 case SLJIT_PREFETCH_L3
:
2208 case SLJIT_PREFETCH_ONCE
:
2209 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
2210 return emit_prefetch(compiler
, src
, srcw
);
2211 #else /* SLJIT_MIPS_REV < 1 */
2212 return SLJIT_SUCCESS
;
2213 #endif /* SLJIT_MIPS_REV >= 1 */
2216 return SLJIT_SUCCESS
;
2219 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_get_register_index(sljit_s32 reg
)
2221 CHECK_REG_INDEX(check_sljit_get_register_index(reg
));
2222 return reg_map
[reg
];
2225 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_get_float_register_index(sljit_s32 reg
)
2227 CHECK_REG_INDEX(check_sljit_get_float_register_index(reg
));
2231 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op_custom(struct sljit_compiler
*compiler
,
2232 void *instruction
, sljit_u32 size
)
2235 CHECK(check_sljit_emit_op_custom(compiler
, instruction
, size
));
2237 return push_inst(compiler
, *(sljit_ins
*)instruction
, UNMOVABLE_INS
);
2240 /* --------------------------------------------------------------------- */
2241 /* Floating point operators */
2242 /* --------------------------------------------------------------------- */
2244 #define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_32) >> 7))
2245 #define FMT(op) ((((sljit_ins)op & SLJIT_32) ^ SLJIT_32) << (21 - 8))
2247 static SLJIT_INLINE sljit_s32
sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler
*compiler
, sljit_s32 op
,
2248 sljit_s32 dst
, sljit_sw dstw
,
2249 sljit_s32 src
, sljit_sw srcw
)
2251 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2252 # define flags (sljit_u32)0
2254 sljit_u32 flags
= ((sljit_u32
)(GET_OPCODE(op
) == SLJIT_CONV_SW_FROM_F64
)) << 21;
2257 if (src
& SLJIT_MEM
) {
2258 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src
, srcw
, dst
, dstw
));
2262 FAIL_IF(push_inst(compiler
, (TRUNC_W_S
^ (flags
>> 19)) | FMT(op
) | FS(src
) | FD(TMP_FREG1
), MOVABLE_INS
));
2264 if (FAST_IS_REG(dst
)) {
2265 FAIL_IF(push_inst(compiler
, MFC1
| flags
| T(dst
) | FS(TMP_FREG1
), MOVABLE_INS
));
2266 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
2267 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2269 return SLJIT_SUCCESS
;
2272 /* Store the integer value from a VFP register. */
2273 return emit_op_mem2(compiler
, flags
? DOUBLE_DATA
: SINGLE_DATA
, FR(TMP_FREG1
), dst
, dstw
, 0, 0);
2275 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2280 static SLJIT_INLINE sljit_s32
sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler
*compiler
, sljit_s32 op
,
2281 sljit_s32 dst
, sljit_sw dstw
,
2282 sljit_s32 src
, sljit_sw srcw
)
2284 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2285 # define flags (sljit_u32)0
2287 sljit_u32 flags
= ((sljit_u32
)(GET_OPCODE(op
) == SLJIT_CONV_F64_FROM_SW
)) << 21;
2290 sljit_s32 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_FREG1
;
2292 if (FAST_IS_REG(src
)) {
2293 FAIL_IF(push_inst(compiler
, MTC1
| flags
| T(src
) | FS(TMP_FREG1
), MOVABLE_INS
));
2294 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
2295 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2297 } else if (src
& SLJIT_MEM
) {
2298 /* Load the integer value into a VFP register. */
2299 FAIL_IF(emit_op_mem2(compiler
, (flags
? DOUBLE_DATA
: SINGLE_DATA
) | LOAD_DATA
, FR(TMP_FREG1
), src
, srcw
, dst
, dstw
));
2302 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
2303 if (GET_OPCODE(op
) == SLJIT_CONV_F64_FROM_S32
)
2304 srcw
= (sljit_s32
)srcw
;
2306 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), srcw
));
2307 FAIL_IF(push_inst(compiler
, MTC1
| flags
| T(TMP_REG1
) | FS(TMP_FREG1
), MOVABLE_INS
));
2308 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
2309 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2313 FAIL_IF(push_inst(compiler
, CVT_S_S
| flags
| (4 << 21) | ((((sljit_ins
)op
& SLJIT_32
) ^ SLJIT_32
) >> 8) | FS(TMP_FREG1
) | FD(dst_r
), MOVABLE_INS
));
2315 if (dst
& SLJIT_MEM
)
2316 return emit_op_mem2(compiler
, FLOAT_DATA(op
), FR(TMP_FREG1
), dst
, dstw
, 0, 0);
2317 return SLJIT_SUCCESS
;
2319 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2324 static SLJIT_INLINE sljit_s32
sljit_emit_fop1_cmp(struct sljit_compiler
*compiler
, sljit_s32 op
,
2325 sljit_s32 src1
, sljit_sw src1w
,
2326 sljit_s32 src2
, sljit_sw src2w
)
2330 if (src1
& SLJIT_MEM
) {
2331 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
, src2
, src2w
));
2335 if (src2
& SLJIT_MEM
) {
2336 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
, 0, 0));
2340 switch (GET_FLAG_TYPE(op
)) {
2342 case SLJIT_ORDERED_EQUAL
:
2343 case SLJIT_UNORDERED_OR_NOT_EQUAL
:
2346 case SLJIT_F_NOT_EQUAL
:
2347 case SLJIT_UNORDERED_OR_EQUAL
:
2348 case SLJIT_ORDERED_NOT_EQUAL
:
2352 case SLJIT_ORDERED_LESS
:
2353 case SLJIT_UNORDERED_OR_GREATER_EQUAL
:
2356 case SLJIT_F_GREATER_EQUAL
:
2357 case SLJIT_UNORDERED_OR_LESS
:
2358 case SLJIT_ORDERED_GREATER_EQUAL
:
2361 case SLJIT_F_GREATER
:
2362 case SLJIT_ORDERED_GREATER
:
2363 case SLJIT_UNORDERED_OR_LESS_EQUAL
:
2366 case SLJIT_F_LESS_EQUAL
:
2367 case SLJIT_UNORDERED_OR_GREATER
:
2368 case SLJIT_ORDERED_LESS_EQUAL
:
2372 SLJIT_ASSERT(GET_FLAG_TYPE(op
) == SLJIT_UNORDERED
|| GET_FLAG_TYPE(op
) == SLJIT_ORDERED
);
2376 return push_inst(compiler
, inst
| FMT(op
) | FT(src2
) | FS(src1
) | C_FD
, UNMOVABLE_INS
);
2379 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_fop1(struct sljit_compiler
*compiler
, sljit_s32 op
,
2380 sljit_s32 dst
, sljit_sw dstw
,
2381 sljit_s32 src
, sljit_sw srcw
)
2386 compiler
->cache_arg
= 0;
2387 compiler
->cache_argw
= 0;
2389 SLJIT_COMPILE_ASSERT((SLJIT_32
== 0x100) && !(DOUBLE_DATA
& 0x2), float_transfer_bit_error
);
2390 SELECT_FOP1_OPERATION_WITH_CHECKS(compiler
, op
, dst
, dstw
, src
, srcw
);
2392 if (GET_OPCODE(op
) == SLJIT_CONV_F64_FROM_F32
)
2395 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_FREG1
;
2397 if (src
& SLJIT_MEM
) {
2398 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(dst_r
), src
, srcw
, dst
, dstw
));
2402 switch (GET_OPCODE(op
)) {
2405 if (dst_r
!= TMP_FREG1
)
2406 FAIL_IF(push_inst(compiler
, MOV_S
| FMT(op
) | FS(src
) | FD(dst_r
), MOVABLE_INS
));
2412 FAIL_IF(push_inst(compiler
, NEG_S
| FMT(op
) | FS(src
) | FD(dst_r
), MOVABLE_INS
));
2415 FAIL_IF(push_inst(compiler
, ABS_S
| FMT(op
) | FS(src
) | FD(dst_r
), MOVABLE_INS
));
2417 case SLJIT_CONV_F64_FROM_F32
:
2418 /* The SLJIT_32 bit is inverted because sljit_f32 needs to be loaded from the memory. */
2419 FAIL_IF(push_inst(compiler
, CVT_S_S
| (sljit_ins
)((op
& SLJIT_32
) ? 1 : (1 << 21)) | FS(src
) | FD(dst_r
), MOVABLE_INS
));
2424 if (dst
& SLJIT_MEM
)
2425 return emit_op_mem2(compiler
, FLOAT_DATA(op
), FR(dst_r
), dst
, dstw
, 0, 0);
2426 return SLJIT_SUCCESS
;
2429 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_fop2(struct sljit_compiler
*compiler
, sljit_s32 op
,
2430 sljit_s32 dst
, sljit_sw dstw
,
2431 sljit_s32 src1
, sljit_sw src1w
,
2432 sljit_s32 src2
, sljit_sw src2w
)
2434 sljit_s32 dst_r
, flags
= 0;
2437 CHECK(check_sljit_emit_fop2(compiler
, op
, dst
, dstw
, src1
, src1w
, src2
, src2w
));
2438 ADJUST_LOCAL_OFFSET(dst
, dstw
);
2439 ADJUST_LOCAL_OFFSET(src1
, src1w
);
2440 ADJUST_LOCAL_OFFSET(src2
, src2w
);
2442 compiler
->cache_arg
= 0;
2443 compiler
->cache_argw
= 0;
2445 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_FREG2
;
2447 if (src1
& SLJIT_MEM
) {
2448 if (getput_arg_fast(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
)) {
2449 FAIL_IF(compiler
->error
);
2455 if (src2
& SLJIT_MEM
) {
2456 if (getput_arg_fast(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
)) {
2457 FAIL_IF(compiler
->error
);
2463 if ((flags
& (SLOW_SRC1
| SLOW_SRC2
)) == (SLOW_SRC1
| SLOW_SRC2
)) {
2464 if (!can_cache(src1
, src1w
, src2
, src2w
) && can_cache(src1
, src1w
, dst
, dstw
)) {
2465 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
, src1
, src1w
));
2466 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
, dst
, dstw
));
2469 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
, src2
, src2w
));
2470 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
, dst
, dstw
));
2473 else if (flags
& SLOW_SRC1
)
2474 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
, dst
, dstw
));
2475 else if (flags
& SLOW_SRC2
)
2476 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
, dst
, dstw
));
2478 if (flags
& SLOW_SRC1
)
2480 if (flags
& SLOW_SRC2
)
2483 switch (GET_OPCODE(op
)) {
2485 FAIL_IF(push_inst(compiler
, ADD_S
| FMT(op
) | FT(src2
) | FS(src1
) | FD(dst_r
), MOVABLE_INS
));
2489 FAIL_IF(push_inst(compiler
, SUB_S
| FMT(op
) | FT(src2
) | FS(src1
) | FD(dst_r
), MOVABLE_INS
));
2493 FAIL_IF(push_inst(compiler
, MUL_S
| FMT(op
) | FT(src2
) | FS(src1
) | FD(dst_r
), MOVABLE_INS
));
2497 FAIL_IF(push_inst(compiler
, DIV_S
| FMT(op
) | FT(src2
) | FS(src1
) | FD(dst_r
), MOVABLE_INS
));
2501 if (dst_r
== TMP_FREG2
)
2502 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
), FR(TMP_FREG2
), dst
, dstw
, 0, 0));
2504 return SLJIT_SUCCESS
;
2510 /* --------------------------------------------------------------------- */
2511 /* Other instructions */
2512 /* --------------------------------------------------------------------- */
2514 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_fast_enter(struct sljit_compiler
*compiler
, sljit_s32 dst
, sljit_sw dstw
)
2517 CHECK(check_sljit_emit_fast_enter(compiler
, dst
, dstw
));
2518 ADJUST_LOCAL_OFFSET(dst
, dstw
);
2520 if (FAST_IS_REG(dst
))
2521 return push_inst(compiler
, ADDU_W
| SA(RETURN_ADDR_REG
) | TA(0) | D(dst
), UNMOVABLE_INS
);
2524 FAIL_IF(emit_op_mem(compiler
, WORD_DATA
, RETURN_ADDR_REG
, dst
, dstw
));
2525 compiler
->delay_slot
= UNMOVABLE_INS
;
2526 return SLJIT_SUCCESS
;
2529 /* --------------------------------------------------------------------- */
2530 /* Conditional instructions */
2531 /* --------------------------------------------------------------------- */
2533 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_label
* sljit_emit_label(struct sljit_compiler
*compiler
)
2535 struct sljit_label
*label
;
2538 CHECK_PTR(check_sljit_emit_label(compiler
));
2540 if (compiler
->last_label
&& compiler
->last_label
->size
== compiler
->size
)
2541 return compiler
->last_label
;
2543 label
= (struct sljit_label
*)ensure_abuf(compiler
, sizeof(struct sljit_label
));
2544 PTR_FAIL_IF(!label
);
2545 set_label(label
, compiler
);
2546 compiler
->delay_slot
= UNMOVABLE_INS
;
2550 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2551 #define BRANCH_LENGTH 4
2553 #define BRANCH_LENGTH 8
2557 inst = BEQ | SA(src) | TA(0) | BRANCH_LENGTH; \
2558 flags = IS_BIT26_COND; \
2561 #define BR_NZ(src) \
2562 inst = BNE | SA(src) | TA(0) | BRANCH_LENGTH; \
2563 flags = IS_BIT26_COND; \
2566 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
2570 flags = IS_BIT23_COND; \
2571 delay_check = FCSR_FCC;
2574 flags = IS_BIT23_COND; \
2575 delay_check = FCSR_FCC;
2577 #else /* SLJIT_MIPS_REV < 6 */
2580 inst = BC1T | BRANCH_LENGTH; \
2581 flags = IS_BIT16_COND; \
2582 delay_check = FCSR_FCC;
2584 inst = BC1F | BRANCH_LENGTH; \
2585 flags = IS_BIT16_COND; \
2586 delay_check = FCSR_FCC;
2588 #endif /* SLJIT_MIPS_REV >= 6 */
2590 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_jump
* sljit_emit_jump(struct sljit_compiler
*compiler
, sljit_s32 type
)
2592 struct sljit_jump
*jump
;
2594 sljit_u32 flags
= 0;
2595 sljit_s32 delay_check
= UNMOVABLE_INS
;
2598 CHECK_PTR(check_sljit_emit_jump(compiler
, type
));
2600 jump
= (struct sljit_jump
*)ensure_abuf(compiler
, sizeof(struct sljit_jump
));
2602 set_jump(jump
, compiler
, type
& SLJIT_REWRITABLE_JUMP
);
2609 case SLJIT_NOT_EQUAL
:
2614 case SLJIT_SIG_LESS
:
2615 case SLJIT_SIG_GREATER
:
2616 case SLJIT_OVERFLOW
:
2620 case SLJIT_GREATER_EQUAL
:
2621 case SLJIT_LESS_EQUAL
:
2622 case SLJIT_SIG_GREATER_EQUAL
:
2623 case SLJIT_SIG_LESS_EQUAL
:
2624 case SLJIT_NOT_OVERFLOW
:
2625 case SLJIT_NOT_CARRY
:
2628 case SLJIT_F_NOT_EQUAL
:
2629 case SLJIT_F_GREATER_EQUAL
:
2630 case SLJIT_F_GREATER
:
2631 case SLJIT_UNORDERED_OR_NOT_EQUAL
:
2632 case SLJIT_ORDERED_NOT_EQUAL
:
2633 case SLJIT_UNORDERED_OR_GREATER_EQUAL
:
2634 case SLJIT_ORDERED_GREATER_EQUAL
:
2635 case SLJIT_ORDERED_GREATER
:
2636 case SLJIT_UNORDERED_OR_GREATER
:
2642 case SLJIT_F_LESS_EQUAL
:
2643 case SLJIT_ORDERED_EQUAL
:
2644 case SLJIT_UNORDERED_OR_EQUAL
:
2645 case SLJIT_ORDERED_LESS
:
2646 case SLJIT_UNORDERED_OR_LESS
:
2647 case SLJIT_UNORDERED_OR_LESS_EQUAL
:
2648 case SLJIT_ORDERED_LESS_EQUAL
:
2649 case SLJIT_UNORDERED
:
2653 /* Not conditional branch. */
2658 jump
->flags
|= flags
;
2659 if (compiler
->delay_slot
== MOVABLE_INS
|| (compiler
->delay_slot
!= UNMOVABLE_INS
&& compiler
->delay_slot
!= delay_check
))
2660 jump
->flags
|= IS_MOVABLE
;
2663 PTR_FAIL_IF(push_inst(compiler
, inst
, UNMOVABLE_INS
));
2665 if (type
<= SLJIT_JUMP
)
2666 PTR_FAIL_IF(push_inst(compiler
, JR
| S(TMP_REG2
), UNMOVABLE_INS
));
2668 jump
->flags
|= IS_JAL
;
2669 PTR_FAIL_IF(push_inst(compiler
, JALR
| S(TMP_REG2
) | DA(RETURN_ADDR_REG
), UNMOVABLE_INS
));
2672 jump
->addr
= compiler
->size
;
2673 PTR_FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2675 /* Maximum number of instructions required for generating a constant. */
2676 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2677 compiler
->size
+= 2;
2679 compiler
->size
+= 6;
2684 #define RESOLVE_IMM1() \
2685 if (src1 & SLJIT_IMM) { \
2687 PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w)); \
2694 #define RESOLVE_IMM2() \
2695 if (src2 & SLJIT_IMM) { \
2697 PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG2), src2w)); \
2704 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_jump
* sljit_emit_cmp(struct sljit_compiler
*compiler
, sljit_s32 type
,
2705 sljit_s32 src1
, sljit_sw src1w
,
2706 sljit_s32 src2
, sljit_sw src2w
)
2708 struct sljit_jump
*jump
;
2713 CHECK_PTR(check_sljit_emit_cmp(compiler
, type
, src1
, src1w
, src2
, src2w
));
2714 ADJUST_LOCAL_OFFSET(src1
, src1w
);
2715 ADJUST_LOCAL_OFFSET(src2
, src2w
);
2717 compiler
->cache_arg
= 0;
2718 compiler
->cache_argw
= 0;
2719 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2720 flags
= WORD_DATA
| LOAD_DATA
;
2721 #else /* !SLJIT_CONFIG_MIPS_32 */
2722 flags
= ((type
& SLJIT_32
) ? INT_DATA
: WORD_DATA
) | LOAD_DATA
;
2723 #endif /* SLJIT_CONFIG_MIPS_32 */
2725 if (src1
& SLJIT_MEM
) {
2726 PTR_FAIL_IF(emit_op_mem2(compiler
, flags
, DR(TMP_REG1
), src1
, src1w
, src2
, src2w
));
2730 if (src2
& SLJIT_MEM
) {
2731 PTR_FAIL_IF(emit_op_mem2(compiler
, flags
, DR(TMP_REG2
), src2
, src2w
, 0, 0));
2735 jump
= (struct sljit_jump
*)ensure_abuf(compiler
, sizeof(struct sljit_jump
));
2737 set_jump(jump
, compiler
, type
& SLJIT_REWRITABLE_JUMP
);
2740 if (type
<= SLJIT_NOT_EQUAL
) {
2743 jump
->flags
|= IS_BIT26_COND
;
2744 if (compiler
->delay_slot
== MOVABLE_INS
|| (compiler
->delay_slot
!= UNMOVABLE_INS
&& compiler
->delay_slot
!= DR(src1
) && compiler
->delay_slot
!= DR(src2
)))
2745 jump
->flags
|= IS_MOVABLE
;
2746 PTR_FAIL_IF(push_inst(compiler
, (type
== SLJIT_EQUAL
? BNE
: BEQ
) | S(src1
) | T(src2
) | BRANCH_LENGTH
, UNMOVABLE_INS
));
2748 else if (type
>= SLJIT_SIG_LESS
&& (((src1
& SLJIT_IMM
) && (src1w
== 0)) || ((src2
& SLJIT_IMM
) && (src2w
== 0)))) {
2750 if ((src1
& SLJIT_IMM
) && (src1w
== 0)) {
2753 case SLJIT_SIG_LESS
:
2755 jump
->flags
|= IS_BIT26_COND
;
2757 case SLJIT_SIG_GREATER_EQUAL
:
2759 jump
->flags
|= IS_BIT26_COND
;
2761 case SLJIT_SIG_GREATER
:
2763 jump
->flags
|= IS_BIT16_COND
;
2765 case SLJIT_SIG_LESS_EQUAL
:
2767 jump
->flags
|= IS_BIT16_COND
;
2775 case SLJIT_SIG_LESS
:
2777 jump
->flags
|= IS_BIT16_COND
;
2779 case SLJIT_SIG_GREATER_EQUAL
:
2781 jump
->flags
|= IS_BIT16_COND
;
2783 case SLJIT_SIG_GREATER
:
2785 jump
->flags
|= IS_BIT26_COND
;
2787 case SLJIT_SIG_LESS_EQUAL
:
2789 jump
->flags
|= IS_BIT26_COND
;
2793 PTR_FAIL_IF(push_inst(compiler
, inst
| S(src1
) | BRANCH_LENGTH
, UNMOVABLE_INS
));
2796 if (type
== SLJIT_LESS
|| type
== SLJIT_GREATER_EQUAL
|| type
== SLJIT_SIG_LESS
|| type
== SLJIT_SIG_GREATER_EQUAL
) {
2798 if ((src2
& SLJIT_IMM
) && src2w
<= SIMM_MAX
&& src2w
>= SIMM_MIN
)
2799 PTR_FAIL_IF(push_inst(compiler
, (type
<= SLJIT_LESS_EQUAL
? SLTIU
: SLTI
) | S(src1
) | T(TMP_REG1
) | IMM(src2w
), DR(TMP_REG1
)));
2802 PTR_FAIL_IF(push_inst(compiler
, (type
<= SLJIT_LESS_EQUAL
? SLTU
: SLT
) | S(src1
) | T(src2
) | D(TMP_REG1
), DR(TMP_REG1
)));
2804 type
= (type
== SLJIT_LESS
|| type
== SLJIT_SIG_LESS
) ? SLJIT_NOT_EQUAL
: SLJIT_EQUAL
;
2808 if ((src1
& SLJIT_IMM
) && src1w
<= SIMM_MAX
&& src1w
>= SIMM_MIN
)
2809 PTR_FAIL_IF(push_inst(compiler
, (type
<= SLJIT_LESS_EQUAL
? SLTIU
: SLTI
) | S(src2
) | T(TMP_REG1
) | IMM(src1w
), DR(TMP_REG1
)));
2812 PTR_FAIL_IF(push_inst(compiler
, (type
<= SLJIT_LESS_EQUAL
? SLTU
: SLT
) | S(src2
) | T(src1
) | D(TMP_REG1
), DR(TMP_REG1
)));
2814 type
= (type
== SLJIT_GREATER
|| type
== SLJIT_SIG_GREATER
) ? SLJIT_NOT_EQUAL
: SLJIT_EQUAL
;
2817 jump
->flags
|= IS_BIT26_COND
;
2818 PTR_FAIL_IF(push_inst(compiler
, (type
== SLJIT_EQUAL
? BNE
: BEQ
) | S(TMP_REG1
) | TA(0) | BRANCH_LENGTH
, UNMOVABLE_INS
));
2821 PTR_FAIL_IF(push_inst(compiler
, JR
| S(TMP_REG2
), UNMOVABLE_INS
));
2822 jump
->addr
= compiler
->size
;
2823 PTR_FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2825 /* Maximum number of instructions required for generating a constant. */
2826 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2827 compiler
->size
+= 2;
2829 compiler
->size
+= 6;
2837 #undef BRANCH_LENGTH
2846 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_ijump(struct sljit_compiler
*compiler
, sljit_s32 type
, sljit_s32 src
, sljit_sw srcw
)
2848 struct sljit_jump
*jump
= NULL
;
2851 CHECK(check_sljit_emit_ijump(compiler
, type
, src
, srcw
));
2852 ADJUST_LOCAL_OFFSET(src
, srcw
);
2854 if (src
& SLJIT_IMM
) {
2855 jump
= (struct sljit_jump
*)ensure_abuf(compiler
, sizeof(struct sljit_jump
));
2857 set_jump(jump
, compiler
, JUMP_ADDR
| ((type
>= SLJIT_FAST_CALL
) ? IS_JAL
: 0));
2858 jump
->u
.target
= (sljit_uw
)srcw
;
2860 if (compiler
->delay_slot
!= UNMOVABLE_INS
)
2861 jump
->flags
|= IS_MOVABLE
;
2865 else if (src
& SLJIT_MEM
) {
2866 FAIL_IF(emit_op_mem(compiler
, WORD_DATA
| LOAD_DATA
, DR(TMP_REG2
), src
, srcw
));
2870 if (type
<= SLJIT_JUMP
)
2871 FAIL_IF(push_inst(compiler
, JR
| S(src
), UNMOVABLE_INS
));
2873 FAIL_IF(push_inst(compiler
, JALR
| S(src
) | DA(RETURN_ADDR_REG
), UNMOVABLE_INS
));
2876 jump
->addr
= compiler
->size
;
2878 /* Maximum number of instructions required for generating a constant. */
2879 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2880 compiler
->size
+= 2;
2882 compiler
->size
+= 6;
2885 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2886 return SLJIT_SUCCESS
;
2889 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op_flags(struct sljit_compiler
*compiler
, sljit_s32 op
,
2890 sljit_s32 dst
, sljit_sw dstw
,
2893 sljit_s32 src_ar
, dst_ar
, invert
;
2894 sljit_s32 saved_op
= op
;
2895 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2896 sljit_s32 mem_type
= WORD_DATA
;
2898 sljit_s32 mem_type
= ((op
& SLJIT_32
) || op
== SLJIT_MOV32
) ? (INT_DATA
| SIGNED_DATA
) : WORD_DATA
;
2902 CHECK(check_sljit_emit_op_flags(compiler
, op
, dst
, dstw
, type
));
2903 ADJUST_LOCAL_OFFSET(dst
, dstw
);
2905 op
= GET_OPCODE(op
);
2906 dst_ar
= DR((op
< SLJIT_ADD
&& FAST_IS_REG(dst
)) ? dst
: TMP_REG2
);
2908 compiler
->cache_arg
= 0;
2909 compiler
->cache_argw
= 0;
2911 if (op
>= SLJIT_ADD
&& (dst
& SLJIT_MEM
))
2912 FAIL_IF(emit_op_mem2(compiler
, mem_type
| LOAD_DATA
, DR(TMP_REG1
), dst
, dstw
, dst
, dstw
));
2914 if (type
< SLJIT_F_EQUAL
) {
2915 src_ar
= OTHER_FLAG
;
2916 invert
= type
& 0x1;
2920 case SLJIT_NOT_EQUAL
:
2921 FAIL_IF(push_inst(compiler
, SLTIU
| SA(EQUAL_FLAG
) | TA(dst_ar
) | IMM(1), dst_ar
));
2924 case SLJIT_OVERFLOW
:
2925 case SLJIT_NOT_OVERFLOW
:
2926 if (compiler
->status_flags_state
& (SLJIT_CURRENT_FLAGS_ADD
| SLJIT_CURRENT_FLAGS_SUB
)) {
2927 src_ar
= OTHER_FLAG
;
2930 FAIL_IF(push_inst(compiler
, SLTIU
| SA(OTHER_FLAG
) | TA(dst_ar
) | IMM(1), dst_ar
));
2939 case SLJIT_F_NOT_EQUAL
:
2940 case SLJIT_F_GREATER_EQUAL
:
2941 case SLJIT_F_GREATER
:
2942 case SLJIT_UNORDERED_OR_NOT_EQUAL
:
2943 case SLJIT_ORDERED_NOT_EQUAL
:
2944 case SLJIT_UNORDERED_OR_GREATER_EQUAL
:
2945 case SLJIT_ORDERED_GREATER_EQUAL
:
2946 case SLJIT_ORDERED_GREATER
:
2947 case SLJIT_UNORDERED_OR_GREATER
:
2953 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
2954 FAIL_IF(push_inst(compiler
, MFC1
| TA(dst_ar
) | FS(TMP_FREG3
), dst_ar
));
2955 #else /* SLJIT_MIPS_REV < 6 */
2956 FAIL_IF(push_inst(compiler
, CFC1
| TA(dst_ar
) | DA(FCSR_REG
), dst_ar
));
2957 #endif /* SLJIT_MIPS_REV >= 6 */
2958 FAIL_IF(push_inst(compiler
, SRL
| TA(dst_ar
) | DA(dst_ar
) | SH_IMM(23), dst_ar
));
2959 FAIL_IF(push_inst(compiler
, ANDI
| SA(dst_ar
) | TA(dst_ar
) | IMM(1), dst_ar
));
2964 FAIL_IF(push_inst(compiler
, XORI
| SA(src_ar
) | TA(dst_ar
) | IMM(1), dst_ar
));
2968 if (op
< SLJIT_ADD
) {
2969 if (dst
& SLJIT_MEM
)
2970 return emit_op_mem(compiler
, mem_type
, src_ar
, dst
, dstw
);
2972 if (src_ar
!= dst_ar
)
2973 return push_inst(compiler
, ADDU_W
| SA(src_ar
) | TA(0) | DA(dst_ar
), dst_ar
);
2974 return SLJIT_SUCCESS
;
2977 /* OTHER_FLAG cannot be specified as src2 argument at the moment. */
2978 if (DR(TMP_REG2
) != src_ar
)
2979 FAIL_IF(push_inst(compiler
, ADDU_W
| SA(src_ar
) | TA(0) | D(TMP_REG2
), DR(TMP_REG2
)));
2981 mem_type
|= CUMULATIVE_OP
| LOGICAL_OP
| IMM_OP
| ALT_KEEP_CACHE
;
2983 if (dst
& SLJIT_MEM
)
2984 return emit_op(compiler
, saved_op
, mem_type
, dst
, dstw
, TMP_REG1
, 0, TMP_REG2
, 0);
2985 return emit_op(compiler
, saved_op
, mem_type
, dst
, dstw
, dst
, dstw
, TMP_REG2
, 0);
2988 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_cmov(struct sljit_compiler
*compiler
, sljit_s32 type
,
2990 sljit_s32 src
, sljit_sw srcw
)
2992 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
2994 #endif /* SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6 */
2997 CHECK(check_sljit_emit_cmov(compiler
, type
, dst_reg
, src
, srcw
));
2999 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
3001 if (SLJIT_UNLIKELY(src
& SLJIT_IMM
)) {
3002 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
3003 if (dst_reg
& SLJIT_32
)
3004 srcw
= (sljit_s32
)srcw
;
3006 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), srcw
));
3011 dst_reg
&= ~SLJIT_32
;
3015 ins
= MOVZ
| TA(EQUAL_FLAG
);
3017 case SLJIT_NOT_EQUAL
:
3018 ins
= MOVN
| TA(EQUAL_FLAG
);
3022 case SLJIT_SIG_LESS
:
3023 case SLJIT_SIG_GREATER
:
3024 case SLJIT_OVERFLOW
:
3025 ins
= MOVN
| TA(OTHER_FLAG
);
3027 case SLJIT_GREATER_EQUAL
:
3028 case SLJIT_LESS_EQUAL
:
3029 case SLJIT_SIG_GREATER_EQUAL
:
3030 case SLJIT_SIG_LESS_EQUAL
:
3031 case SLJIT_NOT_OVERFLOW
:
3032 ins
= MOVZ
| TA(OTHER_FLAG
);
3036 case SLJIT_F_LESS_EQUAL
:
3037 case SLJIT_ORDERED_EQUAL
:
3038 case SLJIT_UNORDERED_OR_EQUAL
:
3039 case SLJIT_ORDERED_LESS
:
3040 case SLJIT_UNORDERED_OR_LESS
:
3041 case SLJIT_UNORDERED_OR_LESS_EQUAL
:
3042 case SLJIT_ORDERED_LESS_EQUAL
:
3043 case SLJIT_UNORDERED
:
3046 case SLJIT_F_NOT_EQUAL
:
3047 case SLJIT_F_GREATER_EQUAL
:
3048 case SLJIT_F_GREATER
:
3049 case SLJIT_UNORDERED_OR_NOT_EQUAL
:
3050 case SLJIT_ORDERED_NOT_EQUAL
:
3051 case SLJIT_UNORDERED_OR_GREATER_EQUAL
:
3052 case SLJIT_ORDERED_GREATER_EQUAL
:
3053 case SLJIT_ORDERED_GREATER
:
3054 case SLJIT_UNORDERED_OR_GREATER
:
3059 ins
= MOVZ
| TA(OTHER_FLAG
);
3060 SLJIT_UNREACHABLE();
3064 return push_inst(compiler
, ins
| S(src
) | D(dst_reg
), DR(dst_reg
));
3066 #else /* SLJIT_MIPS_REV < 1 || SLJIT_MIPS_REV >= 6 */
3067 return sljit_emit_cmov_generic(compiler
, type
, dst_reg
, src
, srcw
);
3068 #endif /* SLJIT_MIPS_REV >= 1 */
3071 #if !(defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
3073 static sljit_s32
update_mem_addr(struct sljit_compiler
*compiler
, sljit_s32
*mem
, sljit_sw
*memw
, sljit_s16 max_offset
)
3075 sljit_s32 arg
= *mem
;
3076 sljit_sw argw
= *memw
;
3078 if (SLJIT_UNLIKELY(arg
& OFFS_REG_MASK
)) {
3081 if (SLJIT_UNLIKELY(argw
)) {
3082 FAIL_IF(push_inst(compiler
, SLL_W
| T(OFFS_REG(arg
)) | D(TMP_REG1
) | SH_IMM(argw
), DR(TMP_REG1
)));
3083 FAIL_IF(push_inst(compiler
, ADDU_W
| S(arg
& REG_MASK
) | T(TMP_REG1
) | D(TMP_REG1
), DR(TMP_REG1
)));
3085 FAIL_IF(push_inst(compiler
, ADDU_W
| S(arg
& REG_MASK
) | T(OFFS_REG(arg
)) | D(TMP_REG1
), DR(TMP_REG1
)));
3090 return SLJIT_SUCCESS
;
3093 if (argw
<= max_offset
&& argw
>= SIMM_MIN
) {
3094 *mem
= arg
& REG_MASK
;
3095 return SLJIT_SUCCESS
;
3100 if ((sljit_s16
)argw
> max_offset
) {
3101 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), argw
));
3104 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), TO_ARGW_HI(argw
)));
3105 *memw
= (sljit_s16
)argw
;
3108 if ((arg
& REG_MASK
) == 0)
3109 return SLJIT_SUCCESS
;
3111 return push_inst(compiler
, ADDU_W
| S(arg
& REG_MASK
) | T(TMP_REG1
) | D(TMP_REG1
), DR(TMP_REG1
));
3114 #if (defined SLJIT_LITTLE_ENDIAN && SLJIT_LITTLE_ENDIAN)
3115 #define MEM16_IMM_FIRST(memw) IMM((memw) + 1)
3116 #define MEM16_IMM_SECOND(memw) IMM(memw)
3117 #define MEMF64_FS_FIRST(freg) FS(freg)
3118 #define MEMF64_FS_SECOND(freg) (FS(freg) | ((sljit_ins)1 << 11))
3119 #else /* !SLJIT_LITTLE_ENDIAN */
3120 #define MEM16_IMM_FIRST(memw) IMM(memw)
3121 #define MEM16_IMM_SECOND(memw) IMM((memw) + 1)
3122 #define MEMF64_FS_FIRST(freg) (FS(freg) | ((sljit_ins)1 << 11))
3123 #define MEMF64_FS_SECOND(freg) FS(freg)
3124 #endif /* SLJIT_LITTLE_ENDIAN */
3126 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_mem(struct sljit_compiler
*compiler
, sljit_s32 type
,
3128 sljit_s32 mem
, sljit_sw memw
)
3130 sljit_s32 op
= type
& 0xff;
3131 sljit_s32 flags
= 0;
3134 CHECK(check_sljit_emit_mem(compiler
, type
, reg
, mem
, memw
));
3136 if (type
& (SLJIT_MEM_PRE
| SLJIT_MEM_POST
))
3137 return SLJIT_ERR_UNSUPPORTED
;
3143 if (!(type
& SLJIT_MEM_STORE
))
3146 if (op
== SLJIT_MOV_S8
)
3147 flags
|= SIGNED_DATA
;
3149 return emit_op_mem(compiler
, flags
, DR(reg
), mem
, memw
);
3153 FAIL_IF(update_mem_addr(compiler
, &mem
, &memw
, SIMM_MAX
- 1));
3154 SLJIT_ASSERT(FAST_IS_REG(mem
) && mem
!= TMP_REG2
);
3156 if (type
& SLJIT_MEM_STORE
) {
3157 FAIL_IF(push_inst(compiler
, SRA_W
| T(reg
) | D(TMP_REG2
) | SH_IMM(8), DR(TMP_REG2
)));
3158 FAIL_IF(push_inst(compiler
, data_transfer_insts
[BYTE_DATA
] | S(mem
) | T(TMP_REG2
) | MEM16_IMM_FIRST(memw
), MOVABLE_INS
));
3159 return push_inst(compiler
, data_transfer_insts
[BYTE_DATA
] | S(mem
) | T(reg
) | MEM16_IMM_SECOND(memw
), MOVABLE_INS
);
3162 flags
= BYTE_DATA
| LOAD_DATA
;
3164 if (op
== SLJIT_MOV_S16
)
3165 flags
|= SIGNED_DATA
;
3167 FAIL_IF(push_inst(compiler
, data_transfer_insts
[flags
] | S(mem
) | T(TMP_REG2
) | MEM16_IMM_FIRST(memw
), DR(TMP_REG2
)));
3168 FAIL_IF(push_inst(compiler
, data_transfer_insts
[BYTE_DATA
| LOAD_DATA
] | S(mem
) | T(reg
) | MEM16_IMM_SECOND(memw
), DR(reg
)));
3169 FAIL_IF(push_inst(compiler
, SLL_W
| T(TMP_REG2
) | D(TMP_REG2
) | SH_IMM(8), DR(TMP_REG2
)));
3170 return push_inst(compiler
, OR
| S(reg
) | T(TMP_REG2
) | D(reg
), DR(reg
));
3174 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
3175 if (type
& SLJIT_MEM_ALIGNED_32
) {
3177 if (!(type
& SLJIT_MEM_STORE
))
3180 return emit_op_mem(compiler
, flags
, DR(reg
), mem
, memw
);
3182 #else /* !SLJIT_CONFIG_MIPS_32 */
3183 FAIL_IF(update_mem_addr(compiler
, &mem
, &memw
, SIMM_MAX
- 7));
3184 SLJIT_ASSERT(FAST_IS_REG(mem
) && mem
!= TMP_REG2
);
3186 if (type
& SLJIT_MEM_STORE
) {
3187 FAIL_IF(push_inst(compiler
, SDL
| S(mem
) | T(reg
) | IMM(memw
), MOVABLE_INS
));
3188 return push_inst(compiler
, SDR
| S(mem
) | T(reg
) | IMM(memw
+ 7), MOVABLE_INS
);
3192 FAIL_IF(push_inst(compiler
, DADDU
| S(mem
) | TA(0) | D(TMP_REG1
), DR(TMP_REG1
)));
3196 FAIL_IF(push_inst(compiler
, LDL
| S(mem
) | T(reg
) | IMM(memw
), DR(reg
)));
3197 return push_inst(compiler
, LDR
| S(mem
) | T(reg
) | IMM(memw
+ 7), DR(reg
));
3198 #endif /* SLJIT_CONFIG_MIPS_32 */
3201 FAIL_IF(update_mem_addr(compiler
, &mem
, &memw
, SIMM_MAX
- 3));
3202 SLJIT_ASSERT(FAST_IS_REG(mem
) && mem
!= TMP_REG2
);
3204 if (type
& SLJIT_MEM_STORE
) {
3205 FAIL_IF(push_inst(compiler
, SWL
| S(mem
) | T(reg
) | IMM(memw
), MOVABLE_INS
));
3206 return push_inst(compiler
, SWR
| S(mem
) | T(reg
) | IMM(memw
+ 3), MOVABLE_INS
);
3209 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
3211 FAIL_IF(push_inst(compiler
, ADDU
| S(mem
) | TA(0) | D(TMP_REG1
), DR(TMP_REG1
)));
3215 FAIL_IF(push_inst(compiler
, LWL
| S(mem
) | T(reg
) | IMM(memw
), DR(reg
)));
3216 return push_inst(compiler
, LWR
| S(mem
) | T(reg
) | IMM(memw
+ 3), DR(reg
));
3218 #else /* !SLJIT_CONFIG_MIPS_32 */
3220 FAIL_IF(push_inst(compiler
, DADDU
| S(mem
) | TA(0) | D(TMP_REG1
), DR(TMP_REG1
)));
3224 FAIL_IF(push_inst(compiler
, LWL
| S(mem
) | T(reg
) | IMM(memw
), DR(reg
)));
3225 FAIL_IF(push_inst(compiler
, LWR
| S(mem
) | T(reg
) | IMM(memw
+ 3), DR(reg
)));
3227 if (op
== SLJIT_MOV_U32
) {
3228 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 2)
3229 return push_inst(compiler
, DINSU
| T(reg
) | SA(0) | (31 << 11) | (0 << 11), DR(reg
));
3230 #else /* SLJIT_MIPS_REV < 1 */
3231 FAIL_IF(push_inst(compiler
, DSLL32
| T(reg
) | D(reg
) | SH_IMM(0), DR(reg
)));
3232 return push_inst(compiler
, DSRL32
| T(reg
) | D(reg
) | SH_IMM(0), DR(reg
));
3233 #endif /* SLJIT_MIPS_REV >= 2 */
3236 return SLJIT_SUCCESS
;
3237 #endif /* SLJIT_CONFIG_MIPS_32 */
3240 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_fmem(struct sljit_compiler
*compiler
, sljit_s32 type
,
3242 sljit_s32 mem
, sljit_sw memw
)
3245 CHECK(check_sljit_emit_fmem(compiler
, type
, freg
, mem
, memw
));
3247 if (type
& (SLJIT_MEM_PRE
| SLJIT_MEM_POST
))
3248 return SLJIT_ERR_UNSUPPORTED
;
3250 FAIL_IF(update_mem_addr(compiler
, &mem
, &memw
, SIMM_MAX
- (type
& SLJIT_32
) ? 3 : 7));
3251 SLJIT_ASSERT(FAST_IS_REG(mem
) && mem
!= TMP_REG2
);
3253 if (type
& SLJIT_MEM_STORE
) {
3254 if (type
& SLJIT_32
) {
3255 FAIL_IF(push_inst(compiler
, MFC1
| T(TMP_REG2
) | FS(freg
), DR(TMP_REG2
)));
3256 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
3257 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
3259 FAIL_IF(push_inst(compiler
, SWL
| S(mem
) | T(TMP_REG2
) | IMM(memw
), MOVABLE_INS
));
3260 return push_inst(compiler
, SWR
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 3), MOVABLE_INS
);
3263 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
3264 FAIL_IF(push_inst(compiler
, MFC1
| T(TMP_REG2
) | MEMF64_FS_FIRST(freg
), DR(TMP_REG2
)));
3265 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
3266 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
3268 FAIL_IF(push_inst(compiler
, SWL
| S(mem
) | T(TMP_REG2
) | IMM(memw
), MOVABLE_INS
));
3269 FAIL_IF(push_inst(compiler
, SWR
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 3), MOVABLE_INS
));
3271 FAIL_IF(push_inst(compiler
, MFC1
| T(TMP_REG2
) | MEMF64_FS_SECOND(freg
), DR(TMP_REG2
)));
3272 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
3273 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
3275 FAIL_IF(push_inst(compiler
, SWL
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 4), MOVABLE_INS
));
3276 return push_inst(compiler
, SWR
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 7), MOVABLE_INS
);
3277 #else /* !SLJIT_CONFIG_MIPS_32 */
3278 FAIL_IF(push_inst(compiler
, MFC1
| (1 << 21) | T(TMP_REG2
) | FS(freg
), DR(TMP_REG2
)));
3279 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
3280 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
3282 FAIL_IF(push_inst(compiler
, SDL
| S(mem
) | T(TMP_REG2
) | IMM(memw
), MOVABLE_INS
));
3283 return push_inst(compiler
, SDR
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 7), MOVABLE_INS
);
3284 #endif /* SLJIT_CONFIG_MIPS_32 */
3287 if (type
& SLJIT_32
) {
3288 FAIL_IF(push_inst(compiler
, LWL
| S(mem
) | T(TMP_REG2
) | IMM(memw
), DR(TMP_REG2
)));
3289 FAIL_IF(push_inst(compiler
, LWR
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 3), DR(TMP_REG2
)));
3291 FAIL_IF(push_inst(compiler
, MTC1
| T(TMP_REG2
) | FS(freg
), MOVABLE_INS
));
3292 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
3293 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
3295 return SLJIT_SUCCESS
;
3298 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
3299 FAIL_IF(push_inst(compiler
, LWL
| S(mem
) | T(TMP_REG2
) | IMM(memw
), DR(TMP_REG2
)));
3300 FAIL_IF(push_inst(compiler
, LWR
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 3), DR(TMP_REG2
)));
3301 FAIL_IF(push_inst(compiler
, MTC1
| T(TMP_REG2
) | MEMF64_FS_FIRST(freg
), MOVABLE_INS
));
3303 FAIL_IF(push_inst(compiler
, LWL
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 4), DR(TMP_REG2
)));
3304 FAIL_IF(push_inst(compiler
, LWR
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 7), DR(TMP_REG2
)));
3305 FAIL_IF(push_inst(compiler
, MTC1
| T(TMP_REG2
) | MEMF64_FS_SECOND(freg
), MOVABLE_INS
));
3306 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
3307 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
3309 #else /* !SLJIT_CONFIG_MIPS_32 */
3310 FAIL_IF(push_inst(compiler
, LDL
| S(mem
) | T(TMP_REG2
) | IMM(memw
), DR(TMP_REG2
)));
3311 FAIL_IF(push_inst(compiler
, LDR
| S(mem
) | T(TMP_REG2
) | IMM(memw
+ 7), DR(TMP_REG2
)));
3313 FAIL_IF(push_inst(compiler
, MTC1
| (1 << 21) | T(TMP_REG2
) | FS(freg
), MOVABLE_INS
));
3314 #if (!defined SLJIT_MIPS_REV || SLJIT_MIPS_REV <= 3)
3315 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
3317 #endif /* SLJIT_CONFIG_MIPS_32 */
3318 return SLJIT_SUCCESS
;
3321 #undef MEM16_IMM_FIRST
3322 #undef MEM16_IMM_SECOND
3323 #undef MEMF64_FS_FIRST
3324 #undef MEMF64_FS_SECOND
3326 #endif /* !SLJIT_MIPS_REV || SLJIT_MIPS_REV < 6 */
3330 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_const
* sljit_emit_const(struct sljit_compiler
*compiler
, sljit_s32 dst
, sljit_sw dstw
, sljit_sw init_value
)
3332 struct sljit_const
*const_
;
3336 CHECK_PTR(check_sljit_emit_const(compiler
, dst
, dstw
, init_value
));
3337 ADJUST_LOCAL_OFFSET(dst
, dstw
);
3339 const_
= (struct sljit_const
*)ensure_abuf(compiler
, sizeof(struct sljit_const
));
3340 PTR_FAIL_IF(!const_
);
3341 set_const(const_
, compiler
);
3343 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_REG2
;
3344 PTR_FAIL_IF(emit_const(compiler
, dst_r
, init_value
));
3346 if (dst
& SLJIT_MEM
)
3347 PTR_FAIL_IF(emit_op_mem(compiler
, WORD_DATA
, DR(TMP_REG2
), dst
, dstw
));
3352 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_put_label
* sljit_emit_put_label(struct sljit_compiler
*compiler
, sljit_s32 dst
, sljit_sw dstw
)
3354 struct sljit_put_label
*put_label
;
3358 CHECK_PTR(check_sljit_emit_put_label(compiler
, dst
, dstw
));
3359 ADJUST_LOCAL_OFFSET(dst
, dstw
);
3361 put_label
= (struct sljit_put_label
*)ensure_abuf(compiler
, sizeof(struct sljit_put_label
));
3362 PTR_FAIL_IF(!put_label
);
3363 set_put_label(put_label
, compiler
, 0);
3365 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_REG2
;
3366 PTR_FAIL_IF(push_inst(compiler
, (sljit_ins
)dst_r
, UNMOVABLE_INS
));
3367 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
3368 compiler
->size
+= 1;
3370 compiler
->size
+= 5;
3373 if (dst
& SLJIT_MEM
)
3374 PTR_FAIL_IF(emit_op_mem(compiler
, WORD_DATA
, DR(TMP_REG2
), dst
, dstw
));