2 * Stack-less Just-In-Time compiler
4 * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 /* Latest MIPS architecture. */
29 #ifndef __mips_hard_float
30 /* Disable automatic detection, covers both -msoft-float and -mno-float */
31 #undef SLJIT_IS_FPU_AVAILABLE
32 #define SLJIT_IS_FPU_AVAILABLE 0
35 SLJIT_API_FUNC_ATTRIBUTE
const char* sljit_get_platform_name(void)
37 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
39 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
40 return "MIPS32-R6" SLJIT_CPUINFO
;
41 #else /* !SLJIT_CONFIG_MIPS_32 */
42 return "MIPS64-R6" SLJIT_CPUINFO
;
43 #endif /* SLJIT_CONFIG_MIPS_32 */
45 #elif (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
47 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
48 return "MIPS32-R1" SLJIT_CPUINFO
;
49 #else /* !SLJIT_CONFIG_MIPS_32 */
50 return "MIPS64-R1" SLJIT_CPUINFO
;
51 #endif /* SLJIT_CONFIG_MIPS_32 */
53 #else /* SLJIT_MIPS_REV < 1 */
54 return "MIPS III" SLJIT_CPUINFO
;
55 #endif /* SLJIT_MIPS_REV >= 6 */
58 /* Length of an instruction word
59 Both for mips-32 and mips-64 */
60 typedef sljit_u32 sljit_ins
;
62 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
63 #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
64 #define TMP_REG3 (SLJIT_NUMBER_OF_REGISTERS + 4)
66 /* For position independent code, t9 must contain the function address. */
67 #define PIC_ADDR_REG TMP_REG2
69 /* Floating point status register. */
71 /* Return address register. */
72 #define RETURN_ADDR_REG 31
74 /* Flags are kept in volatile registers. */
78 #define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
79 #define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
80 #define TMP_FREG3 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3)
82 static const sljit_u8 reg_map
[SLJIT_NUMBER_OF_REGISTERS
+ 5] = {
83 0, 2, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 24, 23, 22, 21, 20, 19, 18, 17, 16, 29, 4, 25, 31
86 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
88 static const sljit_u8 freg_map
[SLJIT_NUMBER_OF_FLOAT_REGISTERS
+ 4] = {
89 0, 0, 14, 2, 4, 6, 8, 12, 10, 16
94 static const sljit_u8 freg_map
[SLJIT_NUMBER_OF_FLOAT_REGISTERS
+ 4] = {
95 0, 0, 13, 14, 15, 16, 17, 12, 18, 10
100 /* --------------------------------------------------------------------- */
101 /* Instrucion forms */
102 /* --------------------------------------------------------------------- */
104 #define S(s) (reg_map[s] << 21)
105 #define T(t) (reg_map[t] << 16)
106 #define D(d) (reg_map[d] << 11)
107 #define FT(t) (freg_map[t] << 16)
108 #define FS(s) (freg_map[s] << 11)
109 #define FD(d) (freg_map[d] << 6)
110 /* Absolute registers. */
111 #define SA(s) ((s) << 21)
112 #define TA(t) ((t) << 16)
113 #define DA(d) ((d) << 11)
114 #define IMM(imm) ((imm) & 0xffff)
115 #define SH_IMM(imm) ((imm) << 6)
117 #define DR(dr) (reg_map[dr])
118 #define FR(dr) (freg_map[dr])
119 #define HI(opcode) ((opcode) << 26)
120 #define LO(opcode) (opcode)
121 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
123 /* S = (20 << 21) D = (21 << 21) */
124 #define CMP_FMT_S (20 << 21)
125 #endif /* SLJIT_MIPS_REV >= 6 */
126 /* S = (16 << 21) D = (17 << 21) */
127 #define FMT_S (16 << 21)
128 #define FMT_D (17 << 21)
130 #define ABS_S (HI(17) | FMT_S | LO(5))
131 #define ADD_S (HI(17) | FMT_S | LO(0))
132 #define ADDIU (HI(9))
133 #define ADDU (HI(0) | LO(33))
134 #define AND (HI(0) | LO(36))
135 #define ANDI (HI(12))
137 #define BAL (HI(1) | (17 << 16))
138 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
139 #define BC1EQZ (HI(17) | (9 << 21) | FT(TMP_FREG3))
140 #define BC1NEZ (HI(17) | (13 << 21) | FT(TMP_FREG3))
141 #else /* SLJIT_MIPS_REV < 6 */
142 #define BC1F (HI(17) | (8 << 21))
143 #define BC1T (HI(17) | (8 << 21) | (1 << 16))
144 #endif /* SLJIT_MIPS_REV >= 6 */
146 #define BGEZ (HI(1) | (1 << 16))
149 #define BLTZ (HI(1) | (0 << 16))
151 #define BREAK (HI(0) | LO(13))
152 #define CFC1 (HI(17) | (2 << 21))
153 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
154 #define C_UEQ_S (HI(17) | CMP_FMT_S | LO(3))
155 #define C_ULE_S (HI(17) | CMP_FMT_S | LO(7))
156 #define C_ULT_S (HI(17) | CMP_FMT_S | LO(5))
157 #define C_UN_S (HI(17) | CMP_FMT_S | LO(1))
158 #define C_FD (FD(TMP_FREG3))
159 #else /* SLJIT_MIPS_REV < 6 */
160 #define C_UEQ_S (HI(17) | FMT_S | LO(51))
161 #define C_ULE_S (HI(17) | FMT_S | LO(55))
162 #define C_ULT_S (HI(17) | FMT_S | LO(53))
163 #define C_UN_S (HI(17) | FMT_S | LO(49))
165 #endif /* SLJIT_MIPS_REV >= 6 */
166 #define CVT_S_S (HI(17) | FMT_S | LO(32))
167 #define DADDIU (HI(25))
168 #define DADDU (HI(0) | LO(45))
169 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
170 #define DDIV (HI(0) | (2 << 6) | LO(30))
171 #define DDIVU (HI(0) | (2 << 6) | LO(31))
172 #define DMOD (HI(0) | (3 << 6) | LO(30))
173 #define DMODU (HI(0) | (3 << 6) | LO(31))
174 #define DIV (HI(0) | (2 << 6) | LO(26))
175 #define DIVU (HI(0) | (2 << 6) | LO(27))
176 #define DMUH (HI(0) | (3 << 6) | LO(28))
177 #define DMUHU (HI(0) | (3 << 6) | LO(29))
178 #define DMUL (HI(0) | (2 << 6) | LO(28))
179 #define DMULU (HI(0) | (2 << 6) | LO(29))
180 #else /* SLJIT_MIPS_REV < 6 */
181 #define DDIV (HI(0) | LO(30))
182 #define DDIVU (HI(0) | LO(31))
183 #define DIV (HI(0) | LO(26))
184 #define DIVU (HI(0) | LO(27))
185 #define DMULT (HI(0) | LO(28))
186 #define DMULTU (HI(0) | LO(29))
187 #endif /* SLJIT_MIPS_REV >= 6 */
188 #define DIV_S (HI(17) | FMT_S | LO(3))
189 #define DSLL (HI(0) | LO(56))
190 #define DSLL32 (HI(0) | LO(60))
191 #define DSLLV (HI(0) | LO(20))
192 #define DSRA (HI(0) | LO(59))
193 #define DSRA32 (HI(0) | LO(63))
194 #define DSRAV (HI(0) | LO(23))
195 #define DSRL (HI(0) | LO(58))
196 #define DSRL32 (HI(0) | LO(62))
197 #define DSRLV (HI(0) | LO(22))
198 #define DSUBU (HI(0) | LO(47))
201 #define JALR (HI(0) | LO(9))
202 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
203 #define JR (HI(0) | LO(9))
204 #else /* SLJIT_MIPS_REV < 6 */
205 #define JR (HI(0) | LO(8))
206 #endif /* SLJIT_MIPS_REV >= 6 */
210 #define MFC1 (HI(17))
211 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
212 #define MOD (HI(0) | (3 << 6) | LO(26))
213 #define MODU (HI(0) | (3 << 6) | LO(27))
214 #else /* SLJIT_MIPS_REV < 6 */
215 #define MFHI (HI(0) | LO(16))
216 #define MFLO (HI(0) | LO(18))
217 #endif /* SLJIT_MIPS_REV >= 6 */
218 #define MOV_S (HI(17) | FMT_S | LO(6))
219 #define MTC1 (HI(17) | (4 << 21))
220 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
221 #define MUH (HI(0) | (3 << 6) | LO(24))
222 #define MUHU (HI(0) | (3 << 6) | LO(25))
223 #define MUL (HI(0) | (2 << 6) | LO(24))
224 #define MULU (HI(0) | (2 << 6) | LO(25))
225 #else /* SLJIT_MIPS_REV < 6 */
226 #define MULT (HI(0) | LO(24))
227 #define MULTU (HI(0) | LO(25))
228 #endif /* SLJIT_MIPS_REV >= 6 */
229 #define MUL_S (HI(17) | FMT_S | LO(2))
230 #define NEG_S (HI(17) | FMT_S | LO(7))
231 #define NOP (HI(0) | LO(0))
232 #define NOR (HI(0) | LO(39))
233 #define OR (HI(0) | LO(37))
236 #define SDC1 (HI(61))
237 #define SLT (HI(0) | LO(42))
238 #define SLTI (HI(10))
239 #define SLTIU (HI(11))
240 #define SLTU (HI(0) | LO(43))
241 #define SLL (HI(0) | LO(0))
242 #define SLLV (HI(0) | LO(4))
243 #define SRL (HI(0) | LO(2))
244 #define SRLV (HI(0) | LO(6))
245 #define SRA (HI(0) | LO(3))
246 #define SRAV (HI(0) | LO(7))
247 #define SUB_S (HI(17) | FMT_S | LO(1))
248 #define SUBU (HI(0) | LO(35))
250 #define SWC1 (HI(57))
251 #define TRUNC_W_S (HI(17) | FMT_S | LO(13))
252 #define XOR (HI(0) | LO(38))
253 #define XORI (HI(14))
255 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
256 #define CLZ (HI(28) | LO(32))
257 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
258 #define DCLZ (LO(18))
259 #else /* SLJIT_MIPS_REV < 6 */
260 #define DCLZ (HI(28) | LO(36))
261 #define MOVF (HI(0) | (0 << 16) | LO(1))
262 #define MOVN (HI(0) | LO(11))
263 #define MOVT (HI(0) | (1 << 16) | LO(1))
264 #define MOVZ (HI(0) | LO(10))
265 #define MUL (HI(28) | LO(2))
266 #endif /* SLJIT_MIPS_REV >= 6 */
267 #define PREF (HI(51))
268 #define PREFX (HI(19) | LO(15))
269 #define SEB (HI(31) | (16 << 6) | LO(32))
270 #define SEH (HI(31) | (24 << 6) | LO(32))
271 #endif /* SLJIT_MIPS_REV >= 1 */
273 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
275 #define ADDIU_W ADDIU
280 #define ADDIU_W DADDIU
285 #define SIMM_MAX (0x7fff)
286 #define SIMM_MIN (-0x8000)
287 #define UIMM_MAX (0xffff)
289 /* dest_reg is the absolute name of the register
290 Useful for reordering instructions in the delay slot. */
291 static sljit_s32
push_inst(struct sljit_compiler
*compiler
, sljit_ins ins
, sljit_s32 delay_slot
)
293 sljit_ins
*ptr
= (sljit_ins
*)ensure_buf(compiler
, sizeof(sljit_ins
));
294 SLJIT_ASSERT(delay_slot
== MOVABLE_INS
|| delay_slot
>= UNMOVABLE_INS
295 || delay_slot
== ((ins
>> 11) & 0x1f) || delay_slot
== ((ins
>> 16) & 0x1f));
299 compiler
->delay_slot
= delay_slot
;
300 return SLJIT_SUCCESS
;
303 static SLJIT_INLINE sljit_ins
invert_branch(sljit_s32 flags
)
305 if (flags
& IS_BIT26_COND
)
307 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
308 if (flags
& IS_BIT23_COND
)
310 #endif /* SLJIT_MIPS_REV >= 6 */
314 static SLJIT_INLINE sljit_ins
* detect_jump_type(struct sljit_jump
*jump
, sljit_ins
*code_ptr
, sljit_ins
*code
, sljit_sw executable_offset
)
317 sljit_uw target_addr
;
319 sljit_ins saved_inst
;
321 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
322 if (jump
->flags
& (SLJIT_REWRITABLE_JUMP
| IS_CALL
))
325 if (jump
->flags
& SLJIT_REWRITABLE_JUMP
)
329 if (jump
->flags
& JUMP_ADDR
)
330 target_addr
= jump
->u
.target
;
332 SLJIT_ASSERT(jump
->flags
& JUMP_LABEL
);
333 target_addr
= (sljit_uw
)(code
+ jump
->u
.label
->size
) + (sljit_uw
)executable_offset
;
336 inst
= (sljit_ins
*)jump
->addr
;
337 if (jump
->flags
& IS_COND
)
340 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
341 if (jump
->flags
& IS_CALL
)
345 /* B instructions. */
346 if (jump
->flags
& IS_MOVABLE
) {
347 diff
= ((sljit_sw
)target_addr
- (sljit_sw
)inst
- executable_offset
) >> 2;
348 if (diff
<= SIMM_MAX
&& diff
>= SIMM_MIN
) {
349 jump
->flags
|= PATCH_B
;
351 if (!(jump
->flags
& IS_COND
)) {
353 inst
[-1] = (jump
->flags
& IS_JAL
) ? BAL
: B
;
354 jump
->addr
-= sizeof(sljit_ins
);
357 saved_inst
= inst
[0];
359 inst
[-1] = saved_inst
^ invert_branch(jump
->flags
);
360 jump
->addr
-= 2 * sizeof(sljit_ins
);
365 diff
= ((sljit_sw
)target_addr
- (sljit_sw
)(inst
+ 1) - executable_offset
) >> 2;
366 if (diff
<= SIMM_MAX
&& diff
>= SIMM_MIN
) {
367 jump
->flags
|= PATCH_B
;
369 if (!(jump
->flags
& IS_COND
)) {
370 inst
[0] = (jump
->flags
& IS_JAL
) ? BAL
: B
;
374 inst
[0] = inst
[0] ^ invert_branch(jump
->flags
);
376 jump
->addr
-= sizeof(sljit_ins
);
381 if (jump
->flags
& IS_COND
) {
382 if ((jump
->flags
& IS_MOVABLE
) && (target_addr
& ~0xfffffff) == ((jump
->addr
+ 2 * sizeof(sljit_ins
)) & ~0xfffffff)) {
383 jump
->flags
|= PATCH_J
;
384 saved_inst
= inst
[0];
386 inst
[-1] = (saved_inst
& 0xffff0000) | 3;
391 else if ((target_addr
& ~0xfffffff) == ((jump
->addr
+ 3 * sizeof(sljit_ins
)) & ~0xfffffff)) {
392 jump
->flags
|= PATCH_J
;
393 inst
[0] = (inst
[0] & 0xffff0000) | 3;
397 jump
->addr
+= sizeof(sljit_ins
);
403 if ((jump
->flags
& IS_MOVABLE
) && (target_addr
& ~0xfffffff) == (jump
->addr
& ~0xfffffff)) {
404 jump
->flags
|= PATCH_J
;
406 inst
[-1] = (jump
->flags
& IS_JAL
) ? JAL
: J
;
407 jump
->addr
-= sizeof(sljit_ins
);
411 if ((target_addr
& ~0xfffffff) == ((jump
->addr
+ sizeof(sljit_ins
)) & ~0xfffffff)) {
412 jump
->flags
|= PATCH_J
;
413 inst
[0] = (jump
->flags
& IS_JAL
) ? JAL
: J
;
419 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
421 if (target_addr
<= 0x7fffffff) {
422 jump
->flags
|= PATCH_ABS32
;
423 if (jump
->flags
& IS_COND
) {
431 if (target_addr
<= 0x7fffffffffffl
) {
432 jump
->flags
|= PATCH_ABS48
;
433 if (jump
->flags
& IS_COND
) {
447 static __attribute__ ((noinline
)) void sljit_cache_flush(void* code
, void* code_ptr
)
449 SLJIT_CACHE_FLUSH(code
, code_ptr
);
453 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
455 static SLJIT_INLINE sljit_sw
put_label_get_length(struct sljit_put_label
*put_label
, sljit_uw max_label
)
457 if (max_label
< 0x80000000l
) {
458 put_label
->flags
= 0;
462 if (max_label
< 0x800000000000l
) {
463 put_label
->flags
= 1;
467 put_label
->flags
= 2;
471 static SLJIT_INLINE
void put_label_set(struct sljit_put_label
*put_label
)
473 sljit_uw addr
= put_label
->label
->addr
;
474 sljit_ins
*inst
= (sljit_ins
*)put_label
->addr
;
475 sljit_s32 reg
= *inst
;
477 if (put_label
->flags
== 0) {
478 SLJIT_ASSERT(addr
< 0x80000000l
);
479 inst
[0] = LUI
| T(reg
) | IMM(addr
>> 16);
481 else if (put_label
->flags
== 1) {
482 SLJIT_ASSERT(addr
< 0x800000000000l
);
483 inst
[0] = LUI
| T(reg
) | IMM(addr
>> 32);
484 inst
[1] = ORI
| S(reg
) | T(reg
) | IMM((addr
>> 16) & 0xffff);
485 inst
[2] = DSLL
| T(reg
) | D(reg
) | SH_IMM(16);
489 inst
[0] = LUI
| T(reg
) | IMM(addr
>> 48);
490 inst
[1] = ORI
| S(reg
) | T(reg
) | IMM((addr
>> 32) & 0xffff);
491 inst
[2] = DSLL
| T(reg
) | D(reg
) | SH_IMM(16);
492 inst
[3] = ORI
| S(reg
) | T(reg
) | IMM((addr
>> 16) & 0xffff);
493 inst
[4] = DSLL
| T(reg
) | D(reg
) | SH_IMM(16);
497 inst
[1] = ORI
| S(reg
) | T(reg
) | IMM(addr
& 0xffff);
502 SLJIT_API_FUNC_ATTRIBUTE
void* sljit_generate_code(struct sljit_compiler
*compiler
)
504 struct sljit_memory_fragment
*buf
;
511 sljit_sw executable_offset
;
514 struct sljit_label
*label
;
515 struct sljit_jump
*jump
;
516 struct sljit_const
*const_
;
517 struct sljit_put_label
*put_label
;
520 CHECK_PTR(check_sljit_generate_code(compiler
));
521 reverse_buf(compiler
);
523 code
= (sljit_ins
*)SLJIT_MALLOC_EXEC(compiler
->size
* sizeof(sljit_ins
), compiler
->exec_allocator_data
);
524 PTR_FAIL_WITH_EXEC_IF(code
);
530 executable_offset
= SLJIT_EXEC_OFFSET(code
);
532 label
= compiler
->labels
;
533 jump
= compiler
->jumps
;
534 const_
= compiler
->consts
;
535 put_label
= compiler
->put_labels
;
538 buf_ptr
= (sljit_ins
*)buf
->memory
;
539 buf_end
= buf_ptr
+ (buf
->used_size
>> 2);
541 *code_ptr
= *buf_ptr
++;
542 if (next_addr
== word_count
) {
543 SLJIT_ASSERT(!label
|| label
->size
>= word_count
);
544 SLJIT_ASSERT(!jump
|| jump
->addr
>= word_count
);
545 SLJIT_ASSERT(!const_
|| const_
->addr
>= word_count
);
546 SLJIT_ASSERT(!put_label
|| put_label
->addr
>= word_count
);
548 /* These structures are ordered by their address. */
549 if (label
&& label
->size
== word_count
) {
550 label
->addr
= (sljit_uw
)SLJIT_ADD_EXEC_OFFSET(code_ptr
, executable_offset
);
551 label
->size
= code_ptr
- code
;
554 if (jump
&& jump
->addr
== word_count
) {
555 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
556 jump
->addr
= (sljit_uw
)(code_ptr
- 3);
558 jump
->addr
= (sljit_uw
)(code_ptr
- 7);
560 code_ptr
= detect_jump_type(jump
, code_ptr
, code
, executable_offset
);
563 if (const_
&& const_
->addr
== word_count
) {
564 const_
->addr
= (sljit_uw
)code_ptr
;
565 const_
= const_
->next
;
567 if (put_label
&& put_label
->addr
== word_count
) {
568 SLJIT_ASSERT(put_label
->label
);
569 put_label
->addr
= (sljit_uw
)code_ptr
;
570 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
571 code_ptr
+= put_label_get_length(put_label
, (sljit_uw
)(SLJIT_ADD_EXEC_OFFSET(code
, executable_offset
) + put_label
->label
->size
));
574 put_label
= put_label
->next
;
576 next_addr
= compute_next_addr(label
, jump
, const_
, put_label
);
580 } while (buf_ptr
< buf_end
);
585 if (label
&& label
->size
== word_count
) {
586 label
->addr
= (sljit_uw
)code_ptr
;
587 label
->size
= code_ptr
- code
;
591 SLJIT_ASSERT(!label
);
593 SLJIT_ASSERT(!const_
);
594 SLJIT_ASSERT(!put_label
);
595 SLJIT_ASSERT(code_ptr
- code
<= (sljit_sw
)compiler
->size
);
597 jump
= compiler
->jumps
;
600 addr
= (jump
->flags
& JUMP_LABEL
) ? jump
->u
.label
->addr
: jump
->u
.target
;
601 buf_ptr
= (sljit_ins
*)jump
->addr
;
603 if (jump
->flags
& PATCH_B
) {
604 addr
= (sljit_sw
)(addr
- ((sljit_uw
)SLJIT_ADD_EXEC_OFFSET(buf_ptr
, executable_offset
) + sizeof(sljit_ins
))) >> 2;
605 SLJIT_ASSERT((sljit_sw
)addr
<= SIMM_MAX
&& (sljit_sw
)addr
>= SIMM_MIN
);
606 buf_ptr
[0] = (buf_ptr
[0] & 0xffff0000) | (addr
& 0xffff);
609 if (jump
->flags
& PATCH_J
) {
610 SLJIT_ASSERT((addr
& ~0xfffffff) == (((sljit_uw
)SLJIT_ADD_EXEC_OFFSET(buf_ptr
, executable_offset
) + sizeof(sljit_ins
)) & ~0xfffffff));
611 buf_ptr
[0] |= (addr
>> 2) & 0x03ffffff;
615 /* Set the fields of immediate loads. */
616 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
617 buf_ptr
[0] = (buf_ptr
[0] & 0xffff0000) | ((addr
>> 16) & 0xffff);
618 buf_ptr
[1] = (buf_ptr
[1] & 0xffff0000) | (addr
& 0xffff);
620 if (jump
->flags
& PATCH_ABS32
) {
621 SLJIT_ASSERT(addr
<= 0x7fffffff);
622 buf_ptr
[0] = (buf_ptr
[0] & 0xffff0000) | ((addr
>> 16) & 0xffff);
623 buf_ptr
[1] = (buf_ptr
[1] & 0xffff0000) | (addr
& 0xffff);
625 else if (jump
->flags
& PATCH_ABS48
) {
626 SLJIT_ASSERT(addr
<= 0x7fffffffffffl
);
627 buf_ptr
[0] = (buf_ptr
[0] & 0xffff0000) | ((addr
>> 32) & 0xffff);
628 buf_ptr
[1] = (buf_ptr
[1] & 0xffff0000) | ((addr
>> 16) & 0xffff);
629 buf_ptr
[3] = (buf_ptr
[3] & 0xffff0000) | (addr
& 0xffff);
632 buf_ptr
[0] = (buf_ptr
[0] & 0xffff0000) | ((addr
>> 48) & 0xffff);
633 buf_ptr
[1] = (buf_ptr
[1] & 0xffff0000) | ((addr
>> 32) & 0xffff);
634 buf_ptr
[3] = (buf_ptr
[3] & 0xffff0000) | ((addr
>> 16) & 0xffff);
635 buf_ptr
[5] = (buf_ptr
[5] & 0xffff0000) | (addr
& 0xffff);
642 put_label
= compiler
->put_labels
;
644 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
645 addr
= put_label
->label
->addr
;
646 buf_ptr
= (sljit_ins
*)put_label
->addr
;
648 SLJIT_ASSERT((buf_ptr
[0] & 0xffe00000) == LUI
&& (buf_ptr
[1] & 0xfc000000) == ORI
);
649 buf_ptr
[0] |= (addr
>> 16) & 0xffff;
650 buf_ptr
[1] |= addr
& 0xffff;
652 put_label_set(put_label
);
654 put_label
= put_label
->next
;
657 compiler
->error
= SLJIT_ERR_COMPILED
;
658 compiler
->executable_offset
= executable_offset
;
659 compiler
->executable_size
= (code_ptr
- code
) * sizeof(sljit_ins
);
661 code
= (sljit_ins
*)SLJIT_ADD_EXEC_OFFSET(code
, executable_offset
);
662 code_ptr
= (sljit_ins
*)SLJIT_ADD_EXEC_OFFSET(code_ptr
, executable_offset
);
665 SLJIT_CACHE_FLUSH(code
, code_ptr
);
667 /* GCC workaround for invalid code generation with -O2. */
668 sljit_cache_flush(code
, code_ptr
);
670 SLJIT_UPDATE_WX_FLAGS(code
, code_ptr
, 1);
674 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_has_cpu_feature(sljit_s32 feature_type
)
678 switch (feature_type
) {
680 #ifdef SLJIT_IS_FPU_AVAILABLE
681 return SLJIT_IS_FPU_AVAILABLE
;
682 #elif defined(__GNUC__)
683 __asm__ ("cfc1 %0, $0" : "=r"(fir
));
684 return (fir
>> 22) & 0x1;
686 #error "FIR check is not implemented for this architecture"
688 case SLJIT_HAS_ZERO_REGISTER
:
691 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
694 case SLJIT_HAS_PREFETCH
:
696 #endif /* SLJIT_MIPS_REV >= 1 */
703 /* --------------------------------------------------------------------- */
705 /* --------------------------------------------------------------------- */
707 /* Creates an index in data_transfer_insts array. */
708 #define LOAD_DATA 0x01
709 #define WORD_DATA 0x00
710 #define BYTE_DATA 0x02
711 #define HALF_DATA 0x04
712 #define INT_DATA 0x06
713 #define SIGNED_DATA 0x08
714 /* Separates integer and floating point registers */
716 #define DOUBLE_DATA 0x10
717 #define SINGLE_DATA 0x12
719 #define MEM_MASK 0x1f
721 #define ARG_TEST 0x00020
722 #define ALT_KEEP_CACHE 0x00040
723 #define CUMULATIVE_OP 0x00080
724 #define LOGICAL_OP 0x00100
725 #define IMM_OP 0x00200
726 #define SRC2_IMM 0x00400
728 #define UNUSED_DEST 0x00800
729 #define REG_DEST 0x01000
730 #define REG1_SOURCE 0x02000
731 #define REG2_SOURCE 0x04000
732 #define SLOW_SRC1 0x08000
733 #define SLOW_SRC2 0x10000
734 #define SLOW_DEST 0x20000
736 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
737 #define STACK_STORE SW
738 #define STACK_LOAD LW
740 #define STACK_STORE SD
741 #define STACK_LOAD LD
744 static SLJIT_INLINE sljit_s32
emit_op_mem(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg_ar
, sljit_s32 arg
, sljit_sw argw
);
746 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
747 #include "sljitNativeMIPS_32.c"
749 #include "sljitNativeMIPS_64.c"
752 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_enter(struct sljit_compiler
*compiler
,
753 sljit_s32 options
, sljit_s32 arg_types
, sljit_s32 scratches
, sljit_s32 saveds
,
754 sljit_s32 fscratches
, sljit_s32 fsaveds
, sljit_s32 local_size
)
757 sljit_s32 args
, i
, tmp
, offs
;
760 CHECK(check_sljit_emit_enter(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
));
761 set_emit_enter(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
);
763 local_size
+= GET_SAVED_REGISTERS_SIZE(scratches
, saveds
, 1) + SLJIT_LOCALS_OFFSET
;
764 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
765 local_size
= (local_size
+ 15) & ~0xf;
767 local_size
= (local_size
+ 31) & ~0x1f;
769 compiler
->local_size
= local_size
;
771 if (local_size
<= SIMM_MAX
) {
773 FAIL_IF(push_inst(compiler
, ADDIU_W
| S(SLJIT_SP
) | T(SLJIT_SP
) | IMM(-local_size
), DR(SLJIT_SP
)));
775 offs
= local_size
- (sljit_sw
)sizeof(sljit_sw
);
778 FAIL_IF(load_immediate(compiler
, DR(OTHER_FLAG
), local_size
));
779 FAIL_IF(push_inst(compiler
, ADDU_W
| S(SLJIT_SP
) | TA(0) | D(TMP_REG2
), DR(TMP_REG2
)));
780 FAIL_IF(push_inst(compiler
, SUBU_W
| S(SLJIT_SP
) | T(OTHER_FLAG
) | D(SLJIT_SP
), DR(SLJIT_SP
)));
783 offs
= -(sljit_sw
)sizeof(sljit_sw
);
786 FAIL_IF(push_inst(compiler
, STACK_STORE
| base
| TA(RETURN_ADDR_REG
) | IMM(offs
), MOVABLE_INS
));
788 tmp
= saveds
< SLJIT_NUMBER_OF_SAVED_REGISTERS
? (SLJIT_S0
+ 1 - saveds
) : SLJIT_FIRST_SAVED_REG
;
789 for (i
= SLJIT_S0
; i
>= tmp
; i
--) {
790 offs
-= (sljit_s32
)(sizeof(sljit_sw
));
791 FAIL_IF(push_inst(compiler
, STACK_STORE
| base
| T(i
) | IMM(offs
), MOVABLE_INS
));
794 for (i
= scratches
; i
>= SLJIT_FIRST_SAVED_REG
; i
--) {
795 offs
-= (sljit_s32
)(sizeof(sljit_sw
));
796 FAIL_IF(push_inst(compiler
, STACK_STORE
| base
| T(i
) | IMM(offs
), MOVABLE_INS
));
799 args
= get_arg_count(arg_types
);
802 FAIL_IF(push_inst(compiler
, ADDU_W
| SA(4) | TA(0) | D(SLJIT_S0
), DR(SLJIT_S0
)));
804 FAIL_IF(push_inst(compiler
, ADDU_W
| SA(5) | TA(0) | D(SLJIT_S1
), DR(SLJIT_S1
)));
806 FAIL_IF(push_inst(compiler
, ADDU_W
| SA(6) | TA(0) | D(SLJIT_S2
), DR(SLJIT_S2
)));
808 return SLJIT_SUCCESS
;
811 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_set_context(struct sljit_compiler
*compiler
,
812 sljit_s32 options
, sljit_s32 arg_types
, sljit_s32 scratches
, sljit_s32 saveds
,
813 sljit_s32 fscratches
, sljit_s32 fsaveds
, sljit_s32 local_size
)
816 CHECK(check_sljit_set_context(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
));
817 set_set_context(compiler
, options
, arg_types
, scratches
, saveds
, fscratches
, fsaveds
, local_size
);
819 local_size
+= GET_SAVED_REGISTERS_SIZE(scratches
, saveds
, 1) + SLJIT_LOCALS_OFFSET
;
820 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
821 compiler
->local_size
= (local_size
+ 15) & ~0xf;
823 compiler
->local_size
= (local_size
+ 31) & ~0x1f;
825 return SLJIT_SUCCESS
;
828 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_return(struct sljit_compiler
*compiler
, sljit_s32 op
, sljit_s32 src
, sljit_sw srcw
)
830 sljit_s32 local_size
, i
, tmp
, offs
;
834 CHECK(check_sljit_emit_return(compiler
, op
, src
, srcw
));
836 FAIL_IF(emit_mov_before_return(compiler
, op
, src
, srcw
));
838 local_size
= compiler
->local_size
;
839 if (local_size
<= SIMM_MAX
)
842 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), local_size
));
843 FAIL_IF(push_inst(compiler
, ADDU_W
| S(SLJIT_SP
) | T(TMP_REG1
) | D(TMP_REG1
), DR(TMP_REG1
)));
848 FAIL_IF(push_inst(compiler
, STACK_LOAD
| base
| TA(RETURN_ADDR_REG
) | IMM(local_size
- (sljit_s32
)sizeof(sljit_sw
)), RETURN_ADDR_REG
));
849 offs
= local_size
- (sljit_s32
)GET_SAVED_REGISTERS_SIZE(compiler
->scratches
, compiler
->saveds
, 1);
851 tmp
= compiler
->scratches
;
852 for (i
= SLJIT_FIRST_SAVED_REG
; i
<= tmp
; i
++) {
853 FAIL_IF(push_inst(compiler
, STACK_LOAD
| base
| T(i
) | IMM(offs
), DR(i
)));
854 offs
+= (sljit_s32
)(sizeof(sljit_sw
));
857 tmp
= compiler
->saveds
< SLJIT_NUMBER_OF_SAVED_REGISTERS
? (SLJIT_S0
+ 1 - compiler
->saveds
) : SLJIT_FIRST_SAVED_REG
;
858 for (i
= tmp
; i
<= SLJIT_S0
; i
++) {
859 FAIL_IF(push_inst(compiler
, STACK_LOAD
| base
| T(i
) | IMM(offs
), DR(i
)));
860 offs
+= (sljit_s32
)(sizeof(sljit_sw
));
863 SLJIT_ASSERT(offs
== local_size
- (sljit_sw
)(sizeof(sljit_sw
)));
865 FAIL_IF(push_inst(compiler
, JR
| SA(RETURN_ADDR_REG
), UNMOVABLE_INS
));
866 if (compiler
->local_size
<= SIMM_MAX
)
867 return push_inst(compiler
, ADDIU_W
| S(SLJIT_SP
) | T(SLJIT_SP
) | IMM(compiler
->local_size
), UNMOVABLE_INS
);
869 return push_inst(compiler
, ADDU_W
| S(TMP_REG1
) | TA(0) | D(SLJIT_SP
), UNMOVABLE_INS
);
875 /* --------------------------------------------------------------------- */
877 /* --------------------------------------------------------------------- */
879 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
880 #define ARCH_32_64(a, b) a
882 #define ARCH_32_64(a, b) b
885 static const sljit_ins data_transfer_insts
[16 + 4] = {
886 /* u w s */ ARCH_32_64(HI(43) /* sw */, HI(63) /* sd */),
887 /* u w l */ ARCH_32_64(HI(35) /* lw */, HI(55) /* ld */),
888 /* u b s */ HI(40) /* sb */,
889 /* u b l */ HI(36) /* lbu */,
890 /* u h s */ HI(41) /* sh */,
891 /* u h l */ HI(37) /* lhu */,
892 /* u i s */ HI(43) /* sw */,
893 /* u i l */ ARCH_32_64(HI(35) /* lw */, HI(39) /* lwu */),
895 /* s w s */ ARCH_32_64(HI(43) /* sw */, HI(63) /* sd */),
896 /* s w l */ ARCH_32_64(HI(35) /* lw */, HI(55) /* ld */),
897 /* s b s */ HI(40) /* sb */,
898 /* s b l */ HI(32) /* lb */,
899 /* s h s */ HI(41) /* sh */,
900 /* s h l */ HI(33) /* lh */,
901 /* s i s */ HI(43) /* sw */,
902 /* s i l */ HI(35) /* lw */,
904 /* d s */ HI(61) /* sdc1 */,
905 /* d l */ HI(53) /* ldc1 */,
906 /* s s */ HI(57) /* swc1 */,
907 /* s l */ HI(49) /* lwc1 */,
912 /* reg_ar is an absoulute register! */
914 /* Can perform an operation using at most 1 instruction. */
915 static sljit_s32
getput_arg_fast(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg_ar
, sljit_s32 arg
, sljit_sw argw
)
917 SLJIT_ASSERT(arg
& SLJIT_MEM
);
919 if (!(arg
& OFFS_REG_MASK
) && argw
<= SIMM_MAX
&& argw
>= SIMM_MIN
) {
920 /* Works for both absoulte and relative addresses. */
921 if (SLJIT_UNLIKELY(flags
& ARG_TEST
))
923 FAIL_IF(push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(arg
& REG_MASK
)
924 | TA(reg_ar
) | IMM(argw
), ((flags
& MEM_MASK
) <= GPR_REG
&& (flags
& LOAD_DATA
)) ? reg_ar
: MOVABLE_INS
));
930 /* See getput_arg below.
931 Note: can_cache is called only for binary operators. Those
932 operators always uses word arguments without write back. */
933 static sljit_s32
can_cache(sljit_s32 arg
, sljit_sw argw
, sljit_s32 next_arg
, sljit_sw next_argw
)
935 SLJIT_ASSERT((arg
& SLJIT_MEM
) && (next_arg
& SLJIT_MEM
));
937 /* Simple operation except for updates. */
938 if (arg
& OFFS_REG_MASK
) {
941 if (argw
&& argw
== next_argw
&& (arg
== next_arg
|| (arg
& OFFS_REG_MASK
) == (next_arg
& OFFS_REG_MASK
)))
946 if (arg
== next_arg
) {
947 if (((next_argw
- argw
) <= SIMM_MAX
&& (next_argw
- argw
) >= SIMM_MIN
))
955 /* Emit the necessary instructions. See can_cache above. */
956 static sljit_s32
getput_arg(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg_ar
, sljit_s32 arg
, sljit_sw argw
, sljit_s32 next_arg
, sljit_sw next_argw
)
958 sljit_s32 tmp_ar
, base
, delay_slot
;
960 SLJIT_ASSERT(arg
& SLJIT_MEM
);
961 if (!(next_arg
& SLJIT_MEM
)) {
966 if ((flags
& MEM_MASK
) <= GPR_REG
&& (flags
& LOAD_DATA
)) {
971 tmp_ar
= DR(TMP_REG1
);
972 delay_slot
= MOVABLE_INS
;
974 base
= arg
& REG_MASK
;
976 if (SLJIT_UNLIKELY(arg
& OFFS_REG_MASK
)) {
979 /* Using the cache. */
980 if (argw
== compiler
->cache_argw
) {
981 if (arg
== compiler
->cache_arg
)
982 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
), delay_slot
);
984 if ((SLJIT_MEM
| (arg
& OFFS_REG_MASK
)) == compiler
->cache_arg
) {
985 if (arg
== next_arg
&& argw
== (next_argw
& 0x3)) {
986 compiler
->cache_arg
= arg
;
987 compiler
->cache_argw
= argw
;
988 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(TMP_REG3
) | D(TMP_REG3
), DR(TMP_REG3
)));
989 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
), delay_slot
);
991 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(TMP_REG3
) | DA(tmp_ar
), tmp_ar
));
992 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
), delay_slot
);
996 if (SLJIT_UNLIKELY(argw
)) {
997 compiler
->cache_arg
= SLJIT_MEM
| (arg
& OFFS_REG_MASK
);
998 compiler
->cache_argw
= argw
;
999 FAIL_IF(push_inst(compiler
, SLL_W
| T(OFFS_REG(arg
)) | D(TMP_REG3
) | SH_IMM(argw
), DR(TMP_REG3
)));
1002 if (arg
== next_arg
&& argw
== (next_argw
& 0x3)) {
1003 compiler
->cache_arg
= arg
;
1004 compiler
->cache_argw
= argw
;
1005 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(!argw
? OFFS_REG(arg
) : TMP_REG3
) | D(TMP_REG3
), DR(TMP_REG3
)));
1006 tmp_ar
= DR(TMP_REG3
);
1009 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(!argw
? OFFS_REG(arg
) : TMP_REG3
) | DA(tmp_ar
), tmp_ar
));
1010 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
), delay_slot
);
1013 if (compiler
->cache_arg
== arg
&& argw
- compiler
->cache_argw
<= SIMM_MAX
&& argw
- compiler
->cache_argw
>= SIMM_MIN
) {
1014 if (argw
!= compiler
->cache_argw
) {
1015 FAIL_IF(push_inst(compiler
, ADDIU_W
| S(TMP_REG3
) | T(TMP_REG3
) | IMM(argw
- compiler
->cache_argw
), DR(TMP_REG3
)));
1016 compiler
->cache_argw
= argw
;
1018 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
), delay_slot
);
1021 if (compiler
->cache_arg
== SLJIT_MEM
&& argw
- compiler
->cache_argw
<= SIMM_MAX
&& argw
- compiler
->cache_argw
>= SIMM_MIN
) {
1022 if (argw
!= compiler
->cache_argw
)
1023 FAIL_IF(push_inst(compiler
, ADDIU_W
| S(TMP_REG3
) | T(TMP_REG3
) | IMM(argw
- compiler
->cache_argw
), DR(TMP_REG3
)));
1026 compiler
->cache_arg
= SLJIT_MEM
;
1027 FAIL_IF(load_immediate(compiler
, DR(TMP_REG3
), argw
));
1029 compiler
->cache_argw
= argw
;
1032 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
), delay_slot
);
1034 if (arg
== next_arg
&& next_argw
- argw
<= SIMM_MAX
&& next_argw
- argw
>= SIMM_MIN
) {
1035 compiler
->cache_arg
= arg
;
1036 FAIL_IF(push_inst(compiler
, ADDU_W
| S(TMP_REG3
) | T(base
) | D(TMP_REG3
), DR(TMP_REG3
)));
1037 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | S(TMP_REG3
) | TA(reg_ar
), delay_slot
);
1040 FAIL_IF(push_inst(compiler
, ADDU_W
| S(TMP_REG3
) | T(base
) | DA(tmp_ar
), tmp_ar
));
1041 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
), delay_slot
);
1044 static SLJIT_INLINE sljit_s32
emit_op_mem(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg_ar
, sljit_s32 arg
, sljit_sw argw
)
1046 sljit_s32 tmp_ar
, base
, delay_slot
;
1048 if (getput_arg_fast(compiler
, flags
, reg_ar
, arg
, argw
))
1049 return compiler
->error
;
1051 if ((flags
& MEM_MASK
) <= GPR_REG
&& (flags
& LOAD_DATA
)) {
1053 delay_slot
= reg_ar
;
1056 tmp_ar
= DR(TMP_REG1
);
1057 delay_slot
= MOVABLE_INS
;
1059 base
= arg
& REG_MASK
;
1061 if (SLJIT_UNLIKELY(arg
& OFFS_REG_MASK
)) {
1064 if (SLJIT_UNLIKELY(argw
)) {
1065 FAIL_IF(push_inst(compiler
, SLL_W
| T(OFFS_REG(arg
)) | DA(tmp_ar
) | SH_IMM(argw
), tmp_ar
));
1066 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | TA(tmp_ar
) | DA(tmp_ar
), tmp_ar
));
1069 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | T(OFFS_REG(arg
)) | DA(tmp_ar
), tmp_ar
));
1070 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
), delay_slot
);
1073 FAIL_IF(load_immediate(compiler
, tmp_ar
, argw
));
1076 FAIL_IF(push_inst(compiler
, ADDU_W
| S(base
) | TA(tmp_ar
) | DA(tmp_ar
), tmp_ar
));
1078 return push_inst(compiler
, data_transfer_insts
[flags
& MEM_MASK
] | SA(tmp_ar
) | TA(reg_ar
), delay_slot
);
1081 static SLJIT_INLINE sljit_s32
emit_op_mem2(struct sljit_compiler
*compiler
, sljit_s32 flags
, sljit_s32 reg
, sljit_s32 arg1
, sljit_sw arg1w
, sljit_s32 arg2
, sljit_sw arg2w
)
1083 if (getput_arg_fast(compiler
, flags
, reg
, arg1
, arg1w
))
1084 return compiler
->error
;
1085 return getput_arg(compiler
, flags
, reg
, arg1
, arg1w
, arg2
, arg2w
);
1088 static sljit_s32
emit_op(struct sljit_compiler
*compiler
, sljit_s32 op
, sljit_s32 flags
,
1089 sljit_s32 dst
, sljit_sw dstw
,
1090 sljit_s32 src1
, sljit_sw src1w
,
1091 sljit_s32 src2
, sljit_sw src2w
)
1093 /* arg1 goes to TMP_REG1 or src reg
1094 arg2 goes to TMP_REG2, imm or src reg
1095 TMP_REG3 can be used for caching
1096 result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
1097 sljit_s32 dst_r
= TMP_REG2
;
1099 sljit_sw src2_r
= 0;
1100 sljit_s32 sugg_src2_r
= TMP_REG2
;
1102 if (!(flags
& ALT_KEEP_CACHE
)) {
1103 compiler
->cache_arg
= 0;
1104 compiler
->cache_argw
= 0;
1107 if (SLJIT_UNLIKELY(dst
== SLJIT_UNUSED
)) {
1108 SLJIT_ASSERT(HAS_FLAGS(op
));
1109 flags
|= UNUSED_DEST
;
1111 else if (FAST_IS_REG(dst
)) {
1114 if (op
>= SLJIT_MOV
&& op
<= SLJIT_MOV_P
)
1115 sugg_src2_r
= dst_r
;
1117 else if ((dst
& SLJIT_MEM
) && !getput_arg_fast(compiler
, flags
| ARG_TEST
, DR(TMP_REG1
), dst
, dstw
))
1120 if (flags
& IMM_OP
) {
1121 if ((src2
& SLJIT_IMM
) && src2w
) {
1122 if ((!(flags
& LOGICAL_OP
) && (src2w
<= SIMM_MAX
&& src2w
>= SIMM_MIN
))
1123 || ((flags
& LOGICAL_OP
) && !(src2w
& ~UIMM_MAX
))) {
1128 if (!(flags
& SRC2_IMM
) && (flags
& CUMULATIVE_OP
) && (src1
& SLJIT_IMM
) && src1w
) {
1129 if ((!(flags
& LOGICAL_OP
) && (src1w
<= SIMM_MAX
&& src1w
>= SIMM_MIN
))
1130 || ((flags
& LOGICAL_OP
) && !(src1w
& ~UIMM_MAX
))) {
1134 /* And swap arguments. */
1138 /* src2w = src2_r unneeded. */
1144 if (FAST_IS_REG(src1
)) {
1146 flags
|= REG1_SOURCE
;
1148 else if (src1
& SLJIT_IMM
) {
1150 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), src1w
));
1157 if (getput_arg_fast(compiler
, flags
| LOAD_DATA
, DR(TMP_REG1
), src1
, src1w
))
1158 FAIL_IF(compiler
->error
);
1165 if (FAST_IS_REG(src2
)) {
1167 flags
|= REG2_SOURCE
;
1168 if (!(flags
& REG_DEST
) && op
>= SLJIT_MOV
&& op
<= SLJIT_MOV_P
)
1171 else if (src2
& SLJIT_IMM
) {
1172 if (!(flags
& SRC2_IMM
)) {
1174 FAIL_IF(load_immediate(compiler
, DR(sugg_src2_r
), src2w
));
1175 src2_r
= sugg_src2_r
;
1179 if ((op
>= SLJIT_MOV
&& op
<= SLJIT_MOV_P
) && (dst
& SLJIT_MEM
))
1185 if (getput_arg_fast(compiler
, flags
| LOAD_DATA
, DR(sugg_src2_r
), src2
, src2w
))
1186 FAIL_IF(compiler
->error
);
1189 src2_r
= sugg_src2_r
;
1192 if ((flags
& (SLOW_SRC1
| SLOW_SRC2
)) == (SLOW_SRC1
| SLOW_SRC2
)) {
1193 SLJIT_ASSERT(src2_r
== TMP_REG2
);
1194 if (!can_cache(src1
, src1w
, src2
, src2w
) && can_cache(src1
, src1w
, dst
, dstw
)) {
1195 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG2
), src2
, src2w
, src1
, src1w
));
1196 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG1
), src1
, src1w
, dst
, dstw
));
1199 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG1
), src1
, src1w
, src2
, src2w
));
1200 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG2
), src2
, src2w
, dst
, dstw
));
1203 else if (flags
& SLOW_SRC1
)
1204 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(TMP_REG1
), src1
, src1w
, dst
, dstw
));
1205 else if (flags
& SLOW_SRC2
)
1206 FAIL_IF(getput_arg(compiler
, flags
| LOAD_DATA
, DR(sugg_src2_r
), src2
, src2w
, dst
, dstw
));
1208 FAIL_IF(emit_single_op(compiler
, op
, flags
, dst_r
, src1_r
, src2_r
));
1210 if (dst
& SLJIT_MEM
) {
1211 if (!(flags
& SLOW_DEST
)) {
1212 getput_arg_fast(compiler
, flags
, DR(dst_r
), dst
, dstw
);
1213 return compiler
->error
;
1215 return getput_arg(compiler
, flags
, DR(dst_r
), dst
, dstw
, 0, 0);
1218 return SLJIT_SUCCESS
;
1221 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op0(struct sljit_compiler
*compiler
, sljit_s32 op
)
1223 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1224 sljit_s32 int_op
= op
& SLJIT_I32_OP
;
1228 CHECK(check_sljit_emit_op0(compiler
, op
));
1230 op
= GET_OPCODE(op
);
1232 case SLJIT_BREAKPOINT
:
1233 return push_inst(compiler
, BREAK
, UNMOVABLE_INS
);
1235 return push_inst(compiler
, NOP
, UNMOVABLE_INS
);
1238 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
1239 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1240 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? DMULU
: DMUL
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
1241 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? DMUHU
: DMUH
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
1242 #else /* !SLJIT_CONFIG_MIPS_64 */
1243 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? MULU
: MUL
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
1244 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? MUHU
: MUH
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
1245 #endif /* SLJIT_CONFIG_MIPS_64 */
1246 FAIL_IF(push_inst(compiler
, ADDU_W
| S(TMP_REG3
) | TA(0) | D(SLJIT_R0
), DR(SLJIT_R0
)));
1247 return push_inst(compiler
, ADDU_W
| S(TMP_REG1
) | TA(0) | D(SLJIT_R1
), DR(SLJIT_R1
));
1248 #else /* SLJIT_MIPS_REV < 6 */
1249 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1250 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? DMULTU
: DMULT
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
1251 #else /* !SLJIT_CONFIG_MIPS_64 */
1252 FAIL_IF(push_inst(compiler
, (op
== SLJIT_LMUL_UW
? MULTU
: MULT
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
1253 #endif /* SLJIT_CONFIG_MIPS_64 */
1254 FAIL_IF(push_inst(compiler
, MFLO
| D(SLJIT_R0
), DR(SLJIT_R0
)));
1255 return push_inst(compiler
, MFHI
| D(SLJIT_R1
), DR(SLJIT_R1
));
1256 #endif /* SLJIT_MIPS_REV >= 6 */
1257 case SLJIT_DIVMOD_UW
:
1258 case SLJIT_DIVMOD_SW
:
1261 SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW
& 0x2) == 0 && SLJIT_DIV_UW
- 0x2 == SLJIT_DIVMOD_UW
, bad_div_opcode_assignments
);
1262 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
1263 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1265 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DIVU
: DIV
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
1266 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? MODU
: MOD
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
1269 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DDIVU
: DDIV
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
1270 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DMODU
: DMOD
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
1272 #else /* !SLJIT_CONFIG_MIPS_64 */
1273 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DIVU
: DIV
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG3
), DR(TMP_REG3
)));
1274 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? MODU
: MOD
) | S(SLJIT_R0
) | T(SLJIT_R1
) | D(TMP_REG1
), DR(TMP_REG1
)));
1275 #endif /* SLJIT_CONFIG_MIPS_64 */
1276 FAIL_IF(push_inst(compiler
, ADDU_W
| S(TMP_REG3
) | TA(0) | D(SLJIT_R0
), DR(SLJIT_R0
)));
1277 return (op
>= SLJIT_DIV_UW
) ? SLJIT_SUCCESS
: push_inst(compiler
, ADDU_W
| S(TMP_REG1
) | TA(0) | D(SLJIT_R1
), DR(SLJIT_R1
));
1278 #else /* SLJIT_MIPS_REV < 6 */
1279 #if !(defined SLJIT_MIPS_REV)
1280 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
1281 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
1282 #endif /* !SLJIT_MIPS_REV */
1283 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1285 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DIVU
: DIV
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
1287 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DDIVU
: DDIV
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
1288 #else /* !SLJIT_CONFIG_MIPS_64 */
1289 FAIL_IF(push_inst(compiler
, ((op
| 0x2) == SLJIT_DIV_UW
? DIVU
: DIV
) | S(SLJIT_R0
) | T(SLJIT_R1
), MOVABLE_INS
));
1290 #endif /* SLJIT_CONFIG_MIPS_64 */
1291 FAIL_IF(push_inst(compiler
, MFLO
| D(SLJIT_R0
), DR(SLJIT_R0
)));
1292 return (op
>= SLJIT_DIV_UW
) ? SLJIT_SUCCESS
: push_inst(compiler
, MFHI
| D(SLJIT_R1
), DR(SLJIT_R1
));
1293 #endif /* SLJIT_MIPS_REV >= 6 */
1295 case SLJIT_SKIP_FRAMES_BEFORE_RETURN
:
1296 return SLJIT_SUCCESS
;
1299 return SLJIT_SUCCESS
;
1302 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
1303 static sljit_s32
emit_prefetch(struct sljit_compiler
*compiler
,
1304 sljit_s32 src
, sljit_sw srcw
)
1306 if (!(src
& OFFS_REG_MASK
)) {
1307 if (srcw
<= SIMM_MAX
&& srcw
>= SIMM_MIN
)
1308 return push_inst(compiler
, PREF
| S(src
& REG_MASK
) | IMM(srcw
), MOVABLE_INS
);
1310 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), srcw
));
1311 return push_inst(compiler
, PREFX
| S(src
& REG_MASK
) | T(TMP_REG1
), MOVABLE_INS
);
1316 if (SLJIT_UNLIKELY(srcw
!= 0)) {
1317 FAIL_IF(push_inst(compiler
, SLL_W
| T(OFFS_REG(src
)) | D(TMP_REG1
) | SH_IMM(srcw
), DR(TMP_REG1
)));
1318 return push_inst(compiler
, PREFX
| S(src
& REG_MASK
) | T(TMP_REG1
), MOVABLE_INS
);
1321 return push_inst(compiler
, PREFX
| S(src
& REG_MASK
) | T(OFFS_REG(src
)), MOVABLE_INS
);
1323 #endif /* SLJIT_MIPS_REV >= 1 */
1325 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op1(struct sljit_compiler
*compiler
, sljit_s32 op
,
1326 sljit_s32 dst
, sljit_sw dstw
,
1327 sljit_s32 src
, sljit_sw srcw
)
1329 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1332 sljit_s32 flags
= 0;
1336 CHECK(check_sljit_emit_op1(compiler
, op
, dst
, dstw
, src
, srcw
));
1337 ADJUST_LOCAL_OFFSET(dst
, dstw
);
1338 ADJUST_LOCAL_OFFSET(src
, srcw
);
1340 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1341 if ((op
& SLJIT_I32_OP
) && GET_OPCODE(op
) >= SLJIT_NOT
)
1342 flags
|= INT_DATA
| SIGNED_DATA
;
1345 switch (GET_OPCODE(op
)) {
1348 return emit_op(compiler
, SLJIT_MOV
, WORD_DATA
, dst
, dstw
, TMP_REG1
, 0, src
, srcw
);
1351 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1352 return emit_op(compiler
, SLJIT_MOV_U32
, INT_DATA
, dst
, dstw
, TMP_REG1
, 0, src
, srcw
);
1354 return emit_op(compiler
, SLJIT_MOV_U32
, INT_DATA
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_u32
)srcw
: srcw
);
1358 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1359 return emit_op(compiler
, SLJIT_MOV_S32
, INT_DATA
| SIGNED_DATA
, dst
, dstw
, TMP_REG1
, 0, src
, srcw
);
1361 return emit_op(compiler
, SLJIT_MOV_S32
, INT_DATA
| SIGNED_DATA
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_s32
)srcw
: srcw
);
1365 return emit_op(compiler
, SLJIT_MOV_U8
, BYTE_DATA
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_u8
)srcw
: srcw
);
1368 return emit_op(compiler
, SLJIT_MOV_S8
, BYTE_DATA
| SIGNED_DATA
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_s8
)srcw
: srcw
);
1371 return emit_op(compiler
, SLJIT_MOV_U16
, HALF_DATA
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_u16
)srcw
: srcw
);
1374 return emit_op(compiler
, SLJIT_MOV_S16
, HALF_DATA
| SIGNED_DATA
, dst
, dstw
, TMP_REG1
, 0, src
, (src
& SLJIT_IMM
) ? (sljit_s16
)srcw
: srcw
);
1377 return emit_op(compiler
, op
, flags
, dst
, dstw
, TMP_REG1
, 0, src
, srcw
);
1380 compiler
->status_flags_state
= SLJIT_CURRENT_FLAGS_ADD_SUB
;
1381 return emit_op(compiler
, SLJIT_SUB
| GET_ALL_FLAGS(op
), flags
| IMM_OP
, dst
, dstw
, SLJIT_IMM
, 0, src
, srcw
);
1384 return emit_op(compiler
, op
, flags
, dst
, dstw
, TMP_REG1
, 0, src
, srcw
);
1387 SLJIT_UNREACHABLE();
1388 return SLJIT_SUCCESS
;
1390 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1395 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op2(struct sljit_compiler
*compiler
, sljit_s32 op
,
1396 sljit_s32 dst
, sljit_sw dstw
,
1397 sljit_s32 src1
, sljit_sw src1w
,
1398 sljit_s32 src2
, sljit_sw src2w
)
1400 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1403 sljit_s32 flags
= 0;
1407 CHECK(check_sljit_emit_op2(compiler
, op
, dst
, dstw
, src1
, src1w
, src2
, src2w
));
1408 ADJUST_LOCAL_OFFSET(dst
, dstw
);
1409 ADJUST_LOCAL_OFFSET(src1
, src1w
);
1410 ADJUST_LOCAL_OFFSET(src2
, src2w
);
1412 if (dst
== SLJIT_UNUSED
&& !HAS_FLAGS(op
))
1413 return SLJIT_SUCCESS
;
1415 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
1416 if (op
& SLJIT_I32_OP
) {
1417 flags
|= INT_DATA
| SIGNED_DATA
;
1418 if (src1
& SLJIT_IMM
)
1419 src1w
= (sljit_s32
)src1w
;
1420 if (src2
& SLJIT_IMM
)
1421 src2w
= (sljit_s32
)src2w
;
1425 switch (GET_OPCODE(op
)) {
1428 compiler
->status_flags_state
= SLJIT_CURRENT_FLAGS_ADD_SUB
;
1429 return emit_op(compiler
, op
, flags
| CUMULATIVE_OP
| IMM_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
1433 compiler
->status_flags_state
= SLJIT_CURRENT_FLAGS_ADD_SUB
;
1434 return emit_op(compiler
, op
, flags
| IMM_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
1437 compiler
->status_flags_state
= 0;
1438 return emit_op(compiler
, op
, flags
| CUMULATIVE_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
1443 return emit_op(compiler
, op
, flags
| CUMULATIVE_OP
| LOGICAL_OP
| IMM_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
1448 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1449 if (src2
& SLJIT_IMM
)
1452 if (src2
& SLJIT_IMM
) {
1453 if (op
& SLJIT_I32_OP
)
1459 return emit_op(compiler
, op
, flags
| IMM_OP
, dst
, dstw
, src1
, src1w
, src2
, src2w
);
1462 SLJIT_UNREACHABLE();
1463 return SLJIT_SUCCESS
;
1465 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1470 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op_src(struct sljit_compiler
*compiler
, sljit_s32 op
,
1471 sljit_s32 src
, sljit_sw srcw
)
1474 CHECK(check_sljit_emit_op_src(compiler
, op
, src
, srcw
));
1475 ADJUST_LOCAL_OFFSET(src
, srcw
);
1478 case SLJIT_FAST_RETURN
:
1479 if (FAST_IS_REG(src
))
1480 FAIL_IF(push_inst(compiler
, ADDU_W
| S(src
) | TA(0) | DA(RETURN_ADDR_REG
), RETURN_ADDR_REG
));
1482 FAIL_IF(emit_op_mem(compiler
, WORD_DATA
| LOAD_DATA
, RETURN_ADDR_REG
, src
, srcw
));
1484 FAIL_IF(push_inst(compiler
, JR
| SA(RETURN_ADDR_REG
), UNMOVABLE_INS
));
1485 return push_inst(compiler
, NOP
, UNMOVABLE_INS
);
1486 case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN
:
1487 return SLJIT_SUCCESS
;
1488 case SLJIT_PREFETCH_L1
:
1489 case SLJIT_PREFETCH_L2
:
1490 case SLJIT_PREFETCH_L3
:
1491 case SLJIT_PREFETCH_ONCE
:
1492 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1)
1493 return emit_prefetch(compiler
, src
, srcw
);
1494 #else /* SLJIT_MIPS_REV < 1 */
1495 return SLJIT_SUCCESS
;
1496 #endif /* SLJIT_MIPS_REV >= 1 */
1499 return SLJIT_SUCCESS
;
1502 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_get_register_index(sljit_s32 reg
)
1504 CHECK_REG_INDEX(check_sljit_get_register_index(reg
));
1505 return reg_map
[reg
];
1508 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_get_float_register_index(sljit_s32 reg
)
1510 CHECK_REG_INDEX(check_sljit_get_float_register_index(reg
));
1514 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op_custom(struct sljit_compiler
*compiler
,
1515 void *instruction
, sljit_s32 size
)
1518 CHECK(check_sljit_emit_op_custom(compiler
, instruction
, size
));
1520 return push_inst(compiler
, *(sljit_ins
*)instruction
, UNMOVABLE_INS
);
1523 /* --------------------------------------------------------------------- */
1524 /* Floating point operators */
1525 /* --------------------------------------------------------------------- */
1527 #define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 7))
1528 #define FMT(op) (((op & SLJIT_F32_OP) ^ SLJIT_F32_OP) << (21 - 8))
1530 static SLJIT_INLINE sljit_s32
sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler
*compiler
, sljit_s32 op
,
1531 sljit_s32 dst
, sljit_sw dstw
,
1532 sljit_s32 src
, sljit_sw srcw
)
1534 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1537 sljit_s32 flags
= (GET_OPCODE(op
) == SLJIT_CONV_SW_FROM_F64
) << 21;
1540 if (src
& SLJIT_MEM
) {
1541 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src
, srcw
, dst
, dstw
));
1545 FAIL_IF(push_inst(compiler
, (TRUNC_W_S
^ (flags
>> 19)) | FMT(op
) | FS(src
) | FD(TMP_FREG1
), MOVABLE_INS
));
1547 if (FAST_IS_REG(dst
))
1548 return push_inst(compiler
, MFC1
| flags
| T(dst
) | FS(TMP_FREG1
), MOVABLE_INS
);
1550 /* Store the integer value from a VFP register. */
1551 return emit_op_mem2(compiler
, flags
? DOUBLE_DATA
: SINGLE_DATA
, FR(TMP_FREG1
), dst
, dstw
, 0, 0);
1553 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1558 static SLJIT_INLINE sljit_s32
sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler
*compiler
, sljit_s32 op
,
1559 sljit_s32 dst
, sljit_sw dstw
,
1560 sljit_s32 src
, sljit_sw srcw
)
1562 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1565 sljit_s32 flags
= (GET_OPCODE(op
) == SLJIT_CONV_F64_FROM_SW
) << 21;
1568 sljit_s32 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_FREG1
;
1570 if (FAST_IS_REG(src
))
1571 FAIL_IF(push_inst(compiler
, MTC1
| flags
| T(src
) | FS(TMP_FREG1
), MOVABLE_INS
));
1572 else if (src
& SLJIT_MEM
) {
1573 /* Load the integer value into a VFP register. */
1574 FAIL_IF(emit_op_mem2(compiler
, ((flags
) ? DOUBLE_DATA
: SINGLE_DATA
) | LOAD_DATA
, FR(TMP_FREG1
), src
, srcw
, dst
, dstw
));
1577 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1578 if (GET_OPCODE(op
) == SLJIT_CONV_F64_FROM_S32
)
1579 srcw
= (sljit_s32
)srcw
;
1581 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), srcw
));
1582 FAIL_IF(push_inst(compiler
, MTC1
| flags
| T(TMP_REG1
) | FS(TMP_FREG1
), MOVABLE_INS
));
1585 FAIL_IF(push_inst(compiler
, CVT_S_S
| flags
| (4 << 21) | (((op
& SLJIT_F32_OP
) ^ SLJIT_F32_OP
) >> 8) | FS(TMP_FREG1
) | FD(dst_r
), MOVABLE_INS
));
1587 if (dst
& SLJIT_MEM
)
1588 return emit_op_mem2(compiler
, FLOAT_DATA(op
), FR(TMP_FREG1
), dst
, dstw
, 0, 0);
1589 return SLJIT_SUCCESS
;
1591 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1596 static SLJIT_INLINE sljit_s32
sljit_emit_fop1_cmp(struct sljit_compiler
*compiler
, sljit_s32 op
,
1597 sljit_s32 src1
, sljit_sw src1w
,
1598 sljit_s32 src2
, sljit_sw src2w
)
1602 if (src1
& SLJIT_MEM
) {
1603 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
, src2
, src2w
));
1607 if (src2
& SLJIT_MEM
) {
1608 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
, 0, 0));
1612 switch (GET_FLAG_TYPE(op
)) {
1613 case SLJIT_EQUAL_F64
:
1614 case SLJIT_NOT_EQUAL_F64
:
1617 case SLJIT_LESS_F64
:
1618 case SLJIT_GREATER_EQUAL_F64
:
1621 case SLJIT_GREATER_F64
:
1622 case SLJIT_LESS_EQUAL_F64
:
1626 SLJIT_ASSERT(GET_FLAG_TYPE(op
) == SLJIT_UNORDERED_F64
|| GET_FLAG_TYPE(op
) == SLJIT_ORDERED_F64
);
1630 return push_inst(compiler
, inst
| FMT(op
) | FT(src2
) | FS(src1
) | C_FD
, UNMOVABLE_INS
);
1633 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_fop1(struct sljit_compiler
*compiler
, sljit_s32 op
,
1634 sljit_s32 dst
, sljit_sw dstw
,
1635 sljit_s32 src
, sljit_sw srcw
)
1640 compiler
->cache_arg
= 0;
1641 compiler
->cache_argw
= 0;
1643 SLJIT_COMPILE_ASSERT((SLJIT_F32_OP
== 0x100) && !(DOUBLE_DATA
& 0x2), float_transfer_bit_error
);
1644 SELECT_FOP1_OPERATION_WITH_CHECKS(compiler
, op
, dst
, dstw
, src
, srcw
);
1646 if (GET_OPCODE(op
) == SLJIT_CONV_F64_FROM_F32
)
1649 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_FREG1
;
1651 if (src
& SLJIT_MEM
) {
1652 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(dst_r
), src
, srcw
, dst
, dstw
));
1656 switch (GET_OPCODE(op
)) {
1659 if (dst_r
!= TMP_FREG1
)
1660 FAIL_IF(push_inst(compiler
, MOV_S
| FMT(op
) | FS(src
) | FD(dst_r
), MOVABLE_INS
));
1666 FAIL_IF(push_inst(compiler
, NEG_S
| FMT(op
) | FS(src
) | FD(dst_r
), MOVABLE_INS
));
1669 FAIL_IF(push_inst(compiler
, ABS_S
| FMT(op
) | FS(src
) | FD(dst_r
), MOVABLE_INS
));
1671 case SLJIT_CONV_F64_FROM_F32
:
1672 FAIL_IF(push_inst(compiler
, CVT_S_S
| ((op
& SLJIT_F32_OP
) ? 1 : (1 << 21)) | FS(src
) | FD(dst_r
), MOVABLE_INS
));
1677 if (dst
& SLJIT_MEM
)
1678 return emit_op_mem2(compiler
, FLOAT_DATA(op
), FR(dst_r
), dst
, dstw
, 0, 0);
1679 return SLJIT_SUCCESS
;
1682 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_fop2(struct sljit_compiler
*compiler
, sljit_s32 op
,
1683 sljit_s32 dst
, sljit_sw dstw
,
1684 sljit_s32 src1
, sljit_sw src1w
,
1685 sljit_s32 src2
, sljit_sw src2w
)
1687 sljit_s32 dst_r
, flags
= 0;
1690 CHECK(check_sljit_emit_fop2(compiler
, op
, dst
, dstw
, src1
, src1w
, src2
, src2w
));
1691 ADJUST_LOCAL_OFFSET(dst
, dstw
);
1692 ADJUST_LOCAL_OFFSET(src1
, src1w
);
1693 ADJUST_LOCAL_OFFSET(src2
, src2w
);
1695 compiler
->cache_arg
= 0;
1696 compiler
->cache_argw
= 0;
1698 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_FREG2
;
1700 if (src1
& SLJIT_MEM
) {
1701 if (getput_arg_fast(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
)) {
1702 FAIL_IF(compiler
->error
);
1708 if (src2
& SLJIT_MEM
) {
1709 if (getput_arg_fast(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
)) {
1710 FAIL_IF(compiler
->error
);
1716 if ((flags
& (SLOW_SRC1
| SLOW_SRC2
)) == (SLOW_SRC1
| SLOW_SRC2
)) {
1717 if (!can_cache(src1
, src1w
, src2
, src2w
) && can_cache(src1
, src1w
, dst
, dstw
)) {
1718 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
, src1
, src1w
));
1719 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
, dst
, dstw
));
1722 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
, src2
, src2w
));
1723 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
, dst
, dstw
));
1726 else if (flags
& SLOW_SRC1
)
1727 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG1
), src1
, src1w
, dst
, dstw
));
1728 else if (flags
& SLOW_SRC2
)
1729 FAIL_IF(getput_arg(compiler
, FLOAT_DATA(op
) | LOAD_DATA
, FR(TMP_FREG2
), src2
, src2w
, dst
, dstw
));
1731 if (flags
& SLOW_SRC1
)
1733 if (flags
& SLOW_SRC2
)
1736 switch (GET_OPCODE(op
)) {
1738 FAIL_IF(push_inst(compiler
, ADD_S
| FMT(op
) | FT(src2
) | FS(src1
) | FD(dst_r
), MOVABLE_INS
));
1742 FAIL_IF(push_inst(compiler
, SUB_S
| FMT(op
) | FT(src2
) | FS(src1
) | FD(dst_r
), MOVABLE_INS
));
1746 FAIL_IF(push_inst(compiler
, MUL_S
| FMT(op
) | FT(src2
) | FS(src1
) | FD(dst_r
), MOVABLE_INS
));
1750 FAIL_IF(push_inst(compiler
, DIV_S
| FMT(op
) | FT(src2
) | FS(src1
) | FD(dst_r
), MOVABLE_INS
));
1754 if (dst_r
== TMP_FREG2
)
1755 FAIL_IF(emit_op_mem2(compiler
, FLOAT_DATA(op
), FR(TMP_FREG2
), dst
, dstw
, 0, 0));
1757 return SLJIT_SUCCESS
;
1760 /* --------------------------------------------------------------------- */
1761 /* Other instructions */
1762 /* --------------------------------------------------------------------- */
1764 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_fast_enter(struct sljit_compiler
*compiler
, sljit_s32 dst
, sljit_sw dstw
)
1767 CHECK(check_sljit_emit_fast_enter(compiler
, dst
, dstw
));
1768 ADJUST_LOCAL_OFFSET(dst
, dstw
);
1770 if (FAST_IS_REG(dst
))
1771 return push_inst(compiler
, ADDU_W
| SA(RETURN_ADDR_REG
) | TA(0) | D(dst
), UNMOVABLE_INS
);
1774 FAIL_IF(emit_op_mem(compiler
, WORD_DATA
, RETURN_ADDR_REG
, dst
, dstw
));
1775 compiler
->delay_slot
= UNMOVABLE_INS
;
1776 return SLJIT_SUCCESS
;
1779 /* --------------------------------------------------------------------- */
1780 /* Conditional instructions */
1781 /* --------------------------------------------------------------------- */
1783 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_label
* sljit_emit_label(struct sljit_compiler
*compiler
)
1785 struct sljit_label
*label
;
1788 CHECK_PTR(check_sljit_emit_label(compiler
));
1790 if (compiler
->last_label
&& compiler
->last_label
->size
== compiler
->size
)
1791 return compiler
->last_label
;
1793 label
= (struct sljit_label
*)ensure_abuf(compiler
, sizeof(struct sljit_label
));
1794 PTR_FAIL_IF(!label
);
1795 set_label(label
, compiler
);
1796 compiler
->delay_slot
= UNMOVABLE_INS
;
1800 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1801 #define JUMP_LENGTH 4
1803 #define JUMP_LENGTH 8
1807 inst = BEQ | SA(src) | TA(0) | JUMP_LENGTH; \
1808 flags = IS_BIT26_COND; \
1811 #define BR_NZ(src) \
1812 inst = BNE | SA(src) | TA(0) | JUMP_LENGTH; \
1813 flags = IS_BIT26_COND; \
1816 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
1820 flags = IS_BIT23_COND; \
1821 delay_check = FCSR_FCC;
1824 flags = IS_BIT23_COND; \
1825 delay_check = FCSR_FCC;
1827 #else /* SLJIT_MIPS_REV < 6 */
1830 inst = BC1T | JUMP_LENGTH; \
1831 flags = IS_BIT16_COND; \
1832 delay_check = FCSR_FCC;
1834 inst = BC1F | JUMP_LENGTH; \
1835 flags = IS_BIT16_COND; \
1836 delay_check = FCSR_FCC;
1838 #endif /* SLJIT_MIPS_REV >= 6 */
1840 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_jump
* sljit_emit_jump(struct sljit_compiler
*compiler
, sljit_s32 type
)
1842 struct sljit_jump
*jump
;
1844 sljit_s32 flags
= 0;
1845 sljit_s32 delay_check
= UNMOVABLE_INS
;
1848 CHECK_PTR(check_sljit_emit_jump(compiler
, type
));
1850 jump
= (struct sljit_jump
*)ensure_abuf(compiler
, sizeof(struct sljit_jump
));
1852 set_jump(jump
, compiler
, type
& SLJIT_REWRITABLE_JUMP
);
1859 case SLJIT_NOT_EQUAL
:
1864 case SLJIT_SIG_LESS
:
1865 case SLJIT_SIG_GREATER
:
1866 case SLJIT_OVERFLOW
:
1869 case SLJIT_GREATER_EQUAL
:
1870 case SLJIT_LESS_EQUAL
:
1871 case SLJIT_SIG_GREATER_EQUAL
:
1872 case SLJIT_SIG_LESS_EQUAL
:
1873 case SLJIT_NOT_OVERFLOW
:
1876 case SLJIT_NOT_EQUAL_F64
:
1877 case SLJIT_GREATER_EQUAL_F64
:
1878 case SLJIT_GREATER_F64
:
1879 case SLJIT_ORDERED_F64
:
1882 case SLJIT_EQUAL_F64
:
1883 case SLJIT_LESS_F64
:
1884 case SLJIT_LESS_EQUAL_F64
:
1885 case SLJIT_UNORDERED_F64
:
1889 /* Not conditional branch. */
1894 jump
->flags
|= flags
;
1895 if (compiler
->delay_slot
== MOVABLE_INS
|| (compiler
->delay_slot
!= UNMOVABLE_INS
&& compiler
->delay_slot
!= delay_check
))
1896 jump
->flags
|= IS_MOVABLE
;
1899 PTR_FAIL_IF(push_inst(compiler
, inst
, UNMOVABLE_INS
));
1901 PTR_FAIL_IF(emit_const(compiler
, TMP_REG2
, 0));
1903 if (type
<= SLJIT_JUMP
)
1904 PTR_FAIL_IF(push_inst(compiler
, JR
| S(TMP_REG2
), UNMOVABLE_INS
));
1906 jump
->flags
|= IS_JAL
;
1907 PTR_FAIL_IF(push_inst(compiler
, JALR
| S(TMP_REG2
) | DA(RETURN_ADDR_REG
), UNMOVABLE_INS
));
1910 jump
->addr
= compiler
->size
;
1911 PTR_FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
1915 #define RESOLVE_IMM1() \
1916 if (src1 & SLJIT_IMM) { \
1918 PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w)); \
1925 #define RESOLVE_IMM2() \
1926 if (src2 & SLJIT_IMM) { \
1928 PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG2), src2w)); \
1935 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_jump
* sljit_emit_cmp(struct sljit_compiler
*compiler
, sljit_s32 type
,
1936 sljit_s32 src1
, sljit_sw src1w
,
1937 sljit_s32 src2
, sljit_sw src2w
)
1939 struct sljit_jump
*jump
;
1944 CHECK_PTR(check_sljit_emit_cmp(compiler
, type
, src1
, src1w
, src2
, src2w
));
1945 ADJUST_LOCAL_OFFSET(src1
, src1w
);
1946 ADJUST_LOCAL_OFFSET(src2
, src2w
);
1948 compiler
->cache_arg
= 0;
1949 compiler
->cache_argw
= 0;
1950 flags
= ((type
& SLJIT_I32_OP
) ? INT_DATA
: WORD_DATA
) | LOAD_DATA
;
1951 if (src1
& SLJIT_MEM
) {
1952 PTR_FAIL_IF(emit_op_mem2(compiler
, flags
, DR(TMP_REG1
), src1
, src1w
, src2
, src2w
));
1955 if (src2
& SLJIT_MEM
) {
1956 PTR_FAIL_IF(emit_op_mem2(compiler
, flags
, DR(TMP_REG2
), src2
, src2w
, 0, 0));
1960 jump
= (struct sljit_jump
*)ensure_abuf(compiler
, sizeof(struct sljit_jump
));
1962 set_jump(jump
, compiler
, type
& SLJIT_REWRITABLE_JUMP
);
1965 if (type
<= SLJIT_NOT_EQUAL
) {
1968 jump
->flags
|= IS_BIT26_COND
;
1969 if (compiler
->delay_slot
== MOVABLE_INS
|| (compiler
->delay_slot
!= UNMOVABLE_INS
&& compiler
->delay_slot
!= DR(src1
) && compiler
->delay_slot
!= DR(src2
)))
1970 jump
->flags
|= IS_MOVABLE
;
1971 PTR_FAIL_IF(push_inst(compiler
, (type
== SLJIT_EQUAL
? BNE
: BEQ
) | S(src1
) | T(src2
) | JUMP_LENGTH
, UNMOVABLE_INS
));
1973 else if (type
>= SLJIT_SIG_LESS
&& (((src1
& SLJIT_IMM
) && (src1w
== 0)) || ((src2
& SLJIT_IMM
) && (src2w
== 0)))) {
1975 if ((src1
& SLJIT_IMM
) && (src1w
== 0)) {
1978 case SLJIT_SIG_LESS
:
1980 jump
->flags
|= IS_BIT26_COND
;
1982 case SLJIT_SIG_GREATER_EQUAL
:
1984 jump
->flags
|= IS_BIT26_COND
;
1986 case SLJIT_SIG_GREATER
:
1988 jump
->flags
|= IS_BIT16_COND
;
1990 case SLJIT_SIG_LESS_EQUAL
:
1992 jump
->flags
|= IS_BIT16_COND
;
2000 case SLJIT_SIG_LESS
:
2002 jump
->flags
|= IS_BIT16_COND
;
2004 case SLJIT_SIG_GREATER_EQUAL
:
2006 jump
->flags
|= IS_BIT16_COND
;
2008 case SLJIT_SIG_GREATER
:
2010 jump
->flags
|= IS_BIT26_COND
;
2012 case SLJIT_SIG_LESS_EQUAL
:
2014 jump
->flags
|= IS_BIT26_COND
;
2018 PTR_FAIL_IF(push_inst(compiler
, inst
| S(src1
) | JUMP_LENGTH
, UNMOVABLE_INS
));
2021 if (type
== SLJIT_LESS
|| type
== SLJIT_GREATER_EQUAL
|| type
== SLJIT_SIG_LESS
|| type
== SLJIT_SIG_GREATER_EQUAL
) {
2023 if ((src2
& SLJIT_IMM
) && src2w
<= SIMM_MAX
&& src2w
>= SIMM_MIN
)
2024 PTR_FAIL_IF(push_inst(compiler
, (type
<= SLJIT_LESS_EQUAL
? SLTIU
: SLTI
) | S(src1
) | T(TMP_REG1
) | IMM(src2w
), DR(TMP_REG1
)));
2027 PTR_FAIL_IF(push_inst(compiler
, (type
<= SLJIT_LESS_EQUAL
? SLTU
: SLT
) | S(src1
) | T(src2
) | D(TMP_REG1
), DR(TMP_REG1
)));
2029 type
= (type
== SLJIT_LESS
|| type
== SLJIT_SIG_LESS
) ? SLJIT_NOT_EQUAL
: SLJIT_EQUAL
;
2033 if ((src1
& SLJIT_IMM
) && src1w
<= SIMM_MAX
&& src1w
>= SIMM_MIN
)
2034 PTR_FAIL_IF(push_inst(compiler
, (type
<= SLJIT_LESS_EQUAL
? SLTIU
: SLTI
) | S(src2
) | T(TMP_REG1
) | IMM(src1w
), DR(TMP_REG1
)));
2037 PTR_FAIL_IF(push_inst(compiler
, (type
<= SLJIT_LESS_EQUAL
? SLTU
: SLT
) | S(src2
) | T(src1
) | D(TMP_REG1
), DR(TMP_REG1
)));
2039 type
= (type
== SLJIT_GREATER
|| type
== SLJIT_SIG_GREATER
) ? SLJIT_NOT_EQUAL
: SLJIT_EQUAL
;
2042 jump
->flags
|= IS_BIT26_COND
;
2043 PTR_FAIL_IF(push_inst(compiler
, (type
== SLJIT_EQUAL
? BNE
: BEQ
) | S(TMP_REG1
) | TA(0) | JUMP_LENGTH
, UNMOVABLE_INS
));
2046 PTR_FAIL_IF(emit_const(compiler
, TMP_REG2
, 0));
2047 PTR_FAIL_IF(push_inst(compiler
, JR
| S(TMP_REG2
), UNMOVABLE_INS
));
2048 jump
->addr
= compiler
->size
;
2049 PTR_FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2065 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_ijump(struct sljit_compiler
*compiler
, sljit_s32 type
, sljit_s32 src
, sljit_sw srcw
)
2067 struct sljit_jump
*jump
= NULL
;
2070 CHECK(check_sljit_emit_ijump(compiler
, type
, src
, srcw
));
2071 ADJUST_LOCAL_OFFSET(src
, srcw
);
2073 if (src
& SLJIT_IMM
) {
2074 jump
= (struct sljit_jump
*)ensure_abuf(compiler
, sizeof(struct sljit_jump
));
2076 set_jump(jump
, compiler
, JUMP_ADDR
| ((type
>= SLJIT_FAST_CALL
) ? IS_JAL
: 0));
2077 jump
->u
.target
= srcw
;
2079 if (compiler
->delay_slot
!= UNMOVABLE_INS
)
2080 jump
->flags
|= IS_MOVABLE
;
2082 FAIL_IF(emit_const(compiler
, TMP_REG2
, 0));
2085 else if (src
& SLJIT_MEM
) {
2086 FAIL_IF(emit_op_mem(compiler
, WORD_DATA
| LOAD_DATA
, DR(TMP_REG2
), src
, srcw
));
2090 FAIL_IF(push_inst(compiler
, JR
| S(src
), UNMOVABLE_INS
));
2092 jump
->addr
= compiler
->size
;
2093 FAIL_IF(push_inst(compiler
, NOP
, UNMOVABLE_INS
));
2094 return SLJIT_SUCCESS
;
2097 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_op_flags(struct sljit_compiler
*compiler
, sljit_s32 op
,
2098 sljit_s32 dst
, sljit_sw dstw
,
2101 sljit_s32 src_ar
, dst_ar
;
2102 sljit_s32 saved_op
= op
;
2103 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2104 sljit_s32 mem_type
= WORD_DATA
;
2106 sljit_s32 mem_type
= (op
& SLJIT_I32_OP
) ? (INT_DATA
| SIGNED_DATA
) : WORD_DATA
;
2110 CHECK(check_sljit_emit_op_flags(compiler
, op
, dst
, dstw
, type
));
2111 ADJUST_LOCAL_OFFSET(dst
, dstw
);
2113 op
= GET_OPCODE(op
);
2114 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
2115 if (op
== SLJIT_MOV_S32
)
2116 mem_type
= INT_DATA
| SIGNED_DATA
;
2118 dst_ar
= DR((op
< SLJIT_ADD
&& FAST_IS_REG(dst
)) ? dst
: TMP_REG2
);
2120 compiler
->cache_arg
= 0;
2121 compiler
->cache_argw
= 0;
2123 if (op
>= SLJIT_ADD
&& (dst
& SLJIT_MEM
))
2124 FAIL_IF(emit_op_mem2(compiler
, mem_type
| LOAD_DATA
, DR(TMP_REG1
), dst
, dstw
, dst
, dstw
));
2126 switch (type
& 0xff) {
2128 case SLJIT_NOT_EQUAL
:
2129 FAIL_IF(push_inst(compiler
, SLTIU
| SA(EQUAL_FLAG
) | TA(dst_ar
) | IMM(1), dst_ar
));
2132 case SLJIT_OVERFLOW
:
2133 case SLJIT_NOT_OVERFLOW
:
2134 if (compiler
->status_flags_state
& SLJIT_CURRENT_FLAGS_ADD_SUB
) {
2135 src_ar
= OTHER_FLAG
;
2138 FAIL_IF(push_inst(compiler
, SLTIU
| SA(OTHER_FLAG
) | TA(dst_ar
) | IMM(1), dst_ar
));
2140 type
^= 0x1; /* Flip type bit for the XORI below. */
2142 case SLJIT_GREATER_F64
:
2143 case SLJIT_LESS_EQUAL_F64
:
2144 type
^= 0x1; /* Flip type bit for the XORI below. */
2145 case SLJIT_EQUAL_F64
:
2146 case SLJIT_NOT_EQUAL_F64
:
2147 case SLJIT_LESS_F64
:
2148 case SLJIT_GREATER_EQUAL_F64
:
2149 case SLJIT_UNORDERED_F64
:
2150 case SLJIT_ORDERED_F64
:
2151 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 6)
2152 FAIL_IF(push_inst(compiler
, MFC1
| TA(dst_ar
) | FS(TMP_FREG3
), dst_ar
));
2153 #else /* SLJIT_MIPS_REV < 6 */
2154 FAIL_IF(push_inst(compiler
, CFC1
| TA(dst_ar
) | DA(FCSR_REG
), dst_ar
));
2155 #endif /* SLJIT_MIPS_REV >= 6 */
2156 FAIL_IF(push_inst(compiler
, SRL
| TA(dst_ar
) | DA(dst_ar
) | SH_IMM(23), dst_ar
));
2157 FAIL_IF(push_inst(compiler
, ANDI
| SA(dst_ar
) | TA(dst_ar
) | IMM(1), dst_ar
));
2162 src_ar
= OTHER_FLAG
;
2167 FAIL_IF(push_inst(compiler
, XORI
| SA(src_ar
) | TA(dst_ar
) | IMM(1), dst_ar
));
2171 if (op
< SLJIT_ADD
) {
2172 if (dst
& SLJIT_MEM
)
2173 return emit_op_mem(compiler
, mem_type
, src_ar
, dst
, dstw
);
2175 if (src_ar
!= dst_ar
)
2176 return push_inst(compiler
, ADDU_W
| SA(src_ar
) | TA(0) | DA(dst_ar
), dst_ar
);
2177 return SLJIT_SUCCESS
;
2180 /* OTHER_FLAG cannot be specified as src2 argument at the moment. */
2181 if (DR(TMP_REG2
) != src_ar
)
2182 FAIL_IF(push_inst(compiler
, ADDU_W
| SA(src_ar
) | TA(0) | D(TMP_REG2
), DR(TMP_REG2
)));
2184 mem_type
|= CUMULATIVE_OP
| LOGICAL_OP
| IMM_OP
| ALT_KEEP_CACHE
;
2186 if (dst
& SLJIT_MEM
)
2187 return emit_op(compiler
, saved_op
, mem_type
, dst
, dstw
, TMP_REG1
, 0, TMP_REG2
, 0);
2188 return emit_op(compiler
, saved_op
, mem_type
, dst
, dstw
, dst
, dstw
, TMP_REG2
, 0);
2191 SLJIT_API_FUNC_ATTRIBUTE sljit_s32
sljit_emit_cmov(struct sljit_compiler
*compiler
, sljit_s32 type
,
2193 sljit_s32 src
, sljit_sw srcw
)
2195 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
2197 #endif /* SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6 */
2200 CHECK(check_sljit_emit_cmov(compiler
, type
, dst_reg
, src
, srcw
));
2202 #if (defined SLJIT_MIPS_REV && SLJIT_MIPS_REV >= 1 && SLJIT_MIPS_REV < 6)
2204 if (SLJIT_UNLIKELY(src
& SLJIT_IMM
)) {
2205 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
2206 if (dst_reg
& SLJIT_I32_OP
)
2207 srcw
= (sljit_s32
)srcw
;
2209 FAIL_IF(load_immediate(compiler
, DR(TMP_REG1
), srcw
));
2214 dst_reg
&= ~SLJIT_I32_OP
;
2216 switch (type
& 0xff) {
2218 ins
= MOVZ
| TA(EQUAL_FLAG
);
2220 case SLJIT_NOT_EQUAL
:
2221 ins
= MOVN
| TA(EQUAL_FLAG
);
2225 case SLJIT_SIG_LESS
:
2226 case SLJIT_SIG_GREATER
:
2227 case SLJIT_OVERFLOW
:
2228 ins
= MOVN
| TA(OTHER_FLAG
);
2230 case SLJIT_GREATER_EQUAL
:
2231 case SLJIT_LESS_EQUAL
:
2232 case SLJIT_SIG_GREATER_EQUAL
:
2233 case SLJIT_SIG_LESS_EQUAL
:
2234 case SLJIT_NOT_OVERFLOW
:
2235 ins
= MOVZ
| TA(OTHER_FLAG
);
2237 case SLJIT_EQUAL_F64
:
2238 case SLJIT_LESS_F64
:
2239 case SLJIT_LESS_EQUAL_F64
:
2240 case SLJIT_UNORDERED_F64
:
2243 case SLJIT_NOT_EQUAL_F64
:
2244 case SLJIT_GREATER_EQUAL_F64
:
2245 case SLJIT_GREATER_F64
:
2246 case SLJIT_ORDERED_F64
:
2250 ins
= MOVZ
| TA(OTHER_FLAG
);
2251 SLJIT_UNREACHABLE();
2255 return push_inst(compiler
, ins
| S(src
) | D(dst_reg
), DR(dst_reg
));
2257 #else /* SLJIT_MIPS_REV < 1 || SLJIT_MIPS_REV >= 6 */
2258 return sljit_emit_cmov_generic(compiler
, type
, dst_reg
, src
, srcw
);
2259 #endif /* SLJIT_MIPS_REV >= 1 */
2262 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_const
* sljit_emit_const(struct sljit_compiler
*compiler
, sljit_s32 dst
, sljit_sw dstw
, sljit_sw init_value
)
2264 struct sljit_const
*const_
;
2268 CHECK_PTR(check_sljit_emit_const(compiler
, dst
, dstw
, init_value
));
2269 ADJUST_LOCAL_OFFSET(dst
, dstw
);
2271 const_
= (struct sljit_const
*)ensure_abuf(compiler
, sizeof(struct sljit_const
));
2272 PTR_FAIL_IF(!const_
);
2273 set_const(const_
, compiler
);
2275 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_REG2
;
2276 PTR_FAIL_IF(emit_const(compiler
, dst_r
, init_value
));
2278 if (dst
& SLJIT_MEM
)
2279 PTR_FAIL_IF(emit_op(compiler
, SLJIT_MOV
, WORD_DATA
, dst
, dstw
, TMP_REG1
, 0, TMP_REG2
, 0));
2284 SLJIT_API_FUNC_ATTRIBUTE
struct sljit_put_label
* sljit_emit_put_label(struct sljit_compiler
*compiler
, sljit_s32 dst
, sljit_sw dstw
)
2286 struct sljit_put_label
*put_label
;
2290 CHECK_PTR(check_sljit_emit_put_label(compiler
, dst
, dstw
));
2291 ADJUST_LOCAL_OFFSET(dst
, dstw
);
2293 put_label
= (struct sljit_put_label
*)ensure_abuf(compiler
, sizeof(struct sljit_put_label
));
2294 PTR_FAIL_IF(!put_label
);
2295 set_put_label(put_label
, compiler
, 0);
2297 dst_r
= FAST_IS_REG(dst
) ? dst
: TMP_REG2
;
2298 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
2299 PTR_FAIL_IF(emit_const(compiler
, dst_r
, 0));
2301 PTR_FAIL_IF(push_inst(compiler
, dst_r
, UNMOVABLE_INS
));
2302 compiler
->size
+= 5;
2305 if (dst
& SLJIT_MEM
)
2306 PTR_FAIL_IF(emit_op(compiler
, SLJIT_MOV
, WORD_DATA
, dst
, dstw
, TMP_REG1
, 0, TMP_REG2
, 0));