Rework x86-32 stack layout
[sljit.git] / sljit_src / sljitNativeARM_32.c
blobcd3affb9df2540d278cd3242f3a9d600552d2724
1 /*
2 * Stack-less Just-In-Time compiler
4 * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #ifdef __SOFTFP__
28 #define ARM_ABI_INFO " ABI:softfp"
29 #else
30 #define ARM_ABI_INFO " ABI:hardfp"
31 #endif
33 SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
35 #if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
36 return "ARMv7" SLJIT_CPUINFO ARM_ABI_INFO;
37 #elif (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
38 return "ARMv5" SLJIT_CPUINFO ARM_ABI_INFO;
39 #else
40 #error "Internal error: Unknown ARM architecture"
41 #endif
44 /* Last register + 1. */
45 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
46 #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
47 #define TMP_PC (SLJIT_NUMBER_OF_REGISTERS + 4)
49 #define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
50 #define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
52 /* In ARM instruction words.
53 Cache lines are usually 32 byte aligned. */
54 #define CONST_POOL_ALIGNMENT 8
55 #define CONST_POOL_EMPTY 0xffffffff
57 #define ALIGN_INSTRUCTION(ptr) \
58 (sljit_uw*)(((sljit_uw)(ptr) + (CONST_POOL_ALIGNMENT * sizeof(sljit_uw)) - 1) & ~((CONST_POOL_ALIGNMENT * sizeof(sljit_uw)) - 1))
59 #define MAX_DIFFERENCE(max_diff) \
60 (((max_diff) / (sljit_s32)sizeof(sljit_uw)) - (CONST_POOL_ALIGNMENT - 1))
62 /* See sljit_emit_enter and sljit_emit_op0 if you want to change them. */
63 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
64 0, 0, 1, 2, 3, 11, 10, 9, 8, 7, 6, 5, 4, 13, 12, 14, 15
67 static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
68 0, 0, 1, 2, 3, 4, 5, 15, 14, 13, 12, 11, 10, 9, 8, 6, 7
71 #define RM(rm) ((sljit_uw)reg_map[rm])
72 #define RM8(rm) ((sljit_uw)reg_map[rm] << 8)
73 #define RD(rd) ((sljit_uw)reg_map[rd] << 12)
74 #define RN(rn) ((sljit_uw)reg_map[rn] << 16)
76 #define VM(rm) ((sljit_uw)freg_map[rm])
77 #define VD(rd) ((sljit_uw)freg_map[rd] << 12)
78 #define VN(rn) ((sljit_uw)freg_map[rn] << 16)
80 /* --------------------------------------------------------------------- */
81 /* Instrucion forms */
82 /* --------------------------------------------------------------------- */
84 /* The instruction includes the AL condition.
85 INST_NAME - CONDITIONAL remove this flag. */
86 #define COND_MASK 0xf0000000
87 #define CONDITIONAL 0xe0000000
88 #define PUSH_POOL 0xff000000
90 #define ADC 0xe0a00000
91 #define ADD 0xe0800000
92 #define AND 0xe0000000
93 #define B 0xea000000
94 #define BIC 0xe1c00000
95 #define BL 0xeb000000
96 #define BLX 0xe12fff30
97 #define BX 0xe12fff10
98 #define CLZ 0xe16f0f10
99 #define CMN 0xe1600000
100 #define CMP 0xe1400000
101 #define BKPT 0xe1200070
102 #define EOR 0xe0200000
103 #define LDR 0xe5100000
104 #define MOV 0xe1a00000
105 #define MUL 0xe0000090
106 #define MVN 0xe1e00000
107 #define NOP 0xe1a00000
108 #define ORR 0xe1800000
109 #define PUSH 0xe92d0000
110 #define POP 0xe8bd0000
111 #define RSB 0xe0600000
112 #define RSC 0xe0e00000
113 #define SBC 0xe0c00000
114 #define SMULL 0xe0c00090
115 #define STR 0xe5000000
116 #define SUB 0xe0400000
117 #define TST 0xe1000000
118 #define UMULL 0xe0800090
119 #define VABS_F32 0xeeb00ac0
120 #define VADD_F32 0xee300a00
121 #define VCMP_F32 0xeeb40a40
122 #define VCVT_F32_S32 0xeeb80ac0
123 #define VCVT_F64_F32 0xeeb70ac0
124 #define VCVT_S32_F32 0xeebd0ac0
125 #define VDIV_F32 0xee800a00
126 #define VLDR_F32 0xed100a00
127 #define VMOV_F32 0xeeb00a40
128 #define VMOV 0xee000a10
129 #define VMOV2 0xec400a10
130 #define VMRS 0xeef1fa10
131 #define VMUL_F32 0xee200a00
132 #define VNEG_F32 0xeeb10a40
133 #define VPOP 0xecbd0b00
134 #define VPUSH 0xed2d0b00
135 #define VSTR_F32 0xed000a00
136 #define VSUB_F32 0xee300a40
138 #if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
139 /* Arm v7 specific instructions. */
140 #define MOVW 0xe3000000
141 #define MOVT 0xe3400000
142 #define SXTB 0xe6af0070
143 #define SXTH 0xe6bf0070
144 #define UXTB 0xe6ef0070
145 #define UXTH 0xe6ff0070
146 #endif
148 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
150 static sljit_s32 push_cpool(struct sljit_compiler *compiler)
152 /* Pushing the constant pool into the instruction stream. */
153 sljit_uw* inst;
154 sljit_uw* cpool_ptr;
155 sljit_uw* cpool_end;
156 sljit_s32 i;
158 /* The label could point the address after the constant pool. */
159 if (compiler->last_label && compiler->last_label->size == compiler->size)
160 compiler->last_label->size += compiler->cpool_fill + (CONST_POOL_ALIGNMENT - 1) + 1;
162 SLJIT_ASSERT(compiler->cpool_fill > 0 && compiler->cpool_fill <= CPOOL_SIZE);
163 inst = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
164 FAIL_IF(!inst);
165 compiler->size++;
166 *inst = 0xff000000 | compiler->cpool_fill;
168 for (i = 0; i < CONST_POOL_ALIGNMENT - 1; i++) {
169 inst = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
170 FAIL_IF(!inst);
171 compiler->size++;
172 *inst = 0;
175 cpool_ptr = compiler->cpool;
176 cpool_end = cpool_ptr + compiler->cpool_fill;
177 while (cpool_ptr < cpool_end) {
178 inst = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
179 FAIL_IF(!inst);
180 compiler->size++;
181 *inst = *cpool_ptr++;
183 compiler->cpool_diff = CONST_POOL_EMPTY;
184 compiler->cpool_fill = 0;
185 return SLJIT_SUCCESS;
188 static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_uw inst)
190 sljit_uw* ptr;
192 if (SLJIT_UNLIKELY(compiler->cpool_diff != CONST_POOL_EMPTY && compiler->size - compiler->cpool_diff >= MAX_DIFFERENCE(4092)))
193 FAIL_IF(push_cpool(compiler));
195 ptr = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
196 FAIL_IF(!ptr);
197 compiler->size++;
198 *ptr = inst;
199 return SLJIT_SUCCESS;
202 static sljit_s32 push_inst_with_literal(struct sljit_compiler *compiler, sljit_uw inst, sljit_uw literal)
204 sljit_uw* ptr;
205 sljit_uw cpool_index = CPOOL_SIZE;
206 sljit_uw* cpool_ptr;
207 sljit_uw* cpool_end;
208 sljit_u8* cpool_unique_ptr;
210 if (SLJIT_UNLIKELY(compiler->cpool_diff != CONST_POOL_EMPTY && compiler->size - compiler->cpool_diff >= MAX_DIFFERENCE(4092)))
211 FAIL_IF(push_cpool(compiler));
212 else if (compiler->cpool_fill > 0) {
213 cpool_ptr = compiler->cpool;
214 cpool_end = cpool_ptr + compiler->cpool_fill;
215 cpool_unique_ptr = compiler->cpool_unique;
216 do {
217 if ((*cpool_ptr == literal) && !(*cpool_unique_ptr)) {
218 cpool_index = (sljit_uw)(cpool_ptr - compiler->cpool);
219 break;
221 cpool_ptr++;
222 cpool_unique_ptr++;
223 } while (cpool_ptr < cpool_end);
226 if (cpool_index == CPOOL_SIZE) {
227 /* Must allocate a new entry in the literal pool. */
228 if (compiler->cpool_fill < CPOOL_SIZE) {
229 cpool_index = compiler->cpool_fill;
230 compiler->cpool_fill++;
232 else {
233 FAIL_IF(push_cpool(compiler));
234 cpool_index = 0;
235 compiler->cpool_fill = 1;
239 SLJIT_ASSERT((inst & 0xfff) == 0);
240 ptr = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
241 FAIL_IF(!ptr);
242 compiler->size++;
243 *ptr = inst | cpool_index;
245 compiler->cpool[cpool_index] = literal;
246 compiler->cpool_unique[cpool_index] = 0;
247 if (compiler->cpool_diff == CONST_POOL_EMPTY)
248 compiler->cpool_diff = compiler->size;
249 return SLJIT_SUCCESS;
252 static sljit_s32 push_inst_with_unique_literal(struct sljit_compiler *compiler, sljit_uw inst, sljit_uw literal)
254 sljit_uw* ptr;
255 if (SLJIT_UNLIKELY((compiler->cpool_diff != CONST_POOL_EMPTY && compiler->size - compiler->cpool_diff >= MAX_DIFFERENCE(4092)) || compiler->cpool_fill >= CPOOL_SIZE))
256 FAIL_IF(push_cpool(compiler));
258 SLJIT_ASSERT(compiler->cpool_fill < CPOOL_SIZE && (inst & 0xfff) == 0);
259 ptr = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
260 FAIL_IF(!ptr);
261 compiler->size++;
262 *ptr = inst | compiler->cpool_fill;
264 compiler->cpool[compiler->cpool_fill] = literal;
265 compiler->cpool_unique[compiler->cpool_fill] = 1;
266 compiler->cpool_fill++;
267 if (compiler->cpool_diff == CONST_POOL_EMPTY)
268 compiler->cpool_diff = compiler->size;
269 return SLJIT_SUCCESS;
272 static SLJIT_INLINE sljit_s32 prepare_blx(struct sljit_compiler *compiler)
274 /* Place for at least two instruction (doesn't matter whether the first has a literal). */
275 if (SLJIT_UNLIKELY(compiler->cpool_diff != CONST_POOL_EMPTY && compiler->size - compiler->cpool_diff >= MAX_DIFFERENCE(4088)))
276 return push_cpool(compiler);
277 return SLJIT_SUCCESS;
280 static SLJIT_INLINE sljit_s32 emit_blx(struct sljit_compiler *compiler)
282 /* Must follow tightly the previous instruction (to be able to convert it to bl instruction). */
283 SLJIT_ASSERT(compiler->cpool_diff == CONST_POOL_EMPTY || compiler->size - compiler->cpool_diff < MAX_DIFFERENCE(4092));
284 SLJIT_ASSERT(reg_map[TMP_REG1] != 14);
286 return push_inst(compiler, BLX | RM(TMP_REG1));
289 static sljit_uw patch_pc_relative_loads(sljit_uw *last_pc_patch, sljit_uw *code_ptr, sljit_uw* const_pool, sljit_uw cpool_size)
291 sljit_uw diff;
292 sljit_uw ind;
293 sljit_uw counter = 0;
294 sljit_uw* clear_const_pool = const_pool;
295 sljit_uw* clear_const_pool_end = const_pool + cpool_size;
297 SLJIT_ASSERT(const_pool - code_ptr <= CONST_POOL_ALIGNMENT);
298 /* Set unused flag for all literals in the constant pool.
299 I.e.: unused literals can belong to branches, which can be encoded as B or BL.
300 We can "compress" the constant pool by discarding these literals. */
301 while (clear_const_pool < clear_const_pool_end)
302 *clear_const_pool++ = (sljit_uw)(-1);
304 while (last_pc_patch < code_ptr) {
305 /* Data transfer instruction with Rn == r15. */
306 if ((*last_pc_patch & 0x0c0f0000) == 0x040f0000) {
307 diff = (sljit_uw)(const_pool - last_pc_patch);
308 ind = (*last_pc_patch) & 0xfff;
310 /* Must be a load instruction with immediate offset. */
311 SLJIT_ASSERT(ind < cpool_size && !(*last_pc_patch & (1 << 25)) && (*last_pc_patch & (1 << 20)));
312 if ((sljit_s32)const_pool[ind] < 0) {
313 const_pool[ind] = counter;
314 ind = counter;
315 counter++;
317 else
318 ind = const_pool[ind];
320 SLJIT_ASSERT(diff >= 1);
321 if (diff >= 2 || ind > 0) {
322 diff = (diff + (sljit_uw)ind - 2) << 2;
323 SLJIT_ASSERT(diff <= 0xfff);
324 *last_pc_patch = (*last_pc_patch & ~(sljit_uw)0xfff) | diff;
326 else
327 *last_pc_patch = (*last_pc_patch & ~(sljit_uw)(0xfff | (1 << 23))) | 0x004;
329 last_pc_patch++;
331 return counter;
334 /* In some rare ocasions we may need future patches. The probability is close to 0 in practice. */
335 struct future_patch {
336 struct future_patch* next;
337 sljit_s32 index;
338 sljit_s32 value;
341 static sljit_s32 resolve_const_pool_index(struct sljit_compiler *compiler, struct future_patch **first_patch, sljit_uw cpool_current_index, sljit_uw *cpool_start_address, sljit_uw *buf_ptr)
343 sljit_u32 value;
344 struct future_patch *curr_patch, *prev_patch;
346 SLJIT_UNUSED_ARG(compiler);
348 /* Using the values generated by patch_pc_relative_loads. */
349 if (!*first_patch)
350 value = cpool_start_address[cpool_current_index];
351 else {
352 curr_patch = *first_patch;
353 prev_patch = NULL;
354 while (1) {
355 if (!curr_patch) {
356 value = cpool_start_address[cpool_current_index];
357 break;
359 if ((sljit_uw)curr_patch->index == cpool_current_index) {
360 value = (sljit_uw)curr_patch->value;
361 if (prev_patch)
362 prev_patch->next = curr_patch->next;
363 else
364 *first_patch = curr_patch->next;
365 SLJIT_FREE(curr_patch, compiler->allocator_data);
366 break;
368 prev_patch = curr_patch;
369 curr_patch = curr_patch->next;
373 if ((sljit_sw)value >= 0) {
374 if (value > cpool_current_index) {
375 curr_patch = (struct future_patch*)SLJIT_MALLOC(sizeof(struct future_patch), compiler->allocator_data);
376 if (!curr_patch) {
377 while (*first_patch) {
378 curr_patch = *first_patch;
379 *first_patch = (*first_patch)->next;
380 SLJIT_FREE(curr_patch, compiler->allocator_data);
382 return SLJIT_ERR_ALLOC_FAILED;
384 curr_patch->next = *first_patch;
385 curr_patch->index = (sljit_sw)value;
386 curr_patch->value = (sljit_sw)cpool_start_address[value];
387 *first_patch = curr_patch;
389 cpool_start_address[value] = *buf_ptr;
391 return SLJIT_SUCCESS;
394 #else
396 static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_uw inst)
398 sljit_uw* ptr;
400 ptr = (sljit_uw*)ensure_buf(compiler, sizeof(sljit_uw));
401 FAIL_IF(!ptr);
402 compiler->size++;
403 *ptr = inst;
404 return SLJIT_SUCCESS;
407 static SLJIT_INLINE sljit_s32 emit_imm(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm)
409 FAIL_IF(push_inst(compiler, MOVW | RD(reg) | ((imm << 4) & 0xf0000) | ((sljit_u32)imm & 0xfff)));
410 return push_inst(compiler, MOVT | RD(reg) | ((imm >> 12) & 0xf0000) | (((sljit_u32)imm >> 16) & 0xfff));
413 #endif
415 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_uw *code_ptr, sljit_uw *code, sljit_sw executable_offset)
417 sljit_sw diff;
419 if (jump->flags & SLJIT_REWRITABLE_JUMP)
420 return 0;
422 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
423 if (jump->flags & IS_BL)
424 code_ptr--;
426 if (jump->flags & JUMP_ADDR)
427 diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2) - executable_offset);
428 else {
429 SLJIT_ASSERT(jump->flags & JUMP_LABEL);
430 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2));
433 /* Branch to Thumb code has not been optimized yet. */
434 if (diff & 0x3)
435 return 0;
437 if (jump->flags & IS_BL) {
438 if (diff <= 0x01ffffff && diff >= -0x02000000) {
439 *code_ptr = (BL - CONDITIONAL) | (*(code_ptr + 1) & COND_MASK);
440 jump->flags |= PATCH_B;
441 return 1;
444 else {
445 if (diff <= 0x01ffffff && diff >= -0x02000000) {
446 *code_ptr = (B - CONDITIONAL) | (*code_ptr & COND_MASK);
447 jump->flags |= PATCH_B;
450 #else
451 if (jump->flags & JUMP_ADDR)
452 diff = ((sljit_sw)jump->u.target - (sljit_sw)code_ptr - executable_offset);
453 else {
454 SLJIT_ASSERT(jump->flags & JUMP_LABEL);
455 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)code_ptr);
458 /* Branch to Thumb code has not been optimized yet. */
459 if (diff & 0x3)
460 return 0;
462 if (diff <= 0x01ffffff && diff >= -0x02000000) {
463 code_ptr -= 2;
464 *code_ptr = ((jump->flags & IS_BL) ? (BL - CONDITIONAL) : (B - CONDITIONAL)) | (code_ptr[2] & COND_MASK);
465 jump->flags |= PATCH_B;
466 return 1;
468 #endif
469 return 0;
472 static SLJIT_INLINE void inline_set_jump_addr(sljit_uw jump_ptr, sljit_sw executable_offset, sljit_uw new_addr, sljit_s32 flush_cache)
474 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
475 sljit_uw *ptr = (sljit_uw *)jump_ptr;
476 sljit_uw *inst = (sljit_uw *)ptr[0];
477 sljit_uw mov_pc = ptr[1];
478 sljit_s32 bl = (mov_pc & 0x0000f000) != RD(TMP_PC);
479 sljit_sw diff = (sljit_sw)(((sljit_sw)new_addr - (sljit_sw)(inst + 2) - executable_offset) >> 2);
481 SLJIT_UNUSED_ARG(executable_offset);
483 if (diff <= 0x7fffff && diff >= -0x800000) {
484 /* Turn to branch. */
485 if (!bl) {
486 if (flush_cache) {
487 SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 0);
489 inst[0] = (mov_pc & COND_MASK) | (B - CONDITIONAL) | (diff & 0xffffff);
490 if (flush_cache) {
491 SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
492 inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
493 SLJIT_CACHE_FLUSH(inst, inst + 1);
495 } else {
496 if (flush_cache) {
497 SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 0);
499 inst[0] = (mov_pc & COND_MASK) | (BL - CONDITIONAL) | (diff & 0xffffff);
500 inst[1] = NOP;
501 if (flush_cache) {
502 SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
503 inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
504 SLJIT_CACHE_FLUSH(inst, inst + 2);
507 } else {
508 /* Get the position of the constant. */
509 if (mov_pc & (1 << 23))
510 ptr = inst + ((mov_pc & 0xfff) >> 2) + 2;
511 else
512 ptr = inst + 1;
514 if (*inst != mov_pc) {
515 if (flush_cache) {
516 SLJIT_UPDATE_WX_FLAGS(inst, inst + (!bl ? 1 : 2), 0);
518 inst[0] = mov_pc;
519 if (!bl) {
520 if (flush_cache) {
521 SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
522 inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
523 SLJIT_CACHE_FLUSH(inst, inst + 1);
525 } else {
526 inst[1] = BLX | RM(TMP_REG1);
527 if (flush_cache) {
528 SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
529 inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
530 SLJIT_CACHE_FLUSH(inst, inst + 2);
535 if (flush_cache) {
536 SLJIT_UPDATE_WX_FLAGS(ptr, ptr + 1, 0);
539 *ptr = new_addr;
541 if (flush_cache) {
542 SLJIT_UPDATE_WX_FLAGS(ptr, ptr + 1, 1);
545 #else
546 sljit_uw *inst = (sljit_uw*)jump_ptr;
548 SLJIT_UNUSED_ARG(executable_offset);
550 SLJIT_ASSERT((inst[0] & 0xfff00000) == MOVW && (inst[1] & 0xfff00000) == MOVT);
552 if (flush_cache) {
553 SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 0);
556 inst[0] = MOVW | (inst[0] & 0xf000) | ((new_addr << 4) & 0xf0000) | (new_addr & 0xfff);
557 inst[1] = MOVT | (inst[1] & 0xf000) | ((new_addr >> 12) & 0xf0000) | ((new_addr >> 16) & 0xfff);
559 if (flush_cache) {
560 SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
561 inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
562 SLJIT_CACHE_FLUSH(inst, inst + 2);
564 #endif
567 static sljit_uw get_imm(sljit_uw imm);
568 static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg, sljit_uw imm);
570 static SLJIT_INLINE void inline_set_const(sljit_uw addr, sljit_sw executable_offset, sljit_uw new_constant, sljit_s32 flush_cache)
572 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
573 sljit_uw *ptr = (sljit_uw*)addr;
574 sljit_uw *inst = (sljit_uw*)ptr[0];
575 sljit_uw ldr_literal = ptr[1];
576 sljit_uw src2;
578 SLJIT_UNUSED_ARG(executable_offset);
580 src2 = get_imm(new_constant);
581 if (src2) {
582 if (flush_cache) {
583 SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 0);
586 *inst = 0xe3a00000 | (ldr_literal & 0xf000) | src2;
588 if (flush_cache) {
589 SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
590 inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
591 SLJIT_CACHE_FLUSH(inst, inst + 1);
593 return;
596 src2 = get_imm(~new_constant);
597 if (src2) {
598 if (flush_cache) {
599 SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 0);
602 *inst = 0xe3e00000 | (ldr_literal & 0xf000) | src2;
604 if (flush_cache) {
605 SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
606 inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
607 SLJIT_CACHE_FLUSH(inst, inst + 1);
609 return;
612 if (ldr_literal & (1 << 23))
613 ptr = inst + ((ldr_literal & 0xfff) >> 2) + 2;
614 else
615 ptr = inst + 1;
617 if (*inst != ldr_literal) {
618 if (flush_cache) {
619 SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 0);
622 *inst = ldr_literal;
624 if (flush_cache) {
625 SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
626 inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
627 SLJIT_CACHE_FLUSH(inst, inst + 1);
631 if (flush_cache) {
632 SLJIT_UPDATE_WX_FLAGS(ptr, ptr + 1, 0);
635 *ptr = new_constant;
637 if (flush_cache) {
638 SLJIT_UPDATE_WX_FLAGS(ptr, ptr + 1, 1);
640 #else
641 sljit_uw *inst = (sljit_uw*)addr;
643 SLJIT_UNUSED_ARG(executable_offset);
645 SLJIT_ASSERT((inst[0] & 0xfff00000) == MOVW && (inst[1] & 0xfff00000) == MOVT);
647 if (flush_cache) {
648 SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 0);
651 inst[0] = MOVW | (inst[0] & 0xf000) | ((new_constant << 4) & 0xf0000) | (new_constant & 0xfff);
652 inst[1] = MOVT | (inst[1] & 0xf000) | ((new_constant >> 12) & 0xf0000) | ((new_constant >> 16) & 0xfff);
654 if (flush_cache) {
655 SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
656 inst = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
657 SLJIT_CACHE_FLUSH(inst, inst + 2);
659 #endif
662 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
664 struct sljit_memory_fragment *buf;
665 sljit_uw *code;
666 sljit_uw *code_ptr;
667 sljit_uw *buf_ptr;
668 sljit_uw *buf_end;
669 sljit_uw size;
670 sljit_uw word_count;
671 sljit_uw next_addr;
672 sljit_sw executable_offset;
673 sljit_uw addr;
674 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
675 sljit_uw cpool_size;
676 sljit_uw cpool_skip_alignment;
677 sljit_uw cpool_current_index;
678 sljit_uw *cpool_start_address;
679 sljit_uw *last_pc_patch;
680 struct future_patch *first_patch;
681 #endif
683 struct sljit_label *label;
684 struct sljit_jump *jump;
685 struct sljit_const *const_;
686 struct sljit_put_label *put_label;
688 CHECK_ERROR_PTR();
689 CHECK_PTR(check_sljit_generate_code(compiler));
690 reverse_buf(compiler);
692 /* Second code generation pass. */
693 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
694 size = compiler->size + (compiler->patches << 1);
695 if (compiler->cpool_fill > 0)
696 size += compiler->cpool_fill + CONST_POOL_ALIGNMENT - 1;
697 #else
698 size = compiler->size;
699 #endif
700 code = (sljit_uw*)SLJIT_MALLOC_EXEC(size * sizeof(sljit_uw), compiler->exec_allocator_data);
701 PTR_FAIL_WITH_EXEC_IF(code);
702 buf = compiler->buf;
704 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
705 cpool_size = 0;
706 cpool_skip_alignment = 0;
707 cpool_current_index = 0;
708 cpool_start_address = NULL;
709 first_patch = NULL;
710 last_pc_patch = code;
711 #endif
713 code_ptr = code;
714 word_count = 0;
715 next_addr = 1;
716 executable_offset = SLJIT_EXEC_OFFSET(code);
718 label = compiler->labels;
719 jump = compiler->jumps;
720 const_ = compiler->consts;
721 put_label = compiler->put_labels;
723 if (label && label->size == 0) {
724 label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
725 label = label->next;
728 do {
729 buf_ptr = (sljit_uw*)buf->memory;
730 buf_end = buf_ptr + (buf->used_size >> 2);
731 do {
732 word_count++;
733 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
734 if (cpool_size > 0) {
735 if (cpool_skip_alignment > 0) {
736 buf_ptr++;
737 cpool_skip_alignment--;
739 else {
740 if (SLJIT_UNLIKELY(resolve_const_pool_index(compiler, &first_patch, cpool_current_index, cpool_start_address, buf_ptr))) {
741 SLJIT_FREE_EXEC(code, compiler->exec_allocator_data);
742 compiler->error = SLJIT_ERR_ALLOC_FAILED;
743 return NULL;
745 buf_ptr++;
746 if (++cpool_current_index >= cpool_size) {
747 SLJIT_ASSERT(!first_patch);
748 cpool_size = 0;
749 if (label && label->size == word_count) {
750 /* Points after the current instruction. */
751 label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
752 label->size = (sljit_uw)(code_ptr - code);
753 label = label->next;
755 next_addr = compute_next_addr(label, jump, const_, put_label);
760 else if ((*buf_ptr & 0xff000000) != PUSH_POOL) {
761 #endif
762 *code_ptr = *buf_ptr++;
763 if (next_addr == word_count) {
764 SLJIT_ASSERT(!label || label->size >= word_count);
765 SLJIT_ASSERT(!jump || jump->addr >= word_count);
766 SLJIT_ASSERT(!const_ || const_->addr >= word_count);
767 SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
769 /* These structures are ordered by their address. */
770 if (jump && jump->addr == word_count) {
771 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
772 if (detect_jump_type(jump, code_ptr, code, executable_offset))
773 code_ptr--;
774 jump->addr = (sljit_uw)code_ptr;
775 #else
776 jump->addr = (sljit_uw)(code_ptr - 2);
777 if (detect_jump_type(jump, code_ptr, code, executable_offset))
778 code_ptr -= 2;
779 #endif
780 jump = jump->next;
782 if (label && label->size == word_count) {
783 /* code_ptr can be affected above. */
784 label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr + 1, executable_offset);
785 label->size = (sljit_uw)((code_ptr + 1) - code);
786 label = label->next;
788 if (const_ && const_->addr == word_count) {
789 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
790 const_->addr = (sljit_uw)code_ptr;
791 #else
792 const_->addr = (sljit_uw)(code_ptr - 1);
793 #endif
794 const_ = const_->next;
796 if (put_label && put_label->addr == word_count) {
797 SLJIT_ASSERT(put_label->label);
798 put_label->addr = (sljit_uw)code_ptr;
799 put_label = put_label->next;
801 next_addr = compute_next_addr(label, jump, const_, put_label);
803 code_ptr++;
804 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
806 else {
807 /* Fortunately, no need to shift. */
808 cpool_size = *buf_ptr++ & ~PUSH_POOL;
809 SLJIT_ASSERT(cpool_size > 0);
810 cpool_start_address = ALIGN_INSTRUCTION(code_ptr + 1);
811 cpool_current_index = patch_pc_relative_loads(last_pc_patch, code_ptr, cpool_start_address, cpool_size);
812 if (cpool_current_index > 0) {
813 /* Unconditional branch. */
814 *code_ptr = B | (((sljit_uw)(cpool_start_address - code_ptr) + cpool_current_index - 2) & ~PUSH_POOL);
815 code_ptr = (sljit_uw*)(cpool_start_address + cpool_current_index);
817 cpool_skip_alignment = CONST_POOL_ALIGNMENT - 1;
818 cpool_current_index = 0;
819 last_pc_patch = code_ptr;
821 #endif
822 } while (buf_ptr < buf_end);
823 buf = buf->next;
824 } while (buf);
826 SLJIT_ASSERT(!label);
827 SLJIT_ASSERT(!jump);
828 SLJIT_ASSERT(!const_);
829 SLJIT_ASSERT(!put_label);
831 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
832 SLJIT_ASSERT(cpool_size == 0);
833 if (compiler->cpool_fill > 0) {
834 cpool_start_address = ALIGN_INSTRUCTION(code_ptr);
835 cpool_current_index = patch_pc_relative_loads(last_pc_patch, code_ptr, cpool_start_address, compiler->cpool_fill);
836 if (cpool_current_index > 0)
837 code_ptr = (sljit_uw*)(cpool_start_address + cpool_current_index);
839 buf_ptr = compiler->cpool;
840 buf_end = buf_ptr + compiler->cpool_fill;
841 cpool_current_index = 0;
842 while (buf_ptr < buf_end) {
843 if (SLJIT_UNLIKELY(resolve_const_pool_index(compiler, &first_patch, cpool_current_index, cpool_start_address, buf_ptr))) {
844 SLJIT_FREE_EXEC(code, compiler->exec_allocator_data);
845 compiler->error = SLJIT_ERR_ALLOC_FAILED;
846 return NULL;
848 buf_ptr++;
849 cpool_current_index++;
851 SLJIT_ASSERT(!first_patch);
853 #endif
855 jump = compiler->jumps;
856 while (jump) {
857 buf_ptr = (sljit_uw *)jump->addr;
859 if (jump->flags & PATCH_B) {
860 addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr + 2, executable_offset);
861 if (!(jump->flags & JUMP_ADDR)) {
862 SLJIT_ASSERT(jump->flags & JUMP_LABEL);
863 SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - addr) <= 0x01ffffff && (sljit_sw)(jump->u.label->addr - addr) >= -0x02000000);
864 *buf_ptr |= ((jump->u.label->addr - addr) >> 2) & 0x00ffffff;
866 else {
867 SLJIT_ASSERT((sljit_sw)(jump->u.target - addr) <= 0x01ffffff && (sljit_sw)(jump->u.target - addr) >= -0x02000000);
868 *buf_ptr |= ((jump->u.target - addr) >> 2) & 0x00ffffff;
871 else if (jump->flags & SLJIT_REWRITABLE_JUMP) {
872 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
873 jump->addr = (sljit_uw)code_ptr;
874 code_ptr[0] = (sljit_uw)buf_ptr;
875 code_ptr[1] = *buf_ptr;
876 inline_set_jump_addr((sljit_uw)code_ptr, executable_offset, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target, 0);
877 code_ptr += 2;
878 #else
879 inline_set_jump_addr((sljit_uw)buf_ptr, executable_offset, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target, 0);
880 #endif
882 else {
883 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
884 if (jump->flags & IS_BL)
885 buf_ptr--;
886 if (*buf_ptr & (1 << 23))
887 buf_ptr += ((*buf_ptr & 0xfff) >> 2) + 2;
888 else
889 buf_ptr += 1;
890 *buf_ptr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
891 #else
892 inline_set_jump_addr((sljit_uw)buf_ptr, executable_offset, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target, 0);
893 #endif
895 jump = jump->next;
898 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
899 const_ = compiler->consts;
900 while (const_) {
901 buf_ptr = (sljit_uw*)const_->addr;
902 const_->addr = (sljit_uw)code_ptr;
904 code_ptr[0] = (sljit_uw)buf_ptr;
905 code_ptr[1] = *buf_ptr;
906 if (*buf_ptr & (1 << 23))
907 buf_ptr += ((*buf_ptr & 0xfff) >> 2) + 2;
908 else
909 buf_ptr += 1;
910 /* Set the value again (can be a simple constant). */
911 inline_set_const((sljit_uw)code_ptr, executable_offset, *buf_ptr, 0);
912 code_ptr += 2;
914 const_ = const_->next;
916 #endif
918 put_label = compiler->put_labels;
919 while (put_label) {
920 addr = put_label->label->addr;
921 buf_ptr = (sljit_uw*)put_label->addr;
923 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
924 SLJIT_ASSERT((buf_ptr[0] & 0xffff0000) == 0xe59f0000);
925 buf_ptr[((buf_ptr[0] & 0xfff) >> 2) + 2] = addr;
926 #else
927 SLJIT_ASSERT((buf_ptr[-1] & 0xfff00000) == MOVW && (buf_ptr[0] & 0xfff00000) == MOVT);
928 buf_ptr[-1] |= ((addr << 4) & 0xf0000) | (addr & 0xfff);
929 buf_ptr[0] |= ((addr >> 12) & 0xf0000) | ((addr >> 16) & 0xfff);
930 #endif
931 put_label = put_label->next;
934 SLJIT_ASSERT(code_ptr - code <= (sljit_s32)size);
936 compiler->error = SLJIT_ERR_COMPILED;
937 compiler->executable_offset = executable_offset;
938 compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_uw);
940 code = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
941 code_ptr = (sljit_uw *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
943 SLJIT_CACHE_FLUSH(code, code_ptr);
944 SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
945 return code;
948 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
950 switch (feature_type) {
951 case SLJIT_HAS_FPU:
952 #ifdef SLJIT_IS_FPU_AVAILABLE
953 return SLJIT_IS_FPU_AVAILABLE;
954 #else
955 /* Available by default. */
956 return 1;
957 #endif
959 case SLJIT_HAS_CLZ:
960 case SLJIT_HAS_CMOV:
961 #if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
962 case SLJIT_HAS_PREFETCH:
963 #endif
964 return 1;
966 default:
967 return 0;
971 /* --------------------------------------------------------------------- */
972 /* Entry, exit */
973 /* --------------------------------------------------------------------- */
975 /* Creates an index in data_transfer_insts array. */
976 #define WORD_SIZE 0x00
977 #define BYTE_SIZE 0x01
978 #define HALF_SIZE 0x02
979 #define PRELOAD 0x03
980 #define SIGNED 0x04
981 #define LOAD_DATA 0x08
983 /* Flag bits for emit_op. */
984 #define ALLOW_IMM 0x10
985 #define ALLOW_INV_IMM 0x20
986 #define ALLOW_ANY_IMM (ALLOW_IMM | ALLOW_INV_IMM)
987 #define ALLOW_NEG_IMM 0x40
989 /* s/l - store/load (1 bit)
990 u/s - signed/unsigned (1 bit)
991 w/b/h/N - word/byte/half/NOT allowed (2 bit)
992 Storing signed and unsigned values are the same operations. */
994 static const sljit_uw data_transfer_insts[16] = {
995 /* s u w */ 0xe5000000 /* str */,
996 /* s u b */ 0xe5400000 /* strb */,
997 /* s u h */ 0xe10000b0 /* strh */,
998 /* s u N */ 0x00000000 /* not allowed */,
999 /* s s w */ 0xe5000000 /* str */,
1000 /* s s b */ 0xe5400000 /* strb */,
1001 /* s s h */ 0xe10000b0 /* strh */,
1002 /* s s N */ 0x00000000 /* not allowed */,
1004 /* l u w */ 0xe5100000 /* ldr */,
1005 /* l u b */ 0xe5500000 /* ldrb */,
1006 /* l u h */ 0xe11000b0 /* ldrh */,
1007 /* l u p */ 0xf5500000 /* preload */,
1008 /* l s w */ 0xe5100000 /* ldr */,
1009 /* l s b */ 0xe11000d0 /* ldrsb */,
1010 /* l s h */ 0xe11000f0 /* ldrsh */,
1011 /* l s N */ 0x00000000 /* not allowed */,
1014 #define EMIT_DATA_TRANSFER(type, add, target_reg, base_reg, arg) \
1015 (data_transfer_insts[(type) & 0xf] | ((add) << 23) | RD(target_reg) | RN(base_reg) | (sljit_uw)(arg))
1017 /* Normal ldr/str instruction.
1018 Type2: ldrsb, ldrh, ldrsh */
1019 #define IS_TYPE1_TRANSFER(type) \
1020 (data_transfer_insts[(type) & 0xf] & 0x04000000)
1021 #define TYPE2_TRANSFER_IMM(imm) \
1022 (((imm) & 0xf) | (((imm) & 0xf0) << 4) | (1 << 22))
1024 #define EMIT_FPU_OPERATION(opcode, mode, dst, src1, src2) \
1025 ((sljit_uw)(opcode) | (sljit_uw)(mode) | VD(dst) | VM(src1) | VN(src2))
1027 /* Flags for emit_op: */
1028 /* Arguments are swapped. */
1029 #define ARGS_SWAPPED 0x01
1030 /* Inverted immediate. */
1031 #define INV_IMM 0x02
1032 /* Source and destination is register. */
1033 #define MOVE_REG_CONV 0x04
1034 /* Unused return value. */
1035 #define UNUSED_RETURN 0x08
1036 /* SET_FLAGS must be (1 << 20) as it is also the value of S bit (can be used for optimization). */
1037 #define SET_FLAGS (1 << 20)
1038 /* dst: reg
1039 src1: reg
1040 src2: reg or imm (if allowed)
1041 SRC2_IMM must be (1 << 25) as it is also the value of I bit (can be used for optimization). */
1042 #define SRC2_IMM (1 << 25)
1044 static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 inp_flags,
1045 sljit_s32 dst, sljit_sw dstw,
1046 sljit_s32 src1, sljit_sw src1w,
1047 sljit_s32 src2, sljit_sw src2w);
1049 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
1050 sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
1051 sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
1053 sljit_uw imm, offset;
1054 sljit_s32 i, tmp, size, word_arg_count;
1055 sljit_s32 saved_arg_count = SLJIT_KEPT_SAVEDS_COUNT(options);
1056 #ifdef __SOFTFP__
1057 sljit_u32 float_arg_count;
1058 #else
1059 sljit_u32 old_offset, f32_offset;
1060 sljit_u32 remap[3];
1061 sljit_u32 *remap_ptr = remap;
1062 #endif
1064 CHECK_ERROR();
1065 CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
1066 set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
1068 imm = 0;
1070 tmp = SLJIT_S0 - saveds;
1071 for (i = SLJIT_S0 - saved_arg_count; i > tmp; i--)
1072 imm |= (sljit_uw)1 << reg_map[i];
1074 for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--)
1075 imm |= (sljit_uw)1 << reg_map[i];
1077 SLJIT_ASSERT(reg_map[TMP_REG2] == 14);
1079 /* Push saved and temporary registers
1080 multiple registers: stmdb sp!, {..., lr}
1081 single register: str reg, [sp, #-4]! */
1082 if (imm != 0)
1083 FAIL_IF(push_inst(compiler, PUSH | (1 << 14) | imm));
1084 else
1085 FAIL_IF(push_inst(compiler, 0xe52d0004 | RD(TMP_REG2)));
1087 /* Stack must be aligned to 8 bytes: */
1088 size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - saved_arg_count, 1);
1090 if (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
1091 if ((size & SSIZE_OF(sw)) != 0) {
1092 FAIL_IF(push_inst(compiler, SUB | RD(SLJIT_SP) | RN(SLJIT_SP) | SRC2_IMM | sizeof(sljit_sw)));
1093 size += SSIZE_OF(sw);
1096 if (fsaveds + fscratches >= SLJIT_NUMBER_OF_FLOAT_REGISTERS) {
1097 FAIL_IF(push_inst(compiler, VPUSH | VD(SLJIT_FS0) | ((sljit_uw)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
1098 } else {
1099 if (fsaveds > 0)
1100 FAIL_IF(push_inst(compiler, VPUSH | VD(SLJIT_FS0) | ((sljit_uw)fsaveds << 1)));
1101 if (fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG)
1102 FAIL_IF(push_inst(compiler, VPUSH | VD(fscratches) | ((sljit_uw)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
1106 local_size = ((size + local_size + 0x7) & ~0x7) - size;
1107 compiler->local_size = local_size;
1109 if (options & SLJIT_ENTER_REG_ARG)
1110 arg_types = 0;
1112 arg_types >>= SLJIT_ARG_SHIFT;
1113 word_arg_count = 0;
1114 saved_arg_count = 0;
1115 #ifdef __SOFTFP__
1116 SLJIT_COMPILE_ASSERT(SLJIT_FR0 == 1, float_register_index_start);
1118 offset = 0;
1119 float_arg_count = 0;
1121 while (arg_types) {
1122 switch (arg_types & SLJIT_ARG_MASK) {
1123 case SLJIT_ARG_TYPE_F64:
1124 if (offset & 0x7)
1125 offset += sizeof(sljit_sw);
1127 if (offset < 4 * sizeof(sljit_sw))
1128 FAIL_IF(push_inst(compiler, VMOV2 | (offset << 10) | ((offset + sizeof(sljit_sw)) << 14) | float_arg_count));
1129 else
1130 FAIL_IF(push_inst(compiler, VLDR_F32 | 0x800100 | RN(SLJIT_SP)
1131 | (float_arg_count << 12) | ((offset + (sljit_uw)size - 4 * sizeof(sljit_sw)) >> 2)));
1132 float_arg_count++;
1133 offset += sizeof(sljit_f64) - sizeof(sljit_sw);
1134 break;
1135 case SLJIT_ARG_TYPE_F32:
1136 if (offset < 4 * sizeof(sljit_sw))
1137 FAIL_IF(push_inst(compiler, VMOV | (float_arg_count << 16) | (offset << 10)));
1138 else
1139 FAIL_IF(push_inst(compiler, VLDR_F32 | 0x800000 | RN(SLJIT_SP)
1140 | (float_arg_count << 12) | ((offset + (sljit_uw)size - 4 * sizeof(sljit_sw)) >> 2)));
1141 float_arg_count++;
1142 break;
1143 default:
1144 word_arg_count++;
1146 if (!(arg_types & SLJIT_ARG_TYPE_SCRATCH_REG)) {
1147 tmp = SLJIT_S0 - saved_arg_count;
1148 saved_arg_count++;
1149 } else if (word_arg_count - 1 != (sljit_s32)(offset >> 2))
1150 tmp = word_arg_count;
1151 else
1152 break;
1154 if (offset < 4 * sizeof(sljit_sw))
1155 FAIL_IF(push_inst(compiler, MOV | RD(tmp) | (offset >> 2)));
1156 else
1157 FAIL_IF(push_inst(compiler, LDR | 0x800000 | RN(SLJIT_SP) | RD(tmp) | (offset + (sljit_uw)size - 4 * sizeof(sljit_sw))));
1158 break;
1161 offset += sizeof(sljit_sw);
1162 arg_types >>= SLJIT_ARG_SHIFT;
1165 compiler->args_size = offset;
1166 #else
1167 offset = SLJIT_FR0;
1168 old_offset = SLJIT_FR0;
1169 f32_offset = 0;
1171 while (arg_types) {
1172 switch (arg_types & SLJIT_ARG_MASK) {
1173 case SLJIT_ARG_TYPE_F64:
1174 if (offset != old_offset)
1175 *remap_ptr++ = EMIT_FPU_OPERATION(VMOV_F32, SLJIT_32, offset, old_offset, 0);
1176 old_offset++;
1177 offset++;
1178 break;
1179 case SLJIT_ARG_TYPE_F32:
1180 if (f32_offset != 0) {
1181 *remap_ptr++ = EMIT_FPU_OPERATION(VMOV_F32, 0x20, offset, f32_offset, 0);
1182 f32_offset = 0;
1183 } else {
1184 if (offset != old_offset)
1185 *remap_ptr++ = EMIT_FPU_OPERATION(VMOV_F32, 0, offset, old_offset, 0);
1186 f32_offset = old_offset;
1187 old_offset++;
1189 offset++;
1190 break;
1191 default:
1192 if (!(arg_types & SLJIT_ARG_TYPE_SCRATCH_REG)) {
1193 FAIL_IF(push_inst(compiler, MOV | RD(SLJIT_S0 - saved_arg_count) | RM(SLJIT_R0 + word_arg_count)));
1194 saved_arg_count++;
1197 word_arg_count++;
1198 break;
1200 arg_types >>= SLJIT_ARG_SHIFT;
1203 SLJIT_ASSERT((sljit_uw)(remap_ptr - remap) <= sizeof(remap));
1205 while (remap_ptr > remap)
1206 FAIL_IF(push_inst(compiler, *(--remap_ptr)));
1207 #endif
1209 if (local_size > 0)
1210 FAIL_IF(emit_op(compiler, SLJIT_SUB, ALLOW_IMM, SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size));
1212 return SLJIT_SUCCESS;
1215 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
1216 sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
1217 sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
1219 sljit_s32 size;
1221 CHECK_ERROR();
1222 CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
1223 set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
1225 size = GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 1);
1227 if ((size & SSIZE_OF(sw)) != 0 && (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG))
1228 size += SSIZE_OF(sw);
1230 compiler->local_size = ((size + local_size + 0x7) & ~0x7) - size;
1231 return SLJIT_SUCCESS;
1234 static sljit_s32 emit_add_sp(struct sljit_compiler *compiler, sljit_uw imm)
1236 sljit_uw imm2 = get_imm(imm);
1238 if (imm2 == 0) {
1239 FAIL_IF(load_immediate(compiler, TMP_REG2, imm));
1240 imm2 = RM(TMP_REG2);
1243 return push_inst(compiler, ADD | RD(SLJIT_SP) | RN(SLJIT_SP) | imm2);
1246 static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit_s32 frame_size)
1248 sljit_s32 local_size, fscratches, fsaveds, i, tmp;
1249 sljit_s32 saveds_restore_start = SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options);
1250 sljit_s32 lr_dst = TMP_PC;
1251 sljit_uw reg_list;
1253 SLJIT_ASSERT(reg_map[TMP_REG2] == 14);
1255 local_size = compiler->local_size;
1256 fscratches = compiler->fscratches;
1257 fsaveds = compiler->fsaveds;
1259 if (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
1260 if (local_size > 0)
1261 FAIL_IF(emit_add_sp(compiler, (sljit_uw)local_size));
1263 if (fsaveds + fscratches >= SLJIT_NUMBER_OF_FLOAT_REGISTERS) {
1264 FAIL_IF(push_inst(compiler, VPOP | VD(SLJIT_FS0) | ((sljit_uw)SLJIT_NUMBER_OF_SAVED_FLOAT_REGISTERS << 1)));
1265 } else {
1266 if (fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG)
1267 FAIL_IF(push_inst(compiler, VPOP | VD(fscratches) | ((sljit_uw)(fscratches - (SLJIT_FIRST_SAVED_FLOAT_REG - 1)) << 1)));
1268 if (fsaveds > 0)
1269 FAIL_IF(push_inst(compiler, VPOP | VD(SLJIT_FS0) | ((sljit_uw)fsaveds << 1)));
1272 local_size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1) & 0x7;
1275 if (frame_size < 0) {
1276 lr_dst = TMP_REG2;
1277 frame_size = 0;
1278 } else if (frame_size > 0)
1279 lr_dst = 0;
1281 reg_list = 0;
1282 if (lr_dst != 0)
1283 reg_list |= (sljit_uw)1 << reg_map[lr_dst];
1285 tmp = SLJIT_S0 - compiler->saveds;
1286 if (saveds_restore_start != tmp) {
1287 for (i = saveds_restore_start; i > tmp; i--)
1288 reg_list |= (sljit_uw)1 << reg_map[i];
1289 } else
1290 saveds_restore_start = 0;
1292 for (i = compiler->scratches; i >= SLJIT_FIRST_SAVED_REG; i--)
1293 reg_list |= (sljit_uw)1 << reg_map[i];
1295 if (lr_dst == 0 && (reg_list & (reg_list - 1)) == 0) {
1296 /* The local_size does not include the saved registers. */
1297 local_size += SSIZE_OF(sw);
1299 if (reg_list != 0)
1300 local_size += SSIZE_OF(sw);
1302 if (frame_size > local_size)
1303 FAIL_IF(push_inst(compiler, SUB | RD(SLJIT_SP) | RN(SLJIT_SP) | (1 << 25) | (sljit_uw)(frame_size - local_size)));
1304 else if (frame_size < local_size)
1305 FAIL_IF(emit_add_sp(compiler, (sljit_uw)(local_size - frame_size)));
1307 if (reg_list == 0)
1308 return SLJIT_SUCCESS;
1310 if (saveds_restore_start != 0) {
1311 SLJIT_ASSERT(reg_list == ((sljit_uw)1 << reg_map[saveds_restore_start]));
1312 lr_dst = saveds_restore_start;
1313 } else {
1314 SLJIT_ASSERT(reg_list == ((sljit_uw)1 << reg_map[SLJIT_FIRST_SAVED_REG]));
1315 lr_dst = SLJIT_FIRST_SAVED_REG;
1318 return push_inst(compiler, LDR | 0x800000 | RN(SLJIT_SP) | RD(lr_dst) | (sljit_uw)(frame_size - 2 * SSIZE_OF(sw)));
1321 if (local_size > 0)
1322 FAIL_IF(emit_add_sp(compiler, (sljit_uw)local_size));
1324 /* Pop saved and temporary registers
1325 multiple registers: ldmia sp!, {...}
1326 single register: ldr reg, [sp], #4 */
1327 if ((reg_list & (reg_list - 1)) == 0) {
1328 SLJIT_ASSERT(lr_dst != 0);
1329 SLJIT_ASSERT(reg_list == (sljit_uw)1 << reg_map[lr_dst]);
1331 return push_inst(compiler, 0xe49d0004 | RD(lr_dst));
1334 FAIL_IF(push_inst(compiler, POP | reg_list));
1335 if (frame_size > 0)
1336 return push_inst(compiler, SUB | RD(SLJIT_SP) | RN(SLJIT_SP) | (1 << 25) | ((sljit_uw)frame_size - sizeof(sljit_sw)));
1337 return SLJIT_SUCCESS;
1340 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
1342 CHECK_ERROR();
1343 CHECK(check_sljit_emit_return_void(compiler));
1345 return emit_stack_frame_release(compiler, 0);
1348 /* --------------------------------------------------------------------- */
1349 /* Operators */
1350 /* --------------------------------------------------------------------- */
1352 #define EMIT_SHIFT_INS_AND_RETURN(opcode) \
1353 SLJIT_ASSERT(!(flags & INV_IMM) && !(src2 & SRC2_IMM)); \
1354 if (compiler->shift_imm != 0x20) { \
1355 SLJIT_ASSERT(src1 == TMP_REG1); \
1356 SLJIT_ASSERT(!(flags & ARGS_SWAPPED)); \
1358 if (compiler->shift_imm != 0) \
1359 return push_inst(compiler, MOV | (flags & SET_FLAGS) | \
1360 RD(dst) | (compiler->shift_imm << 7) | (opcode << 5) | RM(src2)); \
1361 return push_inst(compiler, MOV | (flags & SET_FLAGS) | RD(dst) | RM(src2)); \
1363 return push_inst(compiler, MOV | (flags & SET_FLAGS) | RD(dst) \
1364 | RM8((flags & ARGS_SWAPPED) ? src1 : src2) | (sljit_uw)(opcode << 5) \
1365 | 0x10 | RM((flags & ARGS_SWAPPED) ? src2 : src1));
1367 static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
1368 sljit_uw dst, sljit_uw src1, sljit_uw src2)
1370 switch (GET_OPCODE(op)) {
1371 case SLJIT_MOV:
1372 SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
1373 if (dst != src2) {
1374 if (src2 & SRC2_IMM) {
1375 return push_inst(compiler, ((flags & INV_IMM) ? MVN : MOV) | RD(dst) | src2);
1377 return push_inst(compiler, MOV | RD(dst) | RM(src2));
1379 return SLJIT_SUCCESS;
1381 case SLJIT_MOV_U8:
1382 case SLJIT_MOV_S8:
1383 SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
1384 if (flags & MOVE_REG_CONV) {
1385 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
1386 if (op == SLJIT_MOV_U8)
1387 return push_inst(compiler, AND | RD(dst) | RN(src2) | SRC2_IMM | 0xff);
1388 FAIL_IF(push_inst(compiler, MOV | RD(dst) | (24 << 7) | RM(src2)));
1389 return push_inst(compiler, MOV | RD(dst) | (24 << 7) | (op == SLJIT_MOV_U8 ? 0x20 : 0x40) | RM(dst));
1390 #else
1391 return push_inst(compiler, (op == SLJIT_MOV_U8 ? UXTB : SXTB) | RD(dst) | RM(src2));
1392 #endif
1394 else if (dst != src2) {
1395 SLJIT_ASSERT(src2 & SRC2_IMM);
1396 return push_inst(compiler, ((flags & INV_IMM) ? MVN : MOV) | RD(dst) | src2);
1398 return SLJIT_SUCCESS;
1400 case SLJIT_MOV_U16:
1401 case SLJIT_MOV_S16:
1402 SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & ARGS_SWAPPED));
1403 if (flags & MOVE_REG_CONV) {
1404 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
1405 FAIL_IF(push_inst(compiler, MOV | RD(dst) | (16 << 7) | RM(src2)));
1406 return push_inst(compiler, MOV | RD(dst) | (16 << 7) | (op == SLJIT_MOV_U16 ? 0x20 : 0x40) | RM(dst));
1407 #else
1408 return push_inst(compiler, (op == SLJIT_MOV_U16 ? UXTH : SXTH) | RD(dst) | RM(src2));
1409 #endif
1411 else if (dst != src2) {
1412 SLJIT_ASSERT(src2 & SRC2_IMM);
1413 return push_inst(compiler, ((flags & INV_IMM) ? MVN : MOV) | RD(dst) | src2);
1415 return SLJIT_SUCCESS;
1417 case SLJIT_NOT:
1418 if (src2 & SRC2_IMM)
1419 return push_inst(compiler, ((flags & INV_IMM) ? MOV : MVN) | (flags & SET_FLAGS) | RD(dst) | src2);
1421 return push_inst(compiler, MVN | (flags & SET_FLAGS) | RD(dst) | RM(src2));
1423 case SLJIT_CLZ:
1424 SLJIT_ASSERT(!(flags & INV_IMM));
1425 SLJIT_ASSERT(!(src2 & SRC2_IMM));
1426 FAIL_IF(push_inst(compiler, CLZ | RD(dst) | RM(src2)));
1427 return SLJIT_SUCCESS;
1429 case SLJIT_ADD:
1430 SLJIT_ASSERT(!(flags & INV_IMM));
1432 if ((flags & (UNUSED_RETURN | ARGS_SWAPPED)) == UNUSED_RETURN)
1433 return push_inst(compiler, CMN | SET_FLAGS | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1434 return push_inst(compiler, ADD | (flags & SET_FLAGS) | RD(dst) | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1436 case SLJIT_ADDC:
1437 SLJIT_ASSERT(!(flags & INV_IMM));
1438 return push_inst(compiler, ADC | (flags & SET_FLAGS) | RD(dst) | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1440 case SLJIT_SUB:
1441 SLJIT_ASSERT(!(flags & INV_IMM));
1443 if ((flags & (UNUSED_RETURN | ARGS_SWAPPED)) == UNUSED_RETURN)
1444 return push_inst(compiler, CMP | SET_FLAGS | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1446 return push_inst(compiler, (!(flags & ARGS_SWAPPED) ? SUB : RSB) | (flags & SET_FLAGS)
1447 | RD(dst) | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1449 case SLJIT_SUBC:
1450 SLJIT_ASSERT(!(flags & INV_IMM));
1451 return push_inst(compiler, (!(flags & ARGS_SWAPPED) ? SBC : RSC) | (flags & SET_FLAGS)
1452 | RD(dst) | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1454 case SLJIT_MUL:
1455 SLJIT_ASSERT(!(flags & INV_IMM));
1456 SLJIT_ASSERT(!(src2 & SRC2_IMM));
1457 compiler->status_flags_state = 0;
1459 if (!HAS_FLAGS(op))
1460 return push_inst(compiler, MUL | RN(dst) | RM8(src2) | RM(src1));
1462 FAIL_IF(push_inst(compiler, SMULL | RN(TMP_REG1) | RD(dst) | RM8(src2) | RM(src1)));
1464 /* cmp TMP_REG1, dst asr #31. */
1465 return push_inst(compiler, CMP | SET_FLAGS | RN(TMP_REG1) | RM(dst) | 0xfc0);
1467 case SLJIT_AND:
1468 if ((flags & (UNUSED_RETURN | INV_IMM)) == UNUSED_RETURN)
1469 return push_inst(compiler, TST | SET_FLAGS | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1470 return push_inst(compiler, (!(flags & INV_IMM) ? AND : BIC) | (flags & SET_FLAGS)
1471 | RD(dst) | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1473 case SLJIT_OR:
1474 SLJIT_ASSERT(!(flags & INV_IMM));
1475 return push_inst(compiler, ORR | (flags & SET_FLAGS) | RD(dst) | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1477 case SLJIT_XOR:
1478 SLJIT_ASSERT(!(flags & INV_IMM));
1479 return push_inst(compiler, EOR | (flags & SET_FLAGS) | RD(dst) | RN(src1) | ((src2 & SRC2_IMM) ? src2 : RM(src2)));
1481 case SLJIT_SHL:
1482 EMIT_SHIFT_INS_AND_RETURN(0);
1484 case SLJIT_LSHR:
1485 EMIT_SHIFT_INS_AND_RETURN(1);
1487 case SLJIT_ASHR:
1488 EMIT_SHIFT_INS_AND_RETURN(2);
1491 SLJIT_UNREACHABLE();
1492 return SLJIT_SUCCESS;
1495 #undef EMIT_SHIFT_INS_AND_RETURN
1497 /* Tests whether the immediate can be stored in the 12 bit imm field.
1498 Returns with 0 if not possible. */
1499 static sljit_uw get_imm(sljit_uw imm)
1501 sljit_u32 rol;
1503 if (imm <= 0xff)
1504 return SRC2_IMM | imm;
1506 if (!(imm & 0xff000000)) {
1507 imm <<= 8;
1508 rol = 8;
1510 else {
1511 imm = (imm << 24) | (imm >> 8);
1512 rol = 0;
1515 if (!(imm & 0xff000000)) {
1516 imm <<= 8;
1517 rol += 4;
1520 if (!(imm & 0xf0000000)) {
1521 imm <<= 4;
1522 rol += 2;
1525 if (!(imm & 0xc0000000)) {
1526 imm <<= 2;
1527 rol += 1;
1530 if (!(imm & 0x00ffffff))
1531 return SRC2_IMM | (imm >> 24) | (rol << 8);
1532 else
1533 return 0;
1536 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
1537 static sljit_s32 generate_int(struct sljit_compiler *compiler, sljit_s32 reg, sljit_uw imm, sljit_s32 positive)
1539 sljit_uw mask;
1540 sljit_uw imm1;
1541 sljit_uw imm2;
1542 sljit_uw rol;
1544 /* Step1: Search a zero byte (8 continous zero bit). */
1545 mask = 0xff000000;
1546 rol = 8;
1547 while(1) {
1548 if (!(imm & mask)) {
1549 /* Rol imm by rol. */
1550 imm = (imm << rol) | (imm >> (32 - rol));
1551 /* Calculate arm rol. */
1552 rol = 4 + (rol >> 1);
1553 break;
1555 rol += 2;
1556 mask >>= 2;
1557 if (mask & 0x3) {
1558 /* rol by 8. */
1559 imm = (imm << 8) | (imm >> 24);
1560 mask = 0xff00;
1561 rol = 24;
1562 while (1) {
1563 if (!(imm & mask)) {
1564 /* Rol imm by rol. */
1565 imm = (imm << rol) | (imm >> (32 - rol));
1566 /* Calculate arm rol. */
1567 rol = (rol >> 1) - 8;
1568 break;
1570 rol += 2;
1571 mask >>= 2;
1572 if (mask & 0x3)
1573 return 0;
1575 break;
1579 /* The low 8 bit must be zero. */
1580 SLJIT_ASSERT(!(imm & 0xff));
1582 if (!(imm & 0xff000000)) {
1583 imm1 = SRC2_IMM | ((imm >> 16) & 0xff) | (((rol + 4) & 0xf) << 8);
1584 imm2 = SRC2_IMM | ((imm >> 8) & 0xff) | (((rol + 8) & 0xf) << 8);
1586 else if (imm & 0xc0000000) {
1587 imm1 = SRC2_IMM | ((imm >> 24) & 0xff) | ((rol & 0xf) << 8);
1588 imm <<= 8;
1589 rol += 4;
1591 if (!(imm & 0xff000000)) {
1592 imm <<= 8;
1593 rol += 4;
1596 if (!(imm & 0xf0000000)) {
1597 imm <<= 4;
1598 rol += 2;
1601 if (!(imm & 0xc0000000)) {
1602 imm <<= 2;
1603 rol += 1;
1606 if (!(imm & 0x00ffffff))
1607 imm2 = SRC2_IMM | (imm >> 24) | ((rol & 0xf) << 8);
1608 else
1609 return 0;
1611 else {
1612 if (!(imm & 0xf0000000)) {
1613 imm <<= 4;
1614 rol += 2;
1617 if (!(imm & 0xc0000000)) {
1618 imm <<= 2;
1619 rol += 1;
1622 imm1 = SRC2_IMM | ((imm >> 24) & 0xff) | ((rol & 0xf) << 8);
1623 imm <<= 8;
1624 rol += 4;
1626 if (!(imm & 0xf0000000)) {
1627 imm <<= 4;
1628 rol += 2;
1631 if (!(imm & 0xc0000000)) {
1632 imm <<= 2;
1633 rol += 1;
1636 if (!(imm & 0x00ffffff))
1637 imm2 = SRC2_IMM | (imm >> 24) | ((rol & 0xf) << 8);
1638 else
1639 return 0;
1642 FAIL_IF(push_inst(compiler, (positive ? MOV : MVN) | RD(reg) | imm1));
1643 FAIL_IF(push_inst(compiler, (positive ? ORR : BIC) | RD(reg) | RN(reg) | imm2));
1644 return 1;
1646 #endif
1648 static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 reg, sljit_uw imm)
1650 sljit_uw tmp;
1652 #if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
1653 if (!(imm & ~(sljit_uw)0xffff))
1654 return push_inst(compiler, MOVW | RD(reg) | ((imm << 4) & 0xf0000) | (imm & 0xfff));
1655 #endif
1657 /* Create imm by 1 inst. */
1658 tmp = get_imm(imm);
1659 if (tmp)
1660 return push_inst(compiler, MOV | RD(reg) | tmp);
1662 tmp = get_imm(~imm);
1663 if (tmp)
1664 return push_inst(compiler, MVN | RD(reg) | tmp);
1666 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
1667 /* Create imm by 2 inst. */
1668 FAIL_IF(generate_int(compiler, reg, imm, 1));
1669 FAIL_IF(generate_int(compiler, reg, ~imm, 0));
1671 /* Load integer. */
1672 return push_inst_with_literal(compiler, EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1, reg, TMP_PC, 0), imm);
1673 #else
1674 FAIL_IF(push_inst(compiler, MOVW | RD(reg) | ((imm << 4) & 0xf0000) | (imm & 0xfff)));
1675 if (imm <= 0xffff)
1676 return SLJIT_SUCCESS;
1677 return push_inst(compiler, MOVT | RD(reg) | ((imm >> 12) & 0xf0000) | ((imm >> 16) & 0xfff));
1678 #endif
1681 static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg,
1682 sljit_s32 arg, sljit_sw argw, sljit_s32 tmp_reg)
1684 sljit_uw imm, offset_reg;
1685 sljit_sw mask = IS_TYPE1_TRANSFER(flags) ? 0xfff : 0xff;
1687 SLJIT_ASSERT (arg & SLJIT_MEM);
1688 SLJIT_ASSERT((arg & REG_MASK) != tmp_reg || (arg == SLJIT_MEM1(tmp_reg) && argw >= -mask && argw <= mask));
1690 if (SLJIT_UNLIKELY(!(arg & REG_MASK))) {
1691 FAIL_IF(load_immediate(compiler, tmp_reg, (sljit_uw)(argw & ~mask)));
1692 argw &= mask;
1694 return push_inst(compiler, EMIT_DATA_TRANSFER(flags, 1, reg, tmp_reg,
1695 (mask == 0xff) ? TYPE2_TRANSFER_IMM(argw) : argw));
1698 if (arg & OFFS_REG_MASK) {
1699 offset_reg = OFFS_REG(arg);
1700 arg &= REG_MASK;
1701 argw &= 0x3;
1703 if (argw != 0 && (mask == 0xff)) {
1704 FAIL_IF(push_inst(compiler, ADD | RD(tmp_reg) | RN(arg) | RM(offset_reg) | ((sljit_uw)argw << 7)));
1705 return push_inst(compiler, EMIT_DATA_TRANSFER(flags, 1, reg, tmp_reg, TYPE2_TRANSFER_IMM(0)));
1708 /* Bit 25: RM is offset. */
1709 return push_inst(compiler, EMIT_DATA_TRANSFER(flags, 1, reg, arg,
1710 RM(offset_reg) | (mask == 0xff ? 0 : (1 << 25)) | ((sljit_uw)argw << 7)));
1713 arg &= REG_MASK;
1715 if (argw > mask) {
1716 imm = get_imm((sljit_uw)(argw & ~mask));
1717 if (imm) {
1718 FAIL_IF(push_inst(compiler, ADD | RD(tmp_reg) | RN(arg) | imm));
1719 argw = argw & mask;
1720 arg = tmp_reg;
1723 else if (argw < -mask) {
1724 imm = get_imm((sljit_uw)(-argw & ~mask));
1725 if (imm) {
1726 FAIL_IF(push_inst(compiler, SUB | RD(tmp_reg) | RN(arg) | imm));
1727 argw = -(-argw & mask);
1728 arg = tmp_reg;
1732 if (argw <= mask && argw >= -mask) {
1733 if (argw >= 0) {
1734 if (mask == 0xff)
1735 argw = TYPE2_TRANSFER_IMM(argw);
1736 return push_inst(compiler, EMIT_DATA_TRANSFER(flags, 1, reg, arg, argw));
1739 argw = -argw;
1741 if (mask == 0xff)
1742 argw = TYPE2_TRANSFER_IMM(argw);
1744 return push_inst(compiler, EMIT_DATA_TRANSFER(flags, 0, reg, arg, argw));
1747 FAIL_IF(load_immediate(compiler, tmp_reg, (sljit_uw)argw));
1748 return push_inst(compiler, EMIT_DATA_TRANSFER(flags, 1, reg, arg,
1749 RM(tmp_reg) | (mask == 0xff ? 0 : (1 << 25))));
1752 static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 inp_flags,
1753 sljit_s32 dst, sljit_sw dstw,
1754 sljit_s32 src1, sljit_sw src1w,
1755 sljit_s32 src2, sljit_sw src2w)
1757 /* src1 is reg or TMP_REG1
1758 src2 is reg, TMP_REG2, or imm
1759 result goes to TMP_REG2, so put result can use TMP_REG1. */
1761 /* We prefers register and simple consts. */
1762 sljit_s32 dst_reg;
1763 sljit_s32 src1_reg;
1764 sljit_s32 src2_reg = 0;
1765 sljit_s32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
1766 sljit_s32 neg_op = 0;
1768 if (dst == TMP_REG2)
1769 flags |= UNUSED_RETURN;
1771 SLJIT_ASSERT(!(inp_flags & ALLOW_INV_IMM) || (inp_flags & ALLOW_IMM));
1773 if (inp_flags & ALLOW_NEG_IMM) {
1774 switch (GET_OPCODE(op)) {
1775 case SLJIT_ADD:
1776 compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
1777 neg_op = SLJIT_SUB;
1778 break;
1779 case SLJIT_ADDC:
1780 compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
1781 neg_op = SLJIT_SUBC;
1782 break;
1783 case SLJIT_SUB:
1784 compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
1785 neg_op = SLJIT_ADD;
1786 break;
1787 case SLJIT_SUBC:
1788 compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
1789 neg_op = SLJIT_ADDC;
1790 break;
1794 do {
1795 if (!(inp_flags & ALLOW_IMM))
1796 break;
1798 if (src2 & SLJIT_IMM) {
1799 src2_reg = (sljit_s32)get_imm((sljit_uw)src2w);
1800 if (src2_reg)
1801 break;
1802 if (inp_flags & ALLOW_INV_IMM) {
1803 src2_reg = (sljit_s32)get_imm(~(sljit_uw)src2w);
1804 if (src2_reg) {
1805 flags |= INV_IMM;
1806 break;
1809 if (neg_op != 0) {
1810 src2_reg = (sljit_s32)get_imm((sljit_uw)-src2w);
1811 if (src2_reg) {
1812 op = neg_op | GET_ALL_FLAGS(op);
1813 break;
1818 if (src1 & SLJIT_IMM) {
1819 src2_reg = (sljit_s32)get_imm((sljit_uw)src1w);
1820 if (src2_reg) {
1821 flags |= ARGS_SWAPPED;
1822 src1 = src2;
1823 src1w = src2w;
1824 break;
1826 if (inp_flags & ALLOW_INV_IMM) {
1827 src2_reg = (sljit_s32)get_imm(~(sljit_uw)src1w);
1828 if (src2_reg) {
1829 flags |= ARGS_SWAPPED | INV_IMM;
1830 src1 = src2;
1831 src1w = src2w;
1832 break;
1835 if (neg_op >= SLJIT_SUB) {
1836 /* Note: additive operation (commutative). */
1837 src2_reg = (sljit_s32)get_imm((sljit_uw)-src1w);
1838 if (src2_reg) {
1839 src1 = src2;
1840 src1w = src2w;
1841 op = neg_op | GET_ALL_FLAGS(op);
1842 break;
1846 } while(0);
1848 /* Source 1. */
1849 if (FAST_IS_REG(src1))
1850 src1_reg = src1;
1851 else if (src1 & SLJIT_MEM) {
1852 FAIL_IF(emit_op_mem(compiler, inp_flags | LOAD_DATA, TMP_REG1, src1, src1w, TMP_REG1));
1853 src1_reg = TMP_REG1;
1855 else {
1856 FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)src1w));
1857 src1_reg = TMP_REG1;
1860 /* Destination. */
1861 dst_reg = FAST_IS_REG(dst) ? dst : TMP_REG2;
1863 if (op <= SLJIT_MOV_P) {
1864 if (dst & SLJIT_MEM) {
1865 if (inp_flags & BYTE_SIZE)
1866 inp_flags &= ~SIGNED;
1868 if (FAST_IS_REG(src2))
1869 return emit_op_mem(compiler, inp_flags, src2, dst, dstw, TMP_REG2);
1872 if (FAST_IS_REG(src2) && dst_reg != TMP_REG2)
1873 flags |= MOVE_REG_CONV;
1876 /* Source 2. */
1877 if (src2_reg == 0) {
1878 src2_reg = (op <= SLJIT_MOV_P) ? dst_reg : TMP_REG2;
1880 if (FAST_IS_REG(src2))
1881 src2_reg = src2;
1882 else if (src2 & SLJIT_MEM)
1883 FAIL_IF(emit_op_mem(compiler, inp_flags | LOAD_DATA, src2_reg, src2, src2w, TMP_REG2));
1884 else
1885 FAIL_IF(load_immediate(compiler, src2_reg, (sljit_uw)src2w));
1888 FAIL_IF(emit_single_op(compiler, op, flags, (sljit_uw)dst_reg, (sljit_uw)src1_reg, (sljit_uw)src2_reg));
1890 if (!(dst & SLJIT_MEM))
1891 return SLJIT_SUCCESS;
1893 return emit_op_mem(compiler, inp_flags, dst_reg, dst, dstw, TMP_REG1);
1896 #ifdef __cplusplus
1897 extern "C" {
1898 #endif
1900 #if defined(__GNUC__)
1901 extern unsigned int __aeabi_uidivmod(unsigned int numerator, unsigned int denominator);
1902 extern int __aeabi_idivmod(int numerator, int denominator);
1903 #else
1904 #error "Software divmod functions are needed"
1905 #endif
1907 #ifdef __cplusplus
1909 #endif
1911 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
1913 sljit_uw saved_reg_list[3];
1914 sljit_sw saved_reg_count;
1916 CHECK_ERROR();
1917 CHECK(check_sljit_emit_op0(compiler, op));
1919 op = GET_OPCODE(op);
1920 switch (op) {
1921 case SLJIT_BREAKPOINT:
1922 FAIL_IF(push_inst(compiler, BKPT));
1923 break;
1924 case SLJIT_NOP:
1925 FAIL_IF(push_inst(compiler, NOP));
1926 break;
1927 case SLJIT_LMUL_UW:
1928 case SLJIT_LMUL_SW:
1929 return push_inst(compiler, (op == SLJIT_LMUL_UW ? UMULL : SMULL)
1930 | RN(SLJIT_R1) | RD(SLJIT_R0) | RM8(SLJIT_R0) | RM(SLJIT_R1));
1931 case SLJIT_DIVMOD_UW:
1932 case SLJIT_DIVMOD_SW:
1933 case SLJIT_DIV_UW:
1934 case SLJIT_DIV_SW:
1935 SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
1936 SLJIT_ASSERT(reg_map[2] == 1 && reg_map[3] == 2 && reg_map[4] == 3);
1938 saved_reg_count = 0;
1939 if (compiler->scratches >= 4)
1940 saved_reg_list[saved_reg_count++] = 3;
1941 if (compiler->scratches >= 3)
1942 saved_reg_list[saved_reg_count++] = 2;
1943 if (op >= SLJIT_DIV_UW)
1944 saved_reg_list[saved_reg_count++] = 1;
1946 if (saved_reg_count > 0) {
1947 FAIL_IF(push_inst(compiler, STR | 0x2d0000 | (saved_reg_count >= 3 ? 16 : 8)
1948 | (saved_reg_list[0] << 12) /* str rX, [sp, #-8/-16]! */));
1949 if (saved_reg_count >= 2) {
1950 SLJIT_ASSERT(saved_reg_list[1] < 8);
1951 FAIL_IF(push_inst(compiler, STR | 0x8d0004 | (saved_reg_list[1] << 12) /* str rX, [sp, #4] */));
1953 if (saved_reg_count >= 3) {
1954 SLJIT_ASSERT(saved_reg_list[2] < 8);
1955 FAIL_IF(push_inst(compiler, STR | 0x8d0008 | (saved_reg_list[2] << 12) /* str rX, [sp, #8] */));
1959 #if defined(__GNUC__)
1960 FAIL_IF(sljit_emit_ijump(compiler, SLJIT_FAST_CALL, SLJIT_IMM,
1961 ((op | 0x2) == SLJIT_DIV_UW ? SLJIT_FUNC_ADDR(__aeabi_uidivmod) : SLJIT_FUNC_ADDR(__aeabi_idivmod))));
1962 #else
1963 #error "Software divmod functions are needed"
1964 #endif
1966 if (saved_reg_count > 0) {
1967 if (saved_reg_count >= 3) {
1968 SLJIT_ASSERT(saved_reg_list[2] < 8);
1969 FAIL_IF(push_inst(compiler, LDR | 0x8d0008 | (saved_reg_list[2] << 12) /* ldr rX, [sp, #8] */));
1971 if (saved_reg_count >= 2) {
1972 SLJIT_ASSERT(saved_reg_list[1] < 8);
1973 FAIL_IF(push_inst(compiler, LDR | 0x8d0004 | (saved_reg_list[1] << 12) /* ldr rX, [sp, #4] */));
1975 return push_inst(compiler, (LDR ^ (1 << 24)) | 0x8d0000 | (sljit_uw)(saved_reg_count >= 3 ? 16 : 8)
1976 | (saved_reg_list[0] << 12) /* ldr rX, [sp], #8/16 */);
1978 return SLJIT_SUCCESS;
1979 case SLJIT_ENDBR:
1980 case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
1981 return SLJIT_SUCCESS;
1984 return SLJIT_SUCCESS;
1987 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1988 sljit_s32 dst, sljit_sw dstw,
1989 sljit_s32 src, sljit_sw srcw)
1991 CHECK_ERROR();
1992 CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1993 ADJUST_LOCAL_OFFSET(dst, dstw);
1994 ADJUST_LOCAL_OFFSET(src, srcw);
1996 switch (GET_OPCODE(op)) {
1997 case SLJIT_MOV:
1998 case SLJIT_MOV_U32:
1999 case SLJIT_MOV_S32:
2000 case SLJIT_MOV32:
2001 case SLJIT_MOV_P:
2002 return emit_op(compiler, SLJIT_MOV, ALLOW_ANY_IMM, dst, dstw, TMP_REG1, 0, src, srcw);
2004 case SLJIT_MOV_U8:
2005 return emit_op(compiler, SLJIT_MOV_U8, ALLOW_ANY_IMM | BYTE_SIZE, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
2007 case SLJIT_MOV_S8:
2008 return emit_op(compiler, SLJIT_MOV_S8, ALLOW_ANY_IMM | SIGNED | BYTE_SIZE, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
2010 case SLJIT_MOV_U16:
2011 return emit_op(compiler, SLJIT_MOV_U16, ALLOW_ANY_IMM | HALF_SIZE, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
2013 case SLJIT_MOV_S16:
2014 return emit_op(compiler, SLJIT_MOV_S16, ALLOW_ANY_IMM | SIGNED | HALF_SIZE, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
2016 case SLJIT_NOT:
2017 return emit_op(compiler, op, ALLOW_ANY_IMM, dst, dstw, TMP_REG1, 0, src, srcw);
2019 case SLJIT_CLZ:
2020 return emit_op(compiler, op, 0, dst, dstw, TMP_REG1, 0, src, srcw);
2023 return SLJIT_SUCCESS;
2026 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
2027 sljit_s32 dst, sljit_sw dstw,
2028 sljit_s32 src1, sljit_sw src1w,
2029 sljit_s32 src2, sljit_sw src2w)
2031 CHECK_ERROR();
2032 CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
2033 ADJUST_LOCAL_OFFSET(dst, dstw);
2034 ADJUST_LOCAL_OFFSET(src1, src1w);
2035 ADJUST_LOCAL_OFFSET(src2, src2w);
2037 switch (GET_OPCODE(op)) {
2038 case SLJIT_ADD:
2039 case SLJIT_ADDC:
2040 case SLJIT_SUB:
2041 case SLJIT_SUBC:
2042 return emit_op(compiler, op, ALLOW_IMM | ALLOW_NEG_IMM, dst, dstw, src1, src1w, src2, src2w);
2044 case SLJIT_OR:
2045 case SLJIT_XOR:
2046 return emit_op(compiler, op, ALLOW_IMM, dst, dstw, src1, src1w, src2, src2w);
2048 case SLJIT_MUL:
2049 return emit_op(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w);
2051 case SLJIT_AND:
2052 return emit_op(compiler, op, ALLOW_ANY_IMM, dst, dstw, src1, src1w, src2, src2w);
2054 case SLJIT_SHL:
2055 case SLJIT_LSHR:
2056 case SLJIT_ASHR:
2057 if (src2 & SLJIT_IMM) {
2058 compiler->shift_imm = src2w & 0x1f;
2059 return emit_op(compiler, op, 0, dst, dstw, TMP_REG1, 0, src1, src1w);
2061 else {
2062 compiler->shift_imm = 0x20;
2063 return emit_op(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w);
2067 return SLJIT_SUCCESS;
2070 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
2071 sljit_s32 src1, sljit_sw src1w,
2072 sljit_s32 src2, sljit_sw src2w)
2074 CHECK_ERROR();
2075 CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
2077 SLJIT_SKIP_CHECKS(compiler);
2078 return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
2081 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
2082 sljit_s32 src, sljit_sw srcw)
2084 CHECK_ERROR();
2085 CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
2086 ADJUST_LOCAL_OFFSET(src, srcw);
2088 switch (op) {
2089 case SLJIT_FAST_RETURN:
2090 SLJIT_ASSERT(reg_map[TMP_REG2] == 14);
2092 if (FAST_IS_REG(src))
2093 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG2) | RM(src)));
2094 else
2095 FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG2, src, srcw, TMP_REG1));
2097 return push_inst(compiler, BX | RM(TMP_REG2));
2098 case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
2099 return SLJIT_SUCCESS;
2100 case SLJIT_PREFETCH_L1:
2101 case SLJIT_PREFETCH_L2:
2102 case SLJIT_PREFETCH_L3:
2103 case SLJIT_PREFETCH_ONCE:
2104 #if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
2105 SLJIT_ASSERT(src & SLJIT_MEM);
2106 return emit_op_mem(compiler, PRELOAD | LOAD_DATA, TMP_PC, src, srcw, TMP_REG1);
2107 #else /* !SLJIT_CONFIG_ARM_V7 */
2108 return SLJIT_SUCCESS;
2109 #endif /* SLJIT_CONFIG_ARM_V7 */
2112 return SLJIT_SUCCESS;
2115 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
2117 CHECK_REG_INDEX(check_sljit_get_register_index(reg));
2118 return reg_map[reg];
2121 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
2123 CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
2124 return (freg_map[reg] << 1);
2127 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
2128 void *instruction, sljit_u32 size)
2130 SLJIT_UNUSED_ARG(size);
2131 CHECK_ERROR();
2132 CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
2134 return push_inst(compiler, *(sljit_uw*)instruction);
2137 /* --------------------------------------------------------------------- */
2138 /* Floating point operators */
2139 /* --------------------------------------------------------------------- */
2141 #define FPU_LOAD (1 << 20)
2142 #define EMIT_FPU_DATA_TRANSFER(inst, add, base, freg, offs) \
2143 ((inst) | (sljit_uw)((add) << 23) | RN(base) | VD(freg) | (sljit_uw)(offs))
2145 static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
2147 sljit_uw imm;
2148 sljit_uw inst = VSTR_F32 | (flags & (SLJIT_32 | FPU_LOAD));
2150 SLJIT_ASSERT(arg & SLJIT_MEM);
2151 arg &= ~SLJIT_MEM;
2153 if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
2154 FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG2) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (((sljit_uw)argw & 0x3) << 7)));
2155 arg = TMP_REG2;
2156 argw = 0;
2159 /* Fast loads and stores. */
2160 if (arg) {
2161 if (!(argw & ~0x3fc))
2162 return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 1, arg & REG_MASK, reg, argw >> 2));
2163 if (!(-argw & ~0x3fc))
2164 return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 0, arg & REG_MASK, reg, (-argw) >> 2));
2166 imm = get_imm((sljit_uw)argw & ~(sljit_uw)0x3fc);
2167 if (imm) {
2168 FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG2) | RN(arg & REG_MASK) | imm));
2169 return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 1, TMP_REG2, reg, (argw & 0x3fc) >> 2));
2171 imm = get_imm((sljit_uw)-argw & ~(sljit_uw)0x3fc);
2172 if (imm) {
2173 argw = -argw;
2174 FAIL_IF(push_inst(compiler, SUB | RD(TMP_REG2) | RN(arg & REG_MASK) | imm));
2175 return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 0, TMP_REG2, reg, (argw & 0x3fc) >> 2));
2179 if (arg) {
2180 FAIL_IF(load_immediate(compiler, TMP_REG2, (sljit_uw)argw));
2181 FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG2) | RN(arg & REG_MASK) | RM(TMP_REG2)));
2183 else
2184 FAIL_IF(load_immediate(compiler, TMP_REG2, (sljit_uw)argw));
2186 return push_inst(compiler, EMIT_FPU_DATA_TRANSFER(inst, 1, TMP_REG2, reg, 0));
2189 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
2190 sljit_s32 dst, sljit_sw dstw,
2191 sljit_s32 src, sljit_sw srcw)
2193 op ^= SLJIT_32;
2195 if (src & SLJIT_MEM) {
2196 FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG1, src, srcw));
2197 src = TMP_FREG1;
2200 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_S32_F32, op & SLJIT_32, TMP_FREG1, src, 0)));
2202 if (FAST_IS_REG(dst))
2203 return push_inst(compiler, VMOV | (1 << 20) | RD(dst) | VN(TMP_FREG1));
2205 /* Store the integer value from a VFP register. */
2206 return emit_fop_mem(compiler, 0, TMP_FREG1, dst, dstw);
2209 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
2210 sljit_s32 dst, sljit_sw dstw,
2211 sljit_s32 src, sljit_sw srcw)
2213 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
2215 op ^= SLJIT_32;
2217 if (FAST_IS_REG(src))
2218 FAIL_IF(push_inst(compiler, VMOV | RD(src) | VN(TMP_FREG1)));
2219 else if (src & SLJIT_MEM) {
2220 /* Load the integer value into a VFP register. */
2221 FAIL_IF(emit_fop_mem(compiler, FPU_LOAD, TMP_FREG1, src, srcw));
2223 else {
2224 FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)srcw));
2225 FAIL_IF(push_inst(compiler, VMOV | RD(TMP_REG1) | VN(TMP_FREG1)));
2228 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_F32_S32, op & SLJIT_32, dst_r, TMP_FREG1, 0)));
2230 if (dst & SLJIT_MEM)
2231 return emit_fop_mem(compiler, (op & SLJIT_32), TMP_FREG1, dst, dstw);
2232 return SLJIT_SUCCESS;
2235 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
2236 sljit_s32 src1, sljit_sw src1w,
2237 sljit_s32 src2, sljit_sw src2w)
2239 op ^= SLJIT_32;
2241 if (src1 & SLJIT_MEM) {
2242 FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG1, src1, src1w));
2243 src1 = TMP_FREG1;
2246 if (src2 & SLJIT_MEM) {
2247 FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG2, src2, src2w));
2248 src2 = TMP_FREG2;
2251 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCMP_F32, op & SLJIT_32, src1, src2, 0)));
2252 return push_inst(compiler, VMRS);
2255 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
2256 sljit_s32 dst, sljit_sw dstw,
2257 sljit_s32 src, sljit_sw srcw)
2259 sljit_s32 dst_r;
2261 CHECK_ERROR();
2263 SLJIT_COMPILE_ASSERT((SLJIT_32 == 0x100), float_transfer_bit_error);
2264 SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
2266 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
2268 if (GET_OPCODE(op) != SLJIT_CONV_F64_FROM_F32)
2269 op ^= SLJIT_32;
2271 if (src & SLJIT_MEM) {
2272 FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, dst_r, src, srcw));
2273 src = dst_r;
2276 switch (GET_OPCODE(op)) {
2277 case SLJIT_MOV_F64:
2278 if (src != dst_r) {
2279 if (dst_r != TMP_FREG1)
2280 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMOV_F32, op & SLJIT_32, dst_r, src, 0)));
2281 else
2282 dst_r = src;
2284 break;
2285 case SLJIT_NEG_F64:
2286 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VNEG_F32, op & SLJIT_32, dst_r, src, 0)));
2287 break;
2288 case SLJIT_ABS_F64:
2289 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VABS_F32, op & SLJIT_32, dst_r, src, 0)));
2290 break;
2291 case SLJIT_CONV_F64_FROM_F32:
2292 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VCVT_F64_F32, op & SLJIT_32, dst_r, src, 0)));
2293 op ^= SLJIT_32;
2294 break;
2297 if (dst & SLJIT_MEM)
2298 return emit_fop_mem(compiler, (op & SLJIT_32), dst_r, dst, dstw);
2299 return SLJIT_SUCCESS;
2302 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
2303 sljit_s32 dst, sljit_sw dstw,
2304 sljit_s32 src1, sljit_sw src1w,
2305 sljit_s32 src2, sljit_sw src2w)
2307 sljit_s32 dst_r;
2309 CHECK_ERROR();
2310 CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2311 ADJUST_LOCAL_OFFSET(dst, dstw);
2312 ADJUST_LOCAL_OFFSET(src1, src1w);
2313 ADJUST_LOCAL_OFFSET(src2, src2w);
2315 op ^= SLJIT_32;
2317 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
2319 if (src2 & SLJIT_MEM) {
2320 FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG2, src2, src2w));
2321 src2 = TMP_FREG2;
2324 if (src1 & SLJIT_MEM) {
2325 FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32) | FPU_LOAD, TMP_FREG1, src1, src1w));
2326 src1 = TMP_FREG1;
2329 switch (GET_OPCODE(op)) {
2330 case SLJIT_ADD_F64:
2331 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VADD_F32, op & SLJIT_32, dst_r, src2, src1)));
2332 break;
2334 case SLJIT_SUB_F64:
2335 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VSUB_F32, op & SLJIT_32, dst_r, src2, src1)));
2336 break;
2338 case SLJIT_MUL_F64:
2339 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMUL_F32, op & SLJIT_32, dst_r, src2, src1)));
2340 break;
2342 case SLJIT_DIV_F64:
2343 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VDIV_F32, op & SLJIT_32, dst_r, src2, src1)));
2344 break;
2347 if (dst_r == TMP_FREG1)
2348 FAIL_IF(emit_fop_mem(compiler, (op & SLJIT_32), TMP_FREG1, dst, dstw));
2350 return SLJIT_SUCCESS;
2353 #undef EMIT_FPU_DATA_TRANSFER
2355 /* --------------------------------------------------------------------- */
2356 /* Other instructions */
2357 /* --------------------------------------------------------------------- */
2359 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
2361 CHECK_ERROR();
2362 CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
2363 ADJUST_LOCAL_OFFSET(dst, dstw);
2365 SLJIT_ASSERT(reg_map[TMP_REG2] == 14);
2367 if (FAST_IS_REG(dst))
2368 return push_inst(compiler, MOV | RD(dst) | RM(TMP_REG2));
2370 /* Memory. */
2371 return emit_op_mem(compiler, WORD_SIZE, TMP_REG2, dst, dstw, TMP_REG1);
2374 /* --------------------------------------------------------------------- */
2375 /* Conditional instructions */
2376 /* --------------------------------------------------------------------- */
2378 static sljit_uw get_cc(struct sljit_compiler *compiler, sljit_s32 type)
2380 switch (type) {
2381 case SLJIT_EQUAL:
2382 case SLJIT_F_EQUAL:
2383 case SLJIT_ORDERED_EQUAL:
2384 case SLJIT_UNORDERED_OR_EQUAL: /* Not supported. */
2385 return 0x00000000;
2387 case SLJIT_NOT_EQUAL:
2388 case SLJIT_F_NOT_EQUAL:
2389 case SLJIT_UNORDERED_OR_NOT_EQUAL:
2390 case SLJIT_ORDERED_NOT_EQUAL: /* Not supported. */
2391 return 0x10000000;
2393 case SLJIT_CARRY:
2394 if (compiler->status_flags_state & SLJIT_CURRENT_FLAGS_ADD)
2395 return 0x20000000;
2396 /* fallthrough */
2398 case SLJIT_LESS:
2399 return 0x30000000;
2401 case SLJIT_NOT_CARRY:
2402 if (compiler->status_flags_state & SLJIT_CURRENT_FLAGS_ADD)
2403 return 0x30000000;
2404 /* fallthrough */
2406 case SLJIT_GREATER_EQUAL:
2407 return 0x20000000;
2409 case SLJIT_GREATER:
2410 case SLJIT_UNORDERED_OR_GREATER:
2411 return 0x80000000;
2413 case SLJIT_LESS_EQUAL:
2414 case SLJIT_F_LESS_EQUAL:
2415 case SLJIT_ORDERED_LESS_EQUAL:
2416 return 0x90000000;
2418 case SLJIT_SIG_LESS:
2419 case SLJIT_UNORDERED_OR_LESS:
2420 return 0xb0000000;
2422 case SLJIT_SIG_GREATER_EQUAL:
2423 case SLJIT_F_GREATER_EQUAL:
2424 case SLJIT_ORDERED_GREATER_EQUAL:
2425 return 0xa0000000;
2427 case SLJIT_SIG_GREATER:
2428 case SLJIT_F_GREATER:
2429 case SLJIT_ORDERED_GREATER:
2430 return 0xc0000000;
2432 case SLJIT_SIG_LESS_EQUAL:
2433 case SLJIT_UNORDERED_OR_LESS_EQUAL:
2434 return 0xd0000000;
2436 case SLJIT_OVERFLOW:
2437 if (!(compiler->status_flags_state & (SLJIT_CURRENT_FLAGS_ADD | SLJIT_CURRENT_FLAGS_SUB)))
2438 return 0x10000000;
2439 /* fallthrough */
2441 case SLJIT_UNORDERED:
2442 return 0x60000000;
2444 case SLJIT_NOT_OVERFLOW:
2445 if (!(compiler->status_flags_state & (SLJIT_CURRENT_FLAGS_ADD | SLJIT_CURRENT_FLAGS_SUB)))
2446 return 0x00000000;
2447 /* fallthrough */
2449 case SLJIT_ORDERED:
2450 return 0x70000000;
2452 case SLJIT_F_LESS:
2453 case SLJIT_ORDERED_LESS:
2454 return 0x40000000;
2456 case SLJIT_UNORDERED_OR_GREATER_EQUAL:
2457 return 0x50000000;
2459 default:
2460 SLJIT_ASSERT(type >= SLJIT_JUMP && type <= SLJIT_CALL_REG_ARG);
2461 return 0xe0000000;
2465 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
2467 struct sljit_label *label;
2469 CHECK_ERROR_PTR();
2470 CHECK_PTR(check_sljit_emit_label(compiler));
2472 if (compiler->last_label && compiler->last_label->size == compiler->size)
2473 return compiler->last_label;
2475 label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
2476 PTR_FAIL_IF(!label);
2477 set_label(label, compiler);
2478 return label;
2481 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
2483 struct sljit_jump *jump;
2485 CHECK_ERROR_PTR();
2486 CHECK_PTR(check_sljit_emit_jump(compiler, type));
2488 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2489 PTR_FAIL_IF(!jump);
2490 set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
2491 type &= 0xff;
2493 SLJIT_ASSERT(reg_map[TMP_REG1] != 14);
2495 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
2496 if (type >= SLJIT_FAST_CALL)
2497 PTR_FAIL_IF(prepare_blx(compiler));
2498 PTR_FAIL_IF(push_inst_with_unique_literal(compiler, ((EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1,
2499 type <= SLJIT_JUMP ? TMP_PC : TMP_REG1, TMP_PC, 0)) & ~COND_MASK) | get_cc(compiler, type), 0));
2501 if (jump->flags & SLJIT_REWRITABLE_JUMP) {
2502 jump->addr = compiler->size;
2503 compiler->patches++;
2506 if (type >= SLJIT_FAST_CALL) {
2507 jump->flags |= IS_BL;
2508 PTR_FAIL_IF(emit_blx(compiler));
2511 if (!(jump->flags & SLJIT_REWRITABLE_JUMP))
2512 jump->addr = compiler->size;
2513 #else
2514 if (type >= SLJIT_FAST_CALL)
2515 jump->flags |= IS_BL;
2516 PTR_FAIL_IF(emit_imm(compiler, TMP_REG1, 0));
2517 PTR_FAIL_IF(push_inst(compiler, (((type <= SLJIT_JUMP ? BX : BLX) | RM(TMP_REG1)) & ~COND_MASK) | get_cc(compiler, type)));
2518 jump->addr = compiler->size;
2519 #endif
2520 return jump;
2523 #ifdef __SOFTFP__
2525 static sljit_s32 softfloat_call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_types, sljit_s32 *src, sljit_u32 *extra_space)
2527 sljit_u32 is_tail_call = *extra_space & SLJIT_CALL_RETURN;
2528 sljit_u32 offset = 0;
2529 sljit_u32 word_arg_offset = 0;
2530 sljit_u32 src_offset = 4 * sizeof(sljit_sw);
2531 sljit_u32 float_arg_count = 0;
2532 sljit_s32 types = 0;
2533 sljit_u8 offsets[4];
2534 sljit_u8 *offset_ptr = offsets;
2536 if (src && FAST_IS_REG(*src))
2537 src_offset = (sljit_uw)reg_map[*src] * sizeof(sljit_sw);
2539 arg_types >>= SLJIT_ARG_SHIFT;
2541 while (arg_types) {
2542 types = (types << SLJIT_ARG_SHIFT) | (arg_types & SLJIT_ARG_MASK);
2544 switch (arg_types & SLJIT_ARG_MASK) {
2545 case SLJIT_ARG_TYPE_F64:
2546 if (offset & 0x7)
2547 offset += sizeof(sljit_sw);
2548 *offset_ptr++ = (sljit_u8)offset;
2549 offset += sizeof(sljit_f64);
2550 float_arg_count++;
2551 break;
2552 case SLJIT_ARG_TYPE_F32:
2553 *offset_ptr++ = (sljit_u8)offset;
2554 offset += sizeof(sljit_f32);
2555 float_arg_count++;
2556 break;
2557 default:
2558 *offset_ptr++ = (sljit_u8)offset;
2559 offset += sizeof(sljit_sw);
2560 word_arg_offset += sizeof(sljit_sw);
2561 break;
2564 arg_types >>= SLJIT_ARG_SHIFT;
2567 if (offset > 4 * sizeof(sljit_sw) && (!is_tail_call || offset > compiler->args_size)) {
2568 /* Keep lr register on the stack. */
2569 if (is_tail_call)
2570 offset += sizeof(sljit_sw);
2572 offset = ((offset - 4 * sizeof(sljit_sw)) + 0x7) & ~(sljit_uw)0x7;
2574 *extra_space = offset;
2576 if (is_tail_call)
2577 FAIL_IF(emit_stack_frame_release(compiler, (sljit_s32)offset));
2578 else
2579 FAIL_IF(push_inst(compiler, SUB | RD(SLJIT_SP) | RN(SLJIT_SP) | SRC2_IMM | offset));
2580 } else {
2581 if (is_tail_call)
2582 FAIL_IF(emit_stack_frame_release(compiler, -1));
2583 *extra_space = 0;
2586 /* Process arguments in reversed direction. */
2587 while (types) {
2588 switch (types & SLJIT_ARG_MASK) {
2589 case SLJIT_ARG_TYPE_F64:
2590 float_arg_count--;
2591 offset = *(--offset_ptr);
2593 SLJIT_ASSERT((offset & 0x7) == 0);
2595 if (offset < 4 * sizeof(sljit_sw)) {
2596 if (src_offset == offset || src_offset == offset + sizeof(sljit_sw)) {
2597 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | (src_offset >> 2)));
2598 *src = TMP_REG1;
2600 FAIL_IF(push_inst(compiler, VMOV2 | 0x100000 | (offset << 10) | ((offset + sizeof(sljit_sw)) << 14) | float_arg_count));
2601 } else
2602 FAIL_IF(push_inst(compiler, VSTR_F32 | 0x800100 | RN(SLJIT_SP)
2603 | (float_arg_count << 12) | ((offset - 4 * sizeof(sljit_sw)) >> 2)));
2604 break;
2605 case SLJIT_ARG_TYPE_F32:
2606 float_arg_count--;
2607 offset = *(--offset_ptr);
2609 if (offset < 4 * sizeof(sljit_sw)) {
2610 if (src_offset == offset) {
2611 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | (src_offset >> 2)));
2612 *src = TMP_REG1;
2614 FAIL_IF(push_inst(compiler, VMOV | 0x100000 | (float_arg_count << 16) | (offset << 10)));
2615 } else
2616 FAIL_IF(push_inst(compiler, VSTR_F32 | 0x800000 | RN(SLJIT_SP)
2617 | (float_arg_count << 12) | ((offset - 4 * sizeof(sljit_sw)) >> 2)));
2618 break;
2619 default:
2620 word_arg_offset -= sizeof(sljit_sw);
2621 offset = *(--offset_ptr);
2623 SLJIT_ASSERT(offset >= word_arg_offset);
2625 if (offset != word_arg_offset) {
2626 if (offset < 4 * sizeof(sljit_sw)) {
2627 if (src_offset == offset) {
2628 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | (src_offset >> 2)));
2629 *src = TMP_REG1;
2631 else if (src_offset == word_arg_offset) {
2632 *src = (sljit_s32)(SLJIT_R0 + (offset >> 2));
2633 src_offset = offset;
2635 FAIL_IF(push_inst(compiler, MOV | (offset << 10) | (word_arg_offset >> 2)));
2636 } else
2637 FAIL_IF(push_inst(compiler, STR | 0x800000 | RN(SLJIT_SP) | (word_arg_offset << 10) | (offset - 4 * sizeof(sljit_sw))));
2639 break;
2642 types >>= SLJIT_ARG_SHIFT;
2645 return SLJIT_SUCCESS;
2648 static sljit_s32 softfloat_post_call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_types)
2650 if ((arg_types & SLJIT_ARG_MASK) == SLJIT_ARG_TYPE_F64)
2651 FAIL_IF(push_inst(compiler, VMOV2 | (1 << 16) | (0 << 12) | 0));
2652 if ((arg_types & SLJIT_ARG_MASK) == SLJIT_ARG_TYPE_F32)
2653 FAIL_IF(push_inst(compiler, VMOV | (0 << 16) | (0 << 12)));
2655 return SLJIT_SUCCESS;
2658 #else /* !__SOFTFP__ */
2660 static sljit_s32 hardfloat_call_with_args(struct sljit_compiler *compiler, sljit_s32 arg_types)
2662 sljit_u32 offset = SLJIT_FR0;
2663 sljit_u32 new_offset = SLJIT_FR0;
2664 sljit_u32 f32_offset = 0;
2666 /* Remove return value. */
2667 arg_types >>= SLJIT_ARG_SHIFT;
2669 while (arg_types) {
2670 switch (arg_types & SLJIT_ARG_MASK) {
2671 case SLJIT_ARG_TYPE_F64:
2672 if (offset != new_offset)
2673 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMOV_F32,
2674 SLJIT_32, new_offset, offset, 0)));
2676 new_offset++;
2677 offset++;
2678 break;
2679 case SLJIT_ARG_TYPE_F32:
2680 if (f32_offset != 0) {
2681 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMOV_F32,
2682 0x400000, f32_offset, offset, 0)));
2683 f32_offset = 0;
2684 } else {
2685 if (offset != new_offset)
2686 FAIL_IF(push_inst(compiler, EMIT_FPU_OPERATION(VMOV_F32,
2687 0, new_offset, offset, 0)));
2688 f32_offset = new_offset;
2689 new_offset++;
2691 offset++;
2692 break;
2694 arg_types >>= SLJIT_ARG_SHIFT;
2697 return SLJIT_SUCCESS;
2700 #endif /* __SOFTFP__ */
2702 #undef EMIT_FPU_OPERATION
2704 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
2705 sljit_s32 arg_types)
2707 #ifdef __SOFTFP__
2708 struct sljit_jump *jump;
2709 sljit_u32 extra_space = (sljit_u32)type;
2710 #endif
2712 CHECK_ERROR_PTR();
2713 CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
2715 #ifdef __SOFTFP__
2716 if ((type & 0xff) != SLJIT_CALL_REG_ARG) {
2717 PTR_FAIL_IF(softfloat_call_with_args(compiler, arg_types, NULL, &extra_space));
2718 SLJIT_ASSERT((extra_space & 0x7) == 0);
2720 if ((type & SLJIT_CALL_RETURN) && extra_space == 0)
2721 type = SLJIT_JUMP | (type & SLJIT_REWRITABLE_JUMP);
2723 SLJIT_SKIP_CHECKS(compiler);
2724 jump = sljit_emit_jump(compiler, type);
2725 PTR_FAIL_IF(jump == NULL);
2727 if (extra_space > 0) {
2728 if (type & SLJIT_CALL_RETURN)
2729 PTR_FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1,
2730 TMP_REG2, SLJIT_SP, extra_space - sizeof(sljit_sw))));
2732 PTR_FAIL_IF(push_inst(compiler, ADD | RD(SLJIT_SP) | RN(SLJIT_SP) | SRC2_IMM | extra_space));
2734 if (type & SLJIT_CALL_RETURN) {
2735 PTR_FAIL_IF(push_inst(compiler, BX | RM(TMP_REG2)));
2736 return jump;
2740 SLJIT_ASSERT(!(type & SLJIT_CALL_RETURN));
2741 PTR_FAIL_IF(softfloat_post_call_with_args(compiler, arg_types));
2742 return jump;
2744 #endif /* __SOFTFP__ */
2746 if (type & SLJIT_CALL_RETURN) {
2747 PTR_FAIL_IF(emit_stack_frame_release(compiler, -1));
2748 type = SLJIT_JUMP | (type & SLJIT_REWRITABLE_JUMP);
2751 #ifndef __SOFTFP__
2752 if ((type & 0xff) != SLJIT_CALL_REG_ARG)
2753 PTR_FAIL_IF(hardfloat_call_with_args(compiler, arg_types));
2754 #endif /* !__SOFTFP__ */
2756 SLJIT_SKIP_CHECKS(compiler);
2757 return sljit_emit_jump(compiler, type);
2760 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
2762 struct sljit_jump *jump;
2764 CHECK_ERROR();
2765 CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
2766 ADJUST_LOCAL_OFFSET(src, srcw);
2768 SLJIT_ASSERT(reg_map[TMP_REG1] != 14);
2770 if (!(src & SLJIT_IMM)) {
2771 if (FAST_IS_REG(src)) {
2772 SLJIT_ASSERT(reg_map[src] != 14);
2773 return push_inst(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RM(src));
2776 SLJIT_ASSERT(src & SLJIT_MEM);
2777 FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
2778 return push_inst(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RM(TMP_REG1));
2781 /* These jumps are converted to jump/call instructions when possible. */
2782 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2783 FAIL_IF(!jump);
2784 set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_BL : 0));
2785 jump->u.target = (sljit_uw)srcw;
2787 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
2788 if (type >= SLJIT_FAST_CALL)
2789 FAIL_IF(prepare_blx(compiler));
2790 FAIL_IF(push_inst_with_unique_literal(compiler, EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1, type <= SLJIT_JUMP ? TMP_PC : TMP_REG1, TMP_PC, 0), 0));
2791 if (type >= SLJIT_FAST_CALL)
2792 FAIL_IF(emit_blx(compiler));
2793 #else
2794 FAIL_IF(emit_imm(compiler, TMP_REG1, 0));
2795 FAIL_IF(push_inst(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RM(TMP_REG1)));
2796 #endif
2797 jump->addr = compiler->size;
2798 return SLJIT_SUCCESS;
2801 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
2802 sljit_s32 arg_types,
2803 sljit_s32 src, sljit_sw srcw)
2805 #ifdef __SOFTFP__
2806 sljit_u32 extra_space = (sljit_u32)type;
2807 #endif
2809 CHECK_ERROR();
2810 CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
2812 if (src & SLJIT_MEM) {
2813 FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
2814 src = TMP_REG1;
2817 if ((type & SLJIT_CALL_RETURN) && (src >= SLJIT_FIRST_SAVED_REG && src <= SLJIT_S0)) {
2818 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | RM(src)));
2819 src = TMP_REG1;
2822 #ifdef __SOFTFP__
2823 if ((type & 0xff) != SLJIT_CALL_REG_ARG) {
2824 FAIL_IF(softfloat_call_with_args(compiler, arg_types, &src, &extra_space));
2825 SLJIT_ASSERT((extra_space & 0x7) == 0);
2827 if ((type & SLJIT_CALL_RETURN) && extra_space == 0)
2828 type = SLJIT_JUMP;
2830 SLJIT_SKIP_CHECKS(compiler);
2831 FAIL_IF(sljit_emit_ijump(compiler, type, src, srcw));
2833 if (extra_space > 0) {
2834 if (type & SLJIT_CALL_RETURN)
2835 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1,
2836 TMP_REG2, SLJIT_SP, extra_space - sizeof(sljit_sw))));
2838 FAIL_IF(push_inst(compiler, ADD | RD(SLJIT_SP) | RN(SLJIT_SP) | SRC2_IMM | extra_space));
2840 if (type & SLJIT_CALL_RETURN)
2841 return push_inst(compiler, BX | RM(TMP_REG2));
2844 SLJIT_ASSERT(!(type & SLJIT_CALL_RETURN));
2845 return softfloat_post_call_with_args(compiler, arg_types);
2847 #endif /* __SOFTFP__ */
2849 if (type & SLJIT_CALL_RETURN) {
2850 FAIL_IF(emit_stack_frame_release(compiler, -1));
2851 type = SLJIT_JUMP;
2854 #ifndef __SOFTFP__
2855 if ((type & 0xff) != SLJIT_CALL_REG_ARG)
2856 FAIL_IF(hardfloat_call_with_args(compiler, arg_types));
2857 #endif /* !__SOFTFP__ */
2859 SLJIT_SKIP_CHECKS(compiler);
2860 return sljit_emit_ijump(compiler, type, src, srcw);
2863 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
2864 sljit_s32 dst, sljit_sw dstw,
2865 sljit_s32 type)
2867 sljit_s32 dst_reg, flags = GET_ALL_FLAGS(op);
2868 sljit_uw cc, ins;
2870 CHECK_ERROR();
2871 CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
2872 ADJUST_LOCAL_OFFSET(dst, dstw);
2874 op = GET_OPCODE(op);
2875 cc = get_cc(compiler, type);
2876 dst_reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
2878 if (op < SLJIT_ADD) {
2879 FAIL_IF(push_inst(compiler, MOV | RD(dst_reg) | SRC2_IMM | 0));
2880 FAIL_IF(push_inst(compiler, ((MOV | RD(dst_reg) | SRC2_IMM | 1) & ~COND_MASK) | cc));
2881 if (dst & SLJIT_MEM)
2882 return emit_op_mem(compiler, WORD_SIZE, TMP_REG1, dst, dstw, TMP_REG2);
2883 return SLJIT_SUCCESS;
2886 ins = (op == SLJIT_AND ? AND : (op == SLJIT_OR ? ORR : EOR));
2888 if (dst & SLJIT_MEM)
2889 FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG1, dst, dstw, TMP_REG2));
2891 FAIL_IF(push_inst(compiler, ((ins | RD(dst_reg) | RN(dst_reg) | SRC2_IMM | 1) & ~COND_MASK) | cc));
2893 if (op == SLJIT_AND)
2894 FAIL_IF(push_inst(compiler, ((ins | RD(dst_reg) | RN(dst_reg) | SRC2_IMM | 0) & ~COND_MASK) | (cc ^ 0x10000000)));
2896 if (dst & SLJIT_MEM)
2897 FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG1, dst, dstw, TMP_REG2));
2899 if (flags & SLJIT_SET_Z)
2900 return push_inst(compiler, MOV | SET_FLAGS | RD(TMP_REG2) | RM(dst_reg));
2901 return SLJIT_SUCCESS;
2904 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
2905 sljit_s32 dst_reg,
2906 sljit_s32 src, sljit_sw srcw)
2908 sljit_uw cc, tmp;
2910 CHECK_ERROR();
2911 CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
2913 dst_reg &= ~SLJIT_32;
2915 cc = get_cc(compiler, type);
2917 if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
2918 tmp = get_imm((sljit_uw)srcw);
2919 if (tmp)
2920 return push_inst(compiler, ((MOV | RD(dst_reg) | tmp) & ~COND_MASK) | cc);
2922 tmp = get_imm(~(sljit_uw)srcw);
2923 if (tmp)
2924 return push_inst(compiler, ((MVN | RD(dst_reg) | tmp) & ~COND_MASK) | cc);
2926 #if (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
2927 tmp = (sljit_uw)srcw;
2928 FAIL_IF(push_inst(compiler, (MOVW & ~COND_MASK) | cc | RD(dst_reg) | ((tmp << 4) & 0xf0000) | (tmp & 0xfff)));
2929 if (tmp <= 0xffff)
2930 return SLJIT_SUCCESS;
2931 return push_inst(compiler, (MOVT & ~COND_MASK) | cc | RD(dst_reg) | ((tmp >> 12) & 0xf0000) | ((tmp >> 16) & 0xfff));
2932 #else
2933 FAIL_IF(load_immediate(compiler, TMP_REG1, (sljit_uw)srcw));
2934 src = TMP_REG1;
2935 #endif
2938 return push_inst(compiler, ((MOV | RD(dst_reg) | RM(src)) & ~COND_MASK) | cc);
2941 static sljit_s32 update_mem_addr(struct sljit_compiler *compiler, sljit_s32 *mem, sljit_sw *memw, sljit_s32 max_offset)
2943 sljit_s32 arg = *mem;
2944 sljit_sw argw = *memw;
2945 sljit_uw imm;
2946 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
2947 sljit_sw mask = max_offset >= 0x100 ? 0xfff : 0xff;
2948 #else /* !SLJIT_CONFIG_ARM_V5 */
2949 sljit_sw mask = 0xfff;
2951 SLJIT_ASSERT(max_offset >= 0x100);
2952 #endif /* SLJIT_CONFIG_ARM_V5 */
2954 *mem = TMP_REG1;
2956 if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
2957 *memw = 0;
2958 return push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | ((sljit_uw)(argw & 0x3) << 7));
2961 arg &= REG_MASK;
2963 if (arg) {
2964 if (argw <= max_offset && argw >= -mask) {
2965 *mem = arg;
2966 return SLJIT_SUCCESS;
2969 if (argw < 0) {
2970 imm = get_imm((sljit_uw)(-argw & ~mask));
2972 if (imm) {
2973 *memw = -(-argw & mask);
2974 return push_inst(compiler, SUB | RD(TMP_REG1) | RN(arg) | imm);
2976 } else if ((argw & mask) <= max_offset) {
2977 imm = get_imm((sljit_uw)(argw & ~mask));
2979 if (imm) {
2980 *memw = argw & mask;
2981 return push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg) | imm);
2983 } else {
2984 imm = get_imm((sljit_uw)((argw | mask) + 1));
2986 if (imm) {
2987 *memw = (argw & mask) - (mask + 1);
2988 return push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg) | imm);
2993 imm = (sljit_uw)(argw & ~mask);
2995 if ((argw & mask) > max_offset) {
2996 imm += (sljit_uw)(mask + 1);
2997 *memw = (argw & mask) - (mask + 1);
2998 } else
2999 *memw = argw & mask;
3001 FAIL_IF(load_immediate(compiler, TMP_REG1, imm));
3003 if (arg == 0)
3004 return SLJIT_SUCCESS;
3006 return push_inst(compiler, ADD | RD(TMP_REG1) | RN(TMP_REG1) | RM(arg));
3009 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
3011 static sljit_s32 sljit_emit_mem_unaligned(struct sljit_compiler *compiler, sljit_s32 type,
3012 sljit_s32 reg,
3013 sljit_s32 mem, sljit_sw memw)
3015 sljit_s32 flags;
3016 sljit_s32 steps;
3017 sljit_uw add, shift;
3019 switch (type & 0xff) {
3020 case SLJIT_MOV_U8:
3021 case SLJIT_MOV_S8:
3022 flags = BYTE_SIZE;
3023 if (!(type & SLJIT_MEM_STORE))
3024 flags |= LOAD_DATA;
3025 if ((type & 0xff) == SLJIT_MOV_S8)
3026 flags |= SIGNED;
3028 return emit_op_mem(compiler, flags, reg, mem, memw, TMP_REG1);
3030 case SLJIT_MOV_U16:
3031 FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 1));
3032 flags = BYTE_SIZE;
3033 steps = 1;
3034 break;
3036 case SLJIT_MOV_S16:
3037 FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xff - 1));
3038 flags = BYTE_SIZE | SIGNED;
3039 steps = 1;
3040 break;
3042 default:
3043 if (type & SLJIT_MEM_ALIGNED_32) {
3044 flags = WORD_SIZE;
3045 if (!(type & SLJIT_MEM_STORE))
3046 flags |= LOAD_DATA;
3048 return emit_op_mem(compiler, flags, reg, mem, memw, TMP_REG1);
3051 if (!(type & SLJIT_MEM_ALIGNED_16)) {
3052 FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 3));
3053 flags = BYTE_SIZE;
3054 steps = 3;
3055 break;
3058 FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xff - 2));
3060 add = 1;
3061 if (memw < 0) {
3062 add = 0;
3063 memw = -memw;
3066 if (type & SLJIT_MEM_STORE) {
3067 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(HALF_SIZE, add, reg, mem, TYPE2_TRANSFER_IMM(memw))));
3068 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG2) | RM(reg) | (16 << 7) | (2 << 4)));
3070 if (!add) {
3071 memw -= 2;
3072 if (memw <= 0) {
3073 memw = -memw;
3074 add = 1;
3076 } else
3077 memw += 2;
3079 return push_inst(compiler, EMIT_DATA_TRANSFER(HALF_SIZE, add, TMP_REG2, mem, TYPE2_TRANSFER_IMM(memw)));
3082 if (reg == mem) {
3083 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | RM(mem)));
3084 mem = TMP_REG1;
3087 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(HALF_SIZE | LOAD_DATA, add, reg, mem, TYPE2_TRANSFER_IMM(memw))));
3089 if (!add) {
3090 memw -= 2;
3091 if (memw <= 0) {
3092 memw = -memw;
3093 add = 1;
3095 } else
3096 memw += 2;
3098 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(HALF_SIZE | LOAD_DATA, add, TMP_REG2, mem, TYPE2_TRANSFER_IMM(memw))));
3099 return push_inst(compiler, ORR | RD(reg) | RN(reg) | RM(TMP_REG2) | (16 << 7));
3102 SLJIT_ASSERT(steps > 0);
3104 add = 1;
3105 if (memw < 0) {
3106 add = 0;
3107 memw = -memw;
3110 if (type & SLJIT_MEM_STORE) {
3111 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(BYTE_SIZE, add, reg, mem, memw)));
3112 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG2) | RM(reg) | (8 << 7) | (2 << 4)));
3114 while (1) {
3115 if (!add) {
3116 memw -= 1;
3117 if (memw == 0)
3118 add = 1;
3119 } else
3120 memw += 1;
3122 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(BYTE_SIZE, add, TMP_REG2, mem, memw)));
3124 if (--steps == 0)
3125 return SLJIT_SUCCESS;
3127 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG2) | RM(TMP_REG2) | (8 << 7) | (2 << 4)));
3131 if (reg == mem) {
3132 FAIL_IF(push_inst(compiler, MOV | RD(TMP_REG1) | RM(mem)));
3133 mem = TMP_REG1;
3136 shift = 8;
3137 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(BYTE_SIZE | LOAD_DATA, add, reg, mem, memw)));
3139 do {
3140 if (!add) {
3141 memw -= 1;
3142 if (memw == 0)
3143 add = 1;
3144 } else
3145 memw += 1;
3147 if (steps > 1) {
3148 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(BYTE_SIZE | LOAD_DATA, add, TMP_REG2, mem, memw)));
3149 FAIL_IF(push_inst(compiler, ORR | RD(reg) | RN(reg) | RM(TMP_REG2) | (shift << 7)));
3150 shift += 8;
3152 } while (--steps != 0);
3154 flags |= LOAD_DATA;
3156 if (flags & SIGNED)
3157 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(flags, add, TMP_REG2, mem, TYPE2_TRANSFER_IMM(memw))));
3158 else
3159 FAIL_IF(push_inst(compiler, EMIT_DATA_TRANSFER(flags, add, TMP_REG2, mem, memw)));
3161 return push_inst(compiler, ORR | RD(reg) | RN(reg) | RM(TMP_REG2) | (shift << 7));
3164 #endif /* SLJIT_CONFIG_ARM_V5 */
3166 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
3167 sljit_s32 reg,
3168 sljit_s32 mem, sljit_sw memw)
3170 sljit_s32 flags;
3171 sljit_uw is_type1_transfer, inst;
3173 CHECK_ERROR();
3174 CHECK(check_sljit_emit_mem(compiler, type, reg, mem, memw));
3176 if (type & SLJIT_MEM_UNALIGNED)
3177 return sljit_emit_mem_unaligned(compiler, type, reg, mem, memw);
3179 is_type1_transfer = 1;
3181 switch (type & 0xff) {
3182 case SLJIT_MOV:
3183 case SLJIT_MOV_U32:
3184 case SLJIT_MOV_S32:
3185 case SLJIT_MOV32:
3186 case SLJIT_MOV_P:
3187 flags = WORD_SIZE;
3188 break;
3189 case SLJIT_MOV_U8:
3190 flags = BYTE_SIZE;
3191 break;
3192 case SLJIT_MOV_S8:
3193 if (!(type & SLJIT_MEM_STORE))
3194 is_type1_transfer = 0;
3195 flags = BYTE_SIZE | SIGNED;
3196 break;
3197 case SLJIT_MOV_U16:
3198 is_type1_transfer = 0;
3199 flags = HALF_SIZE;
3200 break;
3201 case SLJIT_MOV_S16:
3202 is_type1_transfer = 0;
3203 flags = HALF_SIZE | SIGNED;
3204 break;
3205 default:
3206 SLJIT_UNREACHABLE();
3207 flags = WORD_SIZE;
3208 break;
3211 if (!(type & SLJIT_MEM_STORE))
3212 flags |= LOAD_DATA;
3214 SLJIT_ASSERT(is_type1_transfer == !!IS_TYPE1_TRANSFER(flags));
3216 if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
3217 if (!is_type1_transfer && memw != 0)
3218 return SLJIT_ERR_UNSUPPORTED;
3220 else {
3221 if (is_type1_transfer) {
3222 if (memw > 4095 || memw < -4095)
3223 return SLJIT_ERR_UNSUPPORTED;
3225 else {
3226 if (memw > 255 || memw < -255)
3227 return SLJIT_ERR_UNSUPPORTED;
3231 if (type & SLJIT_MEM_SUPP)
3232 return SLJIT_SUCCESS;
3234 if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
3235 memw &= 0x3;
3237 inst = EMIT_DATA_TRANSFER(flags, 1, reg, mem & REG_MASK, RM(OFFS_REG(mem)) | ((sljit_uw)memw << 7));
3239 if (is_type1_transfer)
3240 inst |= (1 << 25);
3242 if (type & SLJIT_MEM_PRE)
3243 inst |= (1 << 21);
3244 else
3245 inst ^= (1 << 24);
3247 return push_inst(compiler, inst);
3250 inst = EMIT_DATA_TRANSFER(flags, 0, reg, mem & REG_MASK, 0);
3252 if (type & SLJIT_MEM_PRE)
3253 inst |= (1 << 21);
3254 else
3255 inst ^= (1 << 24);
3257 if (is_type1_transfer) {
3258 if (memw >= 0)
3259 inst |= (1 << 23);
3260 else
3261 memw = -memw;
3263 return push_inst(compiler, inst | (sljit_uw)memw);
3266 if (memw >= 0)
3267 inst |= (1 << 23);
3268 else
3269 memw = -memw;
3271 return push_inst(compiler, inst | TYPE2_TRANSFER_IMM((sljit_uw)memw));
3274 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compiler, sljit_s32 type,
3275 sljit_s32 freg,
3276 sljit_s32 mem, sljit_sw memw)
3278 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
3279 sljit_s32 max_offset;
3280 sljit_s32 dst;
3281 #endif /* SLJIT_CONFIG_ARM_V5 */
3283 CHECK_ERROR();
3284 CHECK(check_sljit_emit_fmem(compiler, type, freg, mem, memw));
3286 if (type & (SLJIT_MEM_PRE | SLJIT_MEM_POST))
3287 return SLJIT_ERR_UNSUPPORTED;
3289 if (type & SLJIT_MEM_ALIGNED_32)
3290 return emit_fop_mem(compiler, ((type ^ SLJIT_32) & SLJIT_32) | ((type & SLJIT_MEM_STORE) ? 0 : FPU_LOAD), freg, mem, memw);
3292 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
3293 if (type & SLJIT_MEM_STORE) {
3294 FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(freg) | RD(TMP_REG2)));
3296 if (type & SLJIT_32)
3297 return sljit_emit_mem_unaligned(compiler, SLJIT_MOV | SLJIT_MEM_STORE | (type & SLJIT_MEM_ALIGNED_16), TMP_REG2, mem, memw);
3299 max_offset = 0xfff - 7;
3300 if (type & SLJIT_MEM_ALIGNED_16)
3301 max_offset++;
3303 FAIL_IF(update_mem_addr(compiler, &mem, &memw, max_offset));
3304 mem |= SLJIT_MEM;
3306 FAIL_IF(sljit_emit_mem_unaligned(compiler, SLJIT_MOV | SLJIT_MEM_STORE | (type & SLJIT_MEM_ALIGNED_16), TMP_REG2, mem, memw));
3308 FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(freg) | 0x80 | RD(TMP_REG2)));
3309 return sljit_emit_mem_unaligned(compiler, SLJIT_MOV | SLJIT_MEM_STORE | (type & SLJIT_MEM_ALIGNED_16), TMP_REG2, mem, memw + 4);
3312 max_offset = (type & SLJIT_32) ? 0xfff - 3 : 0xfff - 7;
3313 if (type & SLJIT_MEM_ALIGNED_16)
3314 max_offset++;
3316 FAIL_IF(update_mem_addr(compiler, &mem, &memw, max_offset));
3318 dst = TMP_REG1;
3320 /* Stack offset adjustment is not needed because dst
3321 is not stored on the stack when mem is SLJIT_SP. */
3323 if (mem == TMP_REG1) {
3324 dst = SLJIT_R3;
3326 if (compiler->scratches >= 4)
3327 FAIL_IF(push_inst(compiler, STR | (1 << 21) | RN(SLJIT_SP) | RD(SLJIT_R3) | 8));
3330 mem |= SLJIT_MEM;
3332 FAIL_IF(sljit_emit_mem_unaligned(compiler, SLJIT_MOV | (type & SLJIT_MEM_ALIGNED_16), dst, mem, memw));
3333 FAIL_IF(push_inst(compiler, VMOV | VN(freg) | RD(dst)));
3335 if (!(type & SLJIT_32)) {
3336 FAIL_IF(sljit_emit_mem_unaligned(compiler, SLJIT_MOV | (type & SLJIT_MEM_ALIGNED_16), dst, mem, memw + 4));
3337 FAIL_IF(push_inst(compiler, VMOV | VN(freg) | 0x80 | RD(dst)));
3340 if (dst == SLJIT_R3 && compiler->scratches >= 4)
3341 FAIL_IF(push_inst(compiler, (LDR ^ (0x1 << 24)) | (0x1 << 23) | RN(SLJIT_SP) | RD(SLJIT_R3) | 8));
3342 return SLJIT_SUCCESS;
3343 #else /* !SLJIT_CONFIG_ARM_V5 */
3344 if (type & SLJIT_MEM_STORE) {
3345 FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(freg) | RD(TMP_REG2)));
3347 if (type & SLJIT_32)
3348 return emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw, TMP_REG1);
3350 FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
3351 mem |= SLJIT_MEM;
3353 FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw, TMP_REG1));
3354 FAIL_IF(push_inst(compiler, VMOV | (1 << 20) | VN(freg) | 0x80 | RD(TMP_REG2)));
3355 return emit_op_mem(compiler, WORD_SIZE, TMP_REG2, mem, memw + 4, TMP_REG1);
3358 if (type & SLJIT_32) {
3359 FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG2, mem, memw, TMP_REG1));
3360 return push_inst(compiler, VMOV | VN(freg) | RD(TMP_REG2));
3363 FAIL_IF(update_mem_addr(compiler, &mem, &memw, 0xfff - 4));
3364 mem |= SLJIT_MEM;
3366 FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG2, mem, memw, TMP_REG1));
3367 FAIL_IF(emit_op_mem(compiler, WORD_SIZE | LOAD_DATA, TMP_REG1, mem, memw + 4, TMP_REG1));
3368 return push_inst(compiler, VMOV2 | VM(freg) | RD(TMP_REG2) | RN(TMP_REG1));
3369 #endif /* SLJIT_CONFIG_ARM_V5 */
3372 #undef FPU_LOAD
3374 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
3376 struct sljit_const *const_;
3377 sljit_s32 dst_r;
3379 CHECK_ERROR_PTR();
3380 CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
3381 ADJUST_LOCAL_OFFSET(dst, dstw);
3383 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
3385 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
3386 PTR_FAIL_IF(push_inst_with_unique_literal(compiler,
3387 EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1, dst_r, TMP_PC, 0), (sljit_uw)init_value));
3388 compiler->patches++;
3389 #else
3390 PTR_FAIL_IF(emit_imm(compiler, dst_r, init_value));
3391 #endif
3393 const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
3394 PTR_FAIL_IF(!const_);
3395 set_const(const_, compiler);
3397 if (dst & SLJIT_MEM)
3398 PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, dst, dstw, TMP_REG1));
3399 return const_;
3402 SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
3404 struct sljit_put_label *put_label;
3405 sljit_s32 dst_r;
3407 CHECK_ERROR_PTR();
3408 CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
3409 ADJUST_LOCAL_OFFSET(dst, dstw);
3411 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
3413 #if (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5)
3414 PTR_FAIL_IF(push_inst_with_unique_literal(compiler, EMIT_DATA_TRANSFER(WORD_SIZE | LOAD_DATA, 1, dst_r, TMP_PC, 0), 0));
3415 compiler->patches++;
3416 #else
3417 PTR_FAIL_IF(emit_imm(compiler, dst_r, 0));
3418 #endif
3420 put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
3421 PTR_FAIL_IF(!put_label);
3422 set_put_label(put_label, compiler, 0);
3424 if (dst & SLJIT_MEM)
3425 PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG2, dst, dstw, TMP_REG1));
3426 return put_label;
3429 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
3431 inline_set_jump_addr(addr, executable_offset, new_target, 1);
3434 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
3436 inline_set_const(addr, executable_offset, (sljit_uw)new_constant, 1);