sd/milkymist-memcard: Fix format string
[qemu/armbru.git] / tcg / tcg-op.c
blob4b8a473fad3cb7277b688116ba729cd360a69e53
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 #include "qemu/osdep.h"
26 #include "cpu.h"
27 #include "exec/exec-all.h"
28 #include "tcg/tcg.h"
29 #include "tcg/tcg-op.h"
30 #include "tcg/tcg-mo.h"
31 #include "trace-tcg.h"
32 #include "trace/mem.h"
33 #include "exec/plugin-gen.h"
35 /* Reduce the number of ifdefs below. This assumes that all uses of
36 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
37 the compiler can eliminate. */
38 #if TCG_TARGET_REG_BITS == 64
39 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
40 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
41 #define TCGV_LOW TCGV_LOW_link_error
42 #define TCGV_HIGH TCGV_HIGH_link_error
43 #endif
45 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
47 TCGOp *op = tcg_emit_op(opc);
48 op->args[0] = a1;
51 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
53 TCGOp *op = tcg_emit_op(opc);
54 op->args[0] = a1;
55 op->args[1] = a2;
58 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
60 TCGOp *op = tcg_emit_op(opc);
61 op->args[0] = a1;
62 op->args[1] = a2;
63 op->args[2] = a3;
66 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
68 TCGOp *op = tcg_emit_op(opc);
69 op->args[0] = a1;
70 op->args[1] = a2;
71 op->args[2] = a3;
72 op->args[3] = a4;
75 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
76 TCGArg a4, TCGArg a5)
78 TCGOp *op = tcg_emit_op(opc);
79 op->args[0] = a1;
80 op->args[1] = a2;
81 op->args[2] = a3;
82 op->args[3] = a4;
83 op->args[4] = a5;
86 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
87 TCGArg a4, TCGArg a5, TCGArg a6)
89 TCGOp *op = tcg_emit_op(opc);
90 op->args[0] = a1;
91 op->args[1] = a2;
92 op->args[2] = a3;
93 op->args[3] = a4;
94 op->args[4] = a5;
95 op->args[5] = a6;
98 void tcg_gen_mb(TCGBar mb_type)
100 if (tcg_ctx->tb_cflags & CF_PARALLEL) {
101 tcg_gen_op1(INDEX_op_mb, mb_type);
105 /* 32 bit ops */
107 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
109 /* some cases can be optimized here */
110 if (arg2 == 0) {
111 tcg_gen_mov_i32(ret, arg1);
112 } else {
113 TCGv_i32 t0 = tcg_const_i32(arg2);
114 tcg_gen_add_i32(ret, arg1, t0);
115 tcg_temp_free_i32(t0);
119 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
121 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
122 /* Don't recurse with tcg_gen_neg_i32. */
123 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
124 } else {
125 TCGv_i32 t0 = tcg_const_i32(arg1);
126 tcg_gen_sub_i32(ret, t0, arg2);
127 tcg_temp_free_i32(t0);
131 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
133 /* some cases can be optimized here */
134 if (arg2 == 0) {
135 tcg_gen_mov_i32(ret, arg1);
136 } else {
137 TCGv_i32 t0 = tcg_const_i32(arg2);
138 tcg_gen_sub_i32(ret, arg1, t0);
139 tcg_temp_free_i32(t0);
143 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
145 TCGv_i32 t0;
146 /* Some cases can be optimized here. */
147 switch (arg2) {
148 case 0:
149 tcg_gen_movi_i32(ret, 0);
150 return;
151 case -1:
152 tcg_gen_mov_i32(ret, arg1);
153 return;
154 case 0xff:
155 /* Don't recurse with tcg_gen_ext8u_i32. */
156 if (TCG_TARGET_HAS_ext8u_i32) {
157 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
158 return;
160 break;
161 case 0xffff:
162 if (TCG_TARGET_HAS_ext16u_i32) {
163 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
164 return;
166 break;
168 t0 = tcg_const_i32(arg2);
169 tcg_gen_and_i32(ret, arg1, t0);
170 tcg_temp_free_i32(t0);
173 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
175 /* Some cases can be optimized here. */
176 if (arg2 == -1) {
177 tcg_gen_movi_i32(ret, -1);
178 } else if (arg2 == 0) {
179 tcg_gen_mov_i32(ret, arg1);
180 } else {
181 TCGv_i32 t0 = tcg_const_i32(arg2);
182 tcg_gen_or_i32(ret, arg1, t0);
183 tcg_temp_free_i32(t0);
187 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
189 /* Some cases can be optimized here. */
190 if (arg2 == 0) {
191 tcg_gen_mov_i32(ret, arg1);
192 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
193 /* Don't recurse with tcg_gen_not_i32. */
194 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
195 } else {
196 TCGv_i32 t0 = tcg_const_i32(arg2);
197 tcg_gen_xor_i32(ret, arg1, t0);
198 tcg_temp_free_i32(t0);
202 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
204 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
205 if (arg2 == 0) {
206 tcg_gen_mov_i32(ret, arg1);
207 } else {
208 TCGv_i32 t0 = tcg_const_i32(arg2);
209 tcg_gen_shl_i32(ret, arg1, t0);
210 tcg_temp_free_i32(t0);
214 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
216 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
217 if (arg2 == 0) {
218 tcg_gen_mov_i32(ret, arg1);
219 } else {
220 TCGv_i32 t0 = tcg_const_i32(arg2);
221 tcg_gen_shr_i32(ret, arg1, t0);
222 tcg_temp_free_i32(t0);
226 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
228 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
229 if (arg2 == 0) {
230 tcg_gen_mov_i32(ret, arg1);
231 } else {
232 TCGv_i32 t0 = tcg_const_i32(arg2);
233 tcg_gen_sar_i32(ret, arg1, t0);
234 tcg_temp_free_i32(t0);
238 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
240 if (cond == TCG_COND_ALWAYS) {
241 tcg_gen_br(l);
242 } else if (cond != TCG_COND_NEVER) {
243 l->refs++;
244 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
248 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
250 if (cond == TCG_COND_ALWAYS) {
251 tcg_gen_br(l);
252 } else if (cond != TCG_COND_NEVER) {
253 TCGv_i32 t0 = tcg_const_i32(arg2);
254 tcg_gen_brcond_i32(cond, arg1, t0, l);
255 tcg_temp_free_i32(t0);
259 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
260 TCGv_i32 arg1, TCGv_i32 arg2)
262 if (cond == TCG_COND_ALWAYS) {
263 tcg_gen_movi_i32(ret, 1);
264 } else if (cond == TCG_COND_NEVER) {
265 tcg_gen_movi_i32(ret, 0);
266 } else {
267 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
271 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
272 TCGv_i32 arg1, int32_t arg2)
274 TCGv_i32 t0 = tcg_const_i32(arg2);
275 tcg_gen_setcond_i32(cond, ret, arg1, t0);
276 tcg_temp_free_i32(t0);
279 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
281 if (arg2 == 0) {
282 tcg_gen_movi_i32(ret, 0);
283 } else if (is_power_of_2(arg2)) {
284 tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
285 } else {
286 TCGv_i32 t0 = tcg_const_i32(arg2);
287 tcg_gen_mul_i32(ret, arg1, t0);
288 tcg_temp_free_i32(t0);
292 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
294 if (TCG_TARGET_HAS_div_i32) {
295 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
296 } else if (TCG_TARGET_HAS_div2_i32) {
297 TCGv_i32 t0 = tcg_temp_new_i32();
298 tcg_gen_sari_i32(t0, arg1, 31);
299 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
300 tcg_temp_free_i32(t0);
301 } else {
302 gen_helper_div_i32(ret, arg1, arg2);
306 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
308 if (TCG_TARGET_HAS_rem_i32) {
309 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
310 } else if (TCG_TARGET_HAS_div_i32) {
311 TCGv_i32 t0 = tcg_temp_new_i32();
312 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
313 tcg_gen_mul_i32(t0, t0, arg2);
314 tcg_gen_sub_i32(ret, arg1, t0);
315 tcg_temp_free_i32(t0);
316 } else if (TCG_TARGET_HAS_div2_i32) {
317 TCGv_i32 t0 = tcg_temp_new_i32();
318 tcg_gen_sari_i32(t0, arg1, 31);
319 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
320 tcg_temp_free_i32(t0);
321 } else {
322 gen_helper_rem_i32(ret, arg1, arg2);
326 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
328 if (TCG_TARGET_HAS_div_i32) {
329 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
330 } else if (TCG_TARGET_HAS_div2_i32) {
331 TCGv_i32 t0 = tcg_temp_new_i32();
332 tcg_gen_movi_i32(t0, 0);
333 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
334 tcg_temp_free_i32(t0);
335 } else {
336 gen_helper_divu_i32(ret, arg1, arg2);
340 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
342 if (TCG_TARGET_HAS_rem_i32) {
343 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
344 } else if (TCG_TARGET_HAS_div_i32) {
345 TCGv_i32 t0 = tcg_temp_new_i32();
346 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
347 tcg_gen_mul_i32(t0, t0, arg2);
348 tcg_gen_sub_i32(ret, arg1, t0);
349 tcg_temp_free_i32(t0);
350 } else if (TCG_TARGET_HAS_div2_i32) {
351 TCGv_i32 t0 = tcg_temp_new_i32();
352 tcg_gen_movi_i32(t0, 0);
353 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
354 tcg_temp_free_i32(t0);
355 } else {
356 gen_helper_remu_i32(ret, arg1, arg2);
360 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
362 if (TCG_TARGET_HAS_andc_i32) {
363 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
364 } else {
365 TCGv_i32 t0 = tcg_temp_new_i32();
366 tcg_gen_not_i32(t0, arg2);
367 tcg_gen_and_i32(ret, arg1, t0);
368 tcg_temp_free_i32(t0);
372 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
374 if (TCG_TARGET_HAS_eqv_i32) {
375 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
376 } else {
377 tcg_gen_xor_i32(ret, arg1, arg2);
378 tcg_gen_not_i32(ret, ret);
382 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
384 if (TCG_TARGET_HAS_nand_i32) {
385 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
386 } else {
387 tcg_gen_and_i32(ret, arg1, arg2);
388 tcg_gen_not_i32(ret, ret);
392 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
394 if (TCG_TARGET_HAS_nor_i32) {
395 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
396 } else {
397 tcg_gen_or_i32(ret, arg1, arg2);
398 tcg_gen_not_i32(ret, ret);
402 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
404 if (TCG_TARGET_HAS_orc_i32) {
405 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
406 } else {
407 TCGv_i32 t0 = tcg_temp_new_i32();
408 tcg_gen_not_i32(t0, arg2);
409 tcg_gen_or_i32(ret, arg1, t0);
410 tcg_temp_free_i32(t0);
414 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
416 if (TCG_TARGET_HAS_clz_i32) {
417 tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
418 } else if (TCG_TARGET_HAS_clz_i64) {
419 TCGv_i64 t1 = tcg_temp_new_i64();
420 TCGv_i64 t2 = tcg_temp_new_i64();
421 tcg_gen_extu_i32_i64(t1, arg1);
422 tcg_gen_extu_i32_i64(t2, arg2);
423 tcg_gen_addi_i64(t2, t2, 32);
424 tcg_gen_clz_i64(t1, t1, t2);
425 tcg_gen_extrl_i64_i32(ret, t1);
426 tcg_temp_free_i64(t1);
427 tcg_temp_free_i64(t2);
428 tcg_gen_subi_i32(ret, ret, 32);
429 } else {
430 gen_helper_clz_i32(ret, arg1, arg2);
434 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
436 TCGv_i32 t = tcg_const_i32(arg2);
437 tcg_gen_clz_i32(ret, arg1, t);
438 tcg_temp_free_i32(t);
441 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
443 if (TCG_TARGET_HAS_ctz_i32) {
444 tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
445 } else if (TCG_TARGET_HAS_ctz_i64) {
446 TCGv_i64 t1 = tcg_temp_new_i64();
447 TCGv_i64 t2 = tcg_temp_new_i64();
448 tcg_gen_extu_i32_i64(t1, arg1);
449 tcg_gen_extu_i32_i64(t2, arg2);
450 tcg_gen_ctz_i64(t1, t1, t2);
451 tcg_gen_extrl_i64_i32(ret, t1);
452 tcg_temp_free_i64(t1);
453 tcg_temp_free_i64(t2);
454 } else if (TCG_TARGET_HAS_ctpop_i32
455 || TCG_TARGET_HAS_ctpop_i64
456 || TCG_TARGET_HAS_clz_i32
457 || TCG_TARGET_HAS_clz_i64) {
458 TCGv_i32 z, t = tcg_temp_new_i32();
460 if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
461 tcg_gen_subi_i32(t, arg1, 1);
462 tcg_gen_andc_i32(t, t, arg1);
463 tcg_gen_ctpop_i32(t, t);
464 } else {
465 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
466 tcg_gen_neg_i32(t, arg1);
467 tcg_gen_and_i32(t, t, arg1);
468 tcg_gen_clzi_i32(t, t, 32);
469 tcg_gen_xori_i32(t, t, 31);
471 z = tcg_const_i32(0);
472 tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
473 tcg_temp_free_i32(t);
474 tcg_temp_free_i32(z);
475 } else {
476 gen_helper_ctz_i32(ret, arg1, arg2);
480 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
482 if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
483 /* This equivalence has the advantage of not requiring a fixup. */
484 TCGv_i32 t = tcg_temp_new_i32();
485 tcg_gen_subi_i32(t, arg1, 1);
486 tcg_gen_andc_i32(t, t, arg1);
487 tcg_gen_ctpop_i32(ret, t);
488 tcg_temp_free_i32(t);
489 } else {
490 TCGv_i32 t = tcg_const_i32(arg2);
491 tcg_gen_ctz_i32(ret, arg1, t);
492 tcg_temp_free_i32(t);
496 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
498 if (TCG_TARGET_HAS_clz_i32) {
499 TCGv_i32 t = tcg_temp_new_i32();
500 tcg_gen_sari_i32(t, arg, 31);
501 tcg_gen_xor_i32(t, t, arg);
502 tcg_gen_clzi_i32(t, t, 32);
503 tcg_gen_subi_i32(ret, t, 1);
504 tcg_temp_free_i32(t);
505 } else {
506 gen_helper_clrsb_i32(ret, arg);
510 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
512 if (TCG_TARGET_HAS_ctpop_i32) {
513 tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
514 } else if (TCG_TARGET_HAS_ctpop_i64) {
515 TCGv_i64 t = tcg_temp_new_i64();
516 tcg_gen_extu_i32_i64(t, arg1);
517 tcg_gen_ctpop_i64(t, t);
518 tcg_gen_extrl_i64_i32(ret, t);
519 tcg_temp_free_i64(t);
520 } else {
521 gen_helper_ctpop_i32(ret, arg1);
525 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
527 if (TCG_TARGET_HAS_rot_i32) {
528 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
529 } else {
530 TCGv_i32 t0, t1;
532 t0 = tcg_temp_new_i32();
533 t1 = tcg_temp_new_i32();
534 tcg_gen_shl_i32(t0, arg1, arg2);
535 tcg_gen_subfi_i32(t1, 32, arg2);
536 tcg_gen_shr_i32(t1, arg1, t1);
537 tcg_gen_or_i32(ret, t0, t1);
538 tcg_temp_free_i32(t0);
539 tcg_temp_free_i32(t1);
543 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
545 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
546 /* some cases can be optimized here */
547 if (arg2 == 0) {
548 tcg_gen_mov_i32(ret, arg1);
549 } else if (TCG_TARGET_HAS_rot_i32) {
550 TCGv_i32 t0 = tcg_const_i32(arg2);
551 tcg_gen_rotl_i32(ret, arg1, t0);
552 tcg_temp_free_i32(t0);
553 } else {
554 TCGv_i32 t0, t1;
555 t0 = tcg_temp_new_i32();
556 t1 = tcg_temp_new_i32();
557 tcg_gen_shli_i32(t0, arg1, arg2);
558 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
559 tcg_gen_or_i32(ret, t0, t1);
560 tcg_temp_free_i32(t0);
561 tcg_temp_free_i32(t1);
565 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
567 if (TCG_TARGET_HAS_rot_i32) {
568 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
569 } else {
570 TCGv_i32 t0, t1;
572 t0 = tcg_temp_new_i32();
573 t1 = tcg_temp_new_i32();
574 tcg_gen_shr_i32(t0, arg1, arg2);
575 tcg_gen_subfi_i32(t1, 32, arg2);
576 tcg_gen_shl_i32(t1, arg1, t1);
577 tcg_gen_or_i32(ret, t0, t1);
578 tcg_temp_free_i32(t0);
579 tcg_temp_free_i32(t1);
583 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
585 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
586 /* some cases can be optimized here */
587 if (arg2 == 0) {
588 tcg_gen_mov_i32(ret, arg1);
589 } else {
590 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
594 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
595 unsigned int ofs, unsigned int len)
597 uint32_t mask;
598 TCGv_i32 t1;
600 tcg_debug_assert(ofs < 32);
601 tcg_debug_assert(len > 0);
602 tcg_debug_assert(len <= 32);
603 tcg_debug_assert(ofs + len <= 32);
605 if (len == 32) {
606 tcg_gen_mov_i32(ret, arg2);
607 return;
609 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
610 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
611 return;
614 t1 = tcg_temp_new_i32();
616 if (TCG_TARGET_HAS_extract2_i32) {
617 if (ofs + len == 32) {
618 tcg_gen_shli_i32(t1, arg1, len);
619 tcg_gen_extract2_i32(ret, t1, arg2, len);
620 goto done;
622 if (ofs == 0) {
623 tcg_gen_extract2_i32(ret, arg1, arg2, len);
624 tcg_gen_rotli_i32(ret, ret, len);
625 goto done;
629 mask = (1u << len) - 1;
630 if (ofs + len < 32) {
631 tcg_gen_andi_i32(t1, arg2, mask);
632 tcg_gen_shli_i32(t1, t1, ofs);
633 } else {
634 tcg_gen_shli_i32(t1, arg2, ofs);
636 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
637 tcg_gen_or_i32(ret, ret, t1);
638 done:
639 tcg_temp_free_i32(t1);
642 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
643 unsigned int ofs, unsigned int len)
645 tcg_debug_assert(ofs < 32);
646 tcg_debug_assert(len > 0);
647 tcg_debug_assert(len <= 32);
648 tcg_debug_assert(ofs + len <= 32);
650 if (ofs + len == 32) {
651 tcg_gen_shli_i32(ret, arg, ofs);
652 } else if (ofs == 0) {
653 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
654 } else if (TCG_TARGET_HAS_deposit_i32
655 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
656 TCGv_i32 zero = tcg_const_i32(0);
657 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
658 tcg_temp_free_i32(zero);
659 } else {
660 /* To help two-operand hosts we prefer to zero-extend first,
661 which allows ARG to stay live. */
662 switch (len) {
663 case 16:
664 if (TCG_TARGET_HAS_ext16u_i32) {
665 tcg_gen_ext16u_i32(ret, arg);
666 tcg_gen_shli_i32(ret, ret, ofs);
667 return;
669 break;
670 case 8:
671 if (TCG_TARGET_HAS_ext8u_i32) {
672 tcg_gen_ext8u_i32(ret, arg);
673 tcg_gen_shli_i32(ret, ret, ofs);
674 return;
676 break;
678 /* Otherwise prefer zero-extension over AND for code size. */
679 switch (ofs + len) {
680 case 16:
681 if (TCG_TARGET_HAS_ext16u_i32) {
682 tcg_gen_shli_i32(ret, arg, ofs);
683 tcg_gen_ext16u_i32(ret, ret);
684 return;
686 break;
687 case 8:
688 if (TCG_TARGET_HAS_ext8u_i32) {
689 tcg_gen_shli_i32(ret, arg, ofs);
690 tcg_gen_ext8u_i32(ret, ret);
691 return;
693 break;
695 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
696 tcg_gen_shli_i32(ret, ret, ofs);
700 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
701 unsigned int ofs, unsigned int len)
703 tcg_debug_assert(ofs < 32);
704 tcg_debug_assert(len > 0);
705 tcg_debug_assert(len <= 32);
706 tcg_debug_assert(ofs + len <= 32);
708 /* Canonicalize certain special cases, even if extract is supported. */
709 if (ofs + len == 32) {
710 tcg_gen_shri_i32(ret, arg, 32 - len);
711 return;
713 if (ofs == 0) {
714 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
715 return;
718 if (TCG_TARGET_HAS_extract_i32
719 && TCG_TARGET_extract_i32_valid(ofs, len)) {
720 tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
721 return;
724 /* Assume that zero-extension, if available, is cheaper than a shift. */
725 switch (ofs + len) {
726 case 16:
727 if (TCG_TARGET_HAS_ext16u_i32) {
728 tcg_gen_ext16u_i32(ret, arg);
729 tcg_gen_shri_i32(ret, ret, ofs);
730 return;
732 break;
733 case 8:
734 if (TCG_TARGET_HAS_ext8u_i32) {
735 tcg_gen_ext8u_i32(ret, arg);
736 tcg_gen_shri_i32(ret, ret, ofs);
737 return;
739 break;
742 /* ??? Ideally we'd know what values are available for immediate AND.
743 Assume that 8 bits are available, plus the special case of 16,
744 so that we get ext8u, ext16u. */
745 switch (len) {
746 case 1 ... 8: case 16:
747 tcg_gen_shri_i32(ret, arg, ofs);
748 tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
749 break;
750 default:
751 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
752 tcg_gen_shri_i32(ret, ret, 32 - len);
753 break;
757 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
758 unsigned int ofs, unsigned int len)
760 tcg_debug_assert(ofs < 32);
761 tcg_debug_assert(len > 0);
762 tcg_debug_assert(len <= 32);
763 tcg_debug_assert(ofs + len <= 32);
765 /* Canonicalize certain special cases, even if extract is supported. */
766 if (ofs + len == 32) {
767 tcg_gen_sari_i32(ret, arg, 32 - len);
768 return;
770 if (ofs == 0) {
771 switch (len) {
772 case 16:
773 tcg_gen_ext16s_i32(ret, arg);
774 return;
775 case 8:
776 tcg_gen_ext8s_i32(ret, arg);
777 return;
781 if (TCG_TARGET_HAS_sextract_i32
782 && TCG_TARGET_extract_i32_valid(ofs, len)) {
783 tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
784 return;
787 /* Assume that sign-extension, if available, is cheaper than a shift. */
788 switch (ofs + len) {
789 case 16:
790 if (TCG_TARGET_HAS_ext16s_i32) {
791 tcg_gen_ext16s_i32(ret, arg);
792 tcg_gen_sari_i32(ret, ret, ofs);
793 return;
795 break;
796 case 8:
797 if (TCG_TARGET_HAS_ext8s_i32) {
798 tcg_gen_ext8s_i32(ret, arg);
799 tcg_gen_sari_i32(ret, ret, ofs);
800 return;
802 break;
804 switch (len) {
805 case 16:
806 if (TCG_TARGET_HAS_ext16s_i32) {
807 tcg_gen_shri_i32(ret, arg, ofs);
808 tcg_gen_ext16s_i32(ret, ret);
809 return;
811 break;
812 case 8:
813 if (TCG_TARGET_HAS_ext8s_i32) {
814 tcg_gen_shri_i32(ret, arg, ofs);
815 tcg_gen_ext8s_i32(ret, ret);
816 return;
818 break;
821 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
822 tcg_gen_sari_i32(ret, ret, 32 - len);
826 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
827 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
829 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
830 unsigned int ofs)
832 tcg_debug_assert(ofs <= 32);
833 if (ofs == 0) {
834 tcg_gen_mov_i32(ret, al);
835 } else if (ofs == 32) {
836 tcg_gen_mov_i32(ret, ah);
837 } else if (al == ah) {
838 tcg_gen_rotri_i32(ret, al, ofs);
839 } else if (TCG_TARGET_HAS_extract2_i32) {
840 tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
841 } else {
842 TCGv_i32 t0 = tcg_temp_new_i32();
843 tcg_gen_shri_i32(t0, al, ofs);
844 tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
845 tcg_temp_free_i32(t0);
849 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
850 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
852 if (cond == TCG_COND_ALWAYS) {
853 tcg_gen_mov_i32(ret, v1);
854 } else if (cond == TCG_COND_NEVER) {
855 tcg_gen_mov_i32(ret, v2);
856 } else if (TCG_TARGET_HAS_movcond_i32) {
857 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
858 } else {
859 TCGv_i32 t0 = tcg_temp_new_i32();
860 TCGv_i32 t1 = tcg_temp_new_i32();
861 tcg_gen_setcond_i32(cond, t0, c1, c2);
862 tcg_gen_neg_i32(t0, t0);
863 tcg_gen_and_i32(t1, v1, t0);
864 tcg_gen_andc_i32(ret, v2, t0);
865 tcg_gen_or_i32(ret, ret, t1);
866 tcg_temp_free_i32(t0);
867 tcg_temp_free_i32(t1);
871 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
872 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
874 if (TCG_TARGET_HAS_add2_i32) {
875 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
876 } else {
877 TCGv_i64 t0 = tcg_temp_new_i64();
878 TCGv_i64 t1 = tcg_temp_new_i64();
879 tcg_gen_concat_i32_i64(t0, al, ah);
880 tcg_gen_concat_i32_i64(t1, bl, bh);
881 tcg_gen_add_i64(t0, t0, t1);
882 tcg_gen_extr_i64_i32(rl, rh, t0);
883 tcg_temp_free_i64(t0);
884 tcg_temp_free_i64(t1);
888 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
889 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
891 if (TCG_TARGET_HAS_sub2_i32) {
892 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
893 } else {
894 TCGv_i64 t0 = tcg_temp_new_i64();
895 TCGv_i64 t1 = tcg_temp_new_i64();
896 tcg_gen_concat_i32_i64(t0, al, ah);
897 tcg_gen_concat_i32_i64(t1, bl, bh);
898 tcg_gen_sub_i64(t0, t0, t1);
899 tcg_gen_extr_i64_i32(rl, rh, t0);
900 tcg_temp_free_i64(t0);
901 tcg_temp_free_i64(t1);
905 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
907 if (TCG_TARGET_HAS_mulu2_i32) {
908 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
909 } else if (TCG_TARGET_HAS_muluh_i32) {
910 TCGv_i32 t = tcg_temp_new_i32();
911 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
912 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
913 tcg_gen_mov_i32(rl, t);
914 tcg_temp_free_i32(t);
915 } else {
916 TCGv_i64 t0 = tcg_temp_new_i64();
917 TCGv_i64 t1 = tcg_temp_new_i64();
918 tcg_gen_extu_i32_i64(t0, arg1);
919 tcg_gen_extu_i32_i64(t1, arg2);
920 tcg_gen_mul_i64(t0, t0, t1);
921 tcg_gen_extr_i64_i32(rl, rh, t0);
922 tcg_temp_free_i64(t0);
923 tcg_temp_free_i64(t1);
927 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
929 if (TCG_TARGET_HAS_muls2_i32) {
930 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
931 } else if (TCG_TARGET_HAS_mulsh_i32) {
932 TCGv_i32 t = tcg_temp_new_i32();
933 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
934 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
935 tcg_gen_mov_i32(rl, t);
936 tcg_temp_free_i32(t);
937 } else if (TCG_TARGET_REG_BITS == 32) {
938 TCGv_i32 t0 = tcg_temp_new_i32();
939 TCGv_i32 t1 = tcg_temp_new_i32();
940 TCGv_i32 t2 = tcg_temp_new_i32();
941 TCGv_i32 t3 = tcg_temp_new_i32();
942 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
943 /* Adjust for negative inputs. */
944 tcg_gen_sari_i32(t2, arg1, 31);
945 tcg_gen_sari_i32(t3, arg2, 31);
946 tcg_gen_and_i32(t2, t2, arg2);
947 tcg_gen_and_i32(t3, t3, arg1);
948 tcg_gen_sub_i32(rh, t1, t2);
949 tcg_gen_sub_i32(rh, rh, t3);
950 tcg_gen_mov_i32(rl, t0);
951 tcg_temp_free_i32(t0);
952 tcg_temp_free_i32(t1);
953 tcg_temp_free_i32(t2);
954 tcg_temp_free_i32(t3);
955 } else {
956 TCGv_i64 t0 = tcg_temp_new_i64();
957 TCGv_i64 t1 = tcg_temp_new_i64();
958 tcg_gen_ext_i32_i64(t0, arg1);
959 tcg_gen_ext_i32_i64(t1, arg2);
960 tcg_gen_mul_i64(t0, t0, t1);
961 tcg_gen_extr_i64_i32(rl, rh, t0);
962 tcg_temp_free_i64(t0);
963 tcg_temp_free_i64(t1);
967 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
969 if (TCG_TARGET_REG_BITS == 32) {
970 TCGv_i32 t0 = tcg_temp_new_i32();
971 TCGv_i32 t1 = tcg_temp_new_i32();
972 TCGv_i32 t2 = tcg_temp_new_i32();
973 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
974 /* Adjust for negative input for the signed arg1. */
975 tcg_gen_sari_i32(t2, arg1, 31);
976 tcg_gen_and_i32(t2, t2, arg2);
977 tcg_gen_sub_i32(rh, t1, t2);
978 tcg_gen_mov_i32(rl, t0);
979 tcg_temp_free_i32(t0);
980 tcg_temp_free_i32(t1);
981 tcg_temp_free_i32(t2);
982 } else {
983 TCGv_i64 t0 = tcg_temp_new_i64();
984 TCGv_i64 t1 = tcg_temp_new_i64();
985 tcg_gen_ext_i32_i64(t0, arg1);
986 tcg_gen_extu_i32_i64(t1, arg2);
987 tcg_gen_mul_i64(t0, t0, t1);
988 tcg_gen_extr_i64_i32(rl, rh, t0);
989 tcg_temp_free_i64(t0);
990 tcg_temp_free_i64(t1);
994 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
996 if (TCG_TARGET_HAS_ext8s_i32) {
997 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
998 } else {
999 tcg_gen_shli_i32(ret, arg, 24);
1000 tcg_gen_sari_i32(ret, ret, 24);
1004 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1006 if (TCG_TARGET_HAS_ext16s_i32) {
1007 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1008 } else {
1009 tcg_gen_shli_i32(ret, arg, 16);
1010 tcg_gen_sari_i32(ret, ret, 16);
1014 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1016 if (TCG_TARGET_HAS_ext8u_i32) {
1017 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1018 } else {
1019 tcg_gen_andi_i32(ret, arg, 0xffu);
1023 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1025 if (TCG_TARGET_HAS_ext16u_i32) {
1026 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1027 } else {
1028 tcg_gen_andi_i32(ret, arg, 0xffffu);
1032 /* Note: we assume the two high bytes are set to zero */
1033 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
1035 if (TCG_TARGET_HAS_bswap16_i32) {
1036 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
1037 } else {
1038 TCGv_i32 t0 = tcg_temp_new_i32();
1040 tcg_gen_ext8u_i32(t0, arg);
1041 tcg_gen_shli_i32(t0, t0, 8);
1042 tcg_gen_shri_i32(ret, arg, 8);
1043 tcg_gen_or_i32(ret, ret, t0);
1044 tcg_temp_free_i32(t0);
1048 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1050 if (TCG_TARGET_HAS_bswap32_i32) {
1051 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
1052 } else {
1053 TCGv_i32 t0 = tcg_temp_new_i32();
1054 TCGv_i32 t1 = tcg_temp_new_i32();
1055 TCGv_i32 t2 = tcg_const_i32(0x00ff00ff);
1057 /* arg = abcd */
1058 tcg_gen_shri_i32(t0, arg, 8); /* t0 = .abc */
1059 tcg_gen_and_i32(t1, arg, t2); /* t1 = .b.d */
1060 tcg_gen_and_i32(t0, t0, t2); /* t0 = .a.c */
1061 tcg_gen_shli_i32(t1, t1, 8); /* t1 = b.d. */
1062 tcg_gen_or_i32(ret, t0, t1); /* ret = badc */
1064 tcg_gen_shri_i32(t0, ret, 16); /* t0 = ..ba */
1065 tcg_gen_shli_i32(t1, ret, 16); /* t1 = dc.. */
1066 tcg_gen_or_i32(ret, t0, t1); /* ret = dcba */
1068 tcg_temp_free_i32(t0);
1069 tcg_temp_free_i32(t1);
1070 tcg_temp_free_i32(t2);
1074 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1076 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1079 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1081 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1084 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1086 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1089 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1091 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1094 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1096 TCGv_i32 t = tcg_temp_new_i32();
1098 tcg_gen_sari_i32(t, a, 31);
1099 tcg_gen_xor_i32(ret, a, t);
1100 tcg_gen_sub_i32(ret, ret, t);
1101 tcg_temp_free_i32(t);
1104 /* 64-bit ops */
1106 #if TCG_TARGET_REG_BITS == 32
1107 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
1109 void tcg_gen_discard_i64(TCGv_i64 arg)
1111 tcg_gen_discard_i32(TCGV_LOW(arg));
1112 tcg_gen_discard_i32(TCGV_HIGH(arg));
1115 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1117 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1118 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1121 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1123 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1124 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1127 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1129 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1130 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1133 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1135 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1136 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1139 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1141 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1142 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1145 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1147 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1148 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1151 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1153 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1154 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1157 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1159 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1160 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1163 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1165 /* Since arg2 and ret have different types,
1166 they cannot be the same temporary */
1167 #ifdef HOST_WORDS_BIGENDIAN
1168 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1169 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1170 #else
1171 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1172 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1173 #endif
1176 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1178 #ifdef HOST_WORDS_BIGENDIAN
1179 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1180 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1181 #else
1182 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1183 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1184 #endif
1187 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1189 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1190 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1193 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1195 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1196 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1199 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1201 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1202 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1205 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1207 gen_helper_shl_i64(ret, arg1, arg2);
1210 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1212 gen_helper_shr_i64(ret, arg1, arg2);
1215 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1217 gen_helper_sar_i64(ret, arg1, arg2);
1220 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1222 TCGv_i64 t0;
1223 TCGv_i32 t1;
1225 t0 = tcg_temp_new_i64();
1226 t1 = tcg_temp_new_i32();
1228 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1229 TCGV_LOW(arg1), TCGV_LOW(arg2));
1231 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1232 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1233 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1234 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1236 tcg_gen_mov_i64(ret, t0);
1237 tcg_temp_free_i64(t0);
1238 tcg_temp_free_i32(t1);
1240 #endif /* TCG_TARGET_REG_SIZE == 32 */
1242 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1244 /* some cases can be optimized here */
1245 if (arg2 == 0) {
1246 tcg_gen_mov_i64(ret, arg1);
1247 } else {
1248 TCGv_i64 t0 = tcg_const_i64(arg2);
1249 tcg_gen_add_i64(ret, arg1, t0);
1250 tcg_temp_free_i64(t0);
1254 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1256 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1257 /* Don't recurse with tcg_gen_neg_i64. */
1258 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1259 } else {
1260 TCGv_i64 t0 = tcg_const_i64(arg1);
1261 tcg_gen_sub_i64(ret, t0, arg2);
1262 tcg_temp_free_i64(t0);
1266 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1268 /* some cases can be optimized here */
1269 if (arg2 == 0) {
1270 tcg_gen_mov_i64(ret, arg1);
1271 } else {
1272 TCGv_i64 t0 = tcg_const_i64(arg2);
1273 tcg_gen_sub_i64(ret, arg1, t0);
1274 tcg_temp_free_i64(t0);
1278 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1280 TCGv_i64 t0;
1282 if (TCG_TARGET_REG_BITS == 32) {
1283 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1284 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1285 return;
1288 /* Some cases can be optimized here. */
1289 switch (arg2) {
1290 case 0:
1291 tcg_gen_movi_i64(ret, 0);
1292 return;
1293 case -1:
1294 tcg_gen_mov_i64(ret, arg1);
1295 return;
1296 case 0xff:
1297 /* Don't recurse with tcg_gen_ext8u_i64. */
1298 if (TCG_TARGET_HAS_ext8u_i64) {
1299 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1300 return;
1302 break;
1303 case 0xffff:
1304 if (TCG_TARGET_HAS_ext16u_i64) {
1305 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1306 return;
1308 break;
1309 case 0xffffffffu:
1310 if (TCG_TARGET_HAS_ext32u_i64) {
1311 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1312 return;
1314 break;
1316 t0 = tcg_const_i64(arg2);
1317 tcg_gen_and_i64(ret, arg1, t0);
1318 tcg_temp_free_i64(t0);
1321 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1323 if (TCG_TARGET_REG_BITS == 32) {
1324 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1325 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1326 return;
1328 /* Some cases can be optimized here. */
1329 if (arg2 == -1) {
1330 tcg_gen_movi_i64(ret, -1);
1331 } else if (arg2 == 0) {
1332 tcg_gen_mov_i64(ret, arg1);
1333 } else {
1334 TCGv_i64 t0 = tcg_const_i64(arg2);
1335 tcg_gen_or_i64(ret, arg1, t0);
1336 tcg_temp_free_i64(t0);
1340 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1342 if (TCG_TARGET_REG_BITS == 32) {
1343 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1344 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1345 return;
1347 /* Some cases can be optimized here. */
1348 if (arg2 == 0) {
1349 tcg_gen_mov_i64(ret, arg1);
1350 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1351 /* Don't recurse with tcg_gen_not_i64. */
1352 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1353 } else {
1354 TCGv_i64 t0 = tcg_const_i64(arg2);
1355 tcg_gen_xor_i64(ret, arg1, t0);
1356 tcg_temp_free_i64(t0);
1360 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1361 unsigned c, bool right, bool arith)
1363 tcg_debug_assert(c < 64);
1364 if (c == 0) {
1365 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1366 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1367 } else if (c >= 32) {
1368 c -= 32;
1369 if (right) {
1370 if (arith) {
1371 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1372 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1373 } else {
1374 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1375 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1377 } else {
1378 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1379 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1381 } else if (right) {
1382 if (TCG_TARGET_HAS_extract2_i32) {
1383 tcg_gen_extract2_i32(TCGV_LOW(ret),
1384 TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1385 } else {
1386 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1387 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1388 TCGV_HIGH(arg1), 32 - c, c);
1390 if (arith) {
1391 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1392 } else {
1393 tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1395 } else {
1396 if (TCG_TARGET_HAS_extract2_i32) {
1397 tcg_gen_extract2_i32(TCGV_HIGH(ret),
1398 TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1399 } else {
1400 TCGv_i32 t0 = tcg_temp_new_i32();
1401 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1402 tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1403 TCGV_HIGH(arg1), c, 32 - c);
1404 tcg_temp_free_i32(t0);
1406 tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1410 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1412 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1413 if (TCG_TARGET_REG_BITS == 32) {
1414 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1415 } else if (arg2 == 0) {
1416 tcg_gen_mov_i64(ret, arg1);
1417 } else {
1418 TCGv_i64 t0 = tcg_const_i64(arg2);
1419 tcg_gen_shl_i64(ret, arg1, t0);
1420 tcg_temp_free_i64(t0);
1424 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1426 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1427 if (TCG_TARGET_REG_BITS == 32) {
1428 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1429 } else if (arg2 == 0) {
1430 tcg_gen_mov_i64(ret, arg1);
1431 } else {
1432 TCGv_i64 t0 = tcg_const_i64(arg2);
1433 tcg_gen_shr_i64(ret, arg1, t0);
1434 tcg_temp_free_i64(t0);
1438 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1440 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1441 if (TCG_TARGET_REG_BITS == 32) {
1442 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1443 } else if (arg2 == 0) {
1444 tcg_gen_mov_i64(ret, arg1);
1445 } else {
1446 TCGv_i64 t0 = tcg_const_i64(arg2);
1447 tcg_gen_sar_i64(ret, arg1, t0);
1448 tcg_temp_free_i64(t0);
1452 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1454 if (cond == TCG_COND_ALWAYS) {
1455 tcg_gen_br(l);
1456 } else if (cond != TCG_COND_NEVER) {
1457 l->refs++;
1458 if (TCG_TARGET_REG_BITS == 32) {
1459 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1460 TCGV_HIGH(arg1), TCGV_LOW(arg2),
1461 TCGV_HIGH(arg2), cond, label_arg(l));
1462 } else {
1463 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1464 label_arg(l));
1469 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1471 if (cond == TCG_COND_ALWAYS) {
1472 tcg_gen_br(l);
1473 } else if (cond != TCG_COND_NEVER) {
1474 TCGv_i64 t0 = tcg_const_i64(arg2);
1475 tcg_gen_brcond_i64(cond, arg1, t0, l);
1476 tcg_temp_free_i64(t0);
1480 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1481 TCGv_i64 arg1, TCGv_i64 arg2)
1483 if (cond == TCG_COND_ALWAYS) {
1484 tcg_gen_movi_i64(ret, 1);
1485 } else if (cond == TCG_COND_NEVER) {
1486 tcg_gen_movi_i64(ret, 0);
1487 } else {
1488 if (TCG_TARGET_REG_BITS == 32) {
1489 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1490 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1491 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1492 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1493 } else {
1494 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1499 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1500 TCGv_i64 arg1, int64_t arg2)
1502 TCGv_i64 t0 = tcg_const_i64(arg2);
1503 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1504 tcg_temp_free_i64(t0);
1507 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1509 if (arg2 == 0) {
1510 tcg_gen_movi_i64(ret, 0);
1511 } else if (is_power_of_2(arg2)) {
1512 tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1513 } else {
1514 TCGv_i64 t0 = tcg_const_i64(arg2);
1515 tcg_gen_mul_i64(ret, arg1, t0);
1516 tcg_temp_free_i64(t0);
1520 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1522 if (TCG_TARGET_HAS_div_i64) {
1523 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1524 } else if (TCG_TARGET_HAS_div2_i64) {
1525 TCGv_i64 t0 = tcg_temp_new_i64();
1526 tcg_gen_sari_i64(t0, arg1, 63);
1527 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1528 tcg_temp_free_i64(t0);
1529 } else {
1530 gen_helper_div_i64(ret, arg1, arg2);
1534 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1536 if (TCG_TARGET_HAS_rem_i64) {
1537 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1538 } else if (TCG_TARGET_HAS_div_i64) {
1539 TCGv_i64 t0 = tcg_temp_new_i64();
1540 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1541 tcg_gen_mul_i64(t0, t0, arg2);
1542 tcg_gen_sub_i64(ret, arg1, t0);
1543 tcg_temp_free_i64(t0);
1544 } else if (TCG_TARGET_HAS_div2_i64) {
1545 TCGv_i64 t0 = tcg_temp_new_i64();
1546 tcg_gen_sari_i64(t0, arg1, 63);
1547 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1548 tcg_temp_free_i64(t0);
1549 } else {
1550 gen_helper_rem_i64(ret, arg1, arg2);
1554 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1556 if (TCG_TARGET_HAS_div_i64) {
1557 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1558 } else if (TCG_TARGET_HAS_div2_i64) {
1559 TCGv_i64 t0 = tcg_temp_new_i64();
1560 tcg_gen_movi_i64(t0, 0);
1561 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1562 tcg_temp_free_i64(t0);
1563 } else {
1564 gen_helper_divu_i64(ret, arg1, arg2);
1568 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1570 if (TCG_TARGET_HAS_rem_i64) {
1571 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1572 } else if (TCG_TARGET_HAS_div_i64) {
1573 TCGv_i64 t0 = tcg_temp_new_i64();
1574 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1575 tcg_gen_mul_i64(t0, t0, arg2);
1576 tcg_gen_sub_i64(ret, arg1, t0);
1577 tcg_temp_free_i64(t0);
1578 } else if (TCG_TARGET_HAS_div2_i64) {
1579 TCGv_i64 t0 = tcg_temp_new_i64();
1580 tcg_gen_movi_i64(t0, 0);
1581 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1582 tcg_temp_free_i64(t0);
1583 } else {
1584 gen_helper_remu_i64(ret, arg1, arg2);
1588 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1590 if (TCG_TARGET_REG_BITS == 32) {
1591 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1592 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1593 } else if (TCG_TARGET_HAS_ext8s_i64) {
1594 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1595 } else {
1596 tcg_gen_shli_i64(ret, arg, 56);
1597 tcg_gen_sari_i64(ret, ret, 56);
1601 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1603 if (TCG_TARGET_REG_BITS == 32) {
1604 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1605 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1606 } else if (TCG_TARGET_HAS_ext16s_i64) {
1607 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1608 } else {
1609 tcg_gen_shli_i64(ret, arg, 48);
1610 tcg_gen_sari_i64(ret, ret, 48);
1614 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1616 if (TCG_TARGET_REG_BITS == 32) {
1617 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1618 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1619 } else if (TCG_TARGET_HAS_ext32s_i64) {
1620 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1621 } else {
1622 tcg_gen_shli_i64(ret, arg, 32);
1623 tcg_gen_sari_i64(ret, ret, 32);
1627 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1629 if (TCG_TARGET_REG_BITS == 32) {
1630 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1631 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1632 } else if (TCG_TARGET_HAS_ext8u_i64) {
1633 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1634 } else {
1635 tcg_gen_andi_i64(ret, arg, 0xffu);
1639 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1641 if (TCG_TARGET_REG_BITS == 32) {
1642 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1643 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1644 } else if (TCG_TARGET_HAS_ext16u_i64) {
1645 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1646 } else {
1647 tcg_gen_andi_i64(ret, arg, 0xffffu);
1651 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1653 if (TCG_TARGET_REG_BITS == 32) {
1654 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1655 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1656 } else if (TCG_TARGET_HAS_ext32u_i64) {
1657 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1658 } else {
1659 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1663 /* Note: we assume the six high bytes are set to zero */
1664 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1666 if (TCG_TARGET_REG_BITS == 32) {
1667 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1668 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1669 } else if (TCG_TARGET_HAS_bswap16_i64) {
1670 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1671 } else {
1672 TCGv_i64 t0 = tcg_temp_new_i64();
1674 tcg_gen_ext8u_i64(t0, arg);
1675 tcg_gen_shli_i64(t0, t0, 8);
1676 tcg_gen_shri_i64(ret, arg, 8);
1677 tcg_gen_or_i64(ret, ret, t0);
1678 tcg_temp_free_i64(t0);
1682 /* Note: we assume the four high bytes are set to zero */
1683 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1685 if (TCG_TARGET_REG_BITS == 32) {
1686 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1687 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1688 } else if (TCG_TARGET_HAS_bswap32_i64) {
1689 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1690 } else {
1691 TCGv_i64 t0 = tcg_temp_new_i64();
1692 TCGv_i64 t1 = tcg_temp_new_i64();
1693 TCGv_i64 t2 = tcg_const_i64(0x00ff00ff);
1695 /* arg = ....abcd */
1696 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .....abc */
1697 tcg_gen_and_i64(t1, arg, t2); /* t1 = .....b.d */
1698 tcg_gen_and_i64(t0, t0, t2); /* t0 = .....a.c */
1699 tcg_gen_shli_i64(t1, t1, 8); /* t1 = ....b.d. */
1700 tcg_gen_or_i64(ret, t0, t1); /* ret = ....badc */
1702 tcg_gen_shli_i64(t1, ret, 48); /* t1 = dc...... */
1703 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ......ba */
1704 tcg_gen_shri_i64(t1, t1, 32); /* t1 = ....dc.. */
1705 tcg_gen_or_i64(ret, t0, t1); /* ret = ....dcba */
1707 tcg_temp_free_i64(t0);
1708 tcg_temp_free_i64(t1);
1709 tcg_temp_free_i64(t2);
1713 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1715 if (TCG_TARGET_REG_BITS == 32) {
1716 TCGv_i32 t0, t1;
1717 t0 = tcg_temp_new_i32();
1718 t1 = tcg_temp_new_i32();
1720 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1721 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1722 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1723 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1724 tcg_temp_free_i32(t0);
1725 tcg_temp_free_i32(t1);
1726 } else if (TCG_TARGET_HAS_bswap64_i64) {
1727 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1728 } else {
1729 TCGv_i64 t0 = tcg_temp_new_i64();
1730 TCGv_i64 t1 = tcg_temp_new_i64();
1731 TCGv_i64 t2 = tcg_temp_new_i64();
1733 /* arg = abcdefgh */
1734 tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1735 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .abcdefg */
1736 tcg_gen_and_i64(t1, arg, t2); /* t1 = .b.d.f.h */
1737 tcg_gen_and_i64(t0, t0, t2); /* t0 = .a.c.e.g */
1738 tcg_gen_shli_i64(t1, t1, 8); /* t1 = b.d.f.h. */
1739 tcg_gen_or_i64(ret, t0, t1); /* ret = badcfehg */
1741 tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1742 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ..badcfe */
1743 tcg_gen_and_i64(t1, ret, t2); /* t1 = ..dc..hg */
1744 tcg_gen_and_i64(t0, t0, t2); /* t0 = ..ba..fe */
1745 tcg_gen_shli_i64(t1, t1, 16); /* t1 = dc..hg.. */
1746 tcg_gen_or_i64(ret, t0, t1); /* ret = dcbahgfe */
1748 tcg_gen_shri_i64(t0, ret, 32); /* t0 = ....dcba */
1749 tcg_gen_shli_i64(t1, ret, 32); /* t1 = hgfe.... */
1750 tcg_gen_or_i64(ret, t0, t1); /* ret = hgfedcba */
1752 tcg_temp_free_i64(t0);
1753 tcg_temp_free_i64(t1);
1754 tcg_temp_free_i64(t2);
1758 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1760 if (TCG_TARGET_REG_BITS == 32) {
1761 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1762 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1763 } else if (TCG_TARGET_HAS_not_i64) {
1764 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1765 } else {
1766 tcg_gen_xori_i64(ret, arg, -1);
1770 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1772 if (TCG_TARGET_REG_BITS == 32) {
1773 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1774 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1775 } else if (TCG_TARGET_HAS_andc_i64) {
1776 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1777 } else {
1778 TCGv_i64 t0 = tcg_temp_new_i64();
1779 tcg_gen_not_i64(t0, arg2);
1780 tcg_gen_and_i64(ret, arg1, t0);
1781 tcg_temp_free_i64(t0);
1785 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1787 if (TCG_TARGET_REG_BITS == 32) {
1788 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1789 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1790 } else if (TCG_TARGET_HAS_eqv_i64) {
1791 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1792 } else {
1793 tcg_gen_xor_i64(ret, arg1, arg2);
1794 tcg_gen_not_i64(ret, ret);
1798 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1800 if (TCG_TARGET_REG_BITS == 32) {
1801 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1802 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1803 } else if (TCG_TARGET_HAS_nand_i64) {
1804 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1805 } else {
1806 tcg_gen_and_i64(ret, arg1, arg2);
1807 tcg_gen_not_i64(ret, ret);
1811 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1813 if (TCG_TARGET_REG_BITS == 32) {
1814 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1815 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1816 } else if (TCG_TARGET_HAS_nor_i64) {
1817 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1818 } else {
1819 tcg_gen_or_i64(ret, arg1, arg2);
1820 tcg_gen_not_i64(ret, ret);
1824 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1826 if (TCG_TARGET_REG_BITS == 32) {
1827 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1828 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1829 } else if (TCG_TARGET_HAS_orc_i64) {
1830 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1831 } else {
1832 TCGv_i64 t0 = tcg_temp_new_i64();
1833 tcg_gen_not_i64(t0, arg2);
1834 tcg_gen_or_i64(ret, arg1, t0);
1835 tcg_temp_free_i64(t0);
1839 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1841 if (TCG_TARGET_HAS_clz_i64) {
1842 tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
1843 } else {
1844 gen_helper_clz_i64(ret, arg1, arg2);
1848 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1850 if (TCG_TARGET_REG_BITS == 32
1851 && TCG_TARGET_HAS_clz_i32
1852 && arg2 <= 0xffffffffu) {
1853 TCGv_i32 t = tcg_const_i32((uint32_t)arg2 - 32);
1854 tcg_gen_clz_i32(t, TCGV_LOW(arg1), t);
1855 tcg_gen_addi_i32(t, t, 32);
1856 tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
1857 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1858 tcg_temp_free_i32(t);
1859 } else {
1860 TCGv_i64 t = tcg_const_i64(arg2);
1861 tcg_gen_clz_i64(ret, arg1, t);
1862 tcg_temp_free_i64(t);
1866 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1868 if (TCG_TARGET_HAS_ctz_i64) {
1869 tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
1870 } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
1871 TCGv_i64 z, t = tcg_temp_new_i64();
1873 if (TCG_TARGET_HAS_ctpop_i64) {
1874 tcg_gen_subi_i64(t, arg1, 1);
1875 tcg_gen_andc_i64(t, t, arg1);
1876 tcg_gen_ctpop_i64(t, t);
1877 } else {
1878 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
1879 tcg_gen_neg_i64(t, arg1);
1880 tcg_gen_and_i64(t, t, arg1);
1881 tcg_gen_clzi_i64(t, t, 64);
1882 tcg_gen_xori_i64(t, t, 63);
1884 z = tcg_const_i64(0);
1885 tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
1886 tcg_temp_free_i64(t);
1887 tcg_temp_free_i64(z);
1888 } else {
1889 gen_helper_ctz_i64(ret, arg1, arg2);
1893 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1895 if (TCG_TARGET_REG_BITS == 32
1896 && TCG_TARGET_HAS_ctz_i32
1897 && arg2 <= 0xffffffffu) {
1898 TCGv_i32 t32 = tcg_const_i32((uint32_t)arg2 - 32);
1899 tcg_gen_ctz_i32(t32, TCGV_HIGH(arg1), t32);
1900 tcg_gen_addi_i32(t32, t32, 32);
1901 tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
1902 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1903 tcg_temp_free_i32(t32);
1904 } else if (!TCG_TARGET_HAS_ctz_i64
1905 && TCG_TARGET_HAS_ctpop_i64
1906 && arg2 == 64) {
1907 /* This equivalence has the advantage of not requiring a fixup. */
1908 TCGv_i64 t = tcg_temp_new_i64();
1909 tcg_gen_subi_i64(t, arg1, 1);
1910 tcg_gen_andc_i64(t, t, arg1);
1911 tcg_gen_ctpop_i64(ret, t);
1912 tcg_temp_free_i64(t);
1913 } else {
1914 TCGv_i64 t64 = tcg_const_i64(arg2);
1915 tcg_gen_ctz_i64(ret, arg1, t64);
1916 tcg_temp_free_i64(t64);
1920 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
1922 if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
1923 TCGv_i64 t = tcg_temp_new_i64();
1924 tcg_gen_sari_i64(t, arg, 63);
1925 tcg_gen_xor_i64(t, t, arg);
1926 tcg_gen_clzi_i64(t, t, 64);
1927 tcg_gen_subi_i64(ret, t, 1);
1928 tcg_temp_free_i64(t);
1929 } else {
1930 gen_helper_clrsb_i64(ret, arg);
1934 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
1936 if (TCG_TARGET_HAS_ctpop_i64) {
1937 tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
1938 } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
1939 tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1940 tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1941 tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
1942 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1943 } else {
1944 gen_helper_ctpop_i64(ret, arg1);
1948 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1950 if (TCG_TARGET_HAS_rot_i64) {
1951 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1952 } else {
1953 TCGv_i64 t0, t1;
1954 t0 = tcg_temp_new_i64();
1955 t1 = tcg_temp_new_i64();
1956 tcg_gen_shl_i64(t0, arg1, arg2);
1957 tcg_gen_subfi_i64(t1, 64, arg2);
1958 tcg_gen_shr_i64(t1, arg1, t1);
1959 tcg_gen_or_i64(ret, t0, t1);
1960 tcg_temp_free_i64(t0);
1961 tcg_temp_free_i64(t1);
1965 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1967 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1968 /* some cases can be optimized here */
1969 if (arg2 == 0) {
1970 tcg_gen_mov_i64(ret, arg1);
1971 } else if (TCG_TARGET_HAS_rot_i64) {
1972 TCGv_i64 t0 = tcg_const_i64(arg2);
1973 tcg_gen_rotl_i64(ret, arg1, t0);
1974 tcg_temp_free_i64(t0);
1975 } else {
1976 TCGv_i64 t0, t1;
1977 t0 = tcg_temp_new_i64();
1978 t1 = tcg_temp_new_i64();
1979 tcg_gen_shli_i64(t0, arg1, arg2);
1980 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1981 tcg_gen_or_i64(ret, t0, t1);
1982 tcg_temp_free_i64(t0);
1983 tcg_temp_free_i64(t1);
1987 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1989 if (TCG_TARGET_HAS_rot_i64) {
1990 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1991 } else {
1992 TCGv_i64 t0, t1;
1993 t0 = tcg_temp_new_i64();
1994 t1 = tcg_temp_new_i64();
1995 tcg_gen_shr_i64(t0, arg1, arg2);
1996 tcg_gen_subfi_i64(t1, 64, arg2);
1997 tcg_gen_shl_i64(t1, arg1, t1);
1998 tcg_gen_or_i64(ret, t0, t1);
1999 tcg_temp_free_i64(t0);
2000 tcg_temp_free_i64(t1);
2004 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2006 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2007 /* some cases can be optimized here */
2008 if (arg2 == 0) {
2009 tcg_gen_mov_i64(ret, arg1);
2010 } else {
2011 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2015 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2016 unsigned int ofs, unsigned int len)
2018 uint64_t mask;
2019 TCGv_i64 t1;
2021 tcg_debug_assert(ofs < 64);
2022 tcg_debug_assert(len > 0);
2023 tcg_debug_assert(len <= 64);
2024 tcg_debug_assert(ofs + len <= 64);
2026 if (len == 64) {
2027 tcg_gen_mov_i64(ret, arg2);
2028 return;
2030 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2031 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2032 return;
2035 if (TCG_TARGET_REG_BITS == 32) {
2036 if (ofs >= 32) {
2037 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2038 TCGV_LOW(arg2), ofs - 32, len);
2039 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2040 return;
2042 if (ofs + len <= 32) {
2043 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2044 TCGV_LOW(arg2), ofs, len);
2045 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2046 return;
2050 t1 = tcg_temp_new_i64();
2052 if (TCG_TARGET_HAS_extract2_i64) {
2053 if (ofs + len == 64) {
2054 tcg_gen_shli_i64(t1, arg1, len);
2055 tcg_gen_extract2_i64(ret, t1, arg2, len);
2056 goto done;
2058 if (ofs == 0) {
2059 tcg_gen_extract2_i64(ret, arg1, arg2, len);
2060 tcg_gen_rotli_i64(ret, ret, len);
2061 goto done;
2065 mask = (1ull << len) - 1;
2066 if (ofs + len < 64) {
2067 tcg_gen_andi_i64(t1, arg2, mask);
2068 tcg_gen_shli_i64(t1, t1, ofs);
2069 } else {
2070 tcg_gen_shli_i64(t1, arg2, ofs);
2072 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2073 tcg_gen_or_i64(ret, ret, t1);
2074 done:
2075 tcg_temp_free_i64(t1);
2078 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2079 unsigned int ofs, unsigned int len)
2081 tcg_debug_assert(ofs < 64);
2082 tcg_debug_assert(len > 0);
2083 tcg_debug_assert(len <= 64);
2084 tcg_debug_assert(ofs + len <= 64);
2086 if (ofs + len == 64) {
2087 tcg_gen_shli_i64(ret, arg, ofs);
2088 } else if (ofs == 0) {
2089 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2090 } else if (TCG_TARGET_HAS_deposit_i64
2091 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2092 TCGv_i64 zero = tcg_const_i64(0);
2093 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2094 tcg_temp_free_i64(zero);
2095 } else {
2096 if (TCG_TARGET_REG_BITS == 32) {
2097 if (ofs >= 32) {
2098 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2099 ofs - 32, len);
2100 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2101 return;
2103 if (ofs + len <= 32) {
2104 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2105 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2106 return;
2109 /* To help two-operand hosts we prefer to zero-extend first,
2110 which allows ARG to stay live. */
2111 switch (len) {
2112 case 32:
2113 if (TCG_TARGET_HAS_ext32u_i64) {
2114 tcg_gen_ext32u_i64(ret, arg);
2115 tcg_gen_shli_i64(ret, ret, ofs);
2116 return;
2118 break;
2119 case 16:
2120 if (TCG_TARGET_HAS_ext16u_i64) {
2121 tcg_gen_ext16u_i64(ret, arg);
2122 tcg_gen_shli_i64(ret, ret, ofs);
2123 return;
2125 break;
2126 case 8:
2127 if (TCG_TARGET_HAS_ext8u_i64) {
2128 tcg_gen_ext8u_i64(ret, arg);
2129 tcg_gen_shli_i64(ret, ret, ofs);
2130 return;
2132 break;
2134 /* Otherwise prefer zero-extension over AND for code size. */
2135 switch (ofs + len) {
2136 case 32:
2137 if (TCG_TARGET_HAS_ext32u_i64) {
2138 tcg_gen_shli_i64(ret, arg, ofs);
2139 tcg_gen_ext32u_i64(ret, ret);
2140 return;
2142 break;
2143 case 16:
2144 if (TCG_TARGET_HAS_ext16u_i64) {
2145 tcg_gen_shli_i64(ret, arg, ofs);
2146 tcg_gen_ext16u_i64(ret, ret);
2147 return;
2149 break;
2150 case 8:
2151 if (TCG_TARGET_HAS_ext8u_i64) {
2152 tcg_gen_shli_i64(ret, arg, ofs);
2153 tcg_gen_ext8u_i64(ret, ret);
2154 return;
2156 break;
2158 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2159 tcg_gen_shli_i64(ret, ret, ofs);
2163 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2164 unsigned int ofs, unsigned int len)
2166 tcg_debug_assert(ofs < 64);
2167 tcg_debug_assert(len > 0);
2168 tcg_debug_assert(len <= 64);
2169 tcg_debug_assert(ofs + len <= 64);
2171 /* Canonicalize certain special cases, even if extract is supported. */
2172 if (ofs + len == 64) {
2173 tcg_gen_shri_i64(ret, arg, 64 - len);
2174 return;
2176 if (ofs == 0) {
2177 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2178 return;
2181 if (TCG_TARGET_REG_BITS == 32) {
2182 /* Look for a 32-bit extract within one of the two words. */
2183 if (ofs >= 32) {
2184 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2185 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2186 return;
2188 if (ofs + len <= 32) {
2189 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2190 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2191 return;
2193 /* The field is split across two words. One double-word
2194 shift is better than two double-word shifts. */
2195 goto do_shift_and;
2198 if (TCG_TARGET_HAS_extract_i64
2199 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2200 tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2201 return;
2204 /* Assume that zero-extension, if available, is cheaper than a shift. */
2205 switch (ofs + len) {
2206 case 32:
2207 if (TCG_TARGET_HAS_ext32u_i64) {
2208 tcg_gen_ext32u_i64(ret, arg);
2209 tcg_gen_shri_i64(ret, ret, ofs);
2210 return;
2212 break;
2213 case 16:
2214 if (TCG_TARGET_HAS_ext16u_i64) {
2215 tcg_gen_ext16u_i64(ret, arg);
2216 tcg_gen_shri_i64(ret, ret, ofs);
2217 return;
2219 break;
2220 case 8:
2221 if (TCG_TARGET_HAS_ext8u_i64) {
2222 tcg_gen_ext8u_i64(ret, arg);
2223 tcg_gen_shri_i64(ret, ret, ofs);
2224 return;
2226 break;
2229 /* ??? Ideally we'd know what values are available for immediate AND.
2230 Assume that 8 bits are available, plus the special cases of 16 and 32,
2231 so that we get ext8u, ext16u, and ext32u. */
2232 switch (len) {
2233 case 1 ... 8: case 16: case 32:
2234 do_shift_and:
2235 tcg_gen_shri_i64(ret, arg, ofs);
2236 tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2237 break;
2238 default:
2239 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2240 tcg_gen_shri_i64(ret, ret, 64 - len);
2241 break;
2245 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2246 unsigned int ofs, unsigned int len)
2248 tcg_debug_assert(ofs < 64);
2249 tcg_debug_assert(len > 0);
2250 tcg_debug_assert(len <= 64);
2251 tcg_debug_assert(ofs + len <= 64);
2253 /* Canonicalize certain special cases, even if sextract is supported. */
2254 if (ofs + len == 64) {
2255 tcg_gen_sari_i64(ret, arg, 64 - len);
2256 return;
2258 if (ofs == 0) {
2259 switch (len) {
2260 case 32:
2261 tcg_gen_ext32s_i64(ret, arg);
2262 return;
2263 case 16:
2264 tcg_gen_ext16s_i64(ret, arg);
2265 return;
2266 case 8:
2267 tcg_gen_ext8s_i64(ret, arg);
2268 return;
2272 if (TCG_TARGET_REG_BITS == 32) {
2273 /* Look for a 32-bit extract within one of the two words. */
2274 if (ofs >= 32) {
2275 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2276 } else if (ofs + len <= 32) {
2277 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2278 } else if (ofs == 0) {
2279 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2280 tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2281 return;
2282 } else if (len > 32) {
2283 TCGv_i32 t = tcg_temp_new_i32();
2284 /* Extract the bits for the high word normally. */
2285 tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2286 /* Shift the field down for the low part. */
2287 tcg_gen_shri_i64(ret, arg, ofs);
2288 /* Overwrite the shift into the high part. */
2289 tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2290 tcg_temp_free_i32(t);
2291 return;
2292 } else {
2293 /* Shift the field down for the low part, such that the
2294 field sits at the MSB. */
2295 tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2296 /* Shift the field down from the MSB, sign extending. */
2297 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2299 /* Sign-extend the field from 32 bits. */
2300 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2301 return;
2304 if (TCG_TARGET_HAS_sextract_i64
2305 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2306 tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2307 return;
2310 /* Assume that sign-extension, if available, is cheaper than a shift. */
2311 switch (ofs + len) {
2312 case 32:
2313 if (TCG_TARGET_HAS_ext32s_i64) {
2314 tcg_gen_ext32s_i64(ret, arg);
2315 tcg_gen_sari_i64(ret, ret, ofs);
2316 return;
2318 break;
2319 case 16:
2320 if (TCG_TARGET_HAS_ext16s_i64) {
2321 tcg_gen_ext16s_i64(ret, arg);
2322 tcg_gen_sari_i64(ret, ret, ofs);
2323 return;
2325 break;
2326 case 8:
2327 if (TCG_TARGET_HAS_ext8s_i64) {
2328 tcg_gen_ext8s_i64(ret, arg);
2329 tcg_gen_sari_i64(ret, ret, ofs);
2330 return;
2332 break;
2334 switch (len) {
2335 case 32:
2336 if (TCG_TARGET_HAS_ext32s_i64) {
2337 tcg_gen_shri_i64(ret, arg, ofs);
2338 tcg_gen_ext32s_i64(ret, ret);
2339 return;
2341 break;
2342 case 16:
2343 if (TCG_TARGET_HAS_ext16s_i64) {
2344 tcg_gen_shri_i64(ret, arg, ofs);
2345 tcg_gen_ext16s_i64(ret, ret);
2346 return;
2348 break;
2349 case 8:
2350 if (TCG_TARGET_HAS_ext8s_i64) {
2351 tcg_gen_shri_i64(ret, arg, ofs);
2352 tcg_gen_ext8s_i64(ret, ret);
2353 return;
2355 break;
2357 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2358 tcg_gen_sari_i64(ret, ret, 64 - len);
2362 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2363 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2365 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2366 unsigned int ofs)
2368 tcg_debug_assert(ofs <= 64);
2369 if (ofs == 0) {
2370 tcg_gen_mov_i64(ret, al);
2371 } else if (ofs == 64) {
2372 tcg_gen_mov_i64(ret, ah);
2373 } else if (al == ah) {
2374 tcg_gen_rotri_i64(ret, al, ofs);
2375 } else if (TCG_TARGET_HAS_extract2_i64) {
2376 tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2377 } else {
2378 TCGv_i64 t0 = tcg_temp_new_i64();
2379 tcg_gen_shri_i64(t0, al, ofs);
2380 tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2381 tcg_temp_free_i64(t0);
2385 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2386 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2388 if (cond == TCG_COND_ALWAYS) {
2389 tcg_gen_mov_i64(ret, v1);
2390 } else if (cond == TCG_COND_NEVER) {
2391 tcg_gen_mov_i64(ret, v2);
2392 } else if (TCG_TARGET_REG_BITS == 32) {
2393 TCGv_i32 t0 = tcg_temp_new_i32();
2394 TCGv_i32 t1 = tcg_temp_new_i32();
2395 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2396 TCGV_LOW(c1), TCGV_HIGH(c1),
2397 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2399 if (TCG_TARGET_HAS_movcond_i32) {
2400 tcg_gen_movi_i32(t1, 0);
2401 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2402 TCGV_LOW(v1), TCGV_LOW(v2));
2403 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2404 TCGV_HIGH(v1), TCGV_HIGH(v2));
2405 } else {
2406 tcg_gen_neg_i32(t0, t0);
2408 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2409 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2410 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2412 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2413 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2414 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2416 tcg_temp_free_i32(t0);
2417 tcg_temp_free_i32(t1);
2418 } else if (TCG_TARGET_HAS_movcond_i64) {
2419 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2420 } else {
2421 TCGv_i64 t0 = tcg_temp_new_i64();
2422 TCGv_i64 t1 = tcg_temp_new_i64();
2423 tcg_gen_setcond_i64(cond, t0, c1, c2);
2424 tcg_gen_neg_i64(t0, t0);
2425 tcg_gen_and_i64(t1, v1, t0);
2426 tcg_gen_andc_i64(ret, v2, t0);
2427 tcg_gen_or_i64(ret, ret, t1);
2428 tcg_temp_free_i64(t0);
2429 tcg_temp_free_i64(t1);
2433 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2434 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2436 if (TCG_TARGET_HAS_add2_i64) {
2437 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2438 } else {
2439 TCGv_i64 t0 = tcg_temp_new_i64();
2440 TCGv_i64 t1 = tcg_temp_new_i64();
2441 tcg_gen_add_i64(t0, al, bl);
2442 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2443 tcg_gen_add_i64(rh, ah, bh);
2444 tcg_gen_add_i64(rh, rh, t1);
2445 tcg_gen_mov_i64(rl, t0);
2446 tcg_temp_free_i64(t0);
2447 tcg_temp_free_i64(t1);
2451 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2452 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2454 if (TCG_TARGET_HAS_sub2_i64) {
2455 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2456 } else {
2457 TCGv_i64 t0 = tcg_temp_new_i64();
2458 TCGv_i64 t1 = tcg_temp_new_i64();
2459 tcg_gen_sub_i64(t0, al, bl);
2460 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2461 tcg_gen_sub_i64(rh, ah, bh);
2462 tcg_gen_sub_i64(rh, rh, t1);
2463 tcg_gen_mov_i64(rl, t0);
2464 tcg_temp_free_i64(t0);
2465 tcg_temp_free_i64(t1);
2469 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2471 if (TCG_TARGET_HAS_mulu2_i64) {
2472 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2473 } else if (TCG_TARGET_HAS_muluh_i64) {
2474 TCGv_i64 t = tcg_temp_new_i64();
2475 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2476 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2477 tcg_gen_mov_i64(rl, t);
2478 tcg_temp_free_i64(t);
2479 } else {
2480 TCGv_i64 t0 = tcg_temp_new_i64();
2481 tcg_gen_mul_i64(t0, arg1, arg2);
2482 gen_helper_muluh_i64(rh, arg1, arg2);
2483 tcg_gen_mov_i64(rl, t0);
2484 tcg_temp_free_i64(t0);
2488 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2490 if (TCG_TARGET_HAS_muls2_i64) {
2491 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2492 } else if (TCG_TARGET_HAS_mulsh_i64) {
2493 TCGv_i64 t = tcg_temp_new_i64();
2494 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2495 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2496 tcg_gen_mov_i64(rl, t);
2497 tcg_temp_free_i64(t);
2498 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2499 TCGv_i64 t0 = tcg_temp_new_i64();
2500 TCGv_i64 t1 = tcg_temp_new_i64();
2501 TCGv_i64 t2 = tcg_temp_new_i64();
2502 TCGv_i64 t3 = tcg_temp_new_i64();
2503 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2504 /* Adjust for negative inputs. */
2505 tcg_gen_sari_i64(t2, arg1, 63);
2506 tcg_gen_sari_i64(t3, arg2, 63);
2507 tcg_gen_and_i64(t2, t2, arg2);
2508 tcg_gen_and_i64(t3, t3, arg1);
2509 tcg_gen_sub_i64(rh, t1, t2);
2510 tcg_gen_sub_i64(rh, rh, t3);
2511 tcg_gen_mov_i64(rl, t0);
2512 tcg_temp_free_i64(t0);
2513 tcg_temp_free_i64(t1);
2514 tcg_temp_free_i64(t2);
2515 tcg_temp_free_i64(t3);
2516 } else {
2517 TCGv_i64 t0 = tcg_temp_new_i64();
2518 tcg_gen_mul_i64(t0, arg1, arg2);
2519 gen_helper_mulsh_i64(rh, arg1, arg2);
2520 tcg_gen_mov_i64(rl, t0);
2521 tcg_temp_free_i64(t0);
2525 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2527 TCGv_i64 t0 = tcg_temp_new_i64();
2528 TCGv_i64 t1 = tcg_temp_new_i64();
2529 TCGv_i64 t2 = tcg_temp_new_i64();
2530 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2531 /* Adjust for negative input for the signed arg1. */
2532 tcg_gen_sari_i64(t2, arg1, 63);
2533 tcg_gen_and_i64(t2, t2, arg2);
2534 tcg_gen_sub_i64(rh, t1, t2);
2535 tcg_gen_mov_i64(rl, t0);
2536 tcg_temp_free_i64(t0);
2537 tcg_temp_free_i64(t1);
2538 tcg_temp_free_i64(t2);
2541 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2543 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2546 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2548 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2551 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2553 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2556 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2558 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2561 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2563 TCGv_i64 t = tcg_temp_new_i64();
2565 tcg_gen_sari_i64(t, a, 63);
2566 tcg_gen_xor_i64(ret, a, t);
2567 tcg_gen_sub_i64(ret, ret, t);
2568 tcg_temp_free_i64(t);
2571 /* Size changing operations. */
2573 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2575 if (TCG_TARGET_REG_BITS == 32) {
2576 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2577 } else if (TCG_TARGET_HAS_extrl_i64_i32) {
2578 tcg_gen_op2(INDEX_op_extrl_i64_i32,
2579 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2580 } else {
2581 tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2585 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2587 if (TCG_TARGET_REG_BITS == 32) {
2588 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2589 } else if (TCG_TARGET_HAS_extrh_i64_i32) {
2590 tcg_gen_op2(INDEX_op_extrh_i64_i32,
2591 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2592 } else {
2593 TCGv_i64 t = tcg_temp_new_i64();
2594 tcg_gen_shri_i64(t, arg, 32);
2595 tcg_gen_mov_i32(ret, (TCGv_i32)t);
2596 tcg_temp_free_i64(t);
2600 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2602 if (TCG_TARGET_REG_BITS == 32) {
2603 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2604 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2605 } else {
2606 tcg_gen_op2(INDEX_op_extu_i32_i64,
2607 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2611 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2613 if (TCG_TARGET_REG_BITS == 32) {
2614 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2615 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2616 } else {
2617 tcg_gen_op2(INDEX_op_ext_i32_i64,
2618 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2622 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2624 TCGv_i64 tmp;
2626 if (TCG_TARGET_REG_BITS == 32) {
2627 tcg_gen_mov_i32(TCGV_LOW(dest), low);
2628 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2629 return;
2632 tmp = tcg_temp_new_i64();
2633 /* These extensions are only needed for type correctness.
2634 We may be able to do better given target specific information. */
2635 tcg_gen_extu_i32_i64(tmp, high);
2636 tcg_gen_extu_i32_i64(dest, low);
2637 /* If deposit is available, use it. Otherwise use the extra
2638 knowledge that we have of the zero-extensions above. */
2639 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2640 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2641 } else {
2642 tcg_gen_shli_i64(tmp, tmp, 32);
2643 tcg_gen_or_i64(dest, dest, tmp);
2645 tcg_temp_free_i64(tmp);
2648 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2650 if (TCG_TARGET_REG_BITS == 32) {
2651 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2652 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2653 } else {
2654 tcg_gen_extrl_i64_i32(lo, arg);
2655 tcg_gen_extrh_i64_i32(hi, arg);
2659 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2661 tcg_gen_ext32u_i64(lo, arg);
2662 tcg_gen_shri_i64(hi, arg, 32);
2665 /* QEMU specific operations. */
2667 void tcg_gen_exit_tb(TranslationBlock *tb, unsigned idx)
2669 uintptr_t val = (uintptr_t)tb + idx;
2671 if (tb == NULL) {
2672 tcg_debug_assert(idx == 0);
2673 } else if (idx <= TB_EXIT_IDXMAX) {
2674 #ifdef CONFIG_DEBUG_TCG
2675 /* This is an exit following a goto_tb. Verify that we have
2676 seen this numbered exit before, via tcg_gen_goto_tb. */
2677 tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2678 #endif
2679 /* When not chaining, exit without indicating a link. */
2680 if (qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2681 val = 0;
2683 } else {
2684 /* This is an exit via the exitreq label. */
2685 tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2688 plugin_gen_disable_mem_helpers();
2689 tcg_gen_op1i(INDEX_op_exit_tb, val);
2692 void tcg_gen_goto_tb(unsigned idx)
2694 /* We only support two chained exits. */
2695 tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2696 #ifdef CONFIG_DEBUG_TCG
2697 /* Verify that we havn't seen this numbered exit before. */
2698 tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2699 tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2700 #endif
2701 plugin_gen_disable_mem_helpers();
2702 /* When not chaining, we simply fall through to the "fallback" exit. */
2703 if (!qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2704 tcg_gen_op1i(INDEX_op_goto_tb, idx);
2708 void tcg_gen_lookup_and_goto_ptr(void)
2710 if (TCG_TARGET_HAS_goto_ptr && !qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2711 TCGv_ptr ptr;
2713 plugin_gen_disable_mem_helpers();
2714 ptr = tcg_temp_new_ptr();
2715 gen_helper_lookup_tb_ptr(ptr, cpu_env);
2716 tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2717 tcg_temp_free_ptr(ptr);
2718 } else {
2719 tcg_gen_exit_tb(NULL, 0);
2723 static inline MemOp tcg_canonicalize_memop(MemOp op, bool is64, bool st)
2725 /* Trigger the asserts within as early as possible. */
2726 (void)get_alignment_bits(op);
2728 switch (op & MO_SIZE) {
2729 case MO_8:
2730 op &= ~MO_BSWAP;
2731 break;
2732 case MO_16:
2733 break;
2734 case MO_32:
2735 if (!is64) {
2736 op &= ~MO_SIGN;
2738 break;
2739 case MO_64:
2740 if (!is64) {
2741 tcg_abort();
2743 break;
2745 if (st) {
2746 op &= ~MO_SIGN;
2748 return op;
2751 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
2752 MemOp memop, TCGArg idx)
2754 TCGMemOpIdx oi = make_memop_idx(memop, idx);
2755 #if TARGET_LONG_BITS == 32
2756 tcg_gen_op3i_i32(opc, val, addr, oi);
2757 #else
2758 if (TCG_TARGET_REG_BITS == 32) {
2759 tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2760 } else {
2761 tcg_gen_op3(opc, tcgv_i32_arg(val), tcgv_i64_arg(addr), oi);
2763 #endif
2766 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
2767 MemOp memop, TCGArg idx)
2769 TCGMemOpIdx oi = make_memop_idx(memop, idx);
2770 #if TARGET_LONG_BITS == 32
2771 if (TCG_TARGET_REG_BITS == 32) {
2772 tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
2773 } else {
2774 tcg_gen_op3(opc, tcgv_i64_arg(val), tcgv_i32_arg(addr), oi);
2776 #else
2777 if (TCG_TARGET_REG_BITS == 32) {
2778 tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
2779 TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2780 } else {
2781 tcg_gen_op3i_i64(opc, val, addr, oi);
2783 #endif
2786 static void tcg_gen_req_mo(TCGBar type)
2788 #ifdef TCG_GUEST_DEFAULT_MO
2789 type &= TCG_GUEST_DEFAULT_MO;
2790 #endif
2791 type &= ~TCG_TARGET_DEFAULT_MO;
2792 if (type) {
2793 tcg_gen_mb(type | TCG_BAR_SC);
2797 static inline TCGv plugin_prep_mem_callbacks(TCGv vaddr)
2799 #ifdef CONFIG_PLUGIN
2800 if (tcg_ctx->plugin_insn != NULL) {
2801 /* Save a copy of the vaddr for use after a load. */
2802 TCGv temp = tcg_temp_new();
2803 tcg_gen_mov_tl(temp, vaddr);
2804 return temp;
2806 #endif
2807 return vaddr;
2810 static inline void plugin_gen_mem_callbacks(TCGv vaddr, uint16_t info)
2812 #ifdef CONFIG_PLUGIN
2813 if (tcg_ctx->plugin_insn != NULL) {
2814 plugin_gen_empty_mem_callback(vaddr, info);
2815 tcg_temp_free(vaddr);
2817 #endif
2820 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2822 MemOp orig_memop;
2823 uint16_t info = trace_mem_get_info(memop, idx, 0);
2825 tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2826 memop = tcg_canonicalize_memop(memop, 0, 0);
2827 trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2829 orig_memop = memop;
2830 if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2831 memop &= ~MO_BSWAP;
2832 /* The bswap primitive requires zero-extended input. */
2833 if ((memop & MO_SSIZE) == MO_SW) {
2834 memop &= ~MO_SIGN;
2838 addr = plugin_prep_mem_callbacks(addr);
2839 gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
2840 plugin_gen_mem_callbacks(addr, info);
2842 if ((orig_memop ^ memop) & MO_BSWAP) {
2843 switch (orig_memop & MO_SIZE) {
2844 case MO_16:
2845 tcg_gen_bswap16_i32(val, val);
2846 if (orig_memop & MO_SIGN) {
2847 tcg_gen_ext16s_i32(val, val);
2849 break;
2850 case MO_32:
2851 tcg_gen_bswap32_i32(val, val);
2852 break;
2853 default:
2854 g_assert_not_reached();
2859 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2861 TCGv_i32 swap = NULL;
2862 uint16_t info = trace_mem_get_info(memop, idx, 1);
2864 tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
2865 memop = tcg_canonicalize_memop(memop, 0, 1);
2866 trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2868 if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2869 swap = tcg_temp_new_i32();
2870 switch (memop & MO_SIZE) {
2871 case MO_16:
2872 tcg_gen_ext16u_i32(swap, val);
2873 tcg_gen_bswap16_i32(swap, swap);
2874 break;
2875 case MO_32:
2876 tcg_gen_bswap32_i32(swap, val);
2877 break;
2878 default:
2879 g_assert_not_reached();
2881 val = swap;
2882 memop &= ~MO_BSWAP;
2885 addr = plugin_prep_mem_callbacks(addr);
2886 gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
2887 plugin_gen_mem_callbacks(addr, info);
2889 if (swap) {
2890 tcg_temp_free_i32(swap);
2894 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
2896 MemOp orig_memop;
2897 uint16_t info;
2899 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2900 tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
2901 if (memop & MO_SIGN) {
2902 tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
2903 } else {
2904 tcg_gen_movi_i32(TCGV_HIGH(val), 0);
2906 return;
2909 tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2910 memop = tcg_canonicalize_memop(memop, 1, 0);
2911 info = trace_mem_get_info(memop, idx, 0);
2912 trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2914 orig_memop = memop;
2915 if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2916 memop &= ~MO_BSWAP;
2917 /* The bswap primitive requires zero-extended input. */
2918 if ((memop & MO_SIGN) && (memop & MO_SIZE) < MO_64) {
2919 memop &= ~MO_SIGN;
2923 addr = plugin_prep_mem_callbacks(addr);
2924 gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
2925 plugin_gen_mem_callbacks(addr, info);
2927 if ((orig_memop ^ memop) & MO_BSWAP) {
2928 switch (orig_memop & MO_SIZE) {
2929 case MO_16:
2930 tcg_gen_bswap16_i64(val, val);
2931 if (orig_memop & MO_SIGN) {
2932 tcg_gen_ext16s_i64(val, val);
2934 break;
2935 case MO_32:
2936 tcg_gen_bswap32_i64(val, val);
2937 if (orig_memop & MO_SIGN) {
2938 tcg_gen_ext32s_i64(val, val);
2940 break;
2941 case MO_64:
2942 tcg_gen_bswap64_i64(val, val);
2943 break;
2944 default:
2945 g_assert_not_reached();
2950 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
2952 TCGv_i64 swap = NULL;
2953 uint16_t info;
2955 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2956 tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
2957 return;
2960 tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
2961 memop = tcg_canonicalize_memop(memop, 1, 1);
2962 info = trace_mem_get_info(memop, idx, 1);
2963 trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2965 if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2966 swap = tcg_temp_new_i64();
2967 switch (memop & MO_SIZE) {
2968 case MO_16:
2969 tcg_gen_ext16u_i64(swap, val);
2970 tcg_gen_bswap16_i64(swap, swap);
2971 break;
2972 case MO_32:
2973 tcg_gen_ext32u_i64(swap, val);
2974 tcg_gen_bswap32_i64(swap, swap);
2975 break;
2976 case MO_64:
2977 tcg_gen_bswap64_i64(swap, val);
2978 break;
2979 default:
2980 g_assert_not_reached();
2982 val = swap;
2983 memop &= ~MO_BSWAP;
2986 addr = plugin_prep_mem_callbacks(addr);
2987 gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
2988 plugin_gen_mem_callbacks(addr, info);
2990 if (swap) {
2991 tcg_temp_free_i64(swap);
2995 static void tcg_gen_ext_i32(TCGv_i32 ret, TCGv_i32 val, MemOp opc)
2997 switch (opc & MO_SSIZE) {
2998 case MO_SB:
2999 tcg_gen_ext8s_i32(ret, val);
3000 break;
3001 case MO_UB:
3002 tcg_gen_ext8u_i32(ret, val);
3003 break;
3004 case MO_SW:
3005 tcg_gen_ext16s_i32(ret, val);
3006 break;
3007 case MO_UW:
3008 tcg_gen_ext16u_i32(ret, val);
3009 break;
3010 default:
3011 tcg_gen_mov_i32(ret, val);
3012 break;
3016 static void tcg_gen_ext_i64(TCGv_i64 ret, TCGv_i64 val, MemOp opc)
3018 switch (opc & MO_SSIZE) {
3019 case MO_SB:
3020 tcg_gen_ext8s_i64(ret, val);
3021 break;
3022 case MO_UB:
3023 tcg_gen_ext8u_i64(ret, val);
3024 break;
3025 case MO_SW:
3026 tcg_gen_ext16s_i64(ret, val);
3027 break;
3028 case MO_UW:
3029 tcg_gen_ext16u_i64(ret, val);
3030 break;
3031 case MO_SL:
3032 tcg_gen_ext32s_i64(ret, val);
3033 break;
3034 case MO_UL:
3035 tcg_gen_ext32u_i64(ret, val);
3036 break;
3037 default:
3038 tcg_gen_mov_i64(ret, val);
3039 break;
3043 #ifdef CONFIG_SOFTMMU
3044 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv,
3045 TCGv_i32, TCGv_i32, TCGv_i32);
3046 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv,
3047 TCGv_i64, TCGv_i64, TCGv_i32);
3048 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv,
3049 TCGv_i32, TCGv_i32);
3050 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv,
3051 TCGv_i64, TCGv_i32);
3052 #else
3053 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32, TCGv_i32);
3054 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64, TCGv_i64);
3055 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32);
3056 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64);
3057 #endif
3059 #ifdef CONFIG_ATOMIC64
3060 # define WITH_ATOMIC64(X) X,
3061 #else
3062 # define WITH_ATOMIC64(X)
3063 #endif
3065 static void * const table_cmpxchg[16] = {
3066 [MO_8] = gen_helper_atomic_cmpxchgb,
3067 [MO_16 | MO_LE] = gen_helper_atomic_cmpxchgw_le,
3068 [MO_16 | MO_BE] = gen_helper_atomic_cmpxchgw_be,
3069 [MO_32 | MO_LE] = gen_helper_atomic_cmpxchgl_le,
3070 [MO_32 | MO_BE] = gen_helper_atomic_cmpxchgl_be,
3071 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_cmpxchgq_le)
3072 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_cmpxchgq_be)
3075 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
3076 TCGv_i32 newv, TCGArg idx, MemOp memop)
3078 memop = tcg_canonicalize_memop(memop, 0, 0);
3080 if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3081 TCGv_i32 t1 = tcg_temp_new_i32();
3082 TCGv_i32 t2 = tcg_temp_new_i32();
3084 tcg_gen_ext_i32(t2, cmpv, memop & MO_SIZE);
3086 tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
3087 tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, t2, newv, t1);
3088 tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3089 tcg_temp_free_i32(t2);
3091 if (memop & MO_SIGN) {
3092 tcg_gen_ext_i32(retv, t1, memop);
3093 } else {
3094 tcg_gen_mov_i32(retv, t1);
3096 tcg_temp_free_i32(t1);
3097 } else {
3098 gen_atomic_cx_i32 gen;
3100 gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3101 tcg_debug_assert(gen != NULL);
3103 #ifdef CONFIG_SOFTMMU
3105 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
3106 gen(retv, cpu_env, addr, cmpv, newv, oi);
3107 tcg_temp_free_i32(oi);
3109 #else
3110 gen(retv, cpu_env, addr, cmpv, newv);
3111 #endif
3113 if (memop & MO_SIGN) {
3114 tcg_gen_ext_i32(retv, retv, memop);
3119 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
3120 TCGv_i64 newv, TCGArg idx, MemOp memop)
3122 memop = tcg_canonicalize_memop(memop, 1, 0);
3124 if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3125 TCGv_i64 t1 = tcg_temp_new_i64();
3126 TCGv_i64 t2 = tcg_temp_new_i64();
3128 tcg_gen_ext_i64(t2, cmpv, memop & MO_SIZE);
3130 tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
3131 tcg_gen_movcond_i64(TCG_COND_EQ, t2, t1, t2, newv, t1);
3132 tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3133 tcg_temp_free_i64(t2);
3135 if (memop & MO_SIGN) {
3136 tcg_gen_ext_i64(retv, t1, memop);
3137 } else {
3138 tcg_gen_mov_i64(retv, t1);
3140 tcg_temp_free_i64(t1);
3141 } else if ((memop & MO_SIZE) == MO_64) {
3142 #ifdef CONFIG_ATOMIC64
3143 gen_atomic_cx_i64 gen;
3145 gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3146 tcg_debug_assert(gen != NULL);
3148 #ifdef CONFIG_SOFTMMU
3150 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop, idx));
3151 gen(retv, cpu_env, addr, cmpv, newv, oi);
3152 tcg_temp_free_i32(oi);
3154 #else
3155 gen(retv, cpu_env, addr, cmpv, newv);
3156 #endif
3157 #else
3158 gen_helper_exit_atomic(cpu_env);
3159 /* Produce a result, so that we have a well-formed opcode stream
3160 with respect to uses of the result in the (dead) code following. */
3161 tcg_gen_movi_i64(retv, 0);
3162 #endif /* CONFIG_ATOMIC64 */
3163 } else {
3164 TCGv_i32 c32 = tcg_temp_new_i32();
3165 TCGv_i32 n32 = tcg_temp_new_i32();
3166 TCGv_i32 r32 = tcg_temp_new_i32();
3168 tcg_gen_extrl_i64_i32(c32, cmpv);
3169 tcg_gen_extrl_i64_i32(n32, newv);
3170 tcg_gen_atomic_cmpxchg_i32(r32, addr, c32, n32, idx, memop & ~MO_SIGN);
3171 tcg_temp_free_i32(c32);
3172 tcg_temp_free_i32(n32);
3174 tcg_gen_extu_i32_i64(retv, r32);
3175 tcg_temp_free_i32(r32);
3177 if (memop & MO_SIGN) {
3178 tcg_gen_ext_i64(retv, retv, memop);
3183 static void do_nonatomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3184 TCGArg idx, MemOp memop, bool new_val,
3185 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
3187 TCGv_i32 t1 = tcg_temp_new_i32();
3188 TCGv_i32 t2 = tcg_temp_new_i32();
3190 memop = tcg_canonicalize_memop(memop, 0, 0);
3192 tcg_gen_qemu_ld_i32(t1, addr, idx, memop);
3193 tcg_gen_ext_i32(t2, val, memop);
3194 gen(t2, t1, t2);
3195 tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3197 tcg_gen_ext_i32(ret, (new_val ? t2 : t1), memop);
3198 tcg_temp_free_i32(t1);
3199 tcg_temp_free_i32(t2);
3202 static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3203 TCGArg idx, MemOp memop, void * const table[])
3205 gen_atomic_op_i32 gen;
3207 memop = tcg_canonicalize_memop(memop, 0, 0);
3209 gen = table[memop & (MO_SIZE | MO_BSWAP)];
3210 tcg_debug_assert(gen != NULL);
3212 #ifdef CONFIG_SOFTMMU
3214 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
3215 gen(ret, cpu_env, addr, val, oi);
3216 tcg_temp_free_i32(oi);
3218 #else
3219 gen(ret, cpu_env, addr, val);
3220 #endif
3222 if (memop & MO_SIGN) {
3223 tcg_gen_ext_i32(ret, ret, memop);
3227 static void do_nonatomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3228 TCGArg idx, MemOp memop, bool new_val,
3229 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
3231 TCGv_i64 t1 = tcg_temp_new_i64();
3232 TCGv_i64 t2 = tcg_temp_new_i64();
3234 memop = tcg_canonicalize_memop(memop, 1, 0);
3236 tcg_gen_qemu_ld_i64(t1, addr, idx, memop);
3237 tcg_gen_ext_i64(t2, val, memop);
3238 gen(t2, t1, t2);
3239 tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3241 tcg_gen_ext_i64(ret, (new_val ? t2 : t1), memop);
3242 tcg_temp_free_i64(t1);
3243 tcg_temp_free_i64(t2);
3246 static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3247 TCGArg idx, MemOp memop, void * const table[])
3249 memop = tcg_canonicalize_memop(memop, 1, 0);
3251 if ((memop & MO_SIZE) == MO_64) {
3252 #ifdef CONFIG_ATOMIC64
3253 gen_atomic_op_i64 gen;
3255 gen = table[memop & (MO_SIZE | MO_BSWAP)];
3256 tcg_debug_assert(gen != NULL);
3258 #ifdef CONFIG_SOFTMMU
3260 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
3261 gen(ret, cpu_env, addr, val, oi);
3262 tcg_temp_free_i32(oi);
3264 #else
3265 gen(ret, cpu_env, addr, val);
3266 #endif
3267 #else
3268 gen_helper_exit_atomic(cpu_env);
3269 /* Produce a result, so that we have a well-formed opcode stream
3270 with respect to uses of the result in the (dead) code following. */
3271 tcg_gen_movi_i64(ret, 0);
3272 #endif /* CONFIG_ATOMIC64 */
3273 } else {
3274 TCGv_i32 v32 = tcg_temp_new_i32();
3275 TCGv_i32 r32 = tcg_temp_new_i32();
3277 tcg_gen_extrl_i64_i32(v32, val);
3278 do_atomic_op_i32(r32, addr, v32, idx, memop & ~MO_SIGN, table);
3279 tcg_temp_free_i32(v32);
3281 tcg_gen_extu_i32_i64(ret, r32);
3282 tcg_temp_free_i32(r32);
3284 if (memop & MO_SIGN) {
3285 tcg_gen_ext_i64(ret, ret, memop);
3290 #define GEN_ATOMIC_HELPER(NAME, OP, NEW) \
3291 static void * const table_##NAME[16] = { \
3292 [MO_8] = gen_helper_atomic_##NAME##b, \
3293 [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le, \
3294 [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be, \
3295 [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le, \
3296 [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be, \
3297 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le) \
3298 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be) \
3299 }; \
3300 void tcg_gen_atomic_##NAME##_i32 \
3301 (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, MemOp memop) \
3303 if (tcg_ctx->tb_cflags & CF_PARALLEL) { \
3304 do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME); \
3305 } else { \
3306 do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW, \
3307 tcg_gen_##OP##_i32); \
3310 void tcg_gen_atomic_##NAME##_i64 \
3311 (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, MemOp memop) \
3313 if (tcg_ctx->tb_cflags & CF_PARALLEL) { \
3314 do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME); \
3315 } else { \
3316 do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW, \
3317 tcg_gen_##OP##_i64); \
3321 GEN_ATOMIC_HELPER(fetch_add, add, 0)
3322 GEN_ATOMIC_HELPER(fetch_and, and, 0)
3323 GEN_ATOMIC_HELPER(fetch_or, or, 0)
3324 GEN_ATOMIC_HELPER(fetch_xor, xor, 0)
3325 GEN_ATOMIC_HELPER(fetch_smin, smin, 0)
3326 GEN_ATOMIC_HELPER(fetch_umin, umin, 0)
3327 GEN_ATOMIC_HELPER(fetch_smax, smax, 0)
3328 GEN_ATOMIC_HELPER(fetch_umax, umax, 0)
3330 GEN_ATOMIC_HELPER(add_fetch, add, 1)
3331 GEN_ATOMIC_HELPER(and_fetch, and, 1)
3332 GEN_ATOMIC_HELPER(or_fetch, or, 1)
3333 GEN_ATOMIC_HELPER(xor_fetch, xor, 1)
3334 GEN_ATOMIC_HELPER(smin_fetch, smin, 1)
3335 GEN_ATOMIC_HELPER(umin_fetch, umin, 1)
3336 GEN_ATOMIC_HELPER(smax_fetch, smax, 1)
3337 GEN_ATOMIC_HELPER(umax_fetch, umax, 1)
3339 static void tcg_gen_mov2_i32(TCGv_i32 r, TCGv_i32 a, TCGv_i32 b)
3341 tcg_gen_mov_i32(r, b);
3344 static void tcg_gen_mov2_i64(TCGv_i64 r, TCGv_i64 a, TCGv_i64 b)
3346 tcg_gen_mov_i64(r, b);
3349 GEN_ATOMIC_HELPER(xchg, mov2, 0)
3351 #undef GEN_ATOMIC_HELPER