Surround kvm_set_boot_id() function with kvm_enabled
[qemu-kvm/fedora.git] / tcg / hppa / tcg-target.c
blob5960c0323d7d23d6ec8d122eba329d69a7ed9c35
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 #ifndef NDEBUG
26 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27 "%r0",
28 "%r1",
29 "%rp",
30 "%r3",
31 "%r4",
32 "%r5",
33 "%r6",
34 "%r7",
35 "%r8",
36 "%r9",
37 "%r10",
38 "%r11",
39 "%r12",
40 "%r13",
41 "%r14",
42 "%r15",
43 "%r16",
44 "%r17",
45 "%r18",
46 "%r19",
47 "%r20",
48 "%r21",
49 "%r22",
50 "%r23",
51 "%r24",
52 "%r25",
53 "%r26",
54 "%dp",
55 "%ret0",
56 "%ret1",
57 "%sp",
58 "%r31",
60 #endif
62 static const int tcg_target_reg_alloc_order[] = {
63 TCG_REG_R4,
64 TCG_REG_R5,
65 TCG_REG_R6,
66 TCG_REG_R7,
67 TCG_REG_R8,
68 TCG_REG_R9,
69 TCG_REG_R10,
70 TCG_REG_R11,
71 TCG_REG_R12,
72 TCG_REG_R13,
74 TCG_REG_R17,
75 TCG_REG_R14,
76 TCG_REG_R15,
77 TCG_REG_R16,
80 static const int tcg_target_call_iarg_regs[4] = {
81 TCG_REG_R26,
82 TCG_REG_R25,
83 TCG_REG_R24,
84 TCG_REG_R23,
87 static const int tcg_target_call_oarg_regs[2] = {
88 TCG_REG_RET0,
89 TCG_REG_RET1,
92 static void patch_reloc(uint8_t *code_ptr, int type,
93 tcg_target_long value, tcg_target_long addend)
95 switch (type) {
96 case R_PARISC_PCREL17F:
97 hppa_patch17f((uint32_t *)code_ptr, value, addend);
98 break;
99 default:
100 tcg_abort();
104 /* maximum number of register used for input function arguments */
105 static inline int tcg_target_get_call_iarg_regs_count(int flags)
107 return 4;
110 /* parse target specific constraints */
111 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
113 const char *ct_str;
115 ct_str = *pct_str;
116 switch (ct_str[0]) {
117 case 'r':
118 ct->ct |= TCG_CT_REG;
119 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
120 break;
121 case 'L': /* qemu_ld/st constraint */
122 ct->ct |= TCG_CT_REG;
123 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
124 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R26);
125 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R25);
126 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R24);
127 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R23);
128 break;
129 default:
130 return -1;
132 ct_str++;
133 *pct_str = ct_str;
134 return 0;
137 /* test if a constant matches the constraint */
138 static inline int tcg_target_const_match(tcg_target_long val,
139 const TCGArgConstraint *arg_ct)
141 int ct;
143 ct = arg_ct->ct;
145 /* TODO */
147 return 0;
150 #define INSN_OP(x) ((x) << 26)
151 #define INSN_EXT3BR(x) ((x) << 13)
152 #define INSN_EXT3SH(x) ((x) << 10)
153 #define INSN_EXT4(x) ((x) << 6)
154 #define INSN_EXT5(x) (x)
155 #define INSN_EXT6(x) ((x) << 6)
156 #define INSN_EXT7(x) ((x) << 6)
157 #define INSN_EXT8A(x) ((x) << 6)
158 #define INSN_EXT8B(x) ((x) << 5)
159 #define INSN_T(x) (x)
160 #define INSN_R1(x) ((x) << 16)
161 #define INSN_R2(x) ((x) << 21)
162 #define INSN_DEP_LEN(x) (32 - (x))
163 #define INSN_SHDEP_CP(x) ((31 - (x)) << 5)
164 #define INSN_SHDEP_P(x) ((x) << 5)
165 #define INSN_COND(x) ((x) << 13)
167 #define COND_NEVER 0
168 #define COND_EQUAL 1
169 #define COND_LT 2
170 #define COND_LTEQ 3
171 #define COND_LTU 4
172 #define COND_LTUEQ 5
173 #define COND_SV 6
174 #define COND_OD 7
177 /* Logical ADD */
178 #define ARITH_ADD (INSN_OP(0x02) | INSN_EXT6(0x28))
179 #define ARITH_AND (INSN_OP(0x02) | INSN_EXT6(0x08))
180 #define ARITH_OR (INSN_OP(0x02) | INSN_EXT6(0x09))
181 #define ARITH_XOR (INSN_OP(0x02) | INSN_EXT6(0x0a))
182 #define ARITH_SUB (INSN_OP(0x02) | INSN_EXT6(0x10))
184 #define SHD (INSN_OP(0x34) | INSN_EXT3SH(2))
185 #define VSHD (INSN_OP(0x34) | INSN_EXT3SH(0))
186 #define DEP (INSN_OP(0x35) | INSN_EXT3SH(3))
187 #define ZDEP (INSN_OP(0x35) | INSN_EXT3SH(2))
188 #define ZVDEP (INSN_OP(0x35) | INSN_EXT3SH(0))
189 #define EXTRU (INSN_OP(0x34) | INSN_EXT3SH(6))
190 #define EXTRS (INSN_OP(0x34) | INSN_EXT3SH(7))
191 #define VEXTRS (INSN_OP(0x34) | INSN_EXT3SH(5))
193 #define SUBI (INSN_OP(0x25))
194 #define MTCTL (INSN_OP(0x00) | INSN_EXT8B(0xc2))
196 #define BL (INSN_OP(0x3a) | INSN_EXT3BR(0))
197 #define BLE_SR4 (INSN_OP(0x39) | (1 << 13))
198 #define BV (INSN_OP(0x3a) | INSN_EXT3BR(6))
199 #define BV_N (INSN_OP(0x3a) | INSN_EXT3BR(6) | 2)
200 #define LDIL (INSN_OP(0x08))
201 #define LDO (INSN_OP(0x0d))
203 #define LDB (INSN_OP(0x10))
204 #define LDH (INSN_OP(0x11))
205 #define LDW (INSN_OP(0x12))
206 #define LDWM (INSN_OP(0x13))
208 #define STB (INSN_OP(0x18))
209 #define STH (INSN_OP(0x19))
210 #define STW (INSN_OP(0x1a))
211 #define STWM (INSN_OP(0x1b))
213 #define COMBT (INSN_OP(0x20))
214 #define COMBF (INSN_OP(0x22))
216 static int lowsignext(uint32_t val, int start, int length)
218 return (((val << 1) & ~(~0 << length)) |
219 ((val >> (length - 1)) & 1)) << start;
222 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
224 /* PA1.1 defines COPY as OR r,0,t */
225 tcg_out32(s, ARITH_OR | INSN_T(ret) | INSN_R1(arg) | INSN_R2(TCG_REG_R0));
227 /* PA2.0 defines COPY as LDO 0(r),t
228 * but hppa-dis.c is unaware of this definition */
229 /* tcg_out32(s, LDO | INSN_R1(ret) | INSN_R2(arg) | reassemble_14(0)); */
232 static inline void tcg_out_movi(TCGContext *s, TCGType type,
233 int ret, tcg_target_long arg)
235 if (arg == (arg & 0x1fff)) {
236 tcg_out32(s, LDO | INSN_R1(ret) | INSN_R2(TCG_REG_R0) |
237 reassemble_14(arg));
238 } else {
239 tcg_out32(s, LDIL | INSN_R2(ret) |
240 reassemble_21(lrsel((uint32_t)arg, 0)));
241 if (arg & 0x7ff)
242 tcg_out32(s, LDO | INSN_R1(ret) | INSN_R2(ret) |
243 reassemble_14(rrsel((uint32_t)arg, 0)));
247 static inline void tcg_out_ld_raw(TCGContext *s, int ret,
248 tcg_target_long arg)
250 tcg_out32(s, LDIL | INSN_R2(ret) |
251 reassemble_21(lrsel((uint32_t)arg, 0)));
252 tcg_out32(s, LDW | INSN_R1(ret) | INSN_R2(ret) |
253 reassemble_14(rrsel((uint32_t)arg, 0)));
256 static inline void tcg_out_ld_ptr(TCGContext *s, int ret,
257 tcg_target_long arg)
259 tcg_out_ld_raw(s, ret, arg);
262 static inline void tcg_out_ldst(TCGContext *s, int ret, int addr, int offset,
263 int op)
265 if (offset == (offset & 0xfff))
266 tcg_out32(s, op | INSN_R1(ret) | INSN_R2(addr) |
267 reassemble_14(offset));
268 else {
269 fprintf(stderr, "unimplemented %s with offset %d\n", __func__, offset);
270 tcg_abort();
274 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
275 int arg1, tcg_target_long arg2)
277 fprintf(stderr, "unimplemented %s\n", __func__);
278 tcg_abort();
281 static inline void tcg_out_st(TCGContext *s, TCGType type, int ret,
282 int arg1, tcg_target_long arg2)
284 fprintf(stderr, "unimplemented %s\n", __func__);
285 tcg_abort();
288 static inline void tcg_out_arith(TCGContext *s, int t, int r1, int r2, int op)
290 tcg_out32(s, op | INSN_T(t) | INSN_R1(r1) | INSN_R2(r2));
293 static inline void tcg_out_arithi(TCGContext *s, int t, int r1,
294 tcg_target_long val, int op)
296 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R20, val);
297 tcg_out_arith(s, t, r1, TCG_REG_R20, op);
300 static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
302 tcg_out_arithi(s, reg, reg, val, ARITH_ADD);
305 static inline void tcg_out_nop(TCGContext *s)
307 tcg_out32(s, ARITH_OR | INSN_T(TCG_REG_R0) | INSN_R1(TCG_REG_R0) |
308 INSN_R2(TCG_REG_R0));
311 static inline void tcg_out_ext8s(TCGContext *s, int ret, int arg) {
312 tcg_out32(s, EXTRS | INSN_R1(ret) | INSN_R2(arg) |
313 INSN_SHDEP_P(31) | INSN_DEP_LEN(8));
316 static inline void tcg_out_ext16s(TCGContext *s, int ret, int arg) {
317 tcg_out32(s, EXTRS | INSN_R1(ret) | INSN_R2(arg) |
318 INSN_SHDEP_P(31) | INSN_DEP_LEN(16));
321 static inline void tcg_out_bswap16(TCGContext *s, int ret, int arg) {
322 if(ret != arg)
323 tcg_out_mov(s, ret, arg);
324 tcg_out32(s, DEP | INSN_R2(ret) | INSN_R1(ret) |
325 INSN_SHDEP_CP(15) | INSN_DEP_LEN(8));
326 tcg_out32(s, SHD | INSN_T(ret) | INSN_R1(TCG_REG_R0) |
327 INSN_R2(ret) | INSN_SHDEP_CP(8));
330 static inline void tcg_out_bswap32(TCGContext *s, int ret, int arg, int temp) {
331 tcg_out32(s, SHD | INSN_T(temp) | INSN_R1(arg) |
332 INSN_R2(arg) | INSN_SHDEP_CP(16));
333 tcg_out32(s, DEP | INSN_R2(temp) | INSN_R1(temp) |
334 INSN_SHDEP_CP(15) | INSN_DEP_LEN(8));
335 tcg_out32(s, SHD | INSN_T(ret) | INSN_R1(arg) |
336 INSN_R2(temp) | INSN_SHDEP_CP(8));
339 static inline void tcg_out_call(TCGContext *s, void *func)
341 uint32_t val = (uint32_t)__canonicalize_funcptr_for_compare(func);
342 tcg_out32(s, LDIL | INSN_R2(TCG_REG_R20) |
343 reassemble_21(lrsel(val, 0)));
344 tcg_out32(s, BLE_SR4 | INSN_R2(TCG_REG_R20) |
345 reassemble_17(rrsel(val, 0) >> 2));
346 tcg_out_mov(s, TCG_REG_RP, TCG_REG_R31);
349 #if defined(CONFIG_SOFTMMU)
351 #include "../../softmmu_defs.h"
353 static void *qemu_ld_helpers[4] = {
354 __ldb_mmu,
355 __ldw_mmu,
356 __ldl_mmu,
357 __ldq_mmu,
360 static void *qemu_st_helpers[4] = {
361 __stb_mmu,
362 __stw_mmu,
363 __stl_mmu,
364 __stq_mmu,
366 #endif
368 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, int opc)
370 int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
371 #if defined(CONFIG_SOFTMMU)
372 uint32_t *label1_ptr, *label2_ptr;
373 #endif
374 #if TARGET_LONG_BITS == 64
375 #if defined(CONFIG_SOFTMMU)
376 uint32_t *label3_ptr;
377 #endif
378 int addr_reg2;
379 #endif
381 data_reg = *args++;
382 if (opc == 3)
383 data_reg2 = *args++;
384 else
385 data_reg2 = 0; /* surpress warning */
386 addr_reg = *args++;
387 #if TARGET_LONG_BITS == 64
388 addr_reg2 = *args++;
389 #endif
390 mem_index = *args;
391 s_bits = opc & 3;
393 r0 = TCG_REG_R26;
394 r1 = TCG_REG_R25;
396 #if defined(CONFIG_SOFTMMU)
397 tcg_out_mov(s, r1, addr_reg);
399 tcg_out_mov(s, r0, addr_reg);
401 tcg_out32(s, SHD | INSN_T(r1) | INSN_R1(TCG_REG_R0) | INSN_R2(r1) |
402 INSN_SHDEP_CP(TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS));
404 tcg_out_arithi(s, r0, r0, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
405 ARITH_AND);
407 tcg_out_arithi(s, r1, r1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS,
408 ARITH_AND);
410 tcg_out_arith(s, r1, r1, TCG_AREG0, ARITH_ADD);
411 tcg_out_arithi(s, r1, r1,
412 offsetof(CPUState, tlb_table[mem_index][0].addr_read),
413 ARITH_ADD);
415 tcg_out_ldst(s, TCG_REG_R20, r1, 0, LDW);
417 #if TARGET_LONG_BITS == 32
418 /* if equal, jump to label1 */
419 label1_ptr = (uint32_t *)s->code_ptr;
420 tcg_out32(s, COMBT | INSN_R1(TCG_REG_R20) | INSN_R2(r0) |
421 INSN_COND(COND_EQUAL));
422 tcg_out_mov(s, r0, addr_reg); /* delay slot */
423 #else
424 /* if not equal, jump to label3 */
425 label3_ptr = (uint32_t *)s->code_ptr;
426 tcg_out32(s, COMBF | INSN_R1(TCG_REG_R20) | INSN_R2(r0) |
427 INSN_COND(COND_EQUAL));
428 tcg_out_mov(s, r0, addr_reg); /* delay slot */
430 tcg_out_ldst(s, TCG_REG_R20, r1, 4, LDW);
432 /* if equal, jump to label1 */
433 label1_ptr = (uint32_t *)s->code_ptr;
434 tcg_out32(s, COMBT | INSN_R1(TCG_REG_R20) | INSN_R2(addr_reg2) |
435 INSN_COND(COND_EQUAL));
436 tcg_out_nop(s); /* delay slot */
438 /* label3: */
439 *label3_ptr |= reassemble_12((uint32_t *)s->code_ptr - label3_ptr - 2);
440 #endif
442 #if TARGET_LONG_BITS == 32
443 tcg_out_mov(s, TCG_REG_R26, addr_reg);
444 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R25, mem_index);
445 #else
446 tcg_out_mov(s, TCG_REG_R26, addr_reg);
447 tcg_out_mov(s, TCG_REG_R25, addr_reg2);
448 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R24, mem_index);
449 #endif
451 tcg_out_call(s, qemu_ld_helpers[s_bits]);
453 switch(opc) {
454 case 0 | 4:
455 tcg_out_ext8s(s, data_reg, TCG_REG_RET0);
456 break;
457 case 1 | 4:
458 tcg_out_ext16s(s, data_reg, TCG_REG_RET0);
459 break;
460 case 0:
461 case 1:
462 case 2:
463 default:
464 tcg_out_mov(s, data_reg, TCG_REG_RET0);
465 break;
466 case 3:
467 tcg_abort();
468 tcg_out_mov(s, data_reg, TCG_REG_RET0);
469 tcg_out_mov(s, data_reg2, TCG_REG_RET1);
470 break;
473 /* jump to label2 */
474 label2_ptr = (uint32_t *)s->code_ptr;
475 tcg_out32(s, BL | INSN_R2(TCG_REG_R0) | 2);
477 /* label1: */
478 *label1_ptr |= reassemble_12((uint32_t *)s->code_ptr - label1_ptr - 2);
480 tcg_out_arithi(s, TCG_REG_R20, r1,
481 offsetof(CPUTLBEntry, addend) - offsetof(CPUTLBEntry, addr_read),
482 ARITH_ADD);
483 tcg_out_ldst(s, TCG_REG_R20, TCG_REG_R20, 0, LDW);
484 tcg_out_arith(s, r0, r0, TCG_REG_R20, ARITH_ADD);
485 #else
486 r0 = addr_reg;
487 #endif
489 #ifdef TARGET_WORDS_BIGENDIAN
490 bswap = 0;
491 #else
492 bswap = 1;
493 #endif
494 switch (opc) {
495 case 0:
496 tcg_out_ldst(s, data_reg, r0, 0, LDB);
497 break;
498 case 0 | 4:
499 tcg_out_ldst(s, data_reg, r0, 0, LDB);
500 tcg_out_ext8s(s, data_reg, data_reg);
501 break;
502 case 1:
503 tcg_out_ldst(s, data_reg, r0, 0, LDH);
504 if (bswap)
505 tcg_out_bswap16(s, data_reg, data_reg);
506 break;
507 case 1 | 4:
508 tcg_out_ldst(s, data_reg, r0, 0, LDH);
509 if (bswap)
510 tcg_out_bswap16(s, data_reg, data_reg);
511 tcg_out_ext16s(s, data_reg, data_reg);
512 break;
513 case 2:
514 tcg_out_ldst(s, data_reg, r0, 0, LDW);
515 if (bswap)
516 tcg_out_bswap32(s, data_reg, data_reg, TCG_REG_R20);
517 break;
518 case 3:
519 tcg_abort();
520 if (!bswap) {
521 tcg_out_ldst(s, data_reg, r0, 0, LDW);
522 tcg_out_ldst(s, data_reg2, r0, 4, LDW);
523 } else {
524 tcg_out_ldst(s, data_reg, r0, 4, LDW);
525 tcg_out_bswap32(s, data_reg, data_reg, TCG_REG_R20);
526 tcg_out_ldst(s, data_reg2, r0, 0, LDW);
527 tcg_out_bswap32(s, data_reg2, data_reg2, TCG_REG_R20);
529 break;
530 default:
531 tcg_abort();
534 #if defined(CONFIG_SOFTMMU)
535 /* label2: */
536 *label2_ptr |= reassemble_17((uint32_t *)s->code_ptr - label2_ptr - 2);
537 #endif
540 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, int opc)
542 int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
543 #if defined(CONFIG_SOFTMMU)
544 uint32_t *label1_ptr, *label2_ptr;
545 #endif
546 #if TARGET_LONG_BITS == 64
547 #if defined(CONFIG_SOFTMMU)
548 uint32_t *label3_ptr;
549 #endif
550 int addr_reg2;
551 #endif
553 data_reg = *args++;
554 if (opc == 3)
555 data_reg2 = *args++;
556 else
557 data_reg2 = 0; /* surpress warning */
558 addr_reg = *args++;
559 #if TARGET_LONG_BITS == 64
560 addr_reg2 = *args++;
561 #endif
562 mem_index = *args;
564 s_bits = opc;
566 r0 = TCG_REG_R26;
567 r1 = TCG_REG_R25;
569 #if defined(CONFIG_SOFTMMU)
570 tcg_out_mov(s, r1, addr_reg);
572 tcg_out_mov(s, r0, addr_reg);
574 tcg_out32(s, SHD | INSN_T(r1) | INSN_R1(TCG_REG_R0) | INSN_R2(r1) |
575 INSN_SHDEP_CP(TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS));
577 tcg_out_arithi(s, r0, r0, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
578 ARITH_AND);
580 tcg_out_arithi(s, r1, r1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS,
581 ARITH_AND);
583 tcg_out_arith(s, r1, r1, TCG_AREG0, ARITH_ADD);
584 tcg_out_arithi(s, r1, r1,
585 offsetof(CPUState, tlb_table[mem_index][0].addr_write),
586 ARITH_ADD);
588 tcg_out_ldst(s, TCG_REG_R20, r1, 0, LDW);
590 #if TARGET_LONG_BITS == 32
591 /* if equal, jump to label1 */
592 label1_ptr = (uint32_t *)s->code_ptr;
593 tcg_out32(s, COMBT | INSN_R1(TCG_REG_R20) | INSN_R2(r0) |
594 INSN_COND(COND_EQUAL));
595 tcg_out_mov(s, r0, addr_reg); /* delay slot */
596 #else
597 /* if not equal, jump to label3 */
598 label3_ptr = (uint32_t *)s->code_ptr;
599 tcg_out32(s, COMBF | INSN_R1(TCG_REG_R20) | INSN_R2(r0) |
600 INSN_COND(COND_EQUAL));
601 tcg_out_mov(s, r0, addr_reg); /* delay slot */
603 tcg_out_ldst(s, TCG_REG_R20, r1, 4, LDW);
605 /* if equal, jump to label1 */
606 label1_ptr = (uint32_t *)s->code_ptr;
607 tcg_out32(s, COMBT | INSN_R1(TCG_REG_R20) | INSN_R2(addr_reg2) |
608 INSN_COND(COND_EQUAL));
609 tcg_out_nop(s); /* delay slot */
611 /* label3: */
612 *label3_ptr |= reassemble_12((uint32_t *)s->code_ptr - label3_ptr - 2);
613 #endif
615 tcg_out_mov(s, TCG_REG_R26, addr_reg);
616 #if TARGET_LONG_BITS == 64
617 tcg_out_mov(s, TCG_REG_R25, addr_reg2);
618 if (opc == 3) {
619 tcg_abort();
620 tcg_out_mov(s, TCG_REG_R24, data_reg);
621 tcg_out_mov(s, TCG_REG_R23, data_reg2);
622 /* TODO: push mem_index */
623 tcg_abort();
624 } else {
625 switch(opc) {
626 case 0:
627 tcg_out32(s, EXTRU | INSN_R1(TCG_REG_R24) | INSN_R2(data_reg) |
628 INSN_SHDEP_P(31) | INSN_DEP_LEN(8));
629 break;
630 case 1:
631 tcg_out32(s, EXTRU | INSN_R1(TCG_REG_R24) | INSN_R2(data_reg) |
632 INSN_SHDEP_P(31) | INSN_DEP_LEN(16));
633 break;
634 case 2:
635 tcg_out_mov(s, TCG_REG_R24, data_reg);
636 break;
638 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R23, mem_index);
640 #else
641 if (opc == 3) {
642 tcg_abort();
643 tcg_out_mov(s, TCG_REG_R25, data_reg);
644 tcg_out_mov(s, TCG_REG_R24, data_reg2);
645 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R23, mem_index);
646 } else {
647 switch(opc) {
648 case 0:
649 tcg_out32(s, EXTRU | INSN_R1(TCG_REG_R25) | INSN_R2(data_reg) |
650 INSN_SHDEP_P(31) | INSN_DEP_LEN(8));
651 break;
652 case 1:
653 tcg_out32(s, EXTRU | INSN_R1(TCG_REG_R25) | INSN_R2(data_reg) |
654 INSN_SHDEP_P(31) | INSN_DEP_LEN(16));
655 break;
656 case 2:
657 tcg_out_mov(s, TCG_REG_R25, data_reg);
658 break;
660 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R24, mem_index);
662 #endif
663 tcg_out_call(s, qemu_st_helpers[s_bits]);
665 /* jump to label2 */
666 label2_ptr = (uint32_t *)s->code_ptr;
667 tcg_out32(s, BL | INSN_R2(TCG_REG_R0) | 2);
669 /* label1: */
670 *label1_ptr |= reassemble_12((uint32_t *)s->code_ptr - label1_ptr - 2);
672 tcg_out_arithi(s, TCG_REG_R20, r1,
673 offsetof(CPUTLBEntry, addend) - offsetof(CPUTLBEntry, addr_write),
674 ARITH_ADD);
675 tcg_out_ldst(s, TCG_REG_R20, TCG_REG_R20, 0, LDW);
676 tcg_out_arith(s, r0, r0, TCG_REG_R20, ARITH_ADD);
677 #else
678 r0 = addr_reg;
679 #endif
681 #ifdef TARGET_WORDS_BIGENDIAN
682 bswap = 0;
683 #else
684 bswap = 1;
685 #endif
686 switch (opc) {
687 case 0:
688 tcg_out_ldst(s, data_reg, r0, 0, STB);
689 break;
690 case 1:
691 if (bswap) {
692 tcg_out_bswap16(s, TCG_REG_R20, data_reg);
693 data_reg = TCG_REG_R20;
695 tcg_out_ldst(s, data_reg, r0, 0, STH);
696 break;
697 case 2:
698 if (bswap) {
699 tcg_out_bswap32(s, TCG_REG_R20, data_reg, TCG_REG_R20);
700 data_reg = TCG_REG_R20;
702 tcg_out_ldst(s, data_reg, r0, 0, STW);
703 break;
704 case 3:
705 tcg_abort();
706 if (!bswap) {
707 tcg_out_ldst(s, data_reg, r0, 0, STW);
708 tcg_out_ldst(s, data_reg2, r0, 4, STW);
709 } else {
710 tcg_out_bswap32(s, TCG_REG_R20, data_reg, TCG_REG_R20);
711 tcg_out_ldst(s, TCG_REG_R20, r0, 4, STW);
712 tcg_out_bswap32(s, TCG_REG_R20, data_reg2, TCG_REG_R20);
713 tcg_out_ldst(s, TCG_REG_R20, r0, 0, STW);
715 break;
716 default:
717 tcg_abort();
720 #if defined(CONFIG_SOFTMMU)
721 /* label2: */
722 *label2_ptr |= reassemble_17((uint32_t *)s->code_ptr - label2_ptr - 2);
723 #endif
726 static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
727 const int *const_args)
729 int c;
731 switch (opc) {
732 case INDEX_op_exit_tb:
733 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RET0, args[0]);
734 tcg_out32(s, BV_N | INSN_R2(TCG_REG_R18));
735 break;
736 case INDEX_op_goto_tb:
737 if (s->tb_jmp_offset) {
738 /* direct jump method */
739 fprintf(stderr, "goto_tb direct\n");
740 tcg_abort();
741 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R20, args[0]);
742 tcg_out32(s, BV_N | INSN_R2(TCG_REG_R20));
743 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
744 } else {
745 /* indirect jump method */
746 tcg_out_ld_ptr(s, TCG_REG_R20,
747 (tcg_target_long)(s->tb_next + args[0]));
748 tcg_out32(s, BV_N | INSN_R2(TCG_REG_R20));
750 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
751 break;
752 case INDEX_op_call:
753 tcg_out32(s, BLE_SR4 | INSN_R2(args[0]));
754 tcg_out_mov(s, TCG_REG_RP, TCG_REG_R31);
755 break;
756 case INDEX_op_jmp:
757 fprintf(stderr, "unimplemented jmp\n");
758 tcg_abort();
759 break;
760 case INDEX_op_br:
761 fprintf(stderr, "unimplemented br\n");
762 tcg_abort();
763 break;
764 case INDEX_op_movi_i32:
765 tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
766 break;
768 case INDEX_op_ld8u_i32:
769 tcg_out_ldst(s, args[0], args[1], args[2], LDB);
770 break;
771 case INDEX_op_ld8s_i32:
772 tcg_out_ldst(s, args[0], args[1], args[2], LDB);
773 tcg_out_ext8s(s, args[0], args[0]);
774 break;
775 case INDEX_op_ld16u_i32:
776 tcg_out_ldst(s, args[0], args[1], args[2], LDH);
777 break;
778 case INDEX_op_ld16s_i32:
779 tcg_out_ldst(s, args[0], args[1], args[2], LDH);
780 tcg_out_ext16s(s, args[0], args[0]);
781 break;
782 case INDEX_op_ld_i32:
783 tcg_out_ldst(s, args[0], args[1], args[2], LDW);
784 break;
786 case INDEX_op_st8_i32:
787 tcg_out_ldst(s, args[0], args[1], args[2], STB);
788 break;
789 case INDEX_op_st16_i32:
790 tcg_out_ldst(s, args[0], args[1], args[2], STH);
791 break;
792 case INDEX_op_st_i32:
793 tcg_out_ldst(s, args[0], args[1], args[2], STW);
794 break;
796 case INDEX_op_sub_i32:
797 c = ARITH_SUB;
798 goto gen_arith;
799 case INDEX_op_and_i32:
800 c = ARITH_AND;
801 goto gen_arith;
802 case INDEX_op_or_i32:
803 c = ARITH_OR;
804 goto gen_arith;
805 case INDEX_op_xor_i32:
806 c = ARITH_XOR;
807 goto gen_arith;
808 case INDEX_op_add_i32:
809 c = ARITH_ADD;
810 goto gen_arith;
812 case INDEX_op_shl_i32:
813 tcg_out32(s, SUBI | INSN_R1(TCG_REG_R20) | INSN_R2(args[2]) |
814 lowsignext(0x1f, 0, 11));
815 tcg_out32(s, MTCTL | INSN_R2(11) | INSN_R1(TCG_REG_R20));
816 tcg_out32(s, ZVDEP | INSN_R2(args[0]) | INSN_R1(args[1]) |
817 INSN_DEP_LEN(32));
818 break;
819 case INDEX_op_shr_i32:
820 tcg_out32(s, MTCTL | INSN_R2(11) | INSN_R1(args[2]));
821 tcg_out32(s, VSHD | INSN_T(args[0]) | INSN_R1(TCG_REG_R0) |
822 INSN_R2(args[1]));
823 break;
824 case INDEX_op_sar_i32:
825 tcg_out32(s, SUBI | INSN_R1(TCG_REG_R20) | INSN_R2(args[2]) |
826 lowsignext(0x1f, 0, 11));
827 tcg_out32(s, MTCTL | INSN_R2(11) | INSN_R1(TCG_REG_R20));
828 tcg_out32(s, VEXTRS | INSN_R1(args[0]) | INSN_R2(args[1]) |
829 INSN_DEP_LEN(32));
830 break;
832 case INDEX_op_mul_i32:
833 fprintf(stderr, "unimplemented mul\n");
834 tcg_abort();
835 break;
836 case INDEX_op_mulu2_i32:
837 fprintf(stderr, "unimplemented mulu2\n");
838 tcg_abort();
839 break;
840 case INDEX_op_div2_i32:
841 fprintf(stderr, "unimplemented div2\n");
842 tcg_abort();
843 break;
844 case INDEX_op_divu2_i32:
845 fprintf(stderr, "unimplemented divu2\n");
846 tcg_abort();
847 break;
849 case INDEX_op_brcond_i32:
850 fprintf(stderr, "unimplemented brcond\n");
851 tcg_abort();
852 break;
854 case INDEX_op_qemu_ld8u:
855 tcg_out_qemu_ld(s, args, 0);
856 break;
857 case INDEX_op_qemu_ld8s:
858 tcg_out_qemu_ld(s, args, 0 | 4);
859 break;
860 case INDEX_op_qemu_ld16u:
861 tcg_out_qemu_ld(s, args, 1);
862 break;
863 case INDEX_op_qemu_ld16s:
864 tcg_out_qemu_ld(s, args, 1 | 4);
865 break;
866 case INDEX_op_qemu_ld32u:
867 tcg_out_qemu_ld(s, args, 2);
868 break;
870 case INDEX_op_qemu_st8:
871 tcg_out_qemu_st(s, args, 0);
872 break;
873 case INDEX_op_qemu_st16:
874 tcg_out_qemu_st(s, args, 1);
875 break;
876 case INDEX_op_qemu_st32:
877 tcg_out_qemu_st(s, args, 2);
878 break;
880 default:
881 fprintf(stderr, "unknown opcode 0x%x\n", opc);
882 tcg_abort();
884 return;
886 gen_arith:
887 tcg_out_arith(s, args[0], args[1], args[2], c);
890 static const TCGTargetOpDef hppa_op_defs[] = {
891 { INDEX_op_exit_tb, { } },
892 { INDEX_op_goto_tb, { } },
894 { INDEX_op_call, { "r" } },
895 { INDEX_op_jmp, { "r" } },
896 { INDEX_op_br, { } },
898 { INDEX_op_mov_i32, { "r", "r" } },
899 { INDEX_op_movi_i32, { "r" } },
900 { INDEX_op_ld8u_i32, { "r", "r" } },
901 { INDEX_op_ld8s_i32, { "r", "r" } },
902 { INDEX_op_ld16u_i32, { "r", "r" } },
903 { INDEX_op_ld16s_i32, { "r", "r" } },
904 { INDEX_op_ld_i32, { "r", "r" } },
905 { INDEX_op_st8_i32, { "r", "r" } },
906 { INDEX_op_st16_i32, { "r", "r" } },
907 { INDEX_op_st_i32, { "r", "r" } },
909 { INDEX_op_add_i32, { "r", "r", "r" } },
910 { INDEX_op_sub_i32, { "r", "r", "r" } },
911 { INDEX_op_and_i32, { "r", "r", "r" } },
912 { INDEX_op_or_i32, { "r", "r", "r" } },
913 { INDEX_op_xor_i32, { "r", "r", "r" } },
915 { INDEX_op_shl_i32, { "r", "r", "r" } },
916 { INDEX_op_shr_i32, { "r", "r", "r" } },
917 { INDEX_op_sar_i32, { "r", "r", "r" } },
919 { INDEX_op_brcond_i32, { "r", "r" } },
921 #if TARGET_LONG_BITS == 32
922 { INDEX_op_qemu_ld8u, { "r", "L" } },
923 { INDEX_op_qemu_ld8s, { "r", "L" } },
924 { INDEX_op_qemu_ld16u, { "r", "L" } },
925 { INDEX_op_qemu_ld16s, { "r", "L" } },
926 { INDEX_op_qemu_ld32u, { "r", "L" } },
927 { INDEX_op_qemu_ld64, { "r", "r", "L" } },
929 { INDEX_op_qemu_st8, { "L", "L" } },
930 { INDEX_op_qemu_st16, { "L", "L" } },
931 { INDEX_op_qemu_st32, { "L", "L" } },
932 { INDEX_op_qemu_st64, { "L", "L", "L" } },
933 #else
934 { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
935 { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
936 { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
937 { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
938 { INDEX_op_qemu_ld32u, { "r", "L", "L" } },
939 { INDEX_op_qemu_ld32s, { "r", "L", "L" } },
940 { INDEX_op_qemu_ld64, { "r", "r", "L", "L" } },
942 { INDEX_op_qemu_st8, { "L", "L", "L" } },
943 { INDEX_op_qemu_st16, { "L", "L", "L" } },
944 { INDEX_op_qemu_st32, { "L", "L", "L" } },
945 { INDEX_op_qemu_st64, { "L", "L", "L", "L" } },
946 #endif
947 { -1 },
950 void tcg_target_init(TCGContext *s)
952 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
953 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
954 (1 << TCG_REG_R20) |
955 (1 << TCG_REG_R21) |
956 (1 << TCG_REG_R22) |
957 (1 << TCG_REG_R23) |
958 (1 << TCG_REG_R24) |
959 (1 << TCG_REG_R25) |
960 (1 << TCG_REG_R26));
962 tcg_regset_clear(s->reserved_regs);
963 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0); /* hardwired to zero */
964 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1); /* addil target */
965 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RP); /* link register */
966 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3); /* frame pointer */
967 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R18); /* return pointer */
968 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R19); /* clobbered w/o pic */
969 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R20); /* reserved */
970 tcg_regset_set_reg(s->reserved_regs, TCG_REG_DP); /* data pointer */
971 tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP); /* stack pointer */
972 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R31); /* ble link reg */
974 tcg_add_target_add_op_defs(hppa_op_defs);