1 /*** VSX extension ***/
3 static inline void get_cpu_vsrh(TCGv_i64 dst
, int n
)
5 tcg_gen_ld_i64(dst
, cpu_env
, vsr64_offset(n
, true));
8 static inline void get_cpu_vsrl(TCGv_i64 dst
, int n
)
10 tcg_gen_ld_i64(dst
, cpu_env
, vsr64_offset(n
, false));
13 static inline void set_cpu_vsrh(int n
, TCGv_i64 src
)
15 tcg_gen_st_i64(src
, cpu_env
, vsr64_offset(n
, true));
18 static inline void set_cpu_vsrl(int n
, TCGv_i64 src
)
20 tcg_gen_st_i64(src
, cpu_env
, vsr64_offset(n
, false));
23 static inline TCGv_ptr
gen_vsr_ptr(int reg
)
25 TCGv_ptr r
= tcg_temp_new_ptr();
26 tcg_gen_addi_ptr(r
, cpu_env
, vsr_full_offset(reg
));
30 #define VSX_LOAD_SCALAR(name, operation) \
31 static void gen_##name(DisasContext *ctx) \
35 if (unlikely(!ctx->vsx_enabled)) { \
36 gen_exception(ctx, POWERPC_EXCP_VSXU); \
39 t0 = tcg_temp_new_i64(); \
40 gen_set_access_type(ctx, ACCESS_INT); \
41 EA = tcg_temp_new(); \
42 gen_addr_reg_index(ctx, EA); \
43 gen_qemu_##operation(ctx, t0, EA); \
44 set_cpu_vsrh(xT(ctx->opcode), t0); \
45 /* NOTE: cpu_vsrl is undefined */ \
47 tcg_temp_free_i64(t0); \
50 VSX_LOAD_SCALAR(lxsdx
, ld64_i64
)
51 VSX_LOAD_SCALAR(lxsiwax
, ld32s_i64
)
52 VSX_LOAD_SCALAR(lxsibzx
, ld8u_i64
)
53 VSX_LOAD_SCALAR(lxsihzx
, ld16u_i64
)
54 VSX_LOAD_SCALAR(lxsiwzx
, ld32u_i64
)
55 VSX_LOAD_SCALAR(lxsspx
, ld32fs
)
57 static void gen_lxvd2x(DisasContext
*ctx
)
61 if (unlikely(!ctx
->vsx_enabled
)) {
62 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
65 t0
= tcg_temp_new_i64();
66 gen_set_access_type(ctx
, ACCESS_INT
);
68 gen_addr_reg_index(ctx
, EA
);
69 gen_qemu_ld64_i64(ctx
, t0
, EA
);
70 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
71 tcg_gen_addi_tl(EA
, EA
, 8);
72 gen_qemu_ld64_i64(ctx
, t0
, EA
);
73 set_cpu_vsrl(xT(ctx
->opcode
), t0
);
75 tcg_temp_free_i64(t0
);
78 static void gen_lxvdsx(DisasContext
*ctx
)
83 if (unlikely(!ctx
->vsx_enabled
)) {
84 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
87 t0
= tcg_temp_new_i64();
88 t1
= tcg_temp_new_i64();
89 gen_set_access_type(ctx
, ACCESS_INT
);
91 gen_addr_reg_index(ctx
, EA
);
92 gen_qemu_ld64_i64(ctx
, t0
, EA
);
93 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
94 tcg_gen_mov_i64(t1
, t0
);
95 set_cpu_vsrl(xT(ctx
->opcode
), t1
);
97 tcg_temp_free_i64(t0
);
98 tcg_temp_free_i64(t1
);
101 static void gen_lxvw4x(DisasContext
*ctx
)
106 if (unlikely(!ctx
->vsx_enabled
)) {
107 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
110 xth
= tcg_temp_new_i64();
111 xtl
= tcg_temp_new_i64();
113 gen_set_access_type(ctx
, ACCESS_INT
);
116 gen_addr_reg_index(ctx
, EA
);
118 TCGv_i64 t0
= tcg_temp_new_i64();
119 TCGv_i64 t1
= tcg_temp_new_i64();
121 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
122 tcg_gen_shri_i64(t1
, t0
, 32);
123 tcg_gen_deposit_i64(xth
, t1
, t0
, 32, 32);
124 tcg_gen_addi_tl(EA
, EA
, 8);
125 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
126 tcg_gen_shri_i64(t1
, t0
, 32);
127 tcg_gen_deposit_i64(xtl
, t1
, t0
, 32, 32);
128 tcg_temp_free_i64(t0
);
129 tcg_temp_free_i64(t1
);
131 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
132 tcg_gen_addi_tl(EA
, EA
, 8);
133 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
135 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
136 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
138 tcg_temp_free_i64(xth
);
139 tcg_temp_free_i64(xtl
);
142 static void gen_bswap16x8(TCGv_i64 outh
, TCGv_i64 outl
,
143 TCGv_i64 inh
, TCGv_i64 inl
)
145 TCGv_i64 mask
= tcg_const_i64(0x00FF00FF00FF00FF);
146 TCGv_i64 t0
= tcg_temp_new_i64();
147 TCGv_i64 t1
= tcg_temp_new_i64();
149 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
150 tcg_gen_and_i64(t0
, inh
, mask
);
151 tcg_gen_shli_i64(t0
, t0
, 8);
152 tcg_gen_shri_i64(t1
, inh
, 8);
153 tcg_gen_and_i64(t1
, t1
, mask
);
154 tcg_gen_or_i64(outh
, t0
, t1
);
156 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
157 tcg_gen_and_i64(t0
, inl
, mask
);
158 tcg_gen_shli_i64(t0
, t0
, 8);
159 tcg_gen_shri_i64(t1
, inl
, 8);
160 tcg_gen_and_i64(t1
, t1
, mask
);
161 tcg_gen_or_i64(outl
, t0
, t1
);
163 tcg_temp_free_i64(t0
);
164 tcg_temp_free_i64(t1
);
165 tcg_temp_free_i64(mask
);
168 static void gen_bswap32x4(TCGv_i64 outh
, TCGv_i64 outl
,
169 TCGv_i64 inh
, TCGv_i64 inl
)
171 TCGv_i64 hi
= tcg_temp_new_i64();
172 TCGv_i64 lo
= tcg_temp_new_i64();
174 tcg_gen_bswap64_i64(hi
, inh
);
175 tcg_gen_bswap64_i64(lo
, inl
);
176 tcg_gen_shri_i64(outh
, hi
, 32);
177 tcg_gen_deposit_i64(outh
, outh
, hi
, 32, 32);
178 tcg_gen_shri_i64(outl
, lo
, 32);
179 tcg_gen_deposit_i64(outl
, outl
, lo
, 32, 32);
181 tcg_temp_free_i64(hi
);
182 tcg_temp_free_i64(lo
);
184 static void gen_lxvh8x(DisasContext
*ctx
)
190 if (unlikely(!ctx
->vsx_enabled
)) {
191 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
194 xth
= tcg_temp_new_i64();
195 xtl
= tcg_temp_new_i64();
196 gen_set_access_type(ctx
, ACCESS_INT
);
199 gen_addr_reg_index(ctx
, EA
);
200 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
201 tcg_gen_addi_tl(EA
, EA
, 8);
202 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
204 gen_bswap16x8(xth
, xtl
, xth
, xtl
);
206 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
207 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
209 tcg_temp_free_i64(xth
);
210 tcg_temp_free_i64(xtl
);
213 static void gen_lxvb16x(DisasContext
*ctx
)
219 if (unlikely(!ctx
->vsx_enabled
)) {
220 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
223 xth
= tcg_temp_new_i64();
224 xtl
= tcg_temp_new_i64();
225 gen_set_access_type(ctx
, ACCESS_INT
);
227 gen_addr_reg_index(ctx
, EA
);
228 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
229 tcg_gen_addi_tl(EA
, EA
, 8);
230 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
231 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
232 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
234 tcg_temp_free_i64(xth
);
235 tcg_temp_free_i64(xtl
);
238 #define VSX_VECTOR_LOAD(name, op, indexed) \
239 static void gen_##name(DisasContext *ctx) \
247 xt = xT(ctx->opcode); \
249 xt = DQxT(ctx->opcode); \
253 if (unlikely(!ctx->vsx_enabled)) { \
254 gen_exception(ctx, POWERPC_EXCP_VSXU); \
258 if (unlikely(!ctx->altivec_enabled)) { \
259 gen_exception(ctx, POWERPC_EXCP_VPU); \
263 xth = tcg_temp_new_i64(); \
264 xtl = tcg_temp_new_i64(); \
265 gen_set_access_type(ctx, ACCESS_INT); \
266 EA = tcg_temp_new(); \
268 gen_addr_reg_index(ctx, EA); \
270 gen_addr_imm_index(ctx, EA, 0x0F); \
272 if (ctx->le_mode) { \
273 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
274 set_cpu_vsrl(xt, xtl); \
275 tcg_gen_addi_tl(EA, EA, 8); \
276 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
277 set_cpu_vsrh(xt, xth); \
279 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
280 set_cpu_vsrh(xt, xth); \
281 tcg_gen_addi_tl(EA, EA, 8); \
282 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
283 set_cpu_vsrl(xt, xtl); \
286 tcg_temp_free_i64(xth); \
287 tcg_temp_free_i64(xtl); \
290 VSX_VECTOR_LOAD(lxv
, ld_i64
, 0)
291 VSX_VECTOR_LOAD(lxvx
, ld_i64
, 1)
293 #define VSX_VECTOR_STORE(name, op, indexed) \
294 static void gen_##name(DisasContext *ctx) \
302 xt = xT(ctx->opcode); \
304 xt = DQxT(ctx->opcode); \
308 if (unlikely(!ctx->vsx_enabled)) { \
309 gen_exception(ctx, POWERPC_EXCP_VSXU); \
313 if (unlikely(!ctx->altivec_enabled)) { \
314 gen_exception(ctx, POWERPC_EXCP_VPU); \
318 xth = tcg_temp_new_i64(); \
319 xtl = tcg_temp_new_i64(); \
320 get_cpu_vsrh(xth, xt); \
321 get_cpu_vsrl(xtl, xt); \
322 gen_set_access_type(ctx, ACCESS_INT); \
323 EA = tcg_temp_new(); \
325 gen_addr_reg_index(ctx, EA); \
327 gen_addr_imm_index(ctx, EA, 0x0F); \
329 if (ctx->le_mode) { \
330 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
331 tcg_gen_addi_tl(EA, EA, 8); \
332 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
334 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
335 tcg_gen_addi_tl(EA, EA, 8); \
336 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
339 tcg_temp_free_i64(xth); \
340 tcg_temp_free_i64(xtl); \
343 VSX_VECTOR_STORE(stxv
, st_i64
, 0)
344 VSX_VECTOR_STORE(stxvx
, st_i64
, 1)
347 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
348 static void gen_##name(DisasContext *ctx) \
353 if (xT(ctx->opcode) < 32) { \
354 if (unlikely(!ctx->vsx_enabled)) { \
355 gen_exception(ctx, POWERPC_EXCP_VSXU); \
359 if (unlikely(!ctx->altivec_enabled)) { \
360 gen_exception(ctx, POWERPC_EXCP_VPU); \
364 EA = tcg_temp_new(); \
365 xt = gen_vsr_ptr(xT(ctx->opcode)); \
366 gen_set_access_type(ctx, ACCESS_INT); \
367 gen_addr_register(ctx, EA); \
368 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
370 tcg_temp_free_ptr(xt); \
373 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl
)
374 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll
)
375 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl
)
376 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll
)
379 #define VSX_LOAD_SCALAR_DS(name, operation) \
380 static void gen_##name(DisasContext *ctx) \
385 if (unlikely(!ctx->altivec_enabled)) { \
386 gen_exception(ctx, POWERPC_EXCP_VPU); \
389 xth = tcg_temp_new_i64(); \
390 gen_set_access_type(ctx, ACCESS_INT); \
391 EA = tcg_temp_new(); \
392 gen_addr_imm_index(ctx, EA, 0x03); \
393 gen_qemu_##operation(ctx, xth, EA); \
394 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
395 /* NOTE: cpu_vsrl is undefined */ \
397 tcg_temp_free_i64(xth); \
400 VSX_LOAD_SCALAR_DS(lxsd
, ld64_i64
)
401 VSX_LOAD_SCALAR_DS(lxssp
, ld32fs
)
403 #define VSX_STORE_SCALAR(name, operation) \
404 static void gen_##name(DisasContext *ctx) \
408 if (unlikely(!ctx->vsx_enabled)) { \
409 gen_exception(ctx, POWERPC_EXCP_VSXU); \
412 t0 = tcg_temp_new_i64(); \
413 gen_set_access_type(ctx, ACCESS_INT); \
414 EA = tcg_temp_new(); \
415 gen_addr_reg_index(ctx, EA); \
416 get_cpu_vsrh(t0, xS(ctx->opcode)); \
417 gen_qemu_##operation(ctx, t0, EA); \
419 tcg_temp_free_i64(t0); \
422 VSX_STORE_SCALAR(stxsdx
, st64_i64
)
424 VSX_STORE_SCALAR(stxsibx
, st8_i64
)
425 VSX_STORE_SCALAR(stxsihx
, st16_i64
)
426 VSX_STORE_SCALAR(stxsiwx
, st32_i64
)
427 VSX_STORE_SCALAR(stxsspx
, st32fs
)
429 static void gen_stxvd2x(DisasContext
*ctx
)
433 if (unlikely(!ctx
->vsx_enabled
)) {
434 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
437 t0
= tcg_temp_new_i64();
438 gen_set_access_type(ctx
, ACCESS_INT
);
440 gen_addr_reg_index(ctx
, EA
);
441 get_cpu_vsrh(t0
, xS(ctx
->opcode
));
442 gen_qemu_st64_i64(ctx
, t0
, EA
);
443 tcg_gen_addi_tl(EA
, EA
, 8);
444 get_cpu_vsrl(t0
, xS(ctx
->opcode
));
445 gen_qemu_st64_i64(ctx
, t0
, EA
);
447 tcg_temp_free_i64(t0
);
450 static void gen_stxvw4x(DisasContext
*ctx
)
456 if (unlikely(!ctx
->vsx_enabled
)) {
457 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
460 xsh
= tcg_temp_new_i64();
461 xsl
= tcg_temp_new_i64();
462 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
463 get_cpu_vsrl(xsl
, xS(ctx
->opcode
));
464 gen_set_access_type(ctx
, ACCESS_INT
);
466 gen_addr_reg_index(ctx
, EA
);
468 TCGv_i64 t0
= tcg_temp_new_i64();
469 TCGv_i64 t1
= tcg_temp_new_i64();
471 tcg_gen_shri_i64(t0
, xsh
, 32);
472 tcg_gen_deposit_i64(t1
, t0
, xsh
, 32, 32);
473 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
474 tcg_gen_addi_tl(EA
, EA
, 8);
475 tcg_gen_shri_i64(t0
, xsl
, 32);
476 tcg_gen_deposit_i64(t1
, t0
, xsl
, 32, 32);
477 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
478 tcg_temp_free_i64(t0
);
479 tcg_temp_free_i64(t1
);
481 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
482 tcg_gen_addi_tl(EA
, EA
, 8);
483 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
486 tcg_temp_free_i64(xsh
);
487 tcg_temp_free_i64(xsl
);
490 static void gen_stxvh8x(DisasContext
*ctx
)
496 if (unlikely(!ctx
->vsx_enabled
)) {
497 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
500 xsh
= tcg_temp_new_i64();
501 xsl
= tcg_temp_new_i64();
502 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
503 get_cpu_vsrl(xsl
, xS(ctx
->opcode
));
504 gen_set_access_type(ctx
, ACCESS_INT
);
506 gen_addr_reg_index(ctx
, EA
);
508 TCGv_i64 outh
= tcg_temp_new_i64();
509 TCGv_i64 outl
= tcg_temp_new_i64();
511 gen_bswap16x8(outh
, outl
, xsh
, xsl
);
512 tcg_gen_qemu_st_i64(outh
, EA
, ctx
->mem_idx
, MO_BEQ
);
513 tcg_gen_addi_tl(EA
, EA
, 8);
514 tcg_gen_qemu_st_i64(outl
, EA
, ctx
->mem_idx
, MO_BEQ
);
515 tcg_temp_free_i64(outh
);
516 tcg_temp_free_i64(outl
);
518 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
519 tcg_gen_addi_tl(EA
, EA
, 8);
520 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
523 tcg_temp_free_i64(xsh
);
524 tcg_temp_free_i64(xsl
);
527 static void gen_stxvb16x(DisasContext
*ctx
)
533 if (unlikely(!ctx
->vsx_enabled
)) {
534 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
537 xsh
= tcg_temp_new_i64();
538 xsl
= tcg_temp_new_i64();
539 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
540 get_cpu_vsrl(xsl
, xS(ctx
->opcode
));
541 gen_set_access_type(ctx
, ACCESS_INT
);
543 gen_addr_reg_index(ctx
, EA
);
544 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
545 tcg_gen_addi_tl(EA
, EA
, 8);
546 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
548 tcg_temp_free_i64(xsh
);
549 tcg_temp_free_i64(xsl
);
552 #define VSX_STORE_SCALAR_DS(name, operation) \
553 static void gen_##name(DisasContext *ctx) \
558 if (unlikely(!ctx->altivec_enabled)) { \
559 gen_exception(ctx, POWERPC_EXCP_VPU); \
562 xth = tcg_temp_new_i64(); \
563 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
564 gen_set_access_type(ctx, ACCESS_INT); \
565 EA = tcg_temp_new(); \
566 gen_addr_imm_index(ctx, EA, 0x03); \
567 gen_qemu_##operation(ctx, xth, EA); \
568 /* NOTE: cpu_vsrl is undefined */ \
570 tcg_temp_free_i64(xth); \
573 VSX_STORE_SCALAR_DS(stxsd
, st64_i64
)
574 VSX_STORE_SCALAR_DS(stxssp
, st32fs
)
576 static void gen_mfvsrwz(DisasContext
*ctx
)
578 if (xS(ctx
->opcode
) < 32) {
579 if (unlikely(!ctx
->fpu_enabled
)) {
580 gen_exception(ctx
, POWERPC_EXCP_FPU
);
584 if (unlikely(!ctx
->altivec_enabled
)) {
585 gen_exception(ctx
, POWERPC_EXCP_VPU
);
589 TCGv_i64 tmp
= tcg_temp_new_i64();
590 TCGv_i64 xsh
= tcg_temp_new_i64();
591 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
592 tcg_gen_ext32u_i64(tmp
, xsh
);
593 tcg_gen_trunc_i64_tl(cpu_gpr
[rA(ctx
->opcode
)], tmp
);
594 tcg_temp_free_i64(tmp
);
595 tcg_temp_free_i64(xsh
);
598 static void gen_mtvsrwa(DisasContext
*ctx
)
600 if (xS(ctx
->opcode
) < 32) {
601 if (unlikely(!ctx
->fpu_enabled
)) {
602 gen_exception(ctx
, POWERPC_EXCP_FPU
);
606 if (unlikely(!ctx
->altivec_enabled
)) {
607 gen_exception(ctx
, POWERPC_EXCP_VPU
);
611 TCGv_i64 tmp
= tcg_temp_new_i64();
612 TCGv_i64 xsh
= tcg_temp_new_i64();
613 tcg_gen_extu_tl_i64(tmp
, cpu_gpr
[rA(ctx
->opcode
)]);
614 tcg_gen_ext32s_i64(xsh
, tmp
);
615 set_cpu_vsrh(xT(ctx
->opcode
), xsh
);
616 tcg_temp_free_i64(tmp
);
617 tcg_temp_free_i64(xsh
);
620 static void gen_mtvsrwz(DisasContext
*ctx
)
622 if (xS(ctx
->opcode
) < 32) {
623 if (unlikely(!ctx
->fpu_enabled
)) {
624 gen_exception(ctx
, POWERPC_EXCP_FPU
);
628 if (unlikely(!ctx
->altivec_enabled
)) {
629 gen_exception(ctx
, POWERPC_EXCP_VPU
);
633 TCGv_i64 tmp
= tcg_temp_new_i64();
634 TCGv_i64 xsh
= tcg_temp_new_i64();
635 tcg_gen_extu_tl_i64(tmp
, cpu_gpr
[rA(ctx
->opcode
)]);
636 tcg_gen_ext32u_i64(xsh
, tmp
);
637 set_cpu_vsrh(xT(ctx
->opcode
), xsh
);
638 tcg_temp_free_i64(tmp
);
639 tcg_temp_free_i64(xsh
);
642 #if defined(TARGET_PPC64)
643 static void gen_mfvsrd(DisasContext
*ctx
)
646 if (xS(ctx
->opcode
) < 32) {
647 if (unlikely(!ctx
->fpu_enabled
)) {
648 gen_exception(ctx
, POWERPC_EXCP_FPU
);
652 if (unlikely(!ctx
->altivec_enabled
)) {
653 gen_exception(ctx
, POWERPC_EXCP_VPU
);
657 t0
= tcg_temp_new_i64();
658 get_cpu_vsrh(t0
, xS(ctx
->opcode
));
659 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], t0
);
660 tcg_temp_free_i64(t0
);
663 static void gen_mtvsrd(DisasContext
*ctx
)
666 if (xS(ctx
->opcode
) < 32) {
667 if (unlikely(!ctx
->fpu_enabled
)) {
668 gen_exception(ctx
, POWERPC_EXCP_FPU
);
672 if (unlikely(!ctx
->altivec_enabled
)) {
673 gen_exception(ctx
, POWERPC_EXCP_VPU
);
677 t0
= tcg_temp_new_i64();
678 tcg_gen_mov_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
679 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
680 tcg_temp_free_i64(t0
);
683 static void gen_mfvsrld(DisasContext
*ctx
)
686 if (xS(ctx
->opcode
) < 32) {
687 if (unlikely(!ctx
->vsx_enabled
)) {
688 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
692 if (unlikely(!ctx
->altivec_enabled
)) {
693 gen_exception(ctx
, POWERPC_EXCP_VPU
);
697 t0
= tcg_temp_new_i64();
698 get_cpu_vsrl(t0
, xS(ctx
->opcode
));
699 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], t0
);
700 tcg_temp_free_i64(t0
);
703 static void gen_mtvsrdd(DisasContext
*ctx
)
706 if (xT(ctx
->opcode
) < 32) {
707 if (unlikely(!ctx
->vsx_enabled
)) {
708 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
712 if (unlikely(!ctx
->altivec_enabled
)) {
713 gen_exception(ctx
, POWERPC_EXCP_VPU
);
718 t0
= tcg_temp_new_i64();
719 if (!rA(ctx
->opcode
)) {
720 tcg_gen_movi_i64(t0
, 0);
722 tcg_gen_mov_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
724 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
726 tcg_gen_mov_i64(t0
, cpu_gpr
[rB(ctx
->opcode
)]);
727 set_cpu_vsrl(xT(ctx
->opcode
), t0
);
728 tcg_temp_free_i64(t0
);
731 static void gen_mtvsrws(DisasContext
*ctx
)
734 if (xT(ctx
->opcode
) < 32) {
735 if (unlikely(!ctx
->vsx_enabled
)) {
736 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
740 if (unlikely(!ctx
->altivec_enabled
)) {
741 gen_exception(ctx
, POWERPC_EXCP_VPU
);
746 t0
= tcg_temp_new_i64();
747 tcg_gen_deposit_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)],
748 cpu_gpr
[rA(ctx
->opcode
)], 32, 32);
749 set_cpu_vsrl(xT(ctx
->opcode
), t0
);
750 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
751 tcg_temp_free_i64(t0
);
756 static void gen_xxpermdi(DisasContext
*ctx
)
760 if (unlikely(!ctx
->vsx_enabled
)) {
761 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
765 xh
= tcg_temp_new_i64();
766 xl
= tcg_temp_new_i64();
768 if (unlikely((xT(ctx
->opcode
) == xA(ctx
->opcode
)) ||
769 (xT(ctx
->opcode
) == xB(ctx
->opcode
)))) {
770 if ((DM(ctx
->opcode
) & 2) == 0) {
771 get_cpu_vsrh(xh
, xA(ctx
->opcode
));
773 get_cpu_vsrl(xh
, xA(ctx
->opcode
));
775 if ((DM(ctx
->opcode
) & 1) == 0) {
776 get_cpu_vsrh(xl
, xB(ctx
->opcode
));
778 get_cpu_vsrl(xl
, xB(ctx
->opcode
));
781 set_cpu_vsrh(xT(ctx
->opcode
), xh
);
782 set_cpu_vsrl(xT(ctx
->opcode
), xl
);
784 if ((DM(ctx
->opcode
) & 2) == 0) {
785 get_cpu_vsrh(xh
, xA(ctx
->opcode
));
786 set_cpu_vsrh(xT(ctx
->opcode
), xh
);
788 get_cpu_vsrl(xh
, xA(ctx
->opcode
));
789 set_cpu_vsrh(xT(ctx
->opcode
), xh
);
791 if ((DM(ctx
->opcode
) & 1) == 0) {
792 get_cpu_vsrh(xl
, xB(ctx
->opcode
));
793 set_cpu_vsrl(xT(ctx
->opcode
), xl
);
795 get_cpu_vsrl(xl
, xB(ctx
->opcode
));
796 set_cpu_vsrl(xT(ctx
->opcode
), xl
);
799 tcg_temp_free_i64(xh
);
800 tcg_temp_free_i64(xl
);
807 #define SGN_MASK_DP 0x8000000000000000ull
808 #define SGN_MASK_SP 0x8000000080000000ull
810 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
811 static void glue(gen_, name)(DisasContext *ctx) \
814 if (unlikely(!ctx->vsx_enabled)) { \
815 gen_exception(ctx, POWERPC_EXCP_VSXU); \
818 xb = tcg_temp_new_i64(); \
819 sgm = tcg_temp_new_i64(); \
820 get_cpu_vsrh(xb, xB(ctx->opcode)); \
821 tcg_gen_movi_i64(sgm, sgn_mask); \
824 tcg_gen_andc_i64(xb, xb, sgm); \
828 tcg_gen_or_i64(xb, xb, sgm); \
832 tcg_gen_xor_i64(xb, xb, sgm); \
836 TCGv_i64 xa = tcg_temp_new_i64(); \
837 get_cpu_vsrh(xa, xA(ctx->opcode)); \
838 tcg_gen_and_i64(xa, xa, sgm); \
839 tcg_gen_andc_i64(xb, xb, sgm); \
840 tcg_gen_or_i64(xb, xb, xa); \
841 tcg_temp_free_i64(xa); \
845 set_cpu_vsrh(xT(ctx->opcode), xb); \
846 tcg_temp_free_i64(xb); \
847 tcg_temp_free_i64(sgm); \
850 VSX_SCALAR_MOVE(xsabsdp
, OP_ABS
, SGN_MASK_DP
)
851 VSX_SCALAR_MOVE(xsnabsdp
, OP_NABS
, SGN_MASK_DP
)
852 VSX_SCALAR_MOVE(xsnegdp
, OP_NEG
, SGN_MASK_DP
)
853 VSX_SCALAR_MOVE(xscpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
855 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
856 static void glue(gen_, name)(DisasContext *ctx) \
859 int xt = rD(ctx->opcode) + 32; \
860 int xb = rB(ctx->opcode) + 32; \
861 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
863 if (unlikely(!ctx->vsx_enabled)) { \
864 gen_exception(ctx, POWERPC_EXCP_VSXU); \
867 xbh = tcg_temp_new_i64(); \
868 xbl = tcg_temp_new_i64(); \
869 sgm = tcg_temp_new_i64(); \
870 tmp = tcg_temp_new_i64(); \
871 get_cpu_vsrh(xbh, xb); \
872 get_cpu_vsrl(xbl, xb); \
873 tcg_gen_movi_i64(sgm, sgn_mask); \
876 tcg_gen_andc_i64(xbh, xbh, sgm); \
879 tcg_gen_or_i64(xbh, xbh, sgm); \
882 tcg_gen_xor_i64(xbh, xbh, sgm); \
885 xah = tcg_temp_new_i64(); \
886 xa = rA(ctx->opcode) + 32; \
887 get_cpu_vsrh(tmp, xa); \
888 tcg_gen_and_i64(xah, tmp, sgm); \
889 tcg_gen_andc_i64(xbh, xbh, sgm); \
890 tcg_gen_or_i64(xbh, xbh, xah); \
891 tcg_temp_free_i64(xah); \
894 set_cpu_vsrh(xt, xbh); \
895 set_cpu_vsrl(xt, xbl); \
896 tcg_temp_free_i64(xbl); \
897 tcg_temp_free_i64(xbh); \
898 tcg_temp_free_i64(sgm); \
899 tcg_temp_free_i64(tmp); \
902 VSX_SCALAR_MOVE_QP(xsabsqp
, OP_ABS
, SGN_MASK_DP
)
903 VSX_SCALAR_MOVE_QP(xsnabsqp
, OP_NABS
, SGN_MASK_DP
)
904 VSX_SCALAR_MOVE_QP(xsnegqp
, OP_NEG
, SGN_MASK_DP
)
905 VSX_SCALAR_MOVE_QP(xscpsgnqp
, OP_CPSGN
, SGN_MASK_DP
)
907 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
908 static void glue(gen_, name)(DisasContext *ctx) \
910 TCGv_i64 xbh, xbl, sgm; \
911 if (unlikely(!ctx->vsx_enabled)) { \
912 gen_exception(ctx, POWERPC_EXCP_VSXU); \
915 xbh = tcg_temp_new_i64(); \
916 xbl = tcg_temp_new_i64(); \
917 sgm = tcg_temp_new_i64(); \
918 get_cpu_vsrh(xbh, xB(ctx->opcode)); \
919 get_cpu_vsrl(xbl, xB(ctx->opcode)); \
920 tcg_gen_movi_i64(sgm, sgn_mask); \
923 tcg_gen_andc_i64(xbh, xbh, sgm); \
924 tcg_gen_andc_i64(xbl, xbl, sgm); \
928 tcg_gen_or_i64(xbh, xbh, sgm); \
929 tcg_gen_or_i64(xbl, xbl, sgm); \
933 tcg_gen_xor_i64(xbh, xbh, sgm); \
934 tcg_gen_xor_i64(xbl, xbl, sgm); \
938 TCGv_i64 xah = tcg_temp_new_i64(); \
939 TCGv_i64 xal = tcg_temp_new_i64(); \
940 get_cpu_vsrh(xah, xA(ctx->opcode)); \
941 get_cpu_vsrl(xal, xA(ctx->opcode)); \
942 tcg_gen_and_i64(xah, xah, sgm); \
943 tcg_gen_and_i64(xal, xal, sgm); \
944 tcg_gen_andc_i64(xbh, xbh, sgm); \
945 tcg_gen_andc_i64(xbl, xbl, sgm); \
946 tcg_gen_or_i64(xbh, xbh, xah); \
947 tcg_gen_or_i64(xbl, xbl, xal); \
948 tcg_temp_free_i64(xah); \
949 tcg_temp_free_i64(xal); \
953 set_cpu_vsrh(xT(ctx->opcode), xbh); \
954 set_cpu_vsrl(xT(ctx->opcode), xbl); \
955 tcg_temp_free_i64(xbh); \
956 tcg_temp_free_i64(xbl); \
957 tcg_temp_free_i64(sgm); \
960 VSX_VECTOR_MOVE(xvabsdp
, OP_ABS
, SGN_MASK_DP
)
961 VSX_VECTOR_MOVE(xvnabsdp
, OP_NABS
, SGN_MASK_DP
)
962 VSX_VECTOR_MOVE(xvnegdp
, OP_NEG
, SGN_MASK_DP
)
963 VSX_VECTOR_MOVE(xvcpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
964 VSX_VECTOR_MOVE(xvabssp
, OP_ABS
, SGN_MASK_SP
)
965 VSX_VECTOR_MOVE(xvnabssp
, OP_NABS
, SGN_MASK_SP
)
966 VSX_VECTOR_MOVE(xvnegsp
, OP_NEG
, SGN_MASK_SP
)
967 VSX_VECTOR_MOVE(xvcpsgnsp
, OP_CPSGN
, SGN_MASK_SP
)
969 #define VSX_CMP(name, op1, op2, inval, type) \
970 static void gen_##name(DisasContext *ctx) \
973 TCGv_ptr xt, xa, xb; \
974 if (unlikely(!ctx->vsx_enabled)) { \
975 gen_exception(ctx, POWERPC_EXCP_VSXU); \
978 xt = gen_vsr_ptr(xT(ctx->opcode)); \
979 xa = gen_vsr_ptr(xA(ctx->opcode)); \
980 xb = gen_vsr_ptr(xB(ctx->opcode)); \
981 if ((ctx->opcode >> (31 - 21)) & 1) { \
982 gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb); \
984 ignored = tcg_temp_new_i32(); \
985 gen_helper_##name(ignored, cpu_env, xt, xa, xb); \
986 tcg_temp_free_i32(ignored); \
988 gen_helper_float_check_status(cpu_env); \
989 tcg_temp_free_ptr(xt); \
990 tcg_temp_free_ptr(xa); \
991 tcg_temp_free_ptr(xb); \
994 VSX_CMP(xvcmpeqdp
, 0x0C, 0x0C, 0, PPC2_VSX
)
995 VSX_CMP(xvcmpgedp
, 0x0C, 0x0E, 0, PPC2_VSX
)
996 VSX_CMP(xvcmpgtdp
, 0x0C, 0x0D, 0, PPC2_VSX
)
997 VSX_CMP(xvcmpnedp
, 0x0C, 0x0F, 0, PPC2_ISA300
)
998 VSX_CMP(xvcmpeqsp
, 0x0C, 0x08, 0, PPC2_VSX
)
999 VSX_CMP(xvcmpgesp
, 0x0C, 0x0A, 0, PPC2_VSX
)
1000 VSX_CMP(xvcmpgtsp
, 0x0C, 0x09, 0, PPC2_VSX
)
1001 VSX_CMP(xvcmpnesp
, 0x0C, 0x0B, 0, PPC2_VSX
)
1003 static void gen_xscvqpdp(DisasContext
*ctx
)
1007 if (unlikely(!ctx
->vsx_enabled
)) {
1008 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1011 opc
= tcg_const_i32(ctx
->opcode
);
1012 xt
= gen_vsr_ptr(xT(ctx
->opcode
));
1013 xb
= gen_vsr_ptr(xB(ctx
->opcode
));
1014 gen_helper_xscvqpdp(cpu_env
, opc
, xt
, xb
);
1015 tcg_temp_free_i32(opc
);
1016 tcg_temp_free_ptr(xt
);
1017 tcg_temp_free_ptr(xb
);
1020 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
1021 static void gen_##name(DisasContext *ctx) \
1024 if (unlikely(!ctx->vsx_enabled)) { \
1025 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1028 opc = tcg_const_i32(ctx->opcode); \
1029 gen_helper_##name(cpu_env, opc); \
1030 tcg_temp_free_i32(opc); \
1033 #define GEN_VSX_HELPER_X3(name, op1, op2, inval, type) \
1034 static void gen_##name(DisasContext *ctx) \
1036 TCGv_ptr xt, xa, xb; \
1037 if (unlikely(!ctx->vsx_enabled)) { \
1038 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1041 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1042 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1043 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1044 gen_helper_##name(cpu_env, xt, xa, xb); \
1045 tcg_temp_free_ptr(xt); \
1046 tcg_temp_free_ptr(xa); \
1047 tcg_temp_free_ptr(xb); \
1050 #define GEN_VSX_HELPER_X2(name, op1, op2, inval, type) \
1051 static void gen_##name(DisasContext *ctx) \
1054 if (unlikely(!ctx->vsx_enabled)) { \
1055 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1058 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1059 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1060 gen_helper_##name(cpu_env, xt, xb); \
1061 tcg_temp_free_ptr(xt); \
1062 tcg_temp_free_ptr(xb); \
1065 #define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type) \
1066 static void gen_##name(DisasContext *ctx) \
1070 if (unlikely(!ctx->vsx_enabled)) { \
1071 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1074 opc = tcg_const_i32(ctx->opcode); \
1075 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1076 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1077 gen_helper_##name(cpu_env, opc, xa, xb); \
1078 tcg_temp_free_i32(opc); \
1079 tcg_temp_free_ptr(xa); \
1080 tcg_temp_free_ptr(xb); \
1083 #define GEN_VSX_HELPER_X1(name, op1, op2, inval, type) \
1084 static void gen_##name(DisasContext *ctx) \
1088 if (unlikely(!ctx->vsx_enabled)) { \
1089 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1092 opc = tcg_const_i32(ctx->opcode); \
1093 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1094 gen_helper_##name(cpu_env, opc, xb); \
1095 tcg_temp_free_i32(opc); \
1096 tcg_temp_free_ptr(xb); \
1099 #define GEN_VSX_HELPER_R3(name, op1, op2, inval, type) \
1100 static void gen_##name(DisasContext *ctx) \
1103 TCGv_ptr xt, xa, xb; \
1104 if (unlikely(!ctx->vsx_enabled)) { \
1105 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1108 opc = tcg_const_i32(ctx->opcode); \
1109 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \
1110 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \
1111 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1112 gen_helper_##name(cpu_env, opc, xt, xa, xb); \
1113 tcg_temp_free_i32(opc); \
1114 tcg_temp_free_ptr(xt); \
1115 tcg_temp_free_ptr(xa); \
1116 tcg_temp_free_ptr(xb); \
1119 #define GEN_VSX_HELPER_R2(name, op1, op2, inval, type) \
1120 static void gen_##name(DisasContext *ctx) \
1124 if (unlikely(!ctx->vsx_enabled)) { \
1125 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1128 opc = tcg_const_i32(ctx->opcode); \
1129 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \
1130 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1131 gen_helper_##name(cpu_env, opc, xt, xb); \
1132 tcg_temp_free_i32(opc); \
1133 tcg_temp_free_ptr(xt); \
1134 tcg_temp_free_ptr(xb); \
1137 #define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type) \
1138 static void gen_##name(DisasContext *ctx) \
1142 if (unlikely(!ctx->vsx_enabled)) { \
1143 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1146 opc = tcg_const_i32(ctx->opcode); \
1147 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \
1148 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1149 gen_helper_##name(cpu_env, opc, xa, xb); \
1150 tcg_temp_free_i32(opc); \
1151 tcg_temp_free_ptr(xa); \
1152 tcg_temp_free_ptr(xb); \
1155 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1156 static void gen_##name(DisasContext *ctx) \
1160 if (unlikely(!ctx->vsx_enabled)) { \
1161 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1164 t0 = tcg_temp_new_i64(); \
1165 t1 = tcg_temp_new_i64(); \
1166 get_cpu_vsrh(t0, xB(ctx->opcode)); \
1167 gen_helper_##name(t1, cpu_env, t0); \
1168 set_cpu_vsrh(xT(ctx->opcode), t1); \
1169 tcg_temp_free_i64(t0); \
1170 tcg_temp_free_i64(t1); \
1173 GEN_VSX_HELPER_X3(xsadddp
, 0x00, 0x04, 0, PPC2_VSX
)
1174 GEN_VSX_HELPER_R3(xsaddqp
, 0x04, 0x00, 0, PPC2_ISA300
)
1175 GEN_VSX_HELPER_X3(xssubdp
, 0x00, 0x05, 0, PPC2_VSX
)
1176 GEN_VSX_HELPER_X3(xsmuldp
, 0x00, 0x06, 0, PPC2_VSX
)
1177 GEN_VSX_HELPER_R3(xsmulqp
, 0x04, 0x01, 0, PPC2_ISA300
)
1178 GEN_VSX_HELPER_X3(xsdivdp
, 0x00, 0x07, 0, PPC2_VSX
)
1179 GEN_VSX_HELPER_R3(xsdivqp
, 0x04, 0x11, 0, PPC2_ISA300
)
1180 GEN_VSX_HELPER_X2(xsredp
, 0x14, 0x05, 0, PPC2_VSX
)
1181 GEN_VSX_HELPER_X2(xssqrtdp
, 0x16, 0x04, 0, PPC2_VSX
)
1182 GEN_VSX_HELPER_X2(xsrsqrtedp
, 0x14, 0x04, 0, PPC2_VSX
)
1183 GEN_VSX_HELPER_X2_AB(xstdivdp
, 0x14, 0x07, 0, PPC2_VSX
)
1184 GEN_VSX_HELPER_X1(xstsqrtdp
, 0x14, 0x06, 0, PPC2_VSX
)
1185 GEN_VSX_HELPER_X3(xscmpeqdp
, 0x0C, 0x00, 0, PPC2_ISA300
)
1186 GEN_VSX_HELPER_X3(xscmpgtdp
, 0x0C, 0x01, 0, PPC2_ISA300
)
1187 GEN_VSX_HELPER_X3(xscmpgedp
, 0x0C, 0x02, 0, PPC2_ISA300
)
1188 GEN_VSX_HELPER_X3(xscmpnedp
, 0x0C, 0x03, 0, PPC2_ISA300
)
1189 GEN_VSX_HELPER_X2_AB(xscmpexpdp
, 0x0C, 0x07, 0, PPC2_ISA300
)
1190 GEN_VSX_HELPER_R2_AB(xscmpexpqp
, 0x04, 0x05, 0, PPC2_ISA300
)
1191 GEN_VSX_HELPER_X2_AB(xscmpodp
, 0x0C, 0x05, 0, PPC2_VSX
)
1192 GEN_VSX_HELPER_X2_AB(xscmpudp
, 0x0C, 0x04, 0, PPC2_VSX
)
1193 GEN_VSX_HELPER_R2_AB(xscmpoqp
, 0x04, 0x04, 0, PPC2_VSX
)
1194 GEN_VSX_HELPER_R2_AB(xscmpuqp
, 0x04, 0x14, 0, PPC2_VSX
)
1195 GEN_VSX_HELPER_X3(xsmaxdp
, 0x00, 0x14, 0, PPC2_VSX
)
1196 GEN_VSX_HELPER_X3(xsmindp
, 0x00, 0x15, 0, PPC2_VSX
)
1197 GEN_VSX_HELPER_R3(xsmaxcdp
, 0x00, 0x10, 0, PPC2_ISA300
)
1198 GEN_VSX_HELPER_R3(xsmincdp
, 0x00, 0x11, 0, PPC2_ISA300
)
1199 GEN_VSX_HELPER_R3(xsmaxjdp
, 0x00, 0x12, 0, PPC2_ISA300
)
1200 GEN_VSX_HELPER_R3(xsminjdp
, 0x00, 0x12, 0, PPC2_ISA300
)
1201 GEN_VSX_HELPER_X2(xscvdphp
, 0x16, 0x15, 0x11, PPC2_ISA300
)
1202 GEN_VSX_HELPER_X2(xscvdpsp
, 0x12, 0x10, 0, PPC2_VSX
)
1203 GEN_VSX_HELPER_R2(xscvdpqp
, 0x04, 0x1A, 0x16, PPC2_ISA300
)
1204 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn
, 0x16, 0x10, 0, PPC2_VSX207
)
1205 GEN_VSX_HELPER_R2(xscvqpsdz
, 0x04, 0x1A, 0x19, PPC2_ISA300
)
1206 GEN_VSX_HELPER_R2(xscvqpswz
, 0x04, 0x1A, 0x09, PPC2_ISA300
)
1207 GEN_VSX_HELPER_R2(xscvqpudz
, 0x04, 0x1A, 0x11, PPC2_ISA300
)
1208 GEN_VSX_HELPER_R2(xscvqpuwz
, 0x04, 0x1A, 0x01, PPC2_ISA300
)
1209 GEN_VSX_HELPER_X2(xscvhpdp
, 0x16, 0x15, 0x10, PPC2_ISA300
)
1210 GEN_VSX_HELPER_R2(xscvsdqp
, 0x04, 0x1A, 0x0A, PPC2_ISA300
)
1211 GEN_VSX_HELPER_X2(xscvspdp
, 0x12, 0x14, 0, PPC2_VSX
)
1212 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn
, 0x16, 0x14, 0, PPC2_VSX207
)
1213 GEN_VSX_HELPER_X2(xscvdpsxds
, 0x10, 0x15, 0, PPC2_VSX
)
1214 GEN_VSX_HELPER_X2(xscvdpsxws
, 0x10, 0x05, 0, PPC2_VSX
)
1215 GEN_VSX_HELPER_X2(xscvdpuxds
, 0x10, 0x14, 0, PPC2_VSX
)
1216 GEN_VSX_HELPER_X2(xscvdpuxws
, 0x10, 0x04, 0, PPC2_VSX
)
1217 GEN_VSX_HELPER_X2(xscvsxddp
, 0x10, 0x17, 0, PPC2_VSX
)
1218 GEN_VSX_HELPER_R2(xscvudqp
, 0x04, 0x1A, 0x02, PPC2_ISA300
)
1219 GEN_VSX_HELPER_X2(xscvuxddp
, 0x10, 0x16, 0, PPC2_VSX
)
1220 GEN_VSX_HELPER_X2(xsrdpi
, 0x12, 0x04, 0, PPC2_VSX
)
1221 GEN_VSX_HELPER_X2(xsrdpic
, 0x16, 0x06, 0, PPC2_VSX
)
1222 GEN_VSX_HELPER_X2(xsrdpim
, 0x12, 0x07, 0, PPC2_VSX
)
1223 GEN_VSX_HELPER_X2(xsrdpip
, 0x12, 0x06, 0, PPC2_VSX
)
1224 GEN_VSX_HELPER_X2(xsrdpiz
, 0x12, 0x05, 0, PPC2_VSX
)
1225 GEN_VSX_HELPER_XT_XB_ENV(xsrsp
, 0x12, 0x11, 0, PPC2_VSX207
)
1226 GEN_VSX_HELPER_R2(xsrqpi
, 0x05, 0x00, 0, PPC2_ISA300
)
1227 GEN_VSX_HELPER_R2(xsrqpxp
, 0x05, 0x01, 0, PPC2_ISA300
)
1228 GEN_VSX_HELPER_R2(xssqrtqp
, 0x04, 0x19, 0x1B, PPC2_ISA300
)
1229 GEN_VSX_HELPER_R3(xssubqp
, 0x04, 0x10, 0, PPC2_ISA300
)
1230 GEN_VSX_HELPER_X3(xsaddsp
, 0x00, 0x00, 0, PPC2_VSX207
)
1231 GEN_VSX_HELPER_X3(xssubsp
, 0x00, 0x01, 0, PPC2_VSX207
)
1232 GEN_VSX_HELPER_X3(xsmulsp
, 0x00, 0x02, 0, PPC2_VSX207
)
1233 GEN_VSX_HELPER_X3(xsdivsp
, 0x00, 0x03, 0, PPC2_VSX207
)
1234 GEN_VSX_HELPER_X2(xsresp
, 0x14, 0x01, 0, PPC2_VSX207
)
1235 GEN_VSX_HELPER_X2(xssqrtsp
, 0x16, 0x00, 0, PPC2_VSX207
)
1236 GEN_VSX_HELPER_X2(xsrsqrtesp
, 0x14, 0x00, 0, PPC2_VSX207
)
1237 GEN_VSX_HELPER_X2(xscvsxdsp
, 0x10, 0x13, 0, PPC2_VSX207
)
1238 GEN_VSX_HELPER_X2(xscvuxdsp
, 0x10, 0x12, 0, PPC2_VSX207
)
1239 GEN_VSX_HELPER_X1(xststdcsp
, 0x14, 0x12, 0, PPC2_ISA300
)
1240 GEN_VSX_HELPER_2(xststdcdp
, 0x14, 0x16, 0, PPC2_ISA300
)
1241 GEN_VSX_HELPER_2(xststdcqp
, 0x04, 0x16, 0, PPC2_ISA300
)
1243 GEN_VSX_HELPER_X3(xvadddp
, 0x00, 0x0C, 0, PPC2_VSX
)
1244 GEN_VSX_HELPER_X3(xvsubdp
, 0x00, 0x0D, 0, PPC2_VSX
)
1245 GEN_VSX_HELPER_X3(xvmuldp
, 0x00, 0x0E, 0, PPC2_VSX
)
1246 GEN_VSX_HELPER_X3(xvdivdp
, 0x00, 0x0F, 0, PPC2_VSX
)
1247 GEN_VSX_HELPER_X2(xvredp
, 0x14, 0x0D, 0, PPC2_VSX
)
1248 GEN_VSX_HELPER_X2(xvsqrtdp
, 0x16, 0x0C, 0, PPC2_VSX
)
1249 GEN_VSX_HELPER_X2(xvrsqrtedp
, 0x14, 0x0C, 0, PPC2_VSX
)
1250 GEN_VSX_HELPER_X2_AB(xvtdivdp
, 0x14, 0x0F, 0, PPC2_VSX
)
1251 GEN_VSX_HELPER_X1(xvtsqrtdp
, 0x14, 0x0E, 0, PPC2_VSX
)
1252 GEN_VSX_HELPER_X3(xvmaxdp
, 0x00, 0x1C, 0, PPC2_VSX
)
1253 GEN_VSX_HELPER_X3(xvmindp
, 0x00, 0x1D, 0, PPC2_VSX
)
1254 GEN_VSX_HELPER_X2(xvcvdpsp
, 0x12, 0x18, 0, PPC2_VSX
)
1255 GEN_VSX_HELPER_X2(xvcvdpsxds
, 0x10, 0x1D, 0, PPC2_VSX
)
1256 GEN_VSX_HELPER_X2(xvcvdpsxws
, 0x10, 0x0D, 0, PPC2_VSX
)
1257 GEN_VSX_HELPER_X2(xvcvdpuxds
, 0x10, 0x1C, 0, PPC2_VSX
)
1258 GEN_VSX_HELPER_X2(xvcvdpuxws
, 0x10, 0x0C, 0, PPC2_VSX
)
1259 GEN_VSX_HELPER_X2(xvcvsxddp
, 0x10, 0x1F, 0, PPC2_VSX
)
1260 GEN_VSX_HELPER_X2(xvcvuxddp
, 0x10, 0x1E, 0, PPC2_VSX
)
1261 GEN_VSX_HELPER_X2(xvcvsxwdp
, 0x10, 0x0F, 0, PPC2_VSX
)
1262 GEN_VSX_HELPER_X2(xvcvuxwdp
, 0x10, 0x0E, 0, PPC2_VSX
)
1263 GEN_VSX_HELPER_X2(xvrdpi
, 0x12, 0x0C, 0, PPC2_VSX
)
1264 GEN_VSX_HELPER_X2(xvrdpic
, 0x16, 0x0E, 0, PPC2_VSX
)
1265 GEN_VSX_HELPER_X2(xvrdpim
, 0x12, 0x0F, 0, PPC2_VSX
)
1266 GEN_VSX_HELPER_X2(xvrdpip
, 0x12, 0x0E, 0, PPC2_VSX
)
1267 GEN_VSX_HELPER_X2(xvrdpiz
, 0x12, 0x0D, 0, PPC2_VSX
)
1269 GEN_VSX_HELPER_X3(xvaddsp
, 0x00, 0x08, 0, PPC2_VSX
)
1270 GEN_VSX_HELPER_X3(xvsubsp
, 0x00, 0x09, 0, PPC2_VSX
)
1271 GEN_VSX_HELPER_X3(xvmulsp
, 0x00, 0x0A, 0, PPC2_VSX
)
1272 GEN_VSX_HELPER_X3(xvdivsp
, 0x00, 0x0B, 0, PPC2_VSX
)
1273 GEN_VSX_HELPER_X2(xvresp
, 0x14, 0x09, 0, PPC2_VSX
)
1274 GEN_VSX_HELPER_X2(xvsqrtsp
, 0x16, 0x08, 0, PPC2_VSX
)
1275 GEN_VSX_HELPER_X2(xvrsqrtesp
, 0x14, 0x08, 0, PPC2_VSX
)
1276 GEN_VSX_HELPER_X2_AB(xvtdivsp
, 0x14, 0x0B, 0, PPC2_VSX
)
1277 GEN_VSX_HELPER_X1(xvtsqrtsp
, 0x14, 0x0A, 0, PPC2_VSX
)
1278 GEN_VSX_HELPER_X3(xvmaxsp
, 0x00, 0x18, 0, PPC2_VSX
)
1279 GEN_VSX_HELPER_X3(xvminsp
, 0x00, 0x19, 0, PPC2_VSX
)
1280 GEN_VSX_HELPER_X2(xvcvspdp
, 0x12, 0x1C, 0, PPC2_VSX
)
1281 GEN_VSX_HELPER_X2(xvcvhpsp
, 0x16, 0x1D, 0x18, PPC2_ISA300
)
1282 GEN_VSX_HELPER_X2(xvcvsphp
, 0x16, 0x1D, 0x19, PPC2_ISA300
)
1283 GEN_VSX_HELPER_X2(xvcvspsxds
, 0x10, 0x19, 0, PPC2_VSX
)
1284 GEN_VSX_HELPER_X2(xvcvspsxws
, 0x10, 0x09, 0, PPC2_VSX
)
1285 GEN_VSX_HELPER_X2(xvcvspuxds
, 0x10, 0x18, 0, PPC2_VSX
)
1286 GEN_VSX_HELPER_X2(xvcvspuxws
, 0x10, 0x08, 0, PPC2_VSX
)
1287 GEN_VSX_HELPER_X2(xvcvsxdsp
, 0x10, 0x1B, 0, PPC2_VSX
)
1288 GEN_VSX_HELPER_X2(xvcvuxdsp
, 0x10, 0x1A, 0, PPC2_VSX
)
1289 GEN_VSX_HELPER_X2(xvcvsxwsp
, 0x10, 0x0B, 0, PPC2_VSX
)
1290 GEN_VSX_HELPER_X2(xvcvuxwsp
, 0x10, 0x0A, 0, PPC2_VSX
)
1291 GEN_VSX_HELPER_X2(xvrspi
, 0x12, 0x08, 0, PPC2_VSX
)
1292 GEN_VSX_HELPER_X2(xvrspic
, 0x16, 0x0A, 0, PPC2_VSX
)
1293 GEN_VSX_HELPER_X2(xvrspim
, 0x12, 0x0B, 0, PPC2_VSX
)
1294 GEN_VSX_HELPER_X2(xvrspip
, 0x12, 0x0A, 0, PPC2_VSX
)
1295 GEN_VSX_HELPER_X2(xvrspiz
, 0x12, 0x09, 0, PPC2_VSX
)
1296 GEN_VSX_HELPER_2(xvtstdcsp
, 0x14, 0x1A, 0, PPC2_VSX
)
1297 GEN_VSX_HELPER_2(xvtstdcdp
, 0x14, 0x1E, 0, PPC2_VSX
)
1298 GEN_VSX_HELPER_X3(xxperm
, 0x08, 0x03, 0, PPC2_ISA300
)
1299 GEN_VSX_HELPER_X3(xxpermr
, 0x08, 0x07, 0, PPC2_ISA300
)
1301 #define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type) \
1302 static void gen_##name(DisasContext *ctx) \
1304 TCGv_ptr xt, xa, b, c; \
1305 if (unlikely(!ctx->vsx_enabled)) { \
1306 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1309 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1310 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1311 if (ctx->opcode & PPC_BIT(25)) { \
1315 b = gen_vsr_ptr(xT(ctx->opcode)); \
1316 c = gen_vsr_ptr(xB(ctx->opcode)); \
1321 b = gen_vsr_ptr(xB(ctx->opcode)); \
1322 c = gen_vsr_ptr(xT(ctx->opcode)); \
1324 gen_helper_##name(cpu_env, xt, xa, b, c); \
1325 tcg_temp_free_ptr(xt); \
1326 tcg_temp_free_ptr(xa); \
1327 tcg_temp_free_ptr(b); \
1328 tcg_temp_free_ptr(c); \
1331 GEN_VSX_HELPER_VSX_MADD(xsmadddp
, 0x04, 0x04, 0x05, 0, PPC2_VSX
)
1332 GEN_VSX_HELPER_VSX_MADD(xsmsubdp
, 0x04, 0x06, 0x07, 0, PPC2_VSX
)
1333 GEN_VSX_HELPER_VSX_MADD(xsnmadddp
, 0x04, 0x14, 0x15, 0, PPC2_VSX
)
1334 GEN_VSX_HELPER_VSX_MADD(xsnmsubdp
, 0x04, 0x16, 0x17, 0, PPC2_VSX
)
1335 GEN_VSX_HELPER_VSX_MADD(xsmaddsp
, 0x04, 0x00, 0x01, 0, PPC2_VSX207
)
1336 GEN_VSX_HELPER_VSX_MADD(xsmsubsp
, 0x04, 0x02, 0x03, 0, PPC2_VSX207
)
1337 GEN_VSX_HELPER_VSX_MADD(xsnmaddsp
, 0x04, 0x10, 0x11, 0, PPC2_VSX207
)
1338 GEN_VSX_HELPER_VSX_MADD(xsnmsubsp
, 0x04, 0x12, 0x13, 0, PPC2_VSX207
)
1339 GEN_VSX_HELPER_VSX_MADD(xvmadddp
, 0x04, 0x0C, 0x0D, 0, PPC2_VSX
)
1340 GEN_VSX_HELPER_VSX_MADD(xvmsubdp
, 0x04, 0x0E, 0x0F, 0, PPC2_VSX
)
1341 GEN_VSX_HELPER_VSX_MADD(xvnmadddp
, 0x04, 0x1C, 0x1D, 0, PPC2_VSX
)
1342 GEN_VSX_HELPER_VSX_MADD(xvnmsubdp
, 0x04, 0x1E, 0x1F, 0, PPC2_VSX
)
1343 GEN_VSX_HELPER_VSX_MADD(xvmaddsp
, 0x04, 0x08, 0x09, 0, PPC2_VSX
)
1344 GEN_VSX_HELPER_VSX_MADD(xvmsubsp
, 0x04, 0x0A, 0x0B, 0, PPC2_VSX
)
1345 GEN_VSX_HELPER_VSX_MADD(xvnmaddsp
, 0x04, 0x18, 0x19, 0, PPC2_VSX
)
1346 GEN_VSX_HELPER_VSX_MADD(xvnmsubsp
, 0x04, 0x1A, 0x1B, 0, PPC2_VSX
)
1348 static void gen_xxbrd(DisasContext
*ctx
)
1355 if (unlikely(!ctx
->vsx_enabled
)) {
1356 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1359 xth
= tcg_temp_new_i64();
1360 xtl
= tcg_temp_new_i64();
1361 xbh
= tcg_temp_new_i64();
1362 xbl
= tcg_temp_new_i64();
1363 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1364 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1366 tcg_gen_bswap64_i64(xth
, xbh
);
1367 tcg_gen_bswap64_i64(xtl
, xbl
);
1368 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1369 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1371 tcg_temp_free_i64(xth
);
1372 tcg_temp_free_i64(xtl
);
1373 tcg_temp_free_i64(xbh
);
1374 tcg_temp_free_i64(xbl
);
1377 static void gen_xxbrh(DisasContext
*ctx
)
1384 if (unlikely(!ctx
->vsx_enabled
)) {
1385 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1388 xth
= tcg_temp_new_i64();
1389 xtl
= tcg_temp_new_i64();
1390 xbh
= tcg_temp_new_i64();
1391 xbl
= tcg_temp_new_i64();
1392 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1393 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1395 gen_bswap16x8(xth
, xtl
, xbh
, xbl
);
1396 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1397 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1399 tcg_temp_free_i64(xth
);
1400 tcg_temp_free_i64(xtl
);
1401 tcg_temp_free_i64(xbh
);
1402 tcg_temp_free_i64(xbl
);
1405 static void gen_xxbrq(DisasContext
*ctx
)
1413 if (unlikely(!ctx
->vsx_enabled
)) {
1414 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1417 xth
= tcg_temp_new_i64();
1418 xtl
= tcg_temp_new_i64();
1419 xbh
= tcg_temp_new_i64();
1420 xbl
= tcg_temp_new_i64();
1421 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1422 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1423 t0
= tcg_temp_new_i64();
1425 tcg_gen_bswap64_i64(t0
, xbl
);
1426 tcg_gen_bswap64_i64(xtl
, xbh
);
1427 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1428 tcg_gen_mov_i64(xth
, t0
);
1429 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1431 tcg_temp_free_i64(t0
);
1432 tcg_temp_free_i64(xth
);
1433 tcg_temp_free_i64(xtl
);
1434 tcg_temp_free_i64(xbh
);
1435 tcg_temp_free_i64(xbl
);
1438 static void gen_xxbrw(DisasContext
*ctx
)
1445 if (unlikely(!ctx
->vsx_enabled
)) {
1446 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1449 xth
= tcg_temp_new_i64();
1450 xtl
= tcg_temp_new_i64();
1451 xbh
= tcg_temp_new_i64();
1452 xbl
= tcg_temp_new_i64();
1453 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1454 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1456 gen_bswap32x4(xth
, xtl
, xbh
, xbl
);
1457 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1458 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1460 tcg_temp_free_i64(xth
);
1461 tcg_temp_free_i64(xtl
);
1462 tcg_temp_free_i64(xbh
);
1463 tcg_temp_free_i64(xbl
);
1466 #define VSX_LOGICAL(name, vece, tcg_op) \
1467 static void glue(gen_, name)(DisasContext *ctx) \
1469 if (unlikely(!ctx->vsx_enabled)) { \
1470 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1473 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1474 vsr_full_offset(xA(ctx->opcode)), \
1475 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1478 VSX_LOGICAL(xxland
, MO_64
, tcg_gen_gvec_and
)
1479 VSX_LOGICAL(xxlandc
, MO_64
, tcg_gen_gvec_andc
)
1480 VSX_LOGICAL(xxlor
, MO_64
, tcg_gen_gvec_or
)
1481 VSX_LOGICAL(xxlxor
, MO_64
, tcg_gen_gvec_xor
)
1482 VSX_LOGICAL(xxlnor
, MO_64
, tcg_gen_gvec_nor
)
1483 VSX_LOGICAL(xxleqv
, MO_64
, tcg_gen_gvec_eqv
)
1484 VSX_LOGICAL(xxlnand
, MO_64
, tcg_gen_gvec_nand
)
1485 VSX_LOGICAL(xxlorc
, MO_64
, tcg_gen_gvec_orc
)
1487 #define VSX_XXMRG(name, high) \
1488 static void glue(gen_, name)(DisasContext *ctx) \
1490 TCGv_i64 a0, a1, b0, b1, tmp; \
1491 if (unlikely(!ctx->vsx_enabled)) { \
1492 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1495 a0 = tcg_temp_new_i64(); \
1496 a1 = tcg_temp_new_i64(); \
1497 b0 = tcg_temp_new_i64(); \
1498 b1 = tcg_temp_new_i64(); \
1499 tmp = tcg_temp_new_i64(); \
1501 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1502 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1503 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1504 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1506 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1507 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1508 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1509 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1511 tcg_gen_shri_i64(a0, a0, 32); \
1512 tcg_gen_shri_i64(b0, b0, 32); \
1513 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1514 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1515 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1516 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1517 tcg_temp_free_i64(a0); \
1518 tcg_temp_free_i64(a1); \
1519 tcg_temp_free_i64(b0); \
1520 tcg_temp_free_i64(b1); \
1521 tcg_temp_free_i64(tmp); \
1524 VSX_XXMRG(xxmrghw
, 1)
1525 VSX_XXMRG(xxmrglw
, 0)
1527 static void gen_xxsel(DisasContext
*ctx
)
1529 int rt
= xT(ctx
->opcode
);
1530 int ra
= xA(ctx
->opcode
);
1531 int rb
= xB(ctx
->opcode
);
1532 int rc
= xC(ctx
->opcode
);
1534 if (unlikely(!ctx
->vsx_enabled
)) {
1535 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1538 tcg_gen_gvec_bitsel(MO_64
, vsr_full_offset(rt
), vsr_full_offset(rc
),
1539 vsr_full_offset(rb
), vsr_full_offset(ra
), 16, 16);
1542 static void gen_xxspltw(DisasContext
*ctx
)
1544 int rt
= xT(ctx
->opcode
);
1545 int rb
= xB(ctx
->opcode
);
1546 int uim
= UIM(ctx
->opcode
);
1549 if (unlikely(!ctx
->vsx_enabled
)) {
1550 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1554 tofs
= vsr_full_offset(rt
);
1555 bofs
= vsr_full_offset(rb
);
1556 bofs
+= uim
<< MO_32
;
1557 #ifndef HOST_WORDS_BIG_ENDIAN
1561 tcg_gen_gvec_dup_mem(MO_32
, tofs
, bofs
, 16, 16);
1564 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1566 static void gen_xxspltib(DisasContext
*ctx
)
1568 uint8_t uim8
= IMM8(ctx
->opcode
);
1569 int rt
= xT(ctx
->opcode
);
1572 if (unlikely(!ctx
->vsx_enabled
)) {
1573 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1577 if (unlikely(!ctx
->altivec_enabled
)) {
1578 gen_exception(ctx
, POWERPC_EXCP_VPU
);
1582 tcg_gen_gvec_dup8i(vsr_full_offset(rt
), 16, 16, uim8
);
1585 static void gen_xxsldwi(DisasContext
*ctx
)
1588 if (unlikely(!ctx
->vsx_enabled
)) {
1589 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1592 xth
= tcg_temp_new_i64();
1593 xtl
= tcg_temp_new_i64();
1595 switch (SHW(ctx
->opcode
)) {
1597 get_cpu_vsrh(xth
, xA(ctx
->opcode
));
1598 get_cpu_vsrl(xtl
, xA(ctx
->opcode
));
1602 TCGv_i64 t0
= tcg_temp_new_i64();
1603 get_cpu_vsrh(xth
, xA(ctx
->opcode
));
1604 tcg_gen_shli_i64(xth
, xth
, 32);
1605 get_cpu_vsrl(t0
, xA(ctx
->opcode
));
1606 tcg_gen_shri_i64(t0
, t0
, 32);
1607 tcg_gen_or_i64(xth
, xth
, t0
);
1608 get_cpu_vsrl(xtl
, xA(ctx
->opcode
));
1609 tcg_gen_shli_i64(xtl
, xtl
, 32);
1610 get_cpu_vsrh(t0
, xB(ctx
->opcode
));
1611 tcg_gen_shri_i64(t0
, t0
, 32);
1612 tcg_gen_or_i64(xtl
, xtl
, t0
);
1613 tcg_temp_free_i64(t0
);
1617 get_cpu_vsrl(xth
, xA(ctx
->opcode
));
1618 get_cpu_vsrh(xtl
, xB(ctx
->opcode
));
1622 TCGv_i64 t0
= tcg_temp_new_i64();
1623 get_cpu_vsrl(xth
, xA(ctx
->opcode
));
1624 tcg_gen_shli_i64(xth
, xth
, 32);
1625 get_cpu_vsrh(t0
, xB(ctx
->opcode
));
1626 tcg_gen_shri_i64(t0
, t0
, 32);
1627 tcg_gen_or_i64(xth
, xth
, t0
);
1628 get_cpu_vsrh(xtl
, xB(ctx
->opcode
));
1629 tcg_gen_shli_i64(xtl
, xtl
, 32);
1630 get_cpu_vsrl(t0
, xB(ctx
->opcode
));
1631 tcg_gen_shri_i64(t0
, t0
, 32);
1632 tcg_gen_or_i64(xtl
, xtl
, t0
);
1633 tcg_temp_free_i64(t0
);
1638 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1639 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1641 tcg_temp_free_i64(xth
);
1642 tcg_temp_free_i64(xtl
);
1645 #define VSX_EXTRACT_INSERT(name) \
1646 static void gen_##name(DisasContext *ctx) \
1651 uint8_t uimm = UIMM4(ctx->opcode); \
1653 if (unlikely(!ctx->vsx_enabled)) { \
1654 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1657 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1658 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1659 t0 = tcg_temp_new_i32(); \
1660 t1 = tcg_temp_new_i64(); \
1662 * uimm > 15 out of bound and for \
1663 * uimm > 12 handle as per hardware in helper \
1666 tcg_gen_movi_i64(t1, 0); \
1667 set_cpu_vsrh(xT(ctx->opcode), t1); \
1668 set_cpu_vsrl(xT(ctx->opcode), t1); \
1671 tcg_gen_movi_i32(t0, uimm); \
1672 gen_helper_##name(cpu_env, xt, xb, t0); \
1673 tcg_temp_free_ptr(xb); \
1674 tcg_temp_free_ptr(xt); \
1675 tcg_temp_free_i32(t0); \
1676 tcg_temp_free_i64(t1); \
1679 VSX_EXTRACT_INSERT(xxextractuw
)
1680 VSX_EXTRACT_INSERT(xxinsertw
)
1683 static void gen_xsxexpdp(DisasContext
*ctx
)
1685 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1687 if (unlikely(!ctx
->vsx_enabled
)) {
1688 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1691 t0
= tcg_temp_new_i64();
1692 get_cpu_vsrh(t0
, xB(ctx
->opcode
));
1693 tcg_gen_extract_i64(rt
, t0
, 52, 11);
1694 tcg_temp_free_i64(t0
);
1697 static void gen_xsxexpqp(DisasContext
*ctx
)
1703 if (unlikely(!ctx
->vsx_enabled
)) {
1704 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1707 xth
= tcg_temp_new_i64();
1708 xtl
= tcg_temp_new_i64();
1709 xbh
= tcg_temp_new_i64();
1710 get_cpu_vsrh(xbh
, rB(ctx
->opcode
) + 32);
1712 tcg_gen_extract_i64(xth
, xbh
, 48, 15);
1713 set_cpu_vsrh(rD(ctx
->opcode
) + 32, xth
);
1714 tcg_gen_movi_i64(xtl
, 0);
1715 set_cpu_vsrl(rD(ctx
->opcode
) + 32, xtl
);
1717 tcg_temp_free_i64(xbh
);
1718 tcg_temp_free_i64(xth
);
1719 tcg_temp_free_i64(xtl
);
1722 static void gen_xsiexpdp(DisasContext
*ctx
)
1725 TCGv ra
= cpu_gpr
[rA(ctx
->opcode
)];
1726 TCGv rb
= cpu_gpr
[rB(ctx
->opcode
)];
1729 if (unlikely(!ctx
->vsx_enabled
)) {
1730 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1733 t0
= tcg_temp_new_i64();
1734 xth
= tcg_temp_new_i64();
1735 tcg_gen_andi_i64(xth
, ra
, 0x800FFFFFFFFFFFFF);
1736 tcg_gen_andi_i64(t0
, rb
, 0x7FF);
1737 tcg_gen_shli_i64(t0
, t0
, 52);
1738 tcg_gen_or_i64(xth
, xth
, t0
);
1739 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1740 /* dword[1] is undefined */
1741 tcg_temp_free_i64(t0
);
1742 tcg_temp_free_i64(xth
);
1745 static void gen_xsiexpqp(DisasContext
*ctx
)
1754 if (unlikely(!ctx
->vsx_enabled
)) {
1755 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1758 xth
= tcg_temp_new_i64();
1759 xtl
= tcg_temp_new_i64();
1760 xah
= tcg_temp_new_i64();
1761 xal
= tcg_temp_new_i64();
1762 get_cpu_vsrh(xah
, rA(ctx
->opcode
) + 32);
1763 get_cpu_vsrl(xal
, rA(ctx
->opcode
) + 32);
1764 xbh
= tcg_temp_new_i64();
1765 get_cpu_vsrh(xbh
, rB(ctx
->opcode
) + 32);
1766 t0
= tcg_temp_new_i64();
1768 tcg_gen_andi_i64(xth
, xah
, 0x8000FFFFFFFFFFFF);
1769 tcg_gen_andi_i64(t0
, xbh
, 0x7FFF);
1770 tcg_gen_shli_i64(t0
, t0
, 48);
1771 tcg_gen_or_i64(xth
, xth
, t0
);
1772 set_cpu_vsrh(rD(ctx
->opcode
) + 32, xth
);
1773 tcg_gen_mov_i64(xtl
, xal
);
1774 set_cpu_vsrl(rD(ctx
->opcode
) + 32, xtl
);
1776 tcg_temp_free_i64(t0
);
1777 tcg_temp_free_i64(xth
);
1778 tcg_temp_free_i64(xtl
);
1779 tcg_temp_free_i64(xah
);
1780 tcg_temp_free_i64(xal
);
1781 tcg_temp_free_i64(xbh
);
1784 static void gen_xsxsigdp(DisasContext
*ctx
)
1786 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1787 TCGv_i64 t0
, t1
, zr
, nan
, exp
;
1789 if (unlikely(!ctx
->vsx_enabled
)) {
1790 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1793 exp
= tcg_temp_new_i64();
1794 t0
= tcg_temp_new_i64();
1795 t1
= tcg_temp_new_i64();
1796 zr
= tcg_const_i64(0);
1797 nan
= tcg_const_i64(2047);
1799 get_cpu_vsrh(t1
, xB(ctx
->opcode
));
1800 tcg_gen_extract_i64(exp
, t1
, 52, 11);
1801 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1802 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1803 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1804 get_cpu_vsrh(t1
, xB(ctx
->opcode
));
1805 tcg_gen_deposit_i64(rt
, t0
, t1
, 0, 52);
1807 tcg_temp_free_i64(t0
);
1808 tcg_temp_free_i64(t1
);
1809 tcg_temp_free_i64(exp
);
1810 tcg_temp_free_i64(zr
);
1811 tcg_temp_free_i64(nan
);
1814 static void gen_xsxsigqp(DisasContext
*ctx
)
1816 TCGv_i64 t0
, zr
, nan
, exp
;
1822 if (unlikely(!ctx
->vsx_enabled
)) {
1823 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1826 xth
= tcg_temp_new_i64();
1827 xtl
= tcg_temp_new_i64();
1828 xbh
= tcg_temp_new_i64();
1829 xbl
= tcg_temp_new_i64();
1830 get_cpu_vsrh(xbh
, rB(ctx
->opcode
) + 32);
1831 get_cpu_vsrl(xbl
, rB(ctx
->opcode
) + 32);
1832 exp
= tcg_temp_new_i64();
1833 t0
= tcg_temp_new_i64();
1834 zr
= tcg_const_i64(0);
1835 nan
= tcg_const_i64(32767);
1837 tcg_gen_extract_i64(exp
, xbh
, 48, 15);
1838 tcg_gen_movi_i64(t0
, 0x0001000000000000);
1839 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1840 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1841 tcg_gen_deposit_i64(xth
, t0
, xbh
, 0, 48);
1842 set_cpu_vsrh(rD(ctx
->opcode
) + 32, xth
);
1843 tcg_gen_mov_i64(xtl
, xbl
);
1844 set_cpu_vsrl(rD(ctx
->opcode
) + 32, xtl
);
1846 tcg_temp_free_i64(t0
);
1847 tcg_temp_free_i64(exp
);
1848 tcg_temp_free_i64(zr
);
1849 tcg_temp_free_i64(nan
);
1850 tcg_temp_free_i64(xth
);
1851 tcg_temp_free_i64(xtl
);
1852 tcg_temp_free_i64(xbh
);
1853 tcg_temp_free_i64(xbl
);
1857 static void gen_xviexpsp(DisasContext
*ctx
)
1867 if (unlikely(!ctx
->vsx_enabled
)) {
1868 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1871 xth
= tcg_temp_new_i64();
1872 xtl
= tcg_temp_new_i64();
1873 xah
= tcg_temp_new_i64();
1874 xal
= tcg_temp_new_i64();
1875 xbh
= tcg_temp_new_i64();
1876 xbl
= tcg_temp_new_i64();
1877 get_cpu_vsrh(xah
, xA(ctx
->opcode
));
1878 get_cpu_vsrl(xal
, xA(ctx
->opcode
));
1879 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1880 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1881 t0
= tcg_temp_new_i64();
1883 tcg_gen_andi_i64(xth
, xah
, 0x807FFFFF807FFFFF);
1884 tcg_gen_andi_i64(t0
, xbh
, 0xFF000000FF);
1885 tcg_gen_shli_i64(t0
, t0
, 23);
1886 tcg_gen_or_i64(xth
, xth
, t0
);
1887 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1888 tcg_gen_andi_i64(xtl
, xal
, 0x807FFFFF807FFFFF);
1889 tcg_gen_andi_i64(t0
, xbl
, 0xFF000000FF);
1890 tcg_gen_shli_i64(t0
, t0
, 23);
1891 tcg_gen_or_i64(xtl
, xtl
, t0
);
1892 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1894 tcg_temp_free_i64(t0
);
1895 tcg_temp_free_i64(xth
);
1896 tcg_temp_free_i64(xtl
);
1897 tcg_temp_free_i64(xah
);
1898 tcg_temp_free_i64(xal
);
1899 tcg_temp_free_i64(xbh
);
1900 tcg_temp_free_i64(xbl
);
1903 static void gen_xviexpdp(DisasContext
*ctx
)
1912 if (unlikely(!ctx
->vsx_enabled
)) {
1913 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1916 xth
= tcg_temp_new_i64();
1917 xtl
= tcg_temp_new_i64();
1918 xah
= tcg_temp_new_i64();
1919 xal
= tcg_temp_new_i64();
1920 xbh
= tcg_temp_new_i64();
1921 xbl
= tcg_temp_new_i64();
1922 get_cpu_vsrh(xah
, xA(ctx
->opcode
));
1923 get_cpu_vsrl(xal
, xA(ctx
->opcode
));
1924 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1925 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1927 tcg_gen_deposit_i64(xth
, xah
, xbh
, 52, 11);
1928 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1930 tcg_gen_deposit_i64(xtl
, xal
, xbl
, 52, 11);
1931 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1933 tcg_temp_free_i64(xth
);
1934 tcg_temp_free_i64(xtl
);
1935 tcg_temp_free_i64(xah
);
1936 tcg_temp_free_i64(xal
);
1937 tcg_temp_free_i64(xbh
);
1938 tcg_temp_free_i64(xbl
);
1941 static void gen_xvxexpsp(DisasContext
*ctx
)
1948 if (unlikely(!ctx
->vsx_enabled
)) {
1949 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1952 xth
= tcg_temp_new_i64();
1953 xtl
= tcg_temp_new_i64();
1954 xbh
= tcg_temp_new_i64();
1955 xbl
= tcg_temp_new_i64();
1956 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1957 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1959 tcg_gen_shri_i64(xth
, xbh
, 23);
1960 tcg_gen_andi_i64(xth
, xth
, 0xFF000000FF);
1961 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1962 tcg_gen_shri_i64(xtl
, xbl
, 23);
1963 tcg_gen_andi_i64(xtl
, xtl
, 0xFF000000FF);
1964 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1966 tcg_temp_free_i64(xth
);
1967 tcg_temp_free_i64(xtl
);
1968 tcg_temp_free_i64(xbh
);
1969 tcg_temp_free_i64(xbl
);
1972 static void gen_xvxexpdp(DisasContext
*ctx
)
1979 if (unlikely(!ctx
->vsx_enabled
)) {
1980 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1983 xth
= tcg_temp_new_i64();
1984 xtl
= tcg_temp_new_i64();
1985 xbh
= tcg_temp_new_i64();
1986 xbl
= tcg_temp_new_i64();
1987 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1988 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1990 tcg_gen_extract_i64(xth
, xbh
, 52, 11);
1991 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1992 tcg_gen_extract_i64(xtl
, xbl
, 52, 11);
1993 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1995 tcg_temp_free_i64(xth
);
1996 tcg_temp_free_i64(xtl
);
1997 tcg_temp_free_i64(xbh
);
1998 tcg_temp_free_i64(xbl
);
2001 GEN_VSX_HELPER_X2(xvxsigsp
, 0x00, 0x04, 0, PPC2_ISA300
)
2003 static void gen_xvxsigdp(DisasContext
*ctx
)
2009 TCGv_i64 t0
, zr
, nan
, exp
;
2011 if (unlikely(!ctx
->vsx_enabled
)) {
2012 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
2015 xth
= tcg_temp_new_i64();
2016 xtl
= tcg_temp_new_i64();
2017 xbh
= tcg_temp_new_i64();
2018 xbl
= tcg_temp_new_i64();
2019 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
2020 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
2021 exp
= tcg_temp_new_i64();
2022 t0
= tcg_temp_new_i64();
2023 zr
= tcg_const_i64(0);
2024 nan
= tcg_const_i64(2047);
2026 tcg_gen_extract_i64(exp
, xbh
, 52, 11);
2027 tcg_gen_movi_i64(t0
, 0x0010000000000000);
2028 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
2029 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
2030 tcg_gen_deposit_i64(xth
, t0
, xbh
, 0, 52);
2031 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
2033 tcg_gen_extract_i64(exp
, xbl
, 52, 11);
2034 tcg_gen_movi_i64(t0
, 0x0010000000000000);
2035 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
2036 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
2037 tcg_gen_deposit_i64(xtl
, t0
, xbl
, 0, 52);
2038 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
2040 tcg_temp_free_i64(t0
);
2041 tcg_temp_free_i64(exp
);
2042 tcg_temp_free_i64(zr
);
2043 tcg_temp_free_i64(nan
);
2044 tcg_temp_free_i64(xth
);
2045 tcg_temp_free_i64(xtl
);
2046 tcg_temp_free_i64(xbh
);
2047 tcg_temp_free_i64(xbl
);
2053 #undef GEN_XX3_RC_FORM
2054 #undef GEN_XX3FORM_DM