2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "op_mem_access.h"
23 /*** Integer load ***/
24 #define PPC_LD_OP(name, op) \
25 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
27 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
31 #if defined(TARGET_PPC64)
32 #define PPC_LD_OP_64(name, op) \
33 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
35 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
40 #define PPC_ST_OP(name, op) \
41 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
43 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
47 #if defined(TARGET_PPC64)
48 #define PPC_ST_OP_64(name, op) \
49 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
51 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
60 #if defined(TARGET_PPC64)
63 PPC_LD_OP_64(bz
, ldu8
);
64 PPC_LD_OP_64(ha
, lds16
);
65 PPC_LD_OP_64(hz
, ldu16
);
66 PPC_LD_OP_64(wz
, ldu32
);
67 PPC_LD_OP_64(wa
, lds32
);
68 PPC_LD_OP_64(d
, ldu64
);
71 PPC_LD_OP(ha_le
, lds16r
);
72 PPC_LD_OP(hz_le
, ldu16r
);
73 PPC_LD_OP(wz_le
, ldu32r
);
74 #if defined(TARGET_PPC64)
75 PPC_LD_OP(wa_le
, lds32r
);
76 PPC_LD_OP(d_le
, ldu64r
);
77 PPC_LD_OP_64(ha_le
, lds16r
);
78 PPC_LD_OP_64(hz_le
, ldu16r
);
79 PPC_LD_OP_64(wz_le
, ldu32r
);
80 PPC_LD_OP_64(wa_le
, lds32r
);
81 PPC_LD_OP_64(d_le
, ldu64r
);
84 /*** Integer store ***/
88 #if defined(TARGET_PPC64)
91 PPC_ST_OP_64(h
, st16
);
92 PPC_ST_OP_64(w
, st32
);
93 PPC_ST_OP_64(d
, st64
);
96 PPC_ST_OP(h_le
, st16r
);
97 PPC_ST_OP(w_le
, st32r
);
98 #if defined(TARGET_PPC64)
99 PPC_ST_OP(d_le
, st64r
);
100 PPC_ST_OP_64(h_le
, st16r
);
101 PPC_ST_OP_64(w_le
, st32r
);
102 PPC_ST_OP_64(d_le
, st64r
);
105 /*** Integer load and store with byte reverse ***/
106 PPC_LD_OP(hbr
, ldu16r
);
107 PPC_LD_OP(wbr
, ldu32r
);
108 PPC_ST_OP(hbr
, st16r
);
109 PPC_ST_OP(wbr
, st32r
);
110 #if defined(TARGET_PPC64)
111 PPC_LD_OP_64(hbr
, ldu16r
);
112 PPC_LD_OP_64(wbr
, ldu32r
);
113 PPC_ST_OP_64(hbr
, st16r
);
114 PPC_ST_OP_64(wbr
, st32r
);
117 PPC_LD_OP(hbr_le
, ldu16
);
118 PPC_LD_OP(wbr_le
, ldu32
);
119 PPC_ST_OP(hbr_le
, st16
);
120 PPC_ST_OP(wbr_le
, st32
);
121 #if defined(TARGET_PPC64)
122 PPC_LD_OP_64(hbr_le
, ldu16
);
123 PPC_LD_OP_64(wbr_le
, ldu32
);
124 PPC_ST_OP_64(hbr_le
, st16
);
125 PPC_ST_OP_64(wbr_le
, st32
);
128 /*** Integer load and store multiple ***/
129 void OPPROTO
glue(op_lmw
, MEMSUFFIX
) (void)
131 glue(do_lmw
, MEMSUFFIX
)(PARAM1
);
135 #if defined(TARGET_PPC64)
136 void OPPROTO
glue(op_lmw_64
, MEMSUFFIX
) (void)
138 glue(do_lmw_64
, MEMSUFFIX
)(PARAM1
);
143 void OPPROTO
glue(op_lmw_le
, MEMSUFFIX
) (void)
145 glue(do_lmw_le
, MEMSUFFIX
)(PARAM1
);
149 #if defined(TARGET_PPC64)
150 void OPPROTO
glue(op_lmw_le_64
, MEMSUFFIX
) (void)
152 glue(do_lmw_le_64
, MEMSUFFIX
)(PARAM1
);
157 void OPPROTO
glue(op_stmw
, MEMSUFFIX
) (void)
159 glue(do_stmw
, MEMSUFFIX
)(PARAM1
);
163 #if defined(TARGET_PPC64)
164 void OPPROTO
glue(op_stmw_64
, MEMSUFFIX
) (void)
166 glue(do_stmw_64
, MEMSUFFIX
)(PARAM1
);
171 void OPPROTO
glue(op_stmw_le
, MEMSUFFIX
) (void)
173 glue(do_stmw_le
, MEMSUFFIX
)(PARAM1
);
177 #if defined(TARGET_PPC64)
178 void OPPROTO
glue(op_stmw_le_64
, MEMSUFFIX
) (void)
180 glue(do_stmw_le_64
, MEMSUFFIX
)(PARAM1
);
185 /*** Integer load and store strings ***/
186 void OPPROTO
glue(op_lswi
, MEMSUFFIX
) (void)
188 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
192 #if defined(TARGET_PPC64)
193 void OPPROTO
glue(op_lswi_64
, MEMSUFFIX
) (void)
195 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
200 /* PPC32 specification says we must generate an exception if
201 * rA is in the range of registers to be loaded.
202 * In an other hand, IBM says this is valid, but rA won't be loaded.
203 * For now, I'll follow the spec...
205 void OPPROTO
glue(op_lswx
, MEMSUFFIX
) (void)
207 /* Note: T1 comes from xer_bc then no cast is needed */
208 if (likely(T1
!= 0)) {
209 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
210 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
211 do_raise_exception_err(POWERPC_EXCP_PROGRAM
,
213 POWERPC_EXCP_INVAL_LSWX
);
215 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
221 #if defined(TARGET_PPC64)
222 void OPPROTO
glue(op_lswx_64
, MEMSUFFIX
) (void)
224 /* Note: T1 comes from xer_bc then no cast is needed */
225 if (likely(T1
!= 0)) {
226 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
227 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
228 do_raise_exception_err(POWERPC_EXCP_PROGRAM
,
230 POWERPC_EXCP_INVAL_LSWX
);
232 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
239 void OPPROTO
glue(op_stsw
, MEMSUFFIX
) (void)
241 glue(do_stsw
, MEMSUFFIX
)(PARAM1
);
245 #if defined(TARGET_PPC64)
246 void OPPROTO
glue(op_stsw_64
, MEMSUFFIX
) (void)
248 glue(do_stsw_64
, MEMSUFFIX
)(PARAM1
);
253 /*** Floating-point store ***/
254 #define PPC_STF_OP(name, op) \
255 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
257 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
261 #if defined(TARGET_PPC64)
262 #define PPC_STF_OP_64(name, op) \
263 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
265 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
270 static always_inline
void glue(stfs
, MEMSUFFIX
) (target_ulong EA
, double d
)
272 glue(stfl
, MEMSUFFIX
)(EA
, float64_to_float32(d
, &env
->fp_status
));
275 #if defined(WORDS_BIGENDIAN)
282 static always_inline
void glue(stfiw
, MEMSUFFIX
) (target_ulong EA
, double d
)
289 /* Store the low order 32 bits without any conversion */
291 glue(st32
, MEMSUFFIX
)(EA
, u
.u
[WORD0
]);
296 PPC_STF_OP(fd
, stfq
);
297 PPC_STF_OP(fs
, stfs
);
298 PPC_STF_OP(fiw
, stfiw
);
299 #if defined(TARGET_PPC64)
300 PPC_STF_OP_64(fd
, stfq
);
301 PPC_STF_OP_64(fs
, stfs
);
302 PPC_STF_OP_64(fiw
, stfiw
);
305 static always_inline
void glue(stfqr
, MEMSUFFIX
) (target_ulong EA
, double d
)
314 glue(stfq
, MEMSUFFIX
)(EA
, u
.d
);
317 static always_inline
void glue(stfsr
, MEMSUFFIX
) (target_ulong EA
, double d
)
324 u
.f
= float64_to_float32(d
, &env
->fp_status
);
326 glue(stfl
, MEMSUFFIX
)(EA
, u
.f
);
329 static always_inline
void glue(stfiwr
, MEMSUFFIX
) (target_ulong EA
, double d
)
336 /* Store the low order 32 bits without any conversion */
339 glue(st32
, MEMSUFFIX
)(EA
, u
.u
);
342 PPC_STF_OP(fd_le
, stfqr
);
343 PPC_STF_OP(fs_le
, stfsr
);
344 PPC_STF_OP(fiw_le
, stfiwr
);
345 #if defined(TARGET_PPC64)
346 PPC_STF_OP_64(fd_le
, stfqr
);
347 PPC_STF_OP_64(fs_le
, stfsr
);
348 PPC_STF_OP_64(fiw_le
, stfiwr
);
351 /*** Floating-point load ***/
352 #define PPC_LDF_OP(name, op) \
353 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
355 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
359 #if defined(TARGET_PPC64)
360 #define PPC_LDF_OP_64(name, op) \
361 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
363 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
368 static always_inline
double glue(ldfs
, MEMSUFFIX
) (target_ulong EA
)
370 return float32_to_float64(glue(ldfl
, MEMSUFFIX
)(EA
), &env
->fp_status
);
373 PPC_LDF_OP(fd
, ldfq
);
374 PPC_LDF_OP(fs
, ldfs
);
375 #if defined(TARGET_PPC64)
376 PPC_LDF_OP_64(fd
, ldfq
);
377 PPC_LDF_OP_64(fs
, ldfs
);
380 static always_inline
double glue(ldfqr
, MEMSUFFIX
) (target_ulong EA
)
387 u
.d
= glue(ldfq
, MEMSUFFIX
)(EA
);
393 static always_inline
double glue(ldfsr
, MEMSUFFIX
) (target_ulong EA
)
400 u
.f
= glue(ldfl
, MEMSUFFIX
)(EA
);
403 return float32_to_float64(u
.f
, &env
->fp_status
);
406 PPC_LDF_OP(fd_le
, ldfqr
);
407 PPC_LDF_OP(fs_le
, ldfsr
);
408 #if defined(TARGET_PPC64)
409 PPC_LDF_OP_64(fd_le
, ldfqr
);
410 PPC_LDF_OP_64(fs_le
, ldfsr
);
413 /* Load and set reservation */
414 void OPPROTO
glue(op_lwarx
, MEMSUFFIX
) (void)
416 if (unlikely(T0
& 0x03)) {
417 do_raise_exception(POWERPC_EXCP_ALIGN
);
419 T1
= glue(ldu32
, MEMSUFFIX
)((uint32_t)T0
);
420 env
->reserve
= (uint32_t)T0
;
425 #if defined(TARGET_PPC64)
426 void OPPROTO
glue(op_lwarx_64
, MEMSUFFIX
) (void)
428 if (unlikely(T0
& 0x03)) {
429 do_raise_exception(POWERPC_EXCP_ALIGN
);
431 T1
= glue(ldu32
, MEMSUFFIX
)((uint64_t)T0
);
432 env
->reserve
= (uint64_t)T0
;
437 void OPPROTO
glue(op_ldarx
, MEMSUFFIX
) (void)
439 if (unlikely(T0
& 0x03)) {
440 do_raise_exception(POWERPC_EXCP_ALIGN
);
442 T1
= glue(ldu64
, MEMSUFFIX
)((uint32_t)T0
);
443 env
->reserve
= (uint32_t)T0
;
448 void OPPROTO
glue(op_ldarx_64
, MEMSUFFIX
) (void)
450 if (unlikely(T0
& 0x03)) {
451 do_raise_exception(POWERPC_EXCP_ALIGN
);
453 T1
= glue(ldu64
, MEMSUFFIX
)((uint64_t)T0
);
454 env
->reserve
= (uint64_t)T0
;
460 void OPPROTO
glue(op_lwarx_le
, MEMSUFFIX
) (void)
462 if (unlikely(T0
& 0x03)) {
463 do_raise_exception(POWERPC_EXCP_ALIGN
);
465 T1
= glue(ldu32r
, MEMSUFFIX
)((uint32_t)T0
);
466 env
->reserve
= (uint32_t)T0
;
471 #if defined(TARGET_PPC64)
472 void OPPROTO
glue(op_lwarx_le_64
, MEMSUFFIX
) (void)
474 if (unlikely(T0
& 0x03)) {
475 do_raise_exception(POWERPC_EXCP_ALIGN
);
477 T1
= glue(ldu32r
, MEMSUFFIX
)((uint64_t)T0
);
478 env
->reserve
= (uint64_t)T0
;
483 void OPPROTO
glue(op_ldarx_le
, MEMSUFFIX
) (void)
485 if (unlikely(T0
& 0x03)) {
486 do_raise_exception(POWERPC_EXCP_ALIGN
);
488 T1
= glue(ldu64r
, MEMSUFFIX
)((uint32_t)T0
);
489 env
->reserve
= (uint32_t)T0
;
494 void OPPROTO
glue(op_ldarx_le_64
, MEMSUFFIX
) (void)
496 if (unlikely(T0
& 0x03)) {
497 do_raise_exception(POWERPC_EXCP_ALIGN
);
499 T1
= glue(ldu64r
, MEMSUFFIX
)((uint64_t)T0
);
500 env
->reserve
= (uint64_t)T0
;
506 /* Store with reservation */
507 void OPPROTO
glue(op_stwcx
, MEMSUFFIX
) (void)
509 if (unlikely(T0
& 0x03)) {
510 do_raise_exception(POWERPC_EXCP_ALIGN
);
512 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
513 env
->crf
[0] = xer_so
;
515 glue(st32
, MEMSUFFIX
)((uint32_t)T0
, T1
);
516 env
->crf
[0] = xer_so
| 0x02;
519 env
->reserve
= (target_ulong
)-1ULL;
523 #if defined(TARGET_PPC64)
524 void OPPROTO
glue(op_stwcx_64
, MEMSUFFIX
) (void)
526 if (unlikely(T0
& 0x03)) {
527 do_raise_exception(POWERPC_EXCP_ALIGN
);
529 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
530 env
->crf
[0] = xer_so
;
532 glue(st32
, MEMSUFFIX
)((uint64_t)T0
, T1
);
533 env
->crf
[0] = xer_so
| 0x02;
536 env
->reserve
= (target_ulong
)-1ULL;
540 void OPPROTO
glue(op_stdcx
, MEMSUFFIX
) (void)
542 if (unlikely(T0
& 0x03)) {
543 do_raise_exception(POWERPC_EXCP_ALIGN
);
545 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
546 env
->crf
[0] = xer_so
;
548 glue(st64
, MEMSUFFIX
)((uint32_t)T0
, T1
);
549 env
->crf
[0] = xer_so
| 0x02;
552 env
->reserve
= (target_ulong
)-1ULL;
556 void OPPROTO
glue(op_stdcx_64
, MEMSUFFIX
) (void)
558 if (unlikely(T0
& 0x03)) {
559 do_raise_exception(POWERPC_EXCP_ALIGN
);
561 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
562 env
->crf
[0] = xer_so
;
564 glue(st64
, MEMSUFFIX
)((uint64_t)T0
, T1
);
565 env
->crf
[0] = xer_so
| 0x02;
568 env
->reserve
= (target_ulong
)-1ULL;
573 void OPPROTO
glue(op_stwcx_le
, MEMSUFFIX
) (void)
575 if (unlikely(T0
& 0x03)) {
576 do_raise_exception(POWERPC_EXCP_ALIGN
);
578 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
579 env
->crf
[0] = xer_so
;
581 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
582 env
->crf
[0] = xer_so
| 0x02;
585 env
->reserve
= (target_ulong
)-1ULL;
589 #if defined(TARGET_PPC64)
590 void OPPROTO
glue(op_stwcx_le_64
, MEMSUFFIX
) (void)
592 if (unlikely(T0
& 0x03)) {
593 do_raise_exception(POWERPC_EXCP_ALIGN
);
595 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
596 env
->crf
[0] = xer_so
;
598 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
599 env
->crf
[0] = xer_so
| 0x02;
602 env
->reserve
= (target_ulong
)-1ULL;
606 void OPPROTO
glue(op_stdcx_le
, MEMSUFFIX
) (void)
608 if (unlikely(T0
& 0x03)) {
609 do_raise_exception(POWERPC_EXCP_ALIGN
);
611 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
612 env
->crf
[0] = xer_so
;
614 glue(st64r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
615 env
->crf
[0] = xer_so
| 0x02;
618 env
->reserve
= (target_ulong
)-1ULL;
622 void OPPROTO
glue(op_stdcx_le_64
, MEMSUFFIX
) (void)
624 if (unlikely(T0
& 0x03)) {
625 do_raise_exception(POWERPC_EXCP_ALIGN
);
627 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
628 env
->crf
[0] = xer_so
;
630 glue(st64r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
631 env
->crf
[0] = xer_so
| 0x02;
634 env
->reserve
= (target_ulong
)-1ULL;
639 void OPPROTO
glue(op_dcbz_l32
, MEMSUFFIX
) (void)
641 T0
&= ~((uint32_t)31);
642 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x00), 0);
643 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x04), 0);
644 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x08), 0);
645 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x0C), 0);
646 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x10), 0);
647 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x14), 0);
648 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x18), 0);
649 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x1C), 0);
653 void OPPROTO
glue(op_dcbz_l64
, MEMSUFFIX
) (void)
655 T0
&= ~((uint32_t)63);
656 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x00), 0);
657 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x04), 0);
658 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x08), 0);
659 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x0C), 0);
660 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x10), 0);
661 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x14), 0);
662 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x18), 0);
663 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x1C), 0);
664 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x20UL
), 0);
665 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x24UL
), 0);
666 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x28UL
), 0);
667 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x2CUL
), 0);
668 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x30UL
), 0);
669 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x34UL
), 0);
670 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x38UL
), 0);
671 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x3CUL
), 0);
675 void OPPROTO
glue(op_dcbz_l128
, MEMSUFFIX
) (void)
677 T0
&= ~((uint32_t)127);
678 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x00), 0);
679 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x04), 0);
680 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x08), 0);
681 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x0C), 0);
682 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x10), 0);
683 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x14), 0);
684 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x18), 0);
685 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x1C), 0);
686 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x20UL
), 0);
687 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x24UL
), 0);
688 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x28UL
), 0);
689 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x2CUL
), 0);
690 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x30UL
), 0);
691 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x34UL
), 0);
692 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x38UL
), 0);
693 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x3CUL
), 0);
694 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x40UL
), 0);
695 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x44UL
), 0);
696 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x48UL
), 0);
697 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x4CUL
), 0);
698 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x50UL
), 0);
699 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x54UL
), 0);
700 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x58UL
), 0);
701 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x5CUL
), 0);
702 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x60UL
), 0);
703 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x64UL
), 0);
704 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x68UL
), 0);
705 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x6CUL
), 0);
706 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x70UL
), 0);
707 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x74UL
), 0);
708 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x78UL
), 0);
709 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x7CUL
), 0);
713 void OPPROTO
glue(op_dcbz
, MEMSUFFIX
) (void)
715 glue(do_dcbz
, MEMSUFFIX
)();
719 #if defined(TARGET_PPC64)
720 void OPPROTO
glue(op_dcbz_l32_64
, MEMSUFFIX
) (void)
722 T0
&= ~((uint64_t)31);
723 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x00), 0);
724 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x04), 0);
725 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x08), 0);
726 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x0C), 0);
727 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x10), 0);
728 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x14), 0);
729 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x18), 0);
730 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x1C), 0);
734 void OPPROTO
glue(op_dcbz_l64_64
, MEMSUFFIX
) (void)
736 T0
&= ~((uint64_t)63);
737 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x00), 0);
738 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x04), 0);
739 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x08), 0);
740 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x0C), 0);
741 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x10), 0);
742 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x14), 0);
743 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x18), 0);
744 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x1C), 0);
745 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x20UL
), 0);
746 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x24UL
), 0);
747 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x28UL
), 0);
748 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x2CUL
), 0);
749 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x30UL
), 0);
750 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x34UL
), 0);
751 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x38UL
), 0);
752 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x3CUL
), 0);
756 void OPPROTO
glue(op_dcbz_l128_64
, MEMSUFFIX
) (void)
758 T0
&= ~((uint64_t)127);
759 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x00), 0);
760 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x04), 0);
761 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x08), 0);
762 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x0C), 0);
763 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x10), 0);
764 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x14), 0);
765 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x18), 0);
766 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x1C), 0);
767 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x20UL
), 0);
768 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x24UL
), 0);
769 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x28UL
), 0);
770 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x2CUL
), 0);
771 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x30UL
), 0);
772 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x34UL
), 0);
773 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x38UL
), 0);
774 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x3CUL
), 0);
775 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x40UL
), 0);
776 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x44UL
), 0);
777 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x48UL
), 0);
778 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x4CUL
), 0);
779 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x50UL
), 0);
780 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x54UL
), 0);
781 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x58UL
), 0);
782 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x5CUL
), 0);
783 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x60UL
), 0);
784 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x64UL
), 0);
785 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x68UL
), 0);
786 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x6CUL
), 0);
787 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x70UL
), 0);
788 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x74UL
), 0);
789 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x78UL
), 0);
790 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x7CUL
), 0);
794 void OPPROTO
glue(op_dcbz_64
, MEMSUFFIX
) (void)
796 glue(do_dcbz_64
, MEMSUFFIX
)();
801 /* Instruction cache block invalidate */
802 void OPPROTO
glue(op_icbi
, MEMSUFFIX
) (void)
804 glue(do_icbi
, MEMSUFFIX
)();
808 #if defined(TARGET_PPC64)
809 void OPPROTO
glue(op_icbi_64
, MEMSUFFIX
) (void)
811 glue(do_icbi_64
, MEMSUFFIX
)();
816 /* External access */
817 void OPPROTO
glue(op_eciwx
, MEMSUFFIX
) (void)
819 T1
= glue(ldu32
, MEMSUFFIX
)((uint32_t)T0
);
823 #if defined(TARGET_PPC64)
824 void OPPROTO
glue(op_eciwx_64
, MEMSUFFIX
) (void)
826 T1
= glue(ldu32
, MEMSUFFIX
)((uint64_t)T0
);
831 void OPPROTO
glue(op_ecowx
, MEMSUFFIX
) (void)
833 glue(st32
, MEMSUFFIX
)((uint32_t)T0
, T1
);
837 #if defined(TARGET_PPC64)
838 void OPPROTO
glue(op_ecowx_64
, MEMSUFFIX
) (void)
840 glue(st32
, MEMSUFFIX
)((uint64_t)T0
, T1
);
845 void OPPROTO
glue(op_eciwx_le
, MEMSUFFIX
) (void)
847 T1
= glue(ldu32r
, MEMSUFFIX
)((uint32_t)T0
);
851 #if defined(TARGET_PPC64)
852 void OPPROTO
glue(op_eciwx_le_64
, MEMSUFFIX
) (void)
854 T1
= glue(ldu32r
, MEMSUFFIX
)((uint64_t)T0
);
859 void OPPROTO
glue(op_ecowx_le
, MEMSUFFIX
) (void)
861 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
865 #if defined(TARGET_PPC64)
866 void OPPROTO
glue(op_ecowx_le_64
, MEMSUFFIX
) (void)
868 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
873 /* XXX: those micro-ops need tests ! */
874 /* PowerPC 601 specific instructions (POWER bridge) */
875 void OPPROTO
glue(op_POWER_lscbx
, MEMSUFFIX
) (void)
877 /* When byte count is 0, do nothing */
878 if (likely(T1
!= 0)) {
879 glue(do_POWER_lscbx
, MEMSUFFIX
)(PARAM1
, PARAM2
, PARAM3
);
884 /* POWER2 quad load and store */
885 /* XXX: TAGs are not managed */
886 void OPPROTO
glue(op_POWER2_lfq
, MEMSUFFIX
) (void)
888 glue(do_POWER2_lfq
, MEMSUFFIX
)();
892 void glue(op_POWER2_lfq_le
, MEMSUFFIX
) (void)
894 glue(do_POWER2_lfq_le
, MEMSUFFIX
)();
898 void OPPROTO
glue(op_POWER2_stfq
, MEMSUFFIX
) (void)
900 glue(do_POWER2_stfq
, MEMSUFFIX
)();
904 void OPPROTO
glue(op_POWER2_stfq_le
, MEMSUFFIX
) (void)
906 glue(do_POWER2_stfq_le
, MEMSUFFIX
)();
910 /* Altivec vector extension */
911 #if defined(WORDS_BIGENDIAN)
918 void OPPROTO
glue(op_vr_lvx
, MEMSUFFIX
) (void)
920 AVR0
.u64
[VR_DWORD0
] = glue(ldu64
, MEMSUFFIX
)((uint32_t)T0
);
921 AVR0
.u64
[VR_DWORD1
] = glue(ldu64
, MEMSUFFIX
)((uint32_t)T0
+ 8);
924 void OPPROTO
glue(op_vr_lvx_le
, MEMSUFFIX
) (void)
926 AVR0
.u64
[VR_DWORD1
] = glue(ldu64r
, MEMSUFFIX
)((uint32_t)T0
);
927 AVR0
.u64
[VR_DWORD0
] = glue(ldu64r
, MEMSUFFIX
)((uint32_t)T0
+ 8);
930 void OPPROTO
glue(op_vr_stvx
, MEMSUFFIX
) (void)
932 glue(st64
, MEMSUFFIX
)((uint32_t)T0
, AVR0
.u64
[VR_DWORD0
]);
933 glue(st64
, MEMSUFFIX
)((uint32_t)T0
+ 8, AVR0
.u64
[VR_DWORD1
]);
936 void OPPROTO
glue(op_vr_stvx_le
, MEMSUFFIX
) (void)
938 glue(st64r
, MEMSUFFIX
)((uint32_t)T0
, AVR0
.u64
[VR_DWORD1
]);
939 glue(st64r
, MEMSUFFIX
)((uint32_t)T0
+ 8, AVR0
.u64
[VR_DWORD0
]);
942 #if defined(TARGET_PPC64)
943 void OPPROTO
glue(op_vr_lvx_64
, MEMSUFFIX
) (void)
945 AVR0
.u64
[VR_DWORD0
] = glue(ldu64
, MEMSUFFIX
)((uint64_t)T0
);
946 AVR0
.u64
[VR_DWORD1
] = glue(ldu64
, MEMSUFFIX
)((uint64_t)T0
+ 8);
949 void OPPROTO
glue(op_vr_lvx_le_64
, MEMSUFFIX
) (void)
951 AVR0
.u64
[VR_DWORD1
] = glue(ldu64r
, MEMSUFFIX
)((uint64_t)T0
);
952 AVR0
.u64
[VR_DWORD0
] = glue(ldu64r
, MEMSUFFIX
)((uint64_t)T0
+ 8);
955 void OPPROTO
glue(op_vr_stvx_64
, MEMSUFFIX
) (void)
957 glue(st64
, MEMSUFFIX
)((uint64_t)T0
, AVR0
.u64
[VR_DWORD0
]);
958 glue(st64
, MEMSUFFIX
)((uint64_t)T0
+ 8, AVR0
.u64
[VR_DWORD1
]);
961 void OPPROTO
glue(op_vr_stvx_le_64
, MEMSUFFIX
) (void)
963 glue(st64r
, MEMSUFFIX
)((uint64_t)T0
, AVR0
.u64
[VR_DWORD1
]);
964 glue(st64r
, MEMSUFFIX
)((uint64_t)T0
+ 8, AVR0
.u64
[VR_DWORD0
]);
971 #define _PPC_SPE_LD_OP(name, op) \
972 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
974 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
978 #if defined(TARGET_PPC64)
979 #define _PPC_SPE_LD_OP_64(name, op) \
980 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
982 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
985 #define PPC_SPE_LD_OP(name, op) \
986 _PPC_SPE_LD_OP(name, op); \
987 _PPC_SPE_LD_OP_64(name, op)
989 #define PPC_SPE_LD_OP(name, op) \
990 _PPC_SPE_LD_OP(name, op)
993 #define _PPC_SPE_ST_OP(name, op) \
994 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
996 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
1000 #if defined(TARGET_PPC64)
1001 #define _PPC_SPE_ST_OP_64(name, op) \
1002 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
1004 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
1007 #define PPC_SPE_ST_OP(name, op) \
1008 _PPC_SPE_ST_OP(name, op); \
1009 _PPC_SPE_ST_OP_64(name, op)
1011 #define PPC_SPE_ST_OP(name, op) \
1012 _PPC_SPE_ST_OP(name, op)
1015 #if !defined(TARGET_PPC64)
1016 PPC_SPE_LD_OP(dd
, ldu64
);
1017 PPC_SPE_ST_OP(dd
, st64
);
1018 PPC_SPE_LD_OP(dd_le
, ldu64r
);
1019 PPC_SPE_ST_OP(dd_le
, st64r
);
1021 static always_inline
uint64_t glue(spe_ldw
, MEMSUFFIX
) (target_ulong EA
)
1024 ret
= (uint64_t)glue(ldu32
, MEMSUFFIX
)(EA
) << 32;
1025 ret
|= (uint64_t)glue(ldu32
, MEMSUFFIX
)(EA
+ 4);
1028 PPC_SPE_LD_OP(dw
, spe_ldw
);
1029 static always_inline
void glue(spe_stdw
, MEMSUFFIX
) (target_ulong EA
,
1032 glue(st32
, MEMSUFFIX
)(EA
, data
>> 32);
1033 glue(st32
, MEMSUFFIX
)(EA
+ 4, data
);
1035 PPC_SPE_ST_OP(dw
, spe_stdw
);
1036 static always_inline
uint64_t glue(spe_ldw_le
, MEMSUFFIX
) (target_ulong EA
)
1039 ret
= (uint64_t)glue(ldu32r
, MEMSUFFIX
)(EA
) << 32;
1040 ret
|= (uint64_t)glue(ldu32r
, MEMSUFFIX
)(EA
+ 4);
1043 PPC_SPE_LD_OP(dw_le
, spe_ldw_le
);
1044 static always_inline
void glue(spe_stdw_le
, MEMSUFFIX
) (target_ulong EA
,
1047 glue(st32r
, MEMSUFFIX
)(EA
, data
>> 32);
1048 glue(st32r
, MEMSUFFIX
)(EA
+ 4, data
);
1050 PPC_SPE_ST_OP(dw_le
, spe_stdw_le
);
1051 static always_inline
uint64_t glue(spe_ldh
, MEMSUFFIX
) (target_ulong EA
)
1054 ret
= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
) << 48;
1055 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 2) << 32;
1056 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 4) << 16;
1057 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 6);
1060 PPC_SPE_LD_OP(dh
, spe_ldh
);
1061 static always_inline
void glue(spe_stdh
, MEMSUFFIX
) (target_ulong EA
,
1064 glue(st16
, MEMSUFFIX
)(EA
, data
>> 48);
1065 glue(st16
, MEMSUFFIX
)(EA
+ 2, data
>> 32);
1066 glue(st16
, MEMSUFFIX
)(EA
+ 4, data
>> 16);
1067 glue(st16
, MEMSUFFIX
)(EA
+ 6, data
);
1069 PPC_SPE_ST_OP(dh
, spe_stdh
);
1070 static always_inline
uint64_t glue(spe_ldh_le
, MEMSUFFIX
) (target_ulong EA
)
1073 ret
= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
) << 48;
1074 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 2) << 32;
1075 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 4) << 16;
1076 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 6);
1079 PPC_SPE_LD_OP(dh_le
, spe_ldh_le
);
1080 static always_inline
void glue(spe_stdh_le
, MEMSUFFIX
) (target_ulong EA
,
1083 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 48);
1084 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
>> 32);
1085 glue(st16r
, MEMSUFFIX
)(EA
+ 4, data
>> 16);
1086 glue(st16r
, MEMSUFFIX
)(EA
+ 6, data
);
1088 PPC_SPE_ST_OP(dh_le
, spe_stdh_le
);
1089 static always_inline
uint64_t glue(spe_lwhe
, MEMSUFFIX
) (target_ulong EA
)
1092 ret
= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
) << 48;
1093 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 2) << 16;
1096 PPC_SPE_LD_OP(whe
, spe_lwhe
);
1097 static always_inline
void glue(spe_stwhe
, MEMSUFFIX
) (target_ulong EA
,
1100 glue(st16
, MEMSUFFIX
)(EA
, data
>> 48);
1101 glue(st16
, MEMSUFFIX
)(EA
+ 2, data
>> 16);
1103 PPC_SPE_ST_OP(whe
, spe_stwhe
);
1104 static always_inline
uint64_t glue(spe_lwhe_le
, MEMSUFFIX
) (target_ulong EA
)
1107 ret
= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
) << 48;
1108 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 2) << 16;
1111 PPC_SPE_LD_OP(whe_le
, spe_lwhe_le
);
1112 static always_inline
void glue(spe_stwhe_le
, MEMSUFFIX
) (target_ulong EA
,
1115 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 48);
1116 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
>> 16);
1118 PPC_SPE_ST_OP(whe_le
, spe_stwhe_le
);
1119 static always_inline
uint64_t glue(spe_lwhou
, MEMSUFFIX
) (target_ulong EA
)
1122 ret
= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
) << 32;
1123 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 2);
1126 PPC_SPE_LD_OP(whou
, spe_lwhou
);
1127 static always_inline
uint64_t glue(spe_lwhos
, MEMSUFFIX
) (target_ulong EA
)
1130 ret
= ((uint64_t)((int32_t)glue(lds16
, MEMSUFFIX
)(EA
))) << 32;
1131 ret
|= (uint64_t)((int32_t)glue(lds16
, MEMSUFFIX
)(EA
+ 2));
1134 PPC_SPE_LD_OP(whos
, spe_lwhos
);
1135 static always_inline
void glue(spe_stwho
, MEMSUFFIX
) (target_ulong EA
,
1138 glue(st16
, MEMSUFFIX
)(EA
, data
>> 32);
1139 glue(st16
, MEMSUFFIX
)(EA
+ 2, data
);
1141 PPC_SPE_ST_OP(who
, spe_stwho
);
1142 static always_inline
uint64_t glue(spe_lwhou_le
, MEMSUFFIX
) (target_ulong EA
)
1145 ret
= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
) << 32;
1146 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 2);
1149 PPC_SPE_LD_OP(whou_le
, spe_lwhou_le
);
1150 static always_inline
uint64_t glue(spe_lwhos_le
, MEMSUFFIX
) (target_ulong EA
)
1153 ret
= ((uint64_t)((int32_t)glue(lds16r
, MEMSUFFIX
)(EA
))) << 32;
1154 ret
|= (uint64_t)((int32_t)glue(lds16r
, MEMSUFFIX
)(EA
+ 2));
1157 PPC_SPE_LD_OP(whos_le
, spe_lwhos_le
);
1158 static always_inline
void glue(spe_stwho_le
, MEMSUFFIX
) (target_ulong EA
,
1161 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 32);
1162 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
);
1164 PPC_SPE_ST_OP(who_le
, spe_stwho_le
);
1165 #if !defined(TARGET_PPC64)
1166 static always_inline
void glue(spe_stwwo
, MEMSUFFIX
) (target_ulong EA
,
1169 glue(st32
, MEMSUFFIX
)(EA
, data
);
1171 PPC_SPE_ST_OP(wwo
, spe_stwwo
);
1172 static always_inline
void glue(spe_stwwo_le
, MEMSUFFIX
) (target_ulong EA
,
1175 glue(st32r
, MEMSUFFIX
)(EA
, data
);
1177 PPC_SPE_ST_OP(wwo_le
, spe_stwwo_le
);
1179 static always_inline
uint64_t glue(spe_lh
, MEMSUFFIX
) (target_ulong EA
)
1182 tmp
= glue(ldu16
, MEMSUFFIX
)(EA
);
1183 return ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 16);
1185 PPC_SPE_LD_OP(h
, spe_lh
);
1186 static always_inline
uint64_t glue(spe_lh_le
, MEMSUFFIX
) (target_ulong EA
)
1189 tmp
= glue(ldu16r
, MEMSUFFIX
)(EA
);
1190 return ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 16);
1192 PPC_SPE_LD_OP(h_le
, spe_lh_le
);
1193 static always_inline
uint64_t glue(spe_lwwsplat
, MEMSUFFIX
) (target_ulong EA
)
1196 tmp
= glue(ldu32
, MEMSUFFIX
)(EA
);
1197 return ((uint64_t)tmp
<< 32) | (uint64_t)tmp
;
1199 PPC_SPE_LD_OP(wwsplat
, spe_lwwsplat
);
1200 static always_inline
1201 uint64_t glue(spe_lwwsplat_le
, MEMSUFFIX
) (target_ulong EA
)
1204 tmp
= glue(ldu32r
, MEMSUFFIX
)(EA
);
1205 return ((uint64_t)tmp
<< 32) | (uint64_t)tmp
;
1207 PPC_SPE_LD_OP(wwsplat_le
, spe_lwwsplat_le
);
1208 static always_inline
uint64_t glue(spe_lwhsplat
, MEMSUFFIX
) (target_ulong EA
)
1212 tmp
= glue(ldu16
, MEMSUFFIX
)(EA
);
1213 ret
= ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 32);
1214 tmp
= glue(ldu16
, MEMSUFFIX
)(EA
+ 2);
1215 ret
|= ((uint64_t)tmp
<< 16) | (uint64_t)tmp
;
1218 PPC_SPE_LD_OP(whsplat
, spe_lwhsplat
);
1219 static always_inline
1220 uint64_t glue(spe_lwhsplat_le
, MEMSUFFIX
) (target_ulong EA
)
1224 tmp
= glue(ldu16r
, MEMSUFFIX
)(EA
);
1225 ret
= ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 32);
1226 tmp
= glue(ldu16r
, MEMSUFFIX
)(EA
+ 2);
1227 ret
|= ((uint64_t)tmp
<< 16) | (uint64_t)tmp
;
1230 PPC_SPE_LD_OP(whsplat_le
, spe_lwhsplat_le
);