2 * mini-ppc.c: PowerPC backend for the Mono code generator
5 * Paolo Molaro (lupus@ximian.com)
6 * Dietmar Maurer (dietmar@ximian.com)
7 * Andreas Faerber <andreas.faerber@web.de>
9 * (C) 2003 Ximian, Inc.
10 * (C) 2007-2008 Andreas Faerber
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/debug-helpers.h>
20 #include "cpu-ppc64.h"
27 #include <sys/sysctl.h>
33 #define FORCE_INDIR_CALL 1
44 /* This mutex protects architecture specific caches */
45 #define mono_mini_arch_lock() EnterCriticalSection (&mini_arch_mutex)
46 #define mono_mini_arch_unlock() LeaveCriticalSection (&mini_arch_mutex)
47 static CRITICAL_SECTION mini_arch_mutex
;
49 int mono_exc_esp_offset
= 0;
50 static int tls_mode
= TLS_MODE_DETECT
;
51 static int lmf_pthread_key
= -1;
52 static int monothread_key
= -1;
53 static int monodomain_key
= -1;
56 offsets_from_pthread_key (guint32 key
, int *offset2
)
60 *offset2
= idx2
* sizeof (gpointer
);
61 return 284 + idx1
* sizeof (gpointer
);
64 #define emit_linuxthreads_tls(code,dreg,key) do {\
66 off1 = offsets_from_pthread_key ((key), &off2); \
67 ppc_load_reg ((code), (dreg), off1, ppc_r2); \
68 ppc_load_reg ((code), (dreg), off2, (dreg)); \
71 #define emit_darwing5_tls(code,dreg,key) do {\
72 int off1 = 0x48 + key * sizeof (gpointer); \
73 ppc_mfspr ((code), (dreg), 104); \
74 ppc_load_reg ((code), (dreg), off1, (dreg)); \
77 /* FIXME: ensure the sc call preserves all but r3 */
78 #define emit_darwing4_tls(code,dreg,key) do {\
79 int off1 = 0x48 + key * sizeof (gpointer); \
80 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r11, ppc_r3); \
81 ppc_li ((code), ppc_r0, 0x7FF2); \
83 ppc_lwz ((code), (dreg), off1, ppc_r3); \
84 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r3, ppc_r11); \
87 #ifdef PPC_THREAD_PTR_REG
88 #define emit_nptl_tls(code,dreg,key) do { \
90 int off2 = key >> 15; \
91 if ((off2 == 0) || (off2 == -1)) { \
92 ppc_load_reg ((code), (dreg), off1, PPC_THREAD_PTR_REG); \
94 int off3 = (off2 + 1) > 1; \
95 ppc_addis ((code), ppc_r11, PPC_THREAD_PTR_REG, off3); \
96 ppc_load_reg ((code), (dreg), off1, ppc_r11); \
100 #define emit_nptl_tls(code,dreg,key) do { \
101 g_assert_not_reached (); \
105 #define emit_tls_access(code,dreg,key) do { \
106 switch (tls_mode) { \
107 case TLS_MODE_LTHREADS: emit_linuxthreads_tls(code,dreg,key); break; \
108 case TLS_MODE_NPTL: emit_nptl_tls(code,dreg,key); break; \
109 case TLS_MODE_DARWIN_G5: emit_darwing5_tls(code,dreg,key); break; \
110 case TLS_MODE_DARWIN_G4: emit_darwing4_tls(code,dreg,key); break; \
111 default: g_assert_not_reached (); \
115 #define MONO_EMIT_NEW_LOAD_R8(cfg,dr,addr) do { \
117 MONO_INST_NEW ((cfg), (inst), OP_R8CONST); \
118 inst->type = STACK_R8; \
120 inst->inst_p0 = (void*)(addr); \
121 mono_bblock_add_inst (cfg->cbb, inst); \
125 mono_arch_regname (int reg
) {
126 static const char rnames
[][4] = {
127 "r0", "sp", "r2", "r3", "r4",
128 "r5", "r6", "r7", "r8", "r9",
129 "r10", "r11", "r12", "r13", "r14",
130 "r15", "r16", "r17", "r18", "r19",
131 "r20", "r21", "r22", "r23", "r24",
132 "r25", "r26", "r27", "r28", "r29",
135 if (reg
>= 0 && reg
< 32)
141 mono_arch_fregname (int reg
) {
142 static const char rnames
[][4] = {
143 "f0", "f1", "f2", "f3", "f4",
144 "f5", "f6", "f7", "f8", "f9",
145 "f10", "f11", "f12", "f13", "f14",
146 "f15", "f16", "f17", "f18", "f19",
147 "f20", "f21", "f22", "f23", "f24",
148 "f25", "f26", "f27", "f28", "f29",
151 if (reg
>= 0 && reg
< 32)
156 /* this function overwrites r0, r11, r12 */
158 emit_memcpy (guint8
*code
, int size
, int dreg
, int doffset
, int sreg
, int soffset
)
160 /* unrolled, use the counter in big */
161 if (size
> sizeof (gpointer
) * 5) {
162 long shifted
= size
>> MONO_PPC_32_64_CASE (2, 3);
163 guint8
*copy_loop_start
, *copy_loop_jump
;
165 ppc_load (code
, ppc_r0
, shifted
);
166 ppc_mtctr (code
, ppc_r0
);
167 g_assert (sreg
== ppc_r11
);
168 ppc_addi (code
, ppc_r12
, dreg
, (doffset
- sizeof (gpointer
)));
169 ppc_addi (code
, ppc_r11
, sreg
, (soffset
- sizeof (gpointer
)));
170 copy_loop_start
= code
;
171 ppc_load_reg_update (code
, ppc_r0
, (unsigned int)sizeof (gpointer
), ppc_r11
);
172 ppc_store_reg_update (code
, ppc_r0
, (unsigned int)sizeof (gpointer
), ppc_r12
);
173 copy_loop_jump
= code
;
174 ppc_bc (code
, PPC_BR_DEC_CTR_NONZERO
, 0, 0);
175 ppc_patch (copy_loop_jump
, copy_loop_start
);
176 size
-= shifted
* sizeof (gpointer
);
177 doffset
= soffset
= 0;
180 #ifdef __mono_ppc64__
182 ppc_load_reg (code
, ppc_r0
, soffset
, sreg
);
183 ppc_store_reg (code
, ppc_r0
, doffset
, dreg
);
190 ppc_lwz (code
, ppc_r0
, soffset
, sreg
);
191 ppc_stw (code
, ppc_r0
, doffset
, dreg
);
197 ppc_lhz (code
, ppc_r0
, soffset
, sreg
);
198 ppc_sth (code
, ppc_r0
, doffset
, dreg
);
204 ppc_lbz (code
, ppc_r0
, soffset
, sreg
);
205 ppc_stb (code
, ppc_r0
, doffset
, dreg
);
214 * mono_arch_get_argument_info:
215 * @csig: a method signature
216 * @param_count: the number of parameters to consider
217 * @arg_info: an array to store the result infos
219 * Gathers information on parameters such as size, alignment and
220 * padding. arg_info should be large enought to hold param_count + 1 entries.
222 * Returns the size of the activation frame.
225 mono_arch_get_argument_info (MonoMethodSignature
*csig
, int param_count
, MonoJitArgumentInfo
*arg_info
)
227 #ifdef __mono_ppc64__
231 int k
, frame_size
= 0;
232 int size
, align
, pad
;
235 if (MONO_TYPE_ISSTRUCT (csig
->ret
)) {
236 frame_size
+= sizeof (gpointer
);
240 arg_info
[0].offset
= offset
;
243 frame_size
+= sizeof (gpointer
);
247 arg_info
[0].size
= frame_size
;
249 for (k
= 0; k
< param_count
; k
++) {
252 size
= mono_type_native_stack_size (csig
->params
[k
], (guint32
*)&align
);
254 size
= mini_type_stack_size (NULL
, csig
->params
[k
], &align
);
256 /* ignore alignment for now */
259 frame_size
+= pad
= (align
- (frame_size
& (align
- 1))) & (align
- 1);
260 arg_info
[k
].pad
= pad
;
262 arg_info
[k
+ 1].pad
= 0;
263 arg_info
[k
+ 1].size
= size
;
265 arg_info
[k
+ 1].offset
= offset
;
269 align
= MONO_ARCH_FRAME_ALIGNMENT
;
270 frame_size
+= pad
= (align
- (frame_size
& (align
- 1))) & (align
- 1);
271 arg_info
[k
].pad
= pad
;
277 #ifdef __mono_ppc64__
279 is_load_sequence (guint32
*seq
)
281 return ppc_opcode (seq
[0]) == 15 && /* lis */
282 ppc_opcode (seq
[1]) == 24 && /* ori */
283 ppc_opcode (seq
[2]) == 30 && /* sldi */
284 ppc_opcode (seq
[3]) == 25 && /* oris */
285 ppc_opcode (seq
[4]) == 24; /* ori */
288 #define ppc_load_get_dest(l) (((l)>>21) & 0x1f)
289 #define ppc_load_get_off(l) ((gint16)((l) & 0xffff))
292 /* code must point to the blrl */
294 mono_ppc_is_direct_call_sequence (guint32
*code
)
296 #ifdef __mono_ppc64__
297 g_assert(*code
== 0x4e800021 || *code
== 0x4e800020 || *code
== 0x4e800420);
299 /* the thunk-less direct call sequence: lis/ori/sldi/oris/ori/mtlr/blrl */
300 if (ppc_opcode (code
[-1]) == 31) { /* mtlr */
301 if (ppc_opcode (code
[-2]) == 58 && ppc_opcode (code
[-3]) == 58) { /* ld/ld */
302 if (!is_load_sequence (&code
[-8]))
304 /* one of the loads must be "ld r2,8(rX)" */
305 return (ppc_load_get_dest (code
[-2]) == ppc_r2
&& ppc_load_get_off (code
[-2]) == 8) ||
306 (ppc_load_get_dest (code
[-3]) == ppc_r2
&& ppc_load_get_off (code
[-3]) == 8);
308 if (ppc_opcode (code
[-2]) == 24 && ppc_opcode (code
[-3]) == 31) /* mr/nop */
309 return is_load_sequence (&code
[-8]);
311 return is_load_sequence (&code
[-6]);
315 g_assert(*code
== 0x4e800021);
317 /* the thunk-less direct call sequence: lis/ori/mtlr/blrl */
318 return ppc_opcode (code
[-1]) == 31 &&
319 ppc_opcode (code
[-2]) == 24 &&
320 ppc_opcode (code
[-3]) == 15;
325 mono_arch_get_vcall_slot (guint8
*code_ptr
, gpointer
*regs
, int *displacement
)
329 guint32
* code
= (guint32
*)code_ptr
;
333 /* This is the 'blrl' instruction */
336 /* Sanity check: instruction must be 'blrl' */
337 if (*code
!= 0x4e800021)
340 if (mono_ppc_is_direct_call_sequence (code
))
343 /* FIXME: more sanity checks here */
344 /* OK, we're now at the 'blrl' instruction. Now walk backwards
345 till we get to a 'mtlr rA' */
347 if((*code
& 0x7c0803a6) == 0x7c0803a6) {
349 /* Here we are: we reached the 'mtlr rA'.
350 Extract the register from the instruction */
351 reg
= (*code
& 0x03e00000) >> 21;
353 /* ok, this is a lwz reg, offset (vtreg)
354 * it is emitted with:
355 * ppc_emit32 (c, (32 << 26) | ((D) << 21) | ((a) << 16) | (guint16)(d))
357 soff
= (*code
& 0xffff);
359 reg
= (*code
>> 16) & 0x1f;
360 g_assert (reg
!= ppc_r1
);
361 /*g_print ("patching reg is %d\n", reg);*/
363 MonoLMF
*lmf
= (MonoLMF
*)((char*)regs
+ (14 * sizeof (double)) + (13 * sizeof (gpointer
)));
364 /* saved in the MonoLMF structure */
365 o
= (gpointer
)lmf
->iregs
[reg
- 13];
372 *displacement
= offset
;
377 mono_arch_get_vcall_slot_addr (guint8
*code
, gpointer
*regs
)
381 vt
= mono_arch_get_vcall_slot (code
, regs
, &displacement
);
384 return (gpointer
*)((char*)vt
+ displacement
);
387 #define MAX_ARCH_DELEGATE_PARAMS 7
390 mono_arch_get_delegate_invoke_impl (MonoMethodSignature
*sig
, gboolean has_target
)
392 guint8
*code
, *start
;
394 /* FIXME: Support more cases */
395 if (MONO_TYPE_ISSTRUCT (sig
->ret
))
399 static guint8
* cached
= NULL
;
400 int size
= MONO_PPC_32_64_CASE (16, 20) + PPC_FTNPTR_SIZE
;
401 mono_mini_arch_lock ();
403 mono_mini_arch_unlock ();
407 start
= code
= mono_global_codeman_reserve (size
);
408 code
= mono_ppc_create_pre_code_ftnptr (code
);
410 /* Replace the this argument with the target */
411 ppc_load_reg (code
, ppc_r0
, G_STRUCT_OFFSET (MonoDelegate
, method_ptr
), ppc_r3
);
412 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
413 /* it's a function descriptor */
414 ppc_ldx (code
, ppc_r0
, 0, ppc_r0
);
416 ppc_mtctr (code
, ppc_r0
);
417 ppc_load_reg (code
, ppc_r3
, G_STRUCT_OFFSET (MonoDelegate
, target
), ppc_r3
);
418 ppc_bcctr (code
, PPC_BR_ALWAYS
, 0);
420 g_assert ((code
- start
) <= size
);
422 mono_arch_flush_icache (start
, size
);
424 mono_mini_arch_unlock ();
427 static guint8
* cache
[MAX_ARCH_DELEGATE_PARAMS
+ 1] = {NULL
};
430 if (sig
->param_count
> MAX_ARCH_DELEGATE_PARAMS
)
432 for (i
= 0; i
< sig
->param_count
; ++i
)
433 if (!mono_is_regsize_var (sig
->params
[i
]))
436 mono_mini_arch_lock ();
437 code
= cache
[sig
->param_count
];
439 mono_mini_arch_unlock ();
443 size
= MONO_PPC_32_64_CASE (12, 16) + sig
->param_count
* 4 + PPC_FTNPTR_SIZE
;
444 start
= code
= mono_global_codeman_reserve (size
);
445 code
= mono_ppc_create_pre_code_ftnptr (code
);
447 ppc_load_reg (code
, ppc_r0
, G_STRUCT_OFFSET (MonoDelegate
, method_ptr
), ppc_r3
);
448 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
449 /* it's a function descriptor */
450 ppc_ldx (code
, ppc_r0
, 0, ppc_r0
);
452 ppc_mtctr (code
, ppc_r0
);
453 /* slide down the arguments */
454 for (i
= 0; i
< sig
->param_count
; ++i
) {
455 ppc_mr (code
, (ppc_r3
+ i
), (ppc_r3
+ i
+ 1));
457 ppc_bcctr (code
, PPC_BR_ALWAYS
, 0);
459 g_assert ((code
- start
) <= size
);
461 mono_arch_flush_icache (start
, size
);
462 cache
[sig
->param_count
] = start
;
463 mono_mini_arch_unlock ();
470 mono_arch_get_this_arg_from_call (MonoGenericSharingContext
*gsctx
, MonoMethodSignature
*sig
, gssize
*regs
, guint8
*code
)
472 /* FIXME: handle returning a struct */
473 if (MONO_TYPE_ISSTRUCT (sig
->ret
))
474 return (gpointer
)regs
[ppc_r4
];
475 return (gpointer
)regs
[ppc_r3
];
479 * Initialize the cpu to execute managed code.
482 mono_arch_cpu_init (void)
487 * Initialize architecture specific code.
490 mono_arch_init (void)
492 InitializeCriticalSection (&mini_arch_mutex
);
496 * Cleanup architecture specific code.
499 mono_arch_cleanup (void)
501 DeleteCriticalSection (&mini_arch_mutex
);
505 * This function returns the optimizations supported on this cpu.
508 mono_arch_cpu_optimizazions (guint32
*exclude_mask
)
512 /* no ppc-specific optimizations yet */
517 #ifdef __mono_ppc64__
518 #define CASE_PPC32(c)
519 #define CASE_PPC64(c) case c:
521 #define CASE_PPC32(c) case c:
522 #define CASE_PPC64(c)
526 is_regsize_var (MonoType
*t
) {
529 t
= mini_type_get_underlying_type (NULL
, t
);
533 CASE_PPC64 (MONO_TYPE_I8
)
534 CASE_PPC64 (MONO_TYPE_U8
)
538 case MONO_TYPE_FNPTR
:
540 case MONO_TYPE_OBJECT
:
541 case MONO_TYPE_STRING
:
542 case MONO_TYPE_CLASS
:
543 case MONO_TYPE_SZARRAY
:
544 case MONO_TYPE_ARRAY
:
546 case MONO_TYPE_GENERICINST
:
547 if (!mono_type_generic_inst_is_valuetype (t
))
550 case MONO_TYPE_VALUETYPE
:
557 mono_arch_get_allocatable_int_vars (MonoCompile
*cfg
)
562 for (i
= 0; i
< cfg
->num_varinfo
; i
++) {
563 MonoInst
*ins
= cfg
->varinfo
[i
];
564 MonoMethodVar
*vmv
= MONO_VARINFO (cfg
, i
);
567 if (vmv
->range
.first_use
.abs_pos
>= vmv
->range
.last_use
.abs_pos
)
570 if (ins
->flags
& (MONO_INST_VOLATILE
|MONO_INST_INDIRECT
) || (ins
->opcode
!= OP_LOCAL
&& ins
->opcode
!= OP_ARG
))
573 /* we can only allocate 32 bit values */
574 if (is_regsize_var (ins
->inst_vtype
)) {
575 g_assert (MONO_VARINFO (cfg
, i
)->reg
== -1);
576 g_assert (i
== vmv
->idx
);
577 vars
= mono_varlist_insert_sorted (cfg
, vars
, vmv
, FALSE
);
585 mono_arch_get_global_int_regs (MonoCompile
*cfg
)
589 if (cfg
->frame_reg
!= ppc_sp
)
591 /* ppc_r13 is used by the system on PPC EABI */
592 for (i
= 14; i
< top
; ++i
)
593 regs
= g_list_prepend (regs
, GUINT_TO_POINTER (i
));
599 * mono_arch_regalloc_cost:
601 * Return the cost, in number of memory references, of the action of
602 * allocating the variable VMV into a register during global register
606 mono_arch_regalloc_cost (MonoCompile
*cfg
, MonoMethodVar
*vmv
)
618 mono_arch_flush_icache (guint8
*code
, gint size
)
621 guint8
*endp
, *start
;
622 static int cachelinesize
= 0;
623 static int cachelineinc
= 16;
625 if (!cachelinesize
) {
630 mib
[1] = HW_CACHELINE
;
631 len
= sizeof (cachelinesize
);
632 if (sysctl(mib
, 2, &cachelinesize
, (size_t*)&len
, NULL
, 0) == -1) {
636 cachelineinc
= cachelinesize
;
637 /*g_print ("setting cl size to %d\n", cachelinesize);*/
639 #elif defined(__linux__)
640 /* sadly this will work only with 2.6 kernels... */
641 FILE* f
= fopen ("/proc/self/auxv", "rb");
644 while (fread (&vec
, sizeof (vec
), 1, f
) == 1) {
645 if (vec
.type
== 19) {
646 cachelinesize
= vec
.value
;
654 #elif defined(G_COMPILER_CODEWARRIOR)
658 #warning Need a way to get cache line size
664 start
= (guint8
*)((gsize
)start
& ~(cachelinesize
- 1));
665 /* use dcbf for smp support, later optimize for UP, see pem._64bit.d20030611.pdf page 211 */
666 #if defined(G_COMPILER_CODEWARRIOR)
668 for (p
= start
; p
< endp
; p
+= cachelineinc
) {
672 for (p
= start
; p
< endp
; p
+= cachelineinc
) {
678 for (p
= start
; p
< endp
; p
+= cachelineinc
) {
690 for (p
= start
; p
< endp
; p
+= cachelineinc
) {
691 asm ("dcbf 0,%0;" : : "r"(p
) : "memory");
694 for (p
= start
; p
< endp
; p
+= cachelineinc
) {
695 asm ("dcbst 0,%0;" : : "r"(p
) : "memory");
700 for (p
= start
; p
< endp
; p
+= cachelineinc
) {
701 asm ("icbi 0,%0; sync;" : : "r"(p
) : "memory");
709 mono_arch_flush_register_windows (void)
714 #define ALWAYS_ON_STACK(s) s
715 #define FP_ALSO_IN_REG(s) s
717 #ifdef __mono_ppc64__
718 #define ALWAYS_ON_STACK(s) s
719 #define FP_ALSO_IN_REG(s) s
721 #define ALWAYS_ON_STACK(s)
722 #define FP_ALSO_IN_REG(s)
724 #define ALIGN_DOUBLES
737 guint32 vtsize
; /* in param area */
739 guint8 regtype
: 4; /* 0 general, 1 basereg, 2 floating point register, see RegType* */
740 guint8 size
: 4; /* 1, 2, 4, 8, or regs used by RegTypeStructByVal */
741 guint8 bytes
: 4; /* size in bytes - only valid for
742 RegTypeStructByVal if the struct fits
743 in one word, otherwise it's 0*/
758 add_general (guint
*gr
, guint
*stack_size
, ArgInfo
*ainfo
, gboolean simple
)
760 #ifdef __mono_ppc64__
765 if (*gr
>= 3 + PPC_NUM_REG_ARGS
) {
766 ainfo
->offset
= PPC_STACK_PARAM_OFFSET
+ *stack_size
;
767 ainfo
->reg
= ppc_sp
; /* in the caller */
768 ainfo
->regtype
= RegTypeBase
;
769 *stack_size
+= sizeof (gpointer
);
771 ALWAYS_ON_STACK (*stack_size
+= sizeof (gpointer
));
775 if (*gr
>= 3 + PPC_NUM_REG_ARGS
- 1) {
777 //*stack_size += (*stack_size % 8);
779 ainfo
->offset
= PPC_STACK_PARAM_OFFSET
+ *stack_size
;
780 ainfo
->reg
= ppc_sp
; /* in the caller */
781 ainfo
->regtype
= RegTypeBase
;
788 ALWAYS_ON_STACK (*stack_size
+= 8);
796 #if defined(__APPLE__) || defined(__mono_ppc64__)
798 has_only_a_r48_field (MonoClass
*klass
)
802 gboolean have_field
= FALSE
;
804 while ((f
= mono_class_get_fields (klass
, &iter
))) {
805 if (!(f
->type
->attrs
& FIELD_ATTRIBUTE_STATIC
)) {
808 if (!f
->type
->byref
&& (f
->type
->type
== MONO_TYPE_R4
|| f
->type
->type
== MONO_TYPE_R8
))
819 calculate_sizes (MonoMethodSignature
*sig
, gboolean is_pinvoke
)
822 int n
= sig
->hasthis
+ sig
->param_count
;
824 guint32 stack_size
= 0;
825 CallInfo
*cinfo
= g_malloc0 (sizeof (CallInfo
) + sizeof (ArgInfo
) * n
);
827 fr
= PPC_FIRST_FPARG_REG
;
828 gr
= PPC_FIRST_ARG_REG
;
830 /* FIXME: handle returning a struct */
831 if (MONO_TYPE_ISSTRUCT (sig
->ret
)) {
832 add_general (&gr
, &stack_size
, &cinfo
->ret
, TRUE
);
833 cinfo
->struct_ret
= PPC_FIRST_ARG_REG
;
838 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, TRUE
);
841 DEBUG(printf("params: %d\n", sig
->param_count
));
842 for (i
= 0; i
< sig
->param_count
; ++i
) {
843 if (!sig
->pinvoke
&& (sig
->call_convention
== MONO_CALL_VARARG
) && (i
== sig
->sentinelpos
)) {
844 /* Prevent implicit arguments and sig_cookie from
845 being passed in registers */
846 gr
= PPC_LAST_ARG_REG
+ 1;
847 /* FIXME: don't we have to set fr, too? */
848 /* Emit the signature cookie just before the implicit arguments */
849 add_general (&gr
, &stack_size
, &cinfo
->sig_cookie
, TRUE
);
851 DEBUG(printf("param %d: ", i
));
852 if (sig
->params
[i
]->byref
) {
853 DEBUG(printf("byref\n"));
854 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, TRUE
);
858 simpletype
= mini_type_get_underlying_type (NULL
, sig
->params
[i
])->type
;
859 switch (simpletype
) {
860 case MONO_TYPE_BOOLEAN
:
863 cinfo
->args
[n
].size
= 1;
864 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, TRUE
);
870 cinfo
->args
[n
].size
= 2;
871 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, TRUE
);
876 cinfo
->args
[n
].size
= 4;
877 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, TRUE
);
883 case MONO_TYPE_FNPTR
:
884 case MONO_TYPE_CLASS
:
885 case MONO_TYPE_OBJECT
:
886 case MONO_TYPE_STRING
:
887 case MONO_TYPE_SZARRAY
:
888 case MONO_TYPE_ARRAY
:
889 cinfo
->args
[n
].size
= sizeof (gpointer
);
890 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, TRUE
);
893 case MONO_TYPE_GENERICINST
:
894 if (!mono_type_generic_inst_is_valuetype (sig
->params
[i
])) {
895 cinfo
->args
[n
].size
= sizeof (gpointer
);
896 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, TRUE
);
901 case MONO_TYPE_VALUETYPE
: {
904 klass
= mono_class_from_mono_type (sig
->params
[i
]);
906 size
= mono_class_native_size (klass
, NULL
);
908 size
= mono_class_value_size (klass
, NULL
);
909 #if defined(__APPLE__) || defined(__mono_ppc64__)
910 if ((size
== 4 || size
== 8) && has_only_a_r48_field (klass
)) {
911 cinfo
->args
[n
].size
= size
;
913 /* It was 7, now it is 8 in LinuxPPC */
914 if (fr
<= PPC_LAST_FPARG_REG
) {
915 cinfo
->args
[n
].regtype
= RegTypeFP
;
916 cinfo
->args
[n
].reg
= fr
;
918 FP_ALSO_IN_REG (gr
++);
920 FP_ALSO_IN_REG (gr
++);
921 ALWAYS_ON_STACK (stack_size
+= size
);
923 cinfo
->args
[n
].offset
= PPC_STACK_PARAM_OFFSET
+ stack_size
;
924 cinfo
->args
[n
].regtype
= RegTypeBase
;
925 cinfo
->args
[n
].reg
= ppc_sp
; /* in the caller*/
932 DEBUG(printf ("load %d bytes struct\n",
933 mono_class_native_size (sig
->params
[i
]->data
.klass
, NULL
)));
934 #if PPC_PASS_STRUCTS_BY_VALUE
936 int align_size
= size
;
938 int rest
= PPC_LAST_ARG_REG
- gr
+ 1;
940 align_size
+= (sizeof (gpointer
) - 1);
941 align_size
&= ~(sizeof (gpointer
) - 1);
942 nwords
= (align_size
+ sizeof (gpointer
) -1 ) / sizeof (gpointer
);
943 n_in_regs
= MIN (rest
, nwords
);
944 cinfo
->args
[n
].regtype
= RegTypeStructByVal
;
945 if (gr
> PPC_LAST_ARG_REG
947 /* FIXME: check this */
948 || (size
>= 3 && size
% 4 != 0)
951 cinfo
->args
[n
].size
= 0;
952 cinfo
->args
[n
].vtsize
= nwords
;
954 cinfo
->args
[n
].size
= n_in_regs
;
955 cinfo
->args
[n
].vtsize
= nwords
- n_in_regs
;
956 cinfo
->args
[n
].reg
= gr
;
958 #ifdef __mono_ppc64__
959 if (nwords
== 1 && is_pinvoke
)
960 cinfo
->args
[n
].bytes
= size
;
963 cinfo
->args
[n
].bytes
= 0;
965 cinfo
->args
[n
].offset
= PPC_STACK_PARAM_OFFSET
+ stack_size
;
966 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
967 stack_size
+= nwords
* sizeof (gpointer
);
970 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, TRUE
);
971 cinfo
->args
[n
].regtype
= RegTypeStructByAddr
;
972 cinfo
->args
[n
].vtsize
= size
;
977 case MONO_TYPE_TYPEDBYREF
: {
978 int size
= sizeof (MonoTypedRef
);
979 /* keep in sync or merge with the valuetype case */
980 #if PPC_PASS_STRUCTS_BY_VALUE
982 int nwords
= (size
+ sizeof (gpointer
) -1 ) / sizeof (gpointer
);
983 cinfo
->args
[n
].regtype
= RegTypeStructByVal
;
984 if (gr
<= PPC_LAST_ARG_REG
) {
985 int rest
= PPC_LAST_ARG_REG
- gr
+ 1;
986 int n_in_regs
= rest
>= nwords
? nwords
: rest
;
987 cinfo
->args
[n
].size
= n_in_regs
;
988 cinfo
->args
[n
].vtsize
= nwords
- n_in_regs
;
989 cinfo
->args
[n
].reg
= gr
;
992 cinfo
->args
[n
].size
= 0;
993 cinfo
->args
[n
].vtsize
= nwords
;
995 #ifdef __mono_ppc64__
996 if (nwords
== 1 && is_pinvoke
)
997 cinfo
->args
[n
].bytes
= size
;
1000 cinfo
->args
[n
].bytes
= 0;
1001 cinfo
->args
[n
].offset
= PPC_STACK_PARAM_OFFSET
+ stack_size
;
1002 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
1003 stack_size
+= nwords
* sizeof (gpointer
);
1006 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, TRUE
);
1007 cinfo
->args
[n
].regtype
= RegTypeStructByAddr
;
1008 cinfo
->args
[n
].vtsize
= size
;
1015 cinfo
->args
[n
].size
= 8;
1016 add_general (&gr
, &stack_size
, cinfo
->args
+ n
, sizeof (gpointer
) == 8);
1020 cinfo
->args
[n
].size
= 4;
1022 /* It was 7, now it is 8 in LinuxPPC */
1023 if (fr
<= PPC_LAST_FPARG_REG
) {
1024 cinfo
->args
[n
].regtype
= RegTypeFP
;
1025 cinfo
->args
[n
].reg
= fr
;
1027 FP_ALSO_IN_REG (gr
++);
1028 ALWAYS_ON_STACK (stack_size
+= sizeof (gpointer
));
1030 cinfo
->args
[n
].offset
= PPC_STACK_PARAM_OFFSET
+ stack_size
+ MONO_PPC_32_64_CASE (0, 4);
1031 cinfo
->args
[n
].regtype
= RegTypeBase
;
1032 cinfo
->args
[n
].reg
= ppc_sp
; /* in the caller*/
1033 stack_size
+= sizeof (gpointer
);
1038 cinfo
->args
[n
].size
= 8;
1039 /* It was 7, now it is 8 in LinuxPPC */
1040 if (fr
<= PPC_LAST_FPARG_REG
) {
1041 cinfo
->args
[n
].regtype
= RegTypeFP
;
1042 cinfo
->args
[n
].reg
= fr
;
1044 FP_ALSO_IN_REG (gr
+= sizeof (double) / sizeof (gpointer
));
1045 ALWAYS_ON_STACK (stack_size
+= 8);
1047 cinfo
->args
[n
].offset
= PPC_STACK_PARAM_OFFSET
+ stack_size
;
1048 cinfo
->args
[n
].regtype
= RegTypeBase
;
1049 cinfo
->args
[n
].reg
= ppc_sp
; /* in the caller*/
1055 g_error ("Can't trampoline 0x%x", sig
->params
[i
]->type
);
1059 if (!sig
->pinvoke
&& (sig
->call_convention
== MONO_CALL_VARARG
) && (i
== sig
->sentinelpos
)) {
1060 /* Prevent implicit arguments and sig_cookie from
1061 being passed in registers */
1062 gr
= PPC_LAST_ARG_REG
+ 1;
1063 /* Emit the signature cookie just before the implicit arguments */
1064 add_general (&gr
, &stack_size
, &cinfo
->sig_cookie
, TRUE
);
1068 simpletype
= mini_type_get_underlying_type (NULL
, sig
->ret
)->type
;
1069 switch (simpletype
) {
1070 case MONO_TYPE_BOOLEAN
:
1075 case MONO_TYPE_CHAR
:
1081 case MONO_TYPE_FNPTR
:
1082 case MONO_TYPE_CLASS
:
1083 case MONO_TYPE_OBJECT
:
1084 case MONO_TYPE_SZARRAY
:
1085 case MONO_TYPE_ARRAY
:
1086 case MONO_TYPE_STRING
:
1087 cinfo
->ret
.reg
= ppc_r3
;
1091 cinfo
->ret
.reg
= ppc_r3
;
1095 cinfo
->ret
.reg
= ppc_f1
;
1096 cinfo
->ret
.regtype
= RegTypeFP
;
1098 case MONO_TYPE_GENERICINST
:
1099 if (!mono_type_generic_inst_is_valuetype (sig
->ret
)) {
1100 cinfo
->ret
.reg
= ppc_r3
;
1104 case MONO_TYPE_VALUETYPE
:
1106 case MONO_TYPE_TYPEDBYREF
:
1107 case MONO_TYPE_VOID
:
1110 g_error ("Can't handle as return value 0x%x", sig
->ret
->type
);
1114 /* align stack size to 16 */
1115 DEBUG (printf (" stack size: %d (%d)\n", (stack_size
+ 15) & ~15, stack_size
));
1116 stack_size
= (stack_size
+ 15) & ~15;
1118 cinfo
->stack_usage
= stack_size
;
1123 allocate_tailcall_valuetype_addrs (MonoCompile
*cfg
)
1125 #if !PPC_PASS_STRUCTS_BY_VALUE
1126 MonoMethodSignature
*sig
= mono_method_signature (cfg
->method
);
1127 int num_structs
= 0;
1130 if (!(cfg
->flags
& MONO_CFG_HAS_TAIL
))
1133 for (i
= 0; i
< sig
->param_count
; ++i
) {
1134 MonoType
*type
= mono_type_get_underlying_type (sig
->params
[i
]);
1135 if (type
->type
== MONO_TYPE_VALUETYPE
)
1140 cfg
->tailcall_valuetype_addrs
=
1141 mono_mempool_alloc0 (cfg
->mempool
, sizeof (MonoInst
*) * num_structs
);
1142 for (i
= 0; i
< num_structs
; ++i
) {
1143 cfg
->tailcall_valuetype_addrs
[i
] =
1144 mono_compile_create_var (cfg
, &mono_defaults
.int_class
->byval_arg
, OP_LOCAL
);
1145 cfg
->tailcall_valuetype_addrs
[i
]->flags
|= MONO_INST_INDIRECT
;
1152 * Set var information according to the calling convention. ppc version.
1153 * The locals var stuff should most likely be split in another method.
1156 mono_arch_allocate_vars (MonoCompile
*m
)
1158 MonoMethodSignature
*sig
;
1159 MonoMethodHeader
*header
;
1161 int i
, offset
, size
, align
, curinst
;
1162 int frame_reg
= ppc_sp
;
1164 guint32 locals_stack_size
, locals_stack_align
;
1166 allocate_tailcall_valuetype_addrs (m
);
1168 m
->flags
|= MONO_CFG_HAS_SPILLUP
;
1170 /* allow room for the vararg method args: void* and long/double */
1171 if (mono_jit_trace_calls
!= NULL
&& mono_trace_eval (m
->method
))
1172 m
->param_area
= MAX (m
->param_area
, sizeof (gpointer
)*8);
1173 /* this is bug #60332: remove when #59509 is fixed, so no weird vararg
1174 * call convs needs to be handled this way.
1176 if (m
->flags
& MONO_CFG_HAS_VARARGS
)
1177 m
->param_area
= MAX (m
->param_area
, sizeof (gpointer
)*8);
1178 /* gtk-sharp and other broken code will dllimport vararg functions even with
1179 * non-varargs signatures. Since there is little hope people will get this right
1180 * we assume they won't.
1182 if (m
->method
->wrapper_type
== MONO_WRAPPER_MANAGED_TO_NATIVE
)
1183 m
->param_area
= MAX (m
->param_area
, sizeof (gpointer
)*8);
1185 header
= mono_method_get_header (m
->method
);
1188 * We use the frame register also for any method that has
1189 * exception clauses. This way, when the handlers are called,
1190 * the code will reference local variables using the frame reg instead of
1191 * the stack pointer: if we had to restore the stack pointer, we'd
1192 * corrupt the method frames that are already on the stack (since
1193 * filters get called before stack unwinding happens) when the filter
1194 * code would call any method (this also applies to finally etc.).
1196 if ((m
->flags
& MONO_CFG_HAS_ALLOCA
) || header
->num_clauses
)
1197 frame_reg
= ppc_r31
;
1198 m
->frame_reg
= frame_reg
;
1199 if (frame_reg
!= ppc_sp
) {
1200 m
->used_int_regs
|= 1 << frame_reg
;
1203 sig
= mono_method_signature (m
->method
);
1207 if (MONO_TYPE_ISSTRUCT (sig
->ret
)) {
1208 m
->ret
->opcode
= OP_REGVAR
;
1209 m
->ret
->inst_c0
= m
->ret
->dreg
= ppc_r3
;
1211 /* FIXME: handle long values? */
1212 switch (mini_type_get_underlying_type (m
->generic_sharing_context
, sig
->ret
)->type
) {
1213 case MONO_TYPE_VOID
:
1217 m
->ret
->opcode
= OP_REGVAR
;
1218 m
->ret
->inst_c0
= m
->ret
->dreg
= ppc_f1
;
1221 m
->ret
->opcode
= OP_REGVAR
;
1222 m
->ret
->inst_c0
= m
->ret
->dreg
= ppc_r3
;
1226 /* local vars are at a positive offset from the stack pointer */
1228 * also note that if the function uses alloca, we use ppc_r31
1229 * to point at the local variables.
1231 offset
= PPC_MINIMAL_STACK_SIZE
; /* linkage area */
1232 /* align the offset to 16 bytes: not sure this is needed here */
1234 //offset &= ~(16 - 1);
1236 /* add parameter area size for called functions */
1237 offset
+= m
->param_area
;
1239 offset
&= ~(16 - 1);
1241 /* allow room to save the return value */
1242 if (mono_jit_trace_calls
!= NULL
&& mono_trace_eval (m
->method
))
1245 /* the MonoLMF structure is stored just below the stack pointer */
1248 /* this stuff should not be needed on ppc and the new jit,
1249 * because a call on ppc to the handlers doesn't change the
1250 * stack pointer and the jist doesn't manipulate the stack pointer
1251 * for operations involving valuetypes.
1253 /* reserve space to store the esp */
1254 offset
+= sizeof (gpointer
);
1256 /* this is a global constant */
1257 mono_exc_esp_offset
= offset
;
1260 if (MONO_TYPE_ISSTRUCT (sig
->ret
)) {
1261 offset
+= sizeof(gpointer
) - 1;
1262 offset
&= ~(sizeof(gpointer
) - 1);
1264 m
->vret_addr
->opcode
= OP_REGOFFSET
;
1265 m
->vret_addr
->inst_basereg
= frame_reg
;
1266 m
->vret_addr
->inst_offset
= offset
;
1268 if (G_UNLIKELY (m
->verbose_level
> 1)) {
1269 printf ("vret_addr =");
1270 mono_print_ins (m
->vret_addr
);
1273 offset
+= sizeof(gpointer
);
1276 offsets
= mono_allocate_stack_slots_full (m
, FALSE
, &locals_stack_size
, &locals_stack_align
);
1277 if (locals_stack_align
) {
1278 offset
+= (locals_stack_align
- 1);
1279 offset
&= ~(locals_stack_align
- 1);
1281 for (i
= m
->locals_start
; i
< m
->num_varinfo
; i
++) {
1282 if (offsets
[i
] != -1) {
1283 MonoInst
*inst
= m
->varinfo
[i
];
1284 inst
->opcode
= OP_REGOFFSET
;
1285 inst
->inst_basereg
= frame_reg
;
1286 inst
->inst_offset
= offset
+ offsets
[i
];
1288 g_print ("allocating local %d (%s) to %d\n",
1289 i, mono_type_get_name (inst->inst_vtype), inst->inst_offset);
1293 offset
+= locals_stack_size
;
1297 inst
= m
->args
[curinst
];
1298 if (inst
->opcode
!= OP_REGVAR
) {
1299 inst
->opcode
= OP_REGOFFSET
;
1300 inst
->inst_basereg
= frame_reg
;
1301 offset
+= sizeof (gpointer
) - 1;
1302 offset
&= ~(sizeof (gpointer
) - 1);
1303 inst
->inst_offset
= offset
;
1304 offset
+= sizeof (gpointer
);
1309 for (i
= 0; i
< sig
->param_count
; ++i
) {
1310 inst
= m
->args
[curinst
];
1311 if (inst
->opcode
!= OP_REGVAR
) {
1312 inst
->opcode
= OP_REGOFFSET
;
1313 inst
->inst_basereg
= frame_reg
;
1315 size
= mono_type_native_stack_size (sig
->params
[i
], (guint32
*)&align
);
1316 inst
->backend
.is_pinvoke
= 1;
1318 size
= mono_type_size (sig
->params
[i
], &align
);
1320 if (MONO_TYPE_ISSTRUCT (sig
->params
[i
]) && size
< sizeof (gpointer
))
1321 size
= align
= sizeof (gpointer
);
1322 offset
+= align
- 1;
1323 offset
&= ~(align
- 1);
1324 inst
->inst_offset
= offset
;
1330 /* some storage for fp conversions */
1333 m
->arch
.fp_conv_var_offset
= offset
;
1336 /* align the offset to 16 bytes */
1338 offset
&= ~(16 - 1);
1341 m
->stack_offset
= offset
;
1343 if (sig
->call_convention
== MONO_CALL_VARARG
) {
1344 CallInfo
*cinfo
= calculate_sizes (m
->method
->signature
, m
->method
->signature
->pinvoke
);
1346 m
->sig_cookie
= cinfo
->sig_cookie
.offset
;
1353 mono_arch_create_vars (MonoCompile
*cfg
)
1355 MonoMethodSignature
*sig
= mono_method_signature (cfg
->method
);
1357 if (MONO_TYPE_ISSTRUCT (sig
->ret
)) {
1358 cfg
->vret_addr
= mono_compile_create_var (cfg
, &mono_defaults
.int_class
->byval_arg
, OP_ARG
);
1362 /* Fixme: we need an alignment solution for enter_method and mono_arch_call_opcode,
1363 * currently alignment in mono_arch_call_opcode is computed without arch_get_argument_info
1367 emit_sig_cookie (MonoCompile
*cfg
, MonoCallInst
*call
, CallInfo
*cinfo
)
1369 int sig_reg
= mono_alloc_ireg (cfg
);
1371 MONO_EMIT_NEW_ICONST (cfg
, sig_reg
, (gulong
)call
->signature
);
1372 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STORE_MEMBASE_REG
,
1373 ppc_r1
, cinfo
->sig_cookie
.offset
, sig_reg
);
1377 mono_arch_emit_call (MonoCompile
*cfg
, MonoCallInst
*call
)
1380 MonoMethodSignature
*sig
;
1384 sig
= call
->signature
;
1385 n
= sig
->param_count
+ sig
->hasthis
;
1387 cinfo
= calculate_sizes (sig
, sig
->pinvoke
);
1389 for (i
= 0; i
< n
; ++i
) {
1390 ArgInfo
*ainfo
= cinfo
->args
+ i
;
1393 if (i
>= sig
->hasthis
)
1394 t
= sig
->params
[i
- sig
->hasthis
];
1396 t
= &mono_defaults
.int_class
->byval_arg
;
1397 t
= mini_type_get_underlying_type (cfg
->generic_sharing_context
, t
);
1399 if (!sig
->pinvoke
&& (sig
->call_convention
== MONO_CALL_VARARG
) && (i
== sig
->sentinelpos
))
1400 emit_sig_cookie (cfg
, call
, cinfo
);
1402 in
= call
->args
[i
];
1404 if (ainfo
->regtype
== RegTypeGeneral
) {
1405 #ifndef __mono_ppc64__
1406 if (!t
->byref
&& ((t
->type
== MONO_TYPE_I8
) || (t
->type
== MONO_TYPE_U8
))) {
1407 MONO_INST_NEW (cfg
, ins
, OP_MOVE
);
1408 ins
->dreg
= mono_alloc_ireg (cfg
);
1409 ins
->sreg1
= in
->dreg
+ 1;
1410 MONO_ADD_INS (cfg
->cbb
, ins
);
1411 mono_call_inst_add_outarg_reg (cfg
, call
, ins
->dreg
, ainfo
->reg
+ 1, FALSE
);
1413 MONO_INST_NEW (cfg
, ins
, OP_MOVE
);
1414 ins
->dreg
= mono_alloc_ireg (cfg
);
1415 ins
->sreg1
= in
->dreg
+ 2;
1416 MONO_ADD_INS (cfg
->cbb
, ins
);
1417 mono_call_inst_add_outarg_reg (cfg
, call
, ins
->dreg
, ainfo
->reg
, FALSE
);
1421 MONO_INST_NEW (cfg
, ins
, OP_MOVE
);
1422 ins
->dreg
= mono_alloc_ireg (cfg
);
1423 ins
->sreg1
= in
->dreg
;
1424 MONO_ADD_INS (cfg
->cbb
, ins
);
1426 mono_call_inst_add_outarg_reg (cfg
, call
, ins
->dreg
, ainfo
->reg
, FALSE
);
1428 } else if (ainfo
->regtype
== RegTypeStructByAddr
) {
1429 MONO_INST_NEW (cfg
, ins
, OP_OUTARG_VT
);
1430 ins
->opcode
= OP_OUTARG_VT
;
1431 ins
->sreg1
= in
->dreg
;
1432 ins
->klass
= in
->klass
;
1433 ins
->inst_p0
= call
;
1434 ins
->inst_p1
= mono_mempool_alloc (cfg
->mempool
, sizeof (ArgInfo
));
1435 memcpy (ins
->inst_p1
, ainfo
, sizeof (ArgInfo
));
1436 MONO_ADD_INS (cfg
->cbb
, ins
);
1437 } else if (ainfo
->regtype
== RegTypeStructByVal
) {
1438 /* this is further handled in mono_arch_emit_outarg_vt () */
1439 MONO_INST_NEW (cfg
, ins
, OP_OUTARG_VT
);
1440 ins
->opcode
= OP_OUTARG_VT
;
1441 ins
->sreg1
= in
->dreg
;
1442 ins
->klass
= in
->klass
;
1443 ins
->inst_p0
= call
;
1444 ins
->inst_p1
= mono_mempool_alloc (cfg
->mempool
, sizeof (ArgInfo
));
1445 memcpy (ins
->inst_p1
, ainfo
, sizeof (ArgInfo
));
1446 MONO_ADD_INS (cfg
->cbb
, ins
);
1447 } else if (ainfo
->regtype
== RegTypeBase
) {
1448 if (!t
->byref
&& ((t
->type
== MONO_TYPE_I8
) || (t
->type
== MONO_TYPE_U8
))) {
1449 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STOREI8_MEMBASE_REG
, ppc_r1
, ainfo
->offset
, in
->dreg
);
1450 } else if (!t
->byref
&& ((t
->type
== MONO_TYPE_R4
) || (t
->type
== MONO_TYPE_R8
))) {
1451 if (t
->type
== MONO_TYPE_R8
)
1452 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STORER8_MEMBASE_REG
, ppc_r1
, ainfo
->offset
, in
->dreg
);
1454 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STORER4_MEMBASE_REG
, ppc_r1
, ainfo
->offset
, in
->dreg
);
1456 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STORE_MEMBASE_REG
, ppc_r1
, ainfo
->offset
, in
->dreg
);
1458 } else if (ainfo
->regtype
== RegTypeFP
) {
1459 if (t
->type
== MONO_TYPE_VALUETYPE
) {
1460 /* this is further handled in mono_arch_emit_outarg_vt () */
1461 MONO_INST_NEW (cfg
, ins
, OP_OUTARG_VT
);
1462 ins
->opcode
= OP_OUTARG_VT
;
1463 ins
->sreg1
= in
->dreg
;
1464 ins
->klass
= in
->klass
;
1465 ins
->inst_p0
= call
;
1466 ins
->inst_p1
= mono_mempool_alloc (cfg
->mempool
, sizeof (ArgInfo
));
1467 memcpy (ins
->inst_p1
, ainfo
, sizeof (ArgInfo
));
1468 MONO_ADD_INS (cfg
->cbb
, ins
);
1470 cfg
->flags
|= MONO_CFG_HAS_FPOUT
;
1472 int dreg
= mono_alloc_freg (cfg
);
1474 if (ainfo
->size
== 4) {
1475 MONO_EMIT_NEW_UNALU (cfg
, OP_FCONV_TO_R4
, dreg
, in
->dreg
);
1477 MONO_INST_NEW (cfg
, ins
, OP_FMOVE
);
1479 ins
->sreg1
= in
->dreg
;
1480 MONO_ADD_INS (cfg
->cbb
, ins
);
1483 mono_call_inst_add_outarg_reg (cfg
, call
, dreg
, ainfo
->reg
, TRUE
);
1484 cfg
->flags
|= MONO_CFG_HAS_FPOUT
;
1487 g_assert_not_reached ();
1491 /* Emit the signature cookie in the case that there is no
1492 additional argument */
1493 if (!sig
->pinvoke
&& (sig
->call_convention
== MONO_CALL_VARARG
) && (n
== sig
->sentinelpos
))
1494 emit_sig_cookie (cfg
, call
, cinfo
);
1496 if (cinfo
->struct_ret
) {
1499 MONO_INST_NEW (cfg
, vtarg
, OP_MOVE
);
1500 vtarg
->sreg1
= call
->vret_var
->dreg
;
1501 vtarg
->dreg
= mono_alloc_preg (cfg
);
1502 MONO_ADD_INS (cfg
->cbb
, vtarg
);
1504 mono_call_inst_add_outarg_reg (cfg
, call
, vtarg
->dreg
, cinfo
->struct_ret
, FALSE
);
1507 call
->stack_usage
= cinfo
->stack_usage
;
1508 cfg
->param_area
= MAX (PPC_MINIMAL_PARAM_AREA_SIZE
, MAX (cfg
->param_area
, cinfo
->stack_usage
));
1509 cfg
->flags
|= MONO_CFG_HAS_CALLS
;
1515 mono_arch_emit_outarg_vt (MonoCompile
*cfg
, MonoInst
*ins
, MonoInst
*src
)
1517 MonoCallInst
*call
= (MonoCallInst
*)ins
->inst_p0
;
1518 ArgInfo
*ainfo
= ins
->inst_p1
;
1519 int ovf_size
= ainfo
->vtsize
;
1520 int doffset
= ainfo
->offset
;
1521 int i
, soffset
, dreg
;
1523 if (ainfo
->regtype
== RegTypeStructByVal
) {
1530 * Darwin pinvokes needs some special handling for 1
1531 * and 2 byte arguments
1533 g_assert (ins
->klass
);
1534 if (call
->signature
->pinvoke
)
1535 size
= mono_class_native_size (ins
->klass
, NULL
);
1536 if (size
== 2 || size
== 1) {
1537 int tmpr
= mono_alloc_ireg (cfg
);
1539 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg
, OP_LOADI1_MEMBASE
, tmpr
, src
->dreg
, soffset
);
1541 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg
, OP_LOADI2_MEMBASE
, tmpr
, src
->dreg
, soffset
);
1542 dreg
= mono_alloc_ireg (cfg
);
1543 MONO_EMIT_NEW_UNALU (cfg
, OP_MOVE
, dreg
, tmpr
);
1544 mono_call_inst_add_outarg_reg (cfg
, call
, dreg
, ainfo
->reg
, FALSE
);
1547 for (i
= 0; i
< ainfo
->size
; ++i
) {
1548 int antipadding
= 0;
1551 antipadding
= sizeof (gpointer
) - ainfo
->bytes
;
1553 dreg
= mono_alloc_ireg (cfg
);
1554 MONO_EMIT_NEW_LOAD_MEMBASE (cfg
, dreg
, src
->dreg
, soffset
);
1556 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_SHR_UN_IMM
, dreg
, dreg
, antipadding
* 8);
1557 mono_call_inst_add_outarg_reg (cfg
, call
, dreg
, ainfo
->reg
+ i
, FALSE
);
1558 soffset
+= sizeof (gpointer
);
1561 mini_emit_memcpy (cfg
, ppc_r1
, doffset
+ soffset
, src
->dreg
, soffset
, ovf_size
* sizeof (gpointer
), 0);
1562 } else if (ainfo
->regtype
== RegTypeFP
) {
1563 int tmpr
= mono_alloc_freg (cfg
);
1564 if (ainfo
->size
== 4)
1565 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg
, OP_LOADR4_MEMBASE
, tmpr
, src
->dreg
, 0);
1567 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg
, OP_LOADR8_MEMBASE
, tmpr
, src
->dreg
, 0);
1568 dreg
= mono_alloc_freg (cfg
);
1569 MONO_EMIT_NEW_UNALU (cfg
, OP_FMOVE
, dreg
, tmpr
);
1570 mono_call_inst_add_outarg_reg (cfg
, call
, dreg
, ainfo
->reg
, TRUE
);
1572 MonoInst
*vtcopy
= mono_compile_create_var (cfg
, &src
->klass
->byval_arg
, OP_LOCAL
);
1576 /* FIXME: alignment? */
1577 if (call
->signature
->pinvoke
) {
1578 size
= mono_type_native_stack_size (&src
->klass
->byval_arg
, NULL
);
1579 vtcopy
->backend
.is_pinvoke
= 1;
1581 size
= mini_type_stack_size (cfg
->generic_sharing_context
, &src
->klass
->byval_arg
, NULL
);
1584 g_assert (ovf_size
> 0);
1586 EMIT_NEW_VARLOADA (cfg
, load
, vtcopy
, vtcopy
->inst_vtype
);
1587 mini_emit_memcpy (cfg
, load
->dreg
, 0, src
->dreg
, 0, size
, 0);
1590 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STORE_MEMBASE_REG
, ppc_r1
, ainfo
->offset
, load
->dreg
);
1592 mono_call_inst_add_outarg_reg (cfg
, call
, load
->dreg
, ainfo
->reg
, FALSE
);
1597 mono_arch_emit_setret (MonoCompile
*cfg
, MonoMethod
*method
, MonoInst
*val
)
1599 MonoType
*ret
= mini_type_get_underlying_type (cfg
->generic_sharing_context
,
1600 mono_method_signature (method
)->ret
);
1603 #ifndef __mono_ppc64__
1604 if (ret
->type
== MONO_TYPE_I8
|| ret
->type
== MONO_TYPE_U8
) {
1607 MONO_INST_NEW (cfg
, ins
, OP_SETLRET
);
1608 ins
->sreg1
= val
->dreg
+ 1;
1609 ins
->sreg2
= val
->dreg
+ 2;
1610 MONO_ADD_INS (cfg
->cbb
, ins
);
1614 if (ret
->type
== MONO_TYPE_R8
|| ret
->type
== MONO_TYPE_R4
) {
1615 MONO_EMIT_NEW_UNALU (cfg
, OP_FMOVE
, cfg
->ret
->dreg
, val
->dreg
);
1619 MONO_EMIT_NEW_UNALU (cfg
, OP_MOVE
, cfg
->ret
->dreg
, val
->dreg
);
1622 /* FIXME: this is just a useless hint: fix the interface to include the opcode */
1624 mono_arch_is_inst_imm (gint64 imm
)
1630 * Allow tracing to work with this interface (with an optional argument)
1634 mono_arch_instrument_prolog (MonoCompile
*cfg
, void *func
, void *p
, gboolean enable_arguments
)
1638 ppc_load (code
, ppc_r3
, cfg
->method
);
1639 ppc_li (code
, ppc_r4
, 0); /* NULL ebp for now */
1640 ppc_load_func (code
, ppc_r0
, func
);
1641 ppc_mtlr (code
, ppc_r0
);
1655 mono_arch_instrument_epilog (MonoCompile
*cfg
, void *func
, void *p
, gboolean enable_arguments
)
1658 int save_mode
= SAVE_NONE
;
1660 MonoMethod
*method
= cfg
->method
;
1661 int rtype
= mini_type_get_underlying_type (cfg
->generic_sharing_context
,
1662 mono_method_signature (method
)->ret
)->type
;
1663 int save_offset
= PPC_STACK_PARAM_OFFSET
+ cfg
->param_area
;
1667 offset
= code
- cfg
->native_code
;
1668 /* we need about 16 instructions */
1669 if (offset
> (cfg
->code_size
- 16 * 4)) {
1670 cfg
->code_size
*= 2;
1671 cfg
->native_code
= g_realloc (cfg
->native_code
, cfg
->code_size
);
1672 code
= cfg
->native_code
+ offset
;
1676 case MONO_TYPE_VOID
:
1677 /* special case string .ctor icall */
1678 if (strcmp (".ctor", method
->name
) && method
->klass
== mono_defaults
.string_class
)
1679 save_mode
= SAVE_ONE
;
1681 save_mode
= SAVE_NONE
;
1683 #ifndef __mono_ppc64__
1686 save_mode
= SAVE_TWO
;
1691 save_mode
= SAVE_FP
;
1693 case MONO_TYPE_VALUETYPE
:
1694 save_mode
= SAVE_STRUCT
;
1697 save_mode
= SAVE_ONE
;
1701 switch (save_mode
) {
1703 ppc_stw (code
, ppc_r3
, save_offset
, cfg
->frame_reg
);
1704 ppc_stw (code
, ppc_r4
, save_offset
+ 4, cfg
->frame_reg
);
1705 if (enable_arguments
) {
1706 ppc_mr (code
, ppc_r5
, ppc_r4
);
1707 ppc_mr (code
, ppc_r4
, ppc_r3
);
1711 ppc_store_reg (code
, ppc_r3
, save_offset
, cfg
->frame_reg
);
1712 if (enable_arguments
) {
1713 ppc_mr (code
, ppc_r4
, ppc_r3
);
1717 ppc_stfd (code
, ppc_f1
, save_offset
, cfg
->frame_reg
);
1718 if (enable_arguments
) {
1719 /* FIXME: what reg? */
1720 ppc_fmr (code
, ppc_f3
, ppc_f1
);
1721 /* FIXME: use 8 byte load on PPC64 */
1722 ppc_lwz (code
, ppc_r4
, save_offset
, cfg
->frame_reg
);
1723 ppc_lwz (code
, ppc_r5
, save_offset
+ 4, cfg
->frame_reg
);
1727 if (enable_arguments
) {
1728 /* FIXME: get the actual address */
1729 ppc_mr (code
, ppc_r4
, ppc_r3
);
1737 ppc_load (code
, ppc_r3
, cfg
->method
);
1738 ppc_load_func (code
, ppc_r0
, func
);
1739 ppc_mtlr (code
, ppc_r0
);
1742 switch (save_mode
) {
1744 ppc_lwz (code
, ppc_r3
, save_offset
, cfg
->frame_reg
);
1745 ppc_lwz (code
, ppc_r4
, save_offset
+ 4, cfg
->frame_reg
);
1748 ppc_load_reg (code
, ppc_r3
, save_offset
, cfg
->frame_reg
);
1751 ppc_lfd (code
, ppc_f1
, save_offset
, cfg
->frame_reg
);
1761 * Conditional branches have a small offset, so if it is likely overflowed,
1762 * we do a branch to the end of the method (uncond branches have much larger
1763 * offsets) where we perform the conditional and jump back unconditionally.
1764 * It's slightly slower, since we add two uncond branches, but it's very simple
1765 * with the current patch implementation and such large methods are likely not
1766 * going to be perf critical anyway.
1771 const char *exception
;
1778 #define EMIT_COND_BRANCH_FLAGS(ins,b0,b1) \
1779 if (ins->flags & MONO_INST_BRLABEL) { \
1780 if (0 && ins->inst_i0->inst_c0) { \
1781 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_i0->inst_c0) & 0xffff); \
1783 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_LABEL, ins->inst_i0); \
1784 ppc_bc (code, (b0), (b1), 0); \
1787 if (0 && ins->inst_true_bb->native_offset) { \
1788 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_true_bb->native_offset) & 0xffff); \
1790 int br_disp = ins->inst_true_bb->max_offset - offset; \
1791 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1792 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1793 ovfj->data.bb = ins->inst_true_bb; \
1794 ovfj->ip_offset = 0; \
1795 ovfj->b0_cond = (b0); \
1796 ovfj->b1_cond = (b1); \
1797 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB_OVF, ovfj); \
1800 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb); \
1801 ppc_bc (code, (b0), (b1), 0); \
1806 #define EMIT_COND_BRANCH(ins,cond) EMIT_COND_BRANCH_FLAGS(ins, branch_b0_table [(cond)], branch_b1_table [(cond)])
1808 /* emit an exception if condition is fail
1810 * We assign the extra code used to throw the implicit exceptions
1811 * to cfg->bb_exit as far as the big branch handling is concerned
1813 #define EMIT_COND_SYSTEM_EXCEPTION_FLAGS(b0,b1,exc_name) \
1815 int br_disp = cfg->bb_exit->max_offset - offset; \
1816 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1817 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1818 ovfj->data.exception = (exc_name); \
1819 ovfj->ip_offset = code - cfg->native_code; \
1820 ovfj->b0_cond = (b0); \
1821 ovfj->b1_cond = (b1); \
1822 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_OVF, ovfj); \
1824 cfg->bb_exit->max_offset += 24; \
1826 mono_add_patch_info (cfg, code - cfg->native_code, \
1827 MONO_PATCH_INFO_EXC, exc_name); \
1828 ppc_bcl (code, (b0), (b1), 0); \
1832 #define EMIT_COND_SYSTEM_EXCEPTION(cond,exc_name) EMIT_COND_SYSTEM_EXCEPTION_FLAGS(branch_b0_table [(cond)], branch_b1_table [(cond)], (exc_name))
1835 mono_arch_peephole_pass_1 (MonoCompile
*cfg
, MonoBasicBlock
*bb
)
1840 normalize_opcode (int opcode
)
1843 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMBASE
, OP_LOADI8_MEMBASE
):
1844 return OP_LOAD_MEMBASE
;
1845 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMINDEX
, OP_LOADI8_MEMINDEX
):
1846 return OP_LOAD_MEMINDEX
;
1847 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_REG
, OP_STOREI8_MEMBASE_REG
):
1848 return OP_STORE_MEMBASE_REG
;
1849 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_IMM
, OP_STOREI8_MEMBASE_IMM
):
1850 return OP_STORE_MEMBASE_IMM
;
1851 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMINDEX
, OP_STOREI8_MEMINDEX
):
1852 return OP_STORE_MEMINDEX
;
1853 case MONO_PPC_32_64_CASE (OP_ISHR_IMM
, OP_LSHR_IMM
):
1855 case MONO_PPC_32_64_CASE (OP_ISHR_UN_IMM
, OP_LSHR_UN_IMM
):
1856 return OP_SHR_UN_IMM
;
1863 mono_arch_peephole_pass_2 (MonoCompile
*cfg
, MonoBasicBlock
*bb
)
1865 MonoInst
*ins
, *n
, *last_ins
= NULL
;
1867 MONO_BB_FOR_EACH_INS_SAFE (bb
, n
, ins
) {
1868 switch (normalize_opcode (ins
->opcode
)) {
1870 /* remove unnecessary multiplication with 1 */
1871 if (ins
->inst_imm
== 1) {
1872 if (ins
->dreg
!= ins
->sreg1
) {
1873 ins
->opcode
= OP_MOVE
;
1875 MONO_DELETE_INS (bb
, ins
);
1879 int power2
= mono_is_power_of_two (ins
->inst_imm
);
1881 ins
->opcode
= OP_SHL_IMM
;
1882 ins
->inst_imm
= power2
;
1886 case OP_LOAD_MEMBASE
:
1888 * OP_STORE_MEMBASE_REG reg, offset(basereg)
1889 * OP_LOAD_MEMBASE offset(basereg), reg
1891 if (last_ins
&& normalize_opcode (last_ins
->opcode
) == OP_STORE_MEMBASE_REG
&&
1892 ins
->inst_basereg
== last_ins
->inst_destbasereg
&&
1893 ins
->inst_offset
== last_ins
->inst_offset
) {
1894 if (ins
->dreg
== last_ins
->sreg1
) {
1895 MONO_DELETE_INS (bb
, ins
);
1898 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1899 ins
->opcode
= OP_MOVE
;
1900 ins
->sreg1
= last_ins
->sreg1
;
1904 * Note: reg1 must be different from the basereg in the second load
1905 * OP_LOAD_MEMBASE offset(basereg), reg1
1906 * OP_LOAD_MEMBASE offset(basereg), reg2
1908 * OP_LOAD_MEMBASE offset(basereg), reg1
1909 * OP_MOVE reg1, reg2
1911 } else if (last_ins
&& normalize_opcode (last_ins
->opcode
) == OP_LOAD_MEMBASE
&&
1912 ins
->inst_basereg
!= last_ins
->dreg
&&
1913 ins
->inst_basereg
== last_ins
->inst_basereg
&&
1914 ins
->inst_offset
== last_ins
->inst_offset
) {
1916 if (ins
->dreg
== last_ins
->dreg
) {
1917 MONO_DELETE_INS (bb
, ins
);
1920 ins
->opcode
= OP_MOVE
;
1921 ins
->sreg1
= last_ins
->dreg
;
1924 //g_assert_not_reached ();
1928 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1929 * OP_LOAD_MEMBASE offset(basereg), reg
1931 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1932 * OP_ICONST reg, imm
1934 } else if (last_ins
&& normalize_opcode (last_ins
->opcode
) == OP_STORE_MEMBASE_IMM
&&
1935 ins
->inst_basereg
== last_ins
->inst_destbasereg
&&
1936 ins
->inst_offset
== last_ins
->inst_offset
) {
1937 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1938 ins
->opcode
= OP_ICONST
;
1939 ins
->inst_c0
= last_ins
->inst_imm
;
1940 g_assert_not_reached (); // check this rule
1944 case OP_LOADU1_MEMBASE
:
1945 case OP_LOADI1_MEMBASE
:
1946 if (last_ins
&& (last_ins
->opcode
== OP_STOREI1_MEMBASE_REG
) &&
1947 ins
->inst_basereg
== last_ins
->inst_destbasereg
&&
1948 ins
->inst_offset
== last_ins
->inst_offset
) {
1949 ins
->opcode
= (ins
->opcode
== OP_LOADI1_MEMBASE
) ? OP_ICONV_TO_I1
: OP_ICONV_TO_U1
;
1950 ins
->sreg1
= last_ins
->sreg1
;
1953 case OP_LOADU2_MEMBASE
:
1954 case OP_LOADI2_MEMBASE
:
1955 if (last_ins
&& (last_ins
->opcode
== OP_STOREI2_MEMBASE_REG
) &&
1956 ins
->inst_basereg
== last_ins
->inst_destbasereg
&&
1957 ins
->inst_offset
== last_ins
->inst_offset
) {
1958 ins
->opcode
= (ins
->opcode
== OP_LOADI2_MEMBASE
) ? OP_ICONV_TO_I2
: OP_ICONV_TO_U2
;
1959 ins
->sreg1
= last_ins
->sreg1
;
1962 #ifdef __mono_ppc64__
1963 case OP_LOADU4_MEMBASE
:
1964 case OP_LOADI4_MEMBASE
:
1965 if (last_ins
&& (last_ins
->opcode
== OP_STOREI4_MEMBASE_REG
) &&
1966 ins
->inst_basereg
== last_ins
->inst_destbasereg
&&
1967 ins
->inst_offset
== last_ins
->inst_offset
) {
1968 ins
->opcode
= (ins
->opcode
== OP_LOADI4_MEMBASE
) ? OP_ICONV_TO_I4
: OP_ICONV_TO_U4
;
1969 ins
->sreg1
= last_ins
->sreg1
;
1974 ins
->opcode
= OP_MOVE
;
1978 if (ins
->dreg
== ins
->sreg1
) {
1979 MONO_DELETE_INS (bb
, ins
);
1983 * OP_MOVE sreg, dreg
1984 * OP_MOVE dreg, sreg
1986 if (last_ins
&& last_ins
->opcode
== OP_MOVE
&&
1987 ins
->sreg1
== last_ins
->dreg
&&
1988 ins
->dreg
== last_ins
->sreg1
) {
1989 MONO_DELETE_INS (bb
, ins
);
1997 bb
->last_ins
= last_ins
;
2001 mono_arch_decompose_opts (MonoCompile
*cfg
, MonoInst
*ins
)
2003 switch (ins
->opcode
) {
2004 case OP_ICONV_TO_R_UN
: {
2005 static const guint64 adjust_val
= 0x4330000000000000ULL
;
2006 int msw_reg
= mono_alloc_ireg (cfg
);
2007 int adj_reg
= mono_alloc_freg (cfg
);
2008 int tmp_reg
= mono_alloc_freg (cfg
);
2009 int basereg
= ppc_sp
;
2011 MONO_EMIT_NEW_ICONST (cfg
, msw_reg
, 0x43300000);
2012 if (!ppc_is_imm16 (offset
+ 4)) {
2013 basereg
= mono_alloc_ireg (cfg
);
2014 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_IADD_IMM
, basereg
, cfg
->frame_reg
, offset
);
2016 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STOREI4_MEMBASE_REG
, basereg
, offset
, msw_reg
);
2017 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STOREI4_MEMBASE_REG
, basereg
, offset
+ 4, ins
->sreg1
);
2018 MONO_EMIT_NEW_LOAD_R8 (cfg
, adj_reg
, &adjust_val
);
2019 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg
, OP_LOADR8_MEMBASE
, tmp_reg
, basereg
, offset
);
2020 MONO_EMIT_NEW_BIALU (cfg
, OP_FSUB
, ins
->dreg
, tmp_reg
, adj_reg
);
2021 ins
->opcode
= OP_NOP
;
2024 #ifndef __mono_ppc64__
2025 case OP_ICONV_TO_R4
:
2026 case OP_ICONV_TO_R8
: {
2027 /* FIXME: change precision for CEE_CONV_R4 */
2028 static const guint64 adjust_val
= 0x4330000080000000ULL
;
2029 int msw_reg
= mono_alloc_ireg (cfg
);
2030 int xored
= mono_alloc_ireg (cfg
);
2031 int adj_reg
= mono_alloc_freg (cfg
);
2032 int tmp_reg
= mono_alloc_freg (cfg
);
2033 int basereg
= ppc_sp
;
2035 if (!ppc_is_imm16 (offset
+ 4)) {
2036 basereg
= mono_alloc_ireg (cfg
);
2037 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_IADD_IMM
, basereg
, cfg
->frame_reg
, offset
);
2039 MONO_EMIT_NEW_ICONST (cfg
, msw_reg
, 0x43300000);
2040 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STOREI4_MEMBASE_REG
, basereg
, offset
, msw_reg
);
2041 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_XOR_IMM
, xored
, ins
->sreg1
, 0x80000000);
2042 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STOREI4_MEMBASE_REG
, basereg
, offset
+ 4, xored
);
2043 MONO_EMIT_NEW_LOAD_R8 (cfg
, adj_reg
, (gpointer
)&adjust_val
);
2044 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg
, OP_LOADR8_MEMBASE
, tmp_reg
, basereg
, offset
);
2045 MONO_EMIT_NEW_BIALU (cfg
, OP_FSUB
, ins
->dreg
, tmp_reg
, adj_reg
);
2046 if (ins
->opcode
== OP_ICONV_TO_R4
)
2047 MONO_EMIT_NEW_UNALU (cfg
, OP_FCONV_TO_R4
, ins
->dreg
, ins
->dreg
);
2048 ins
->opcode
= OP_NOP
;
2053 int msw_reg
= mono_alloc_ireg (cfg
);
2054 int basereg
= ppc_sp
;
2056 if (!ppc_is_imm16 (offset
+ 4)) {
2057 basereg
= mono_alloc_ireg (cfg
);
2058 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_IADD_IMM
, basereg
, cfg
->frame_reg
, offset
);
2060 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STORER8_MEMBASE_REG
, basereg
, offset
, ins
->sreg1
);
2061 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg
, OP_LOADI4_MEMBASE
, msw_reg
, basereg
, offset
);
2062 MONO_EMIT_NEW_UNALU (cfg
, OP_CHECK_FINITE
, -1, msw_reg
);
2063 MONO_EMIT_NEW_UNALU (cfg
, OP_FMOVE
, ins
->dreg
, ins
->sreg1
);
2064 ins
->opcode
= OP_NOP
;
2067 #ifdef __mono_ppc64__
2069 case OP_IADD_OVF_UN
:
2071 int shifted1_reg
= mono_alloc_ireg (cfg
);
2072 int shifted2_reg
= mono_alloc_ireg (cfg
);
2073 int result_shifted_reg
= mono_alloc_ireg (cfg
);
2075 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_SHL_IMM
, shifted1_reg
, ins
->sreg1
, 32);
2076 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_SHL_IMM
, shifted2_reg
, ins
->sreg2
, 32);
2077 MONO_EMIT_NEW_BIALU (cfg
, ins
->opcode
, result_shifted_reg
, shifted1_reg
, shifted2_reg
);
2078 if (ins
->opcode
== OP_IADD_OVF_UN
)
2079 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_SHR_UN_IMM
, ins
->dreg
, result_shifted_reg
, 32);
2081 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_SHR_IMM
, ins
->dreg
, result_shifted_reg
, 32);
2082 ins
->opcode
= OP_NOP
;
2089 * the branch_b0_table should maintain the order of these
2103 branch_b0_table
[] = {
2118 branch_b1_table
[] = {
2132 #define NEW_INS(cfg,dest,op) do { \
2133 MONO_INST_NEW((cfg), (dest), (op)); \
2134 mono_bblock_insert_after_ins (bb, last_ins, (dest)); \
2138 map_to_reg_reg_op (int op
)
2147 case OP_COMPARE_IMM
:
2149 case OP_ICOMPARE_IMM
:
2151 case OP_LCOMPARE_IMM
:
2167 case OP_LOAD_MEMBASE
:
2168 return OP_LOAD_MEMINDEX
;
2169 case OP_LOADI4_MEMBASE
:
2170 return OP_LOADI4_MEMINDEX
;
2171 case OP_LOADU4_MEMBASE
:
2172 return OP_LOADU4_MEMINDEX
;
2173 case OP_LOADI8_MEMBASE
:
2174 return OP_LOADI8_MEMINDEX
;
2175 case OP_LOADU1_MEMBASE
:
2176 return OP_LOADU1_MEMINDEX
;
2177 case OP_LOADI2_MEMBASE
:
2178 return OP_LOADI2_MEMINDEX
;
2179 case OP_LOADU2_MEMBASE
:
2180 return OP_LOADU2_MEMINDEX
;
2181 case OP_LOADI1_MEMBASE
:
2182 return OP_LOADI1_MEMINDEX
;
2183 case OP_LOADR4_MEMBASE
:
2184 return OP_LOADR4_MEMINDEX
;
2185 case OP_LOADR8_MEMBASE
:
2186 return OP_LOADR8_MEMINDEX
;
2187 case OP_STOREI1_MEMBASE_REG
:
2188 return OP_STOREI1_MEMINDEX
;
2189 case OP_STOREI2_MEMBASE_REG
:
2190 return OP_STOREI2_MEMINDEX
;
2191 case OP_STOREI4_MEMBASE_REG
:
2192 return OP_STOREI4_MEMINDEX
;
2193 case OP_STOREI8_MEMBASE_REG
:
2194 return OP_STOREI8_MEMINDEX
;
2195 case OP_STORE_MEMBASE_REG
:
2196 return OP_STORE_MEMINDEX
;
2197 case OP_STORER4_MEMBASE_REG
:
2198 return OP_STORER4_MEMINDEX
;
2199 case OP_STORER8_MEMBASE_REG
:
2200 return OP_STORER8_MEMINDEX
;
2201 case OP_STORE_MEMBASE_IMM
:
2202 return OP_STORE_MEMBASE_REG
;
2203 case OP_STOREI1_MEMBASE_IMM
:
2204 return OP_STOREI1_MEMBASE_REG
;
2205 case OP_STOREI2_MEMBASE_IMM
:
2206 return OP_STOREI2_MEMBASE_REG
;
2207 case OP_STOREI4_MEMBASE_IMM
:
2208 return OP_STOREI4_MEMBASE_REG
;
2209 case OP_STOREI8_MEMBASE_IMM
:
2210 return OP_STOREI8_MEMBASE_REG
;
2212 return mono_op_imm_to_op (op
);
2215 //#define map_to_reg_reg_op(op) (cfg->new_ir? mono_op_imm_to_op (op): map_to_reg_reg_op (op))
2217 #define compare_opcode_is_unsigned(opcode) \
2218 (((opcode) >= CEE_BNE_UN && (opcode) <= CEE_BLT_UN) || \
2219 ((opcode) >= OP_IBNE_UN && (opcode) <= OP_IBLT_UN) || \
2220 ((opcode) >= OP_LBNE_UN && (opcode) <= OP_LBLT_UN) || \
2221 ((opcode) >= OP_COND_EXC_NE_UN && (opcode) <= OP_COND_EXC_LT_UN) || \
2222 ((opcode) >= OP_COND_EXC_INE_UN && (opcode) <= OP_COND_EXC_ILT_UN) || \
2223 ((opcode) == OP_CLT_UN || (opcode) == OP_CGT_UN || \
2224 (opcode) == OP_ICLT_UN || (opcode) == OP_ICGT_UN || \
2225 (opcode) == OP_LCLT_UN || (opcode) == OP_LCGT_UN))
2228 * Remove from the instruction list the instructions that can't be
2229 * represented with very simple instructions with no register
2233 mono_arch_lowering_pass (MonoCompile
*cfg
, MonoBasicBlock
*bb
)
2235 MonoInst
*ins
, *next
, *temp
, *last_ins
= NULL
;
2238 MONO_BB_FOR_EACH_INS (bb
, ins
) {
2240 switch (ins
->opcode
) {
2241 case OP_IDIV_UN_IMM
:
2244 case OP_IREM_UN_IMM
:
2245 NEW_INS (cfg
, temp
, OP_ICONST
);
2246 temp
->inst_c0
= ins
->inst_imm
;
2247 temp
->dreg
= mono_alloc_ireg (cfg
);
2248 ins
->sreg2
= temp
->dreg
;
2249 if (ins
->opcode
== OP_IDIV_IMM
)
2250 ins
->opcode
= OP_IDIV
;
2251 else if (ins
->opcode
== OP_IREM_IMM
)
2252 ins
->opcode
= OP_IREM
;
2253 else if (ins
->opcode
== OP_IDIV_UN_IMM
)
2254 ins
->opcode
= OP_IDIV_UN
;
2255 else if (ins
->opcode
== OP_IREM_UN_IMM
)
2256 ins
->opcode
= OP_IREM_UN
;
2258 /* handle rem separately */
2262 CASE_PPC64 (OP_LREM
)
2263 CASE_PPC64 (OP_LREM_UN
) {
2265 /* we change a rem dest, src1, src2 to
2266 * div temp1, src1, src2
2267 * mul temp2, temp1, src2
2268 * sub dest, src1, temp2
2270 if (ins
->opcode
== OP_IREM
|| ins
->opcode
== OP_IREM_UN
) {
2271 NEW_INS (cfg
, mul
, OP_IMUL
);
2272 NEW_INS (cfg
, temp
, ins
->opcode
== OP_IREM
? OP_IDIV
: OP_IDIV_UN
);
2273 ins
->opcode
= OP_ISUB
;
2275 NEW_INS (cfg
, mul
, OP_LMUL
);
2276 NEW_INS (cfg
, temp
, ins
->opcode
== OP_LREM
? OP_LDIV
: OP_LDIV_UN
);
2277 ins
->opcode
= OP_LSUB
;
2279 temp
->sreg1
= ins
->sreg1
;
2280 temp
->sreg2
= ins
->sreg2
;
2281 temp
->dreg
= mono_alloc_ireg (cfg
);
2282 mul
->sreg1
= temp
->dreg
;
2283 mul
->sreg2
= ins
->sreg2
;
2284 mul
->dreg
= mono_alloc_ireg (cfg
);
2285 ins
->sreg2
= mul
->dreg
;
2289 CASE_PPC64 (OP_LADD_IMM
)
2292 if (!ppc_is_imm16 (ins
->inst_imm
)) {
2293 NEW_INS (cfg
, temp
, OP_ICONST
);
2294 temp
->inst_c0
= ins
->inst_imm
;
2295 temp
->dreg
= mono_alloc_ireg (cfg
);
2296 ins
->sreg2
= temp
->dreg
;
2297 ins
->opcode
= map_to_reg_reg_op (ins
->opcode
);
2301 CASE_PPC64 (OP_LSUB_IMM
)
2303 if (!ppc_is_imm16 (-ins
->inst_imm
)) {
2304 NEW_INS (cfg
, temp
, OP_ICONST
);
2305 temp
->inst_c0
= ins
->inst_imm
;
2306 temp
->dreg
= mono_alloc_ireg (cfg
);
2307 ins
->sreg2
= temp
->dreg
;
2308 ins
->opcode
= map_to_reg_reg_op (ins
->opcode
);
2320 gboolean is_imm
= ((ins
->inst_imm
& 0xffff0000) && (ins
->inst_imm
& 0xffff));
2321 #ifdef __mono_ppc64__
2322 if (ins
->inst_imm
& 0xffffffff00000000UL
)
2326 NEW_INS (cfg
, temp
, OP_ICONST
);
2327 temp
->inst_c0
= ins
->inst_imm
;
2328 temp
->dreg
= mono_alloc_ireg (cfg
);
2329 ins
->sreg2
= temp
->dreg
;
2330 ins
->opcode
= map_to_reg_reg_op (ins
->opcode
);
2339 NEW_INS (cfg
, temp
, OP_ICONST
);
2340 temp
->inst_c0
= ins
->inst_imm
;
2341 temp
->dreg
= mono_alloc_ireg (cfg
);
2342 ins
->sreg2
= temp
->dreg
;
2343 ins
->opcode
= map_to_reg_reg_op (ins
->opcode
);
2345 case OP_COMPARE_IMM
:
2346 case OP_ICOMPARE_IMM
:
2347 CASE_PPC64 (OP_LCOMPARE_IMM
)
2349 /* Branch opts can eliminate the branch */
2350 if (!next
|| (!(MONO_IS_COND_BRANCH_OP (next
) || MONO_IS_COND_EXC (next
) || MONO_IS_SETCC (next
)))) {
2351 ins
->opcode
= OP_NOP
;
2355 if (compare_opcode_is_unsigned (next
->opcode
)) {
2356 if (!ppc_is_uimm16 (ins
->inst_imm
)) {
2357 NEW_INS (cfg
, temp
, OP_ICONST
);
2358 temp
->inst_c0
= ins
->inst_imm
;
2359 temp
->dreg
= mono_alloc_ireg (cfg
);
2360 ins
->sreg2
= temp
->dreg
;
2361 ins
->opcode
= map_to_reg_reg_op (ins
->opcode
);
2364 if (!ppc_is_imm16 (ins
->inst_imm
)) {
2365 NEW_INS (cfg
, temp
, OP_ICONST
);
2366 temp
->inst_c0
= ins
->inst_imm
;
2367 temp
->dreg
= mono_alloc_ireg (cfg
);
2368 ins
->sreg2
= temp
->dreg
;
2369 ins
->opcode
= map_to_reg_reg_op (ins
->opcode
);
2375 if (ins
->inst_imm
== 1) {
2376 ins
->opcode
= OP_MOVE
;
2379 if (ins
->inst_imm
== 0) {
2380 ins
->opcode
= OP_ICONST
;
2384 imm
= mono_is_power_of_two (ins
->inst_imm
);
2386 ins
->opcode
= OP_SHL_IMM
;
2387 ins
->inst_imm
= imm
;
2390 if (!ppc_is_imm16 (ins
->inst_imm
)) {
2391 NEW_INS (cfg
, temp
, OP_ICONST
);
2392 temp
->inst_c0
= ins
->inst_imm
;
2393 temp
->dreg
= mono_alloc_ireg (cfg
);
2394 ins
->sreg2
= temp
->dreg
;
2395 ins
->opcode
= map_to_reg_reg_op (ins
->opcode
);
2398 case OP_LOCALLOC_IMM
:
2399 NEW_INS (cfg
, temp
, OP_ICONST
);
2400 temp
->inst_c0
= ins
->inst_imm
;
2401 temp
->dreg
= mono_alloc_ireg (cfg
);
2402 ins
->sreg1
= temp
->dreg
;
2403 ins
->opcode
= OP_LOCALLOC
;
2405 case OP_LOAD_MEMBASE
:
2406 case OP_LOADI4_MEMBASE
:
2407 CASE_PPC64 (OP_LOADI8_MEMBASE
)
2408 case OP_LOADU4_MEMBASE
:
2409 case OP_LOADI2_MEMBASE
:
2410 case OP_LOADU2_MEMBASE
:
2411 case OP_LOADI1_MEMBASE
:
2412 case OP_LOADU1_MEMBASE
:
2413 case OP_LOADR4_MEMBASE
:
2414 case OP_LOADR8_MEMBASE
:
2415 case OP_STORE_MEMBASE_REG
:
2416 CASE_PPC64 (OP_STOREI8_MEMBASE_REG
)
2417 case OP_STOREI4_MEMBASE_REG
:
2418 case OP_STOREI2_MEMBASE_REG
:
2419 case OP_STOREI1_MEMBASE_REG
:
2420 case OP_STORER4_MEMBASE_REG
:
2421 case OP_STORER8_MEMBASE_REG
:
2422 /* we can do two things: load the immed in a register
2423 * and use an indexed load, or see if the immed can be
2424 * represented as an ad_imm + a load with a smaller offset
2425 * that fits. We just do the first for now, optimize later.
2427 if (ppc_is_imm16 (ins
->inst_offset
))
2429 NEW_INS (cfg
, temp
, OP_ICONST
);
2430 temp
->inst_c0
= ins
->inst_offset
;
2431 temp
->dreg
= mono_alloc_ireg (cfg
);
2432 ins
->sreg2
= temp
->dreg
;
2433 ins
->opcode
= map_to_reg_reg_op (ins
->opcode
);
2435 case OP_STORE_MEMBASE_IMM
:
2436 case OP_STOREI1_MEMBASE_IMM
:
2437 case OP_STOREI2_MEMBASE_IMM
:
2438 case OP_STOREI4_MEMBASE_IMM
:
2439 CASE_PPC64 (OP_STOREI8_MEMBASE_IMM
)
2440 NEW_INS (cfg
, temp
, OP_ICONST
);
2441 temp
->inst_c0
= ins
->inst_imm
;
2442 temp
->dreg
= mono_alloc_ireg (cfg
);
2443 ins
->sreg1
= temp
->dreg
;
2444 ins
->opcode
= map_to_reg_reg_op (ins
->opcode
);
2446 goto loop_start
; /* make it handle the possibly big ins->inst_offset */
2449 NEW_INS (cfg
, temp
, OP_ICONST
);
2450 temp
->inst_c0
= (gulong
)ins
->inst_p0
;
2451 temp
->dreg
= mono_alloc_ireg (cfg
);
2452 ins
->inst_basereg
= temp
->dreg
;
2453 ins
->inst_offset
= 0;
2454 ins
->opcode
= ins
->opcode
== OP_R4CONST
? OP_LOADR4_MEMBASE
: OP_LOADR8_MEMBASE
;
2456 /* make it handle the possibly big ins->inst_offset
2457 * later optimize to use lis + load_membase
2463 bb
->last_ins
= last_ins
;
2464 bb
->max_vreg
= cfg
->next_vreg
;
2468 emit_float_to_int (MonoCompile
*cfg
, guchar
*code
, int dreg
, int sreg
, int size
, gboolean is_signed
)
2470 long offset
= cfg
->arch
.fp_conv_var_offset
;
2472 /* sreg is a float, dreg is an integer reg. ppc_f0 is used a scratch */
2473 #ifdef __mono_ppc64__
2475 ppc_fctidz (code
, ppc_f0
, sreg
);
2480 ppc_fctiwz (code
, ppc_f0
, sreg
);
2483 if (ppc_is_imm16 (offset
+ sub_offset
)) {
2484 ppc_stfd (code
, ppc_f0
, offset
, cfg
->frame_reg
);
2486 ppc_load_reg (code
, dreg
, offset
+ sub_offset
, cfg
->frame_reg
);
2488 ppc_lwz (code
, dreg
, offset
+ sub_offset
, cfg
->frame_reg
);
2490 ppc_load (code
, dreg
, offset
);
2491 ppc_add (code
, dreg
, dreg
, cfg
->frame_reg
);
2492 ppc_stfd (code
, ppc_f0
, 0, dreg
);
2494 ppc_load_reg (code
, dreg
, sub_offset
, dreg
);
2496 ppc_lwz (code
, dreg
, sub_offset
, dreg
);
2500 ppc_andid (code
, dreg
, dreg
, 0xff);
2502 ppc_andid (code
, dreg
, dreg
, 0xffff);
2503 #ifdef __mono_ppc64__
2505 ppc_clrldi (code
, dreg
, dreg
, 32);
2509 ppc_extsb (code
, dreg
, dreg
);
2511 ppc_extsh (code
, dreg
, dreg
);
2512 #ifdef __mono_ppc64__
2514 ppc_extsw (code
, dreg
, dreg
);
2522 const guchar
*target
;
2527 #define is_call_imm(diff) ((glong)(diff) >= -33554432 && (glong)(diff) <= 33554431)
2530 search_thunk_slot (void *data
, int csize
, int bsize
, void *user_data
) {
2531 #ifdef __mono_ppc64__
2532 g_assert_not_reached ();
2534 PatchData
*pdata
= (PatchData
*)user_data
;
2535 guchar
*code
= data
;
2536 guint32
*thunks
= data
;
2537 guint32
*endthunks
= (guint32
*)(code
+ bsize
);
2541 int difflow
, diffhigh
;
2543 /* always ensure a call from pdata->code can reach to the thunks without further thunks */
2544 difflow
= (char*)pdata
->code
- (char*)thunks
;
2545 diffhigh
= (char*)pdata
->code
- (char*)endthunks
;
2546 if (!((is_call_imm (thunks
) && is_call_imm (endthunks
)) || (is_call_imm (difflow
) && is_call_imm (diffhigh
))))
2549 templ
= (guchar
*)load
;
2550 ppc_load_sequence (templ
, ppc_r0
, pdata
->target
);
2552 //g_print ("thunk nentries: %d\n", ((char*)endthunks - (char*)thunks)/16);
2553 if ((pdata
->found
== 2) || (pdata
->code
>= code
&& pdata
->code
<= code
+ csize
)) {
2554 while (thunks
< endthunks
) {
2555 //g_print ("looking for target: %p at %p (%08x-%08x)\n", pdata->target, thunks, thunks [0], thunks [1]);
2556 if ((thunks
[0] == load
[0]) && (thunks
[1] == load
[1])) {
2557 ppc_patch (pdata
->code
, (guchar
*)thunks
);
2560 static int num_thunks = 0;
2562 if ((num_thunks % 20) == 0)
2563 g_print ("num_thunks lookup: %d\n", num_thunks);
2566 } else if ((thunks
[0] == 0) && (thunks
[1] == 0)) {
2567 /* found a free slot instead: emit thunk */
2568 code
= (guchar
*)thunks
;
2569 ppc_lis (code
, ppc_r0
, (gulong
)(pdata
->target
) >> 16);
2570 ppc_ori (code
, ppc_r0
, ppc_r0
, (gulong
)(pdata
->target
) & 0xffff);
2571 ppc_mtctr (code
, ppc_r0
);
2572 ppc_bcctr (code
, PPC_BR_ALWAYS
, 0);
2573 mono_arch_flush_icache ((guchar
*)thunks
, 16);
2575 ppc_patch (pdata
->code
, (guchar
*)thunks
);
2578 static int num_thunks = 0;
2580 if ((num_thunks % 20) == 0)
2581 g_print ("num_thunks: %d\n", num_thunks);
2585 /* skip 16 bytes, the size of the thunk */
2589 //g_print ("failed thunk lookup for %p from %p at %p (%d entries)\n", pdata->target, pdata->code, data, count);
2596 handle_thunk (int absolute
, guchar
*code
, const guchar
*target
) {
2597 MonoDomain
*domain
= mono_domain_get ();
2601 pdata
.target
= target
;
2602 pdata
.absolute
= absolute
;
2605 mono_domain_lock (domain
);
2606 mono_domain_code_foreach (domain
, search_thunk_slot
, &pdata
);
2609 /* this uses the first available slot */
2611 mono_domain_code_foreach (domain
, search_thunk_slot
, &pdata
);
2613 mono_domain_unlock (domain
);
2615 if (pdata
.found
!= 1)
2616 g_print ("thunk failed for %p from %p\n", target
, code
);
2617 g_assert (pdata
.found
== 1);
2621 patch_ins (guint8
*code
, guint32 ins
)
2623 *(guint32
*)code
= ins
;
2624 mono_arch_flush_icache (code
, 4);
2628 ppc_patch_full (guchar
*code
, const guchar
*target
, gboolean is_fd
)
2630 guint32 ins
= *(guint32
*)code
;
2631 guint32 prim
= ins
>> 26;
2634 //g_print ("patching 0x%08x (0x%08x) to point to 0x%08x\n", code, ins, target);
2636 // prefer relative branches, they are more position independent (e.g. for AOT compilation).
2637 gint diff
= target
- code
;
2640 if (diff
<= 33554431){
2641 ins
= (18 << 26) | (diff
) | (ins
& 1);
2642 patch_ins (code
, ins
);
2646 /* diff between 0 and -33554432 */
2647 if (diff
>= -33554432){
2648 ins
= (18 << 26) | (diff
& ~0xfc000000) | (ins
& 1);
2649 patch_ins (code
, ins
);
2654 if ((glong
)target
>= 0){
2655 if ((glong
)target
<= 33554431){
2656 ins
= (18 << 26) | ((gulong
) target
) | (ins
& 1) | 2;
2657 patch_ins (code
, ins
);
2661 if ((glong
)target
>= -33554432){
2662 ins
= (18 << 26) | (((gulong
)target
) & ~0xfc000000) | (ins
& 1) | 2;
2663 patch_ins (code
, ins
);
2668 handle_thunk (TRUE
, code
, target
);
2671 g_assert_not_reached ();
2679 guint32 li
= (gulong
)target
;
2680 ins
= (ins
& 0xffff0000) | (ins
& 3);
2681 ovf
= li
& 0xffff0000;
2682 if (ovf
!= 0 && ovf
!= 0xffff0000)
2683 g_assert_not_reached ();
2686 // FIXME: assert the top bits of li are 0
2688 gint diff
= target
- code
;
2689 ins
= (ins
& 0xffff0000) | (ins
& 3);
2690 ovf
= diff
& 0xffff0000;
2691 if (ovf
!= 0 && ovf
!= 0xffff0000)
2692 g_assert_not_reached ();
2696 patch_ins (code
, ins
);
2700 if (prim
== 15 || ins
== 0x4e800021 || ins
== 0x4e800020 || ins
== 0x4e800420) {
2701 #ifdef __mono_ppc64__
2702 guint32
*seq
= (guint32
*)code
;
2703 guint32
*branch_ins
;
2705 /* the trampoline code will try to patch the blrl, blr, bcctr */
2706 if (ins
== 0x4e800021 || ins
== 0x4e800020 || ins
== 0x4e800420) {
2708 if (ppc_opcode (seq
[-3]) == 58 || ppc_opcode (seq
[-3]) == 31) /* ld || mr */
2713 if (ppc_opcode (seq
[5]) == 58 || ppc_opcode (seq
[5]) == 31) /* ld || mr */
2714 branch_ins
= seq
+ 8;
2716 branch_ins
= seq
+ 6;
2719 seq
= (guint32
*)code
;
2720 /* this is the lis/ori/sldi/oris/ori/(ld/ld|mr/nop)/mtlr/blrl sequence */
2721 g_assert (mono_ppc_is_direct_call_sequence (branch_ins
));
2723 if (ppc_opcode (seq
[5]) == 58) { /* ld */
2724 g_assert (ppc_opcode (seq
[6]) == 58); /* ld */
2727 guint8
*buf
= (guint8
*)&seq
[5];
2728 ppc_mr (buf
, ppc_r0
, ppc_r11
);
2733 target
= mono_get_addr_from_ftnptr ((gpointer
)target
);
2736 /* FIXME: make this thread safe */
2737 /* FIXME: we're assuming we're using r11 here */
2738 ppc_load_sequence (code
, ppc_r11
, target
);
2739 mono_arch_flush_icache ((guint8
*)seq
, 28);
2742 /* the trampoline code will try to patch the blrl, blr, bcctr */
2743 if (ins
== 0x4e800021 || ins
== 0x4e800020 || ins
== 0x4e800420) {
2746 /* this is the lis/ori/mtlr/blrl sequence */
2747 seq
= (guint32
*)code
;
2748 g_assert ((seq
[0] >> 26) == 15);
2749 g_assert ((seq
[1] >> 26) == 24);
2750 g_assert ((seq
[2] >> 26) == 31);
2751 g_assert (seq
[3] == 0x4e800021 || seq
[3] == 0x4e800020 || seq
[3] == 0x4e800420);
2752 /* FIXME: make this thread safe */
2753 ppc_lis (code
, ppc_r0
, (guint32
)(target
) >> 16);
2754 ppc_ori (code
, ppc_r0
, ppc_r0
, (guint32
)(target
) & 0xffff);
2755 mono_arch_flush_icache (code
- 8, 8);
2758 g_assert_not_reached ();
2760 // g_print ("patched with 0x%08x\n", ins);
2764 ppc_patch (guchar
*code
, const guchar
*target
)
2766 ppc_patch_full (code
, target
, FALSE
);
2770 emit_move_return_value (MonoCompile
*cfg
, MonoInst
*ins
, guint8
*code
)
2772 switch (ins
->opcode
) {
2775 case OP_FCALL_MEMBASE
:
2776 if (ins
->dreg
!= ppc_f1
)
2777 ppc_fmr (code
, ins
->dreg
, ppc_f1
);
2785 * emit_load_volatile_arguments:
2787 * Load volatile arguments from the stack to the original input registers.
2788 * Required before a tail call.
2791 emit_load_volatile_arguments (MonoCompile
*cfg
, guint8
*code
)
2793 MonoMethod
*method
= cfg
->method
;
2794 MonoMethodSignature
*sig
;
2798 int struct_index
= 0;
2800 sig
= mono_method_signature (method
);
2802 /* This is the opposite of the code in emit_prolog */
2806 cinfo
= calculate_sizes (sig
, sig
->pinvoke
);
2808 if (MONO_TYPE_ISSTRUCT (sig
->ret
)) {
2809 ArgInfo
*ainfo
= &cinfo
->ret
;
2810 inst
= cfg
->vret_addr
;
2811 g_assert (ppc_is_imm16 (inst
->inst_offset
));
2812 ppc_load_reg (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
2814 for (i
= 0; i
< sig
->param_count
+ sig
->hasthis
; ++i
) {
2815 ArgInfo
*ainfo
= cinfo
->args
+ i
;
2816 inst
= cfg
->args
[pos
];
2818 g_assert (inst
->opcode
!= OP_REGVAR
);
2819 g_assert (ppc_is_imm16 (inst
->inst_offset
));
2821 switch (ainfo
->regtype
) {
2822 case RegTypeGeneral
:
2823 switch (ainfo
->size
) {
2825 ppc_lbz (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
2828 ppc_lhz (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
2830 #ifdef __mono_ppc64__
2832 ppc_lwz (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
2836 ppc_load_reg (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
2842 switch (ainfo
->size
) {
2844 ppc_lfs (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
2847 ppc_lfd (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
2850 g_assert_not_reached ();
2855 MonoType
*type
= mini_type_get_underlying_type (cfg
->generic_sharing_context
,
2856 &inst
->klass
->byval_arg
);
2858 #ifndef __mono_ppc64__
2859 if (type
->type
== MONO_TYPE_I8
)
2863 if (MONO_TYPE_IS_REFERENCE (type
) || type
->type
== MONO_TYPE_I8
) {
2864 ppc_load_reg (code
, ppc_r0
, inst
->inst_offset
, inst
->inst_basereg
);
2865 ppc_store_reg (code
, ppc_r0
, ainfo
->offset
, ainfo
->reg
);
2866 } else if (type
->type
== MONO_TYPE_I4
) {
2867 ppc_lwz (code
, ppc_r0
, inst
->inst_offset
, inst
->inst_basereg
);
2868 ppc_stw (code
, ppc_r0
, ainfo
->offset
, ainfo
->reg
);
2876 case RegTypeStructByVal
: {
2887 * Darwin pinvokes needs some special handling
2888 * for 1 and 2 byte arguments
2890 if (method
->signature
->pinvoke
)
2891 size
= mono_class_native_size (inst
->klass
, NULL
);
2892 if (size
== 1 || size
== 2) {
2897 for (j
= 0; j
< ainfo
->size
; ++j
) {
2898 ppc_load_reg (code
, ainfo
->reg
+ j
,
2899 inst
->inst_offset
+ j
* sizeof (gpointer
),
2900 inst
->inst_basereg
);
2901 /* FIXME: shift to the right */
2908 case RegTypeStructByAddr
: {
2909 MonoInst
*addr
= cfg
->tailcall_valuetype_addrs
[struct_index
];
2911 g_assert (ppc_is_imm16 (addr
->inst_offset
));
2912 g_assert (!ainfo
->offset
);
2913 ppc_load_reg (code
, ainfo
->reg
, addr
->inst_offset
, addr
->inst_basereg
);
2920 g_assert_not_reached ();
2931 /* This must be kept in sync with emit_load_volatile_arguments(). */
2933 ins_native_length (MonoCompile
*cfg
, MonoInst
*ins
)
2935 int len
= ((guint8
*)ins_get_spec (ins
->opcode
))[MONO_INST_LEN
];
2936 MonoMethodSignature
*sig
;
2941 if (ins
->opcode
!= OP_JMP
)
2944 call
= (MonoCallInst
*)ins
;
2945 sig
= mono_method_signature (cfg
->method
);
2946 cinfo
= calculate_sizes (sig
, sig
->pinvoke
);
2948 if (MONO_TYPE_ISSTRUCT (sig
->ret
))
2950 for (i
= 0; i
< sig
->param_count
+ sig
->hasthis
; ++i
) {
2951 ArgInfo
*ainfo
= cinfo
->args
+ i
;
2953 switch (ainfo
->regtype
) {
2954 case RegTypeGeneral
:
2963 case RegTypeStructByVal
:
2964 len
+= 4 * ainfo
->size
;
2967 case RegTypeStructByAddr
:
2972 g_assert_not_reached ();
2982 emit_reserve_param_area (MonoCompile
*cfg
, guint8
*code
)
2984 long size
= cfg
->param_area
;
2986 size
+= MONO_ARCH_FRAME_ALIGNMENT
- 1;
2987 size
&= -MONO_ARCH_FRAME_ALIGNMENT
;
2992 ppc_load_reg (code
, ppc_r0
, 0, ppc_sp
);
2993 if (ppc_is_imm16 (-size
)) {
2994 ppc_store_reg_update (code
, ppc_r0
, -size
, ppc_sp
);
2996 ppc_load (code
, ppc_r11
, -size
);
2997 ppc_store_reg_update_indexed (code
, ppc_r0
, ppc_sp
, ppc_r11
);
3004 emit_unreserve_param_area (MonoCompile
*cfg
, guint8
*code
)
3006 long size
= cfg
->param_area
;
3008 size
+= MONO_ARCH_FRAME_ALIGNMENT
- 1;
3009 size
&= -MONO_ARCH_FRAME_ALIGNMENT
;
3014 ppc_load_reg (code
, ppc_r0
, 0, ppc_sp
);
3015 if (ppc_is_imm16 (size
)) {
3016 ppc_store_reg_update (code
, ppc_r0
, size
, ppc_sp
);
3018 ppc_load (code
, ppc_r11
, size
);
3019 ppc_store_reg_update_indexed (code
, ppc_r0
, ppc_sp
, ppc_r11
);
3025 #define MASK_SHIFT_IMM(i) ((i) & MONO_PPC_32_64_CASE (0x1f, 0x3f))
3028 mono_arch_output_basic_block (MonoCompile
*cfg
, MonoBasicBlock
*bb
)
3030 MonoInst
*ins
, *next
;
3033 guint8
*code
= cfg
->native_code
+ cfg
->code_len
;
3034 MonoInst
*last_ins
= NULL
;
3035 guint last_offset
= 0;
3039 /* we don't align basic blocks of loops on ppc */
3041 if (cfg
->verbose_level
> 2)
3042 g_print ("Basic block %d starting at offset 0x%x\n", bb
->block_num
, bb
->native_offset
);
3044 cpos
= bb
->max_offset
;
3046 if (cfg
->prof_options
& MONO_PROFILE_COVERAGE
) {
3047 //MonoCoverageInfo *cov = mono_get_coverage_info (cfg->method);
3048 //g_assert (!mono_compile_aot);
3051 // cov->data [bb->dfn].iloffset = bb->cil_code - cfg->cil_code;
3052 /* this is not thread save, but good enough */
3053 /* fixme: howto handle overflows? */
3054 //x86_inc_mem (code, &cov->data [bb->dfn].count);
3057 MONO_BB_FOR_EACH_INS (bb
, ins
) {
3058 offset
= code
- cfg
->native_code
;
3060 max_len
= ins_native_length (cfg
, ins
);
3062 if (offset
> (cfg
->code_size
- max_len
- 16)) {
3063 cfg
->code_size
*= 2;
3064 cfg
->native_code
= g_realloc (cfg
->native_code
, cfg
->code_size
);
3065 code
= cfg
->native_code
+ offset
;
3067 // if (ins->cil_code)
3068 // g_print ("cil code\n");
3069 mono_debug_record_line_number (cfg
, ins
, offset
);
3071 switch (normalize_opcode (ins
->opcode
)) {
3072 case OP_RELAXED_NOP
:
3075 case OP_DUMMY_STORE
:
3076 case OP_NOT_REACHED
:
3080 emit_tls_access (code
, ins
->dreg
, ins
->inst_offset
);
3083 ppc_mullw (code
, ppc_r0
, ins
->sreg1
, ins
->sreg2
);
3084 ppc_mulhw (code
, ppc_r3
, ins
->sreg1
, ins
->sreg2
);
3085 ppc_mr (code
, ppc_r4
, ppc_r0
);
3088 ppc_mullw (code
, ppc_r0
, ins
->sreg1
, ins
->sreg2
);
3089 ppc_mulhwu (code
, ppc_r3
, ins
->sreg1
, ins
->sreg2
);
3090 ppc_mr (code
, ppc_r4
, ppc_r0
);
3092 case OP_MEMORY_BARRIER
:
3095 case OP_STOREI1_MEMBASE_REG
:
3096 if (ppc_is_imm16 (ins
->inst_offset
)) {
3097 ppc_stb (code
, ins
->sreg1
, ins
->inst_offset
, ins
->inst_destbasereg
);
3099 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3100 ppc_stbx (code
, ins
->sreg1
, ins
->inst_destbasereg
, ppc_r0
);
3103 case OP_STOREI2_MEMBASE_REG
:
3104 if (ppc_is_imm16 (ins
->inst_offset
)) {
3105 ppc_sth (code
, ins
->sreg1
, ins
->inst_offset
, ins
->inst_destbasereg
);
3107 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3108 ppc_sthx (code
, ins
->sreg1
, ins
->inst_destbasereg
, ppc_r0
);
3111 case OP_STORE_MEMBASE_REG
:
3112 if (ppc_is_imm16 (ins
->inst_offset
)) {
3113 ppc_store_reg (code
, ins
->sreg1
, ins
->inst_offset
, ins
->inst_destbasereg
);
3115 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3116 ppc_store_reg_indexed (code
, ins
->sreg1
, ins
->inst_destbasereg
, ppc_r0
);
3119 case OP_STOREI1_MEMINDEX
:
3120 ppc_stbx (code
, ins
->sreg1
, ins
->sreg2
, ins
->inst_destbasereg
);
3122 case OP_STOREI2_MEMINDEX
:
3123 ppc_sthx (code
, ins
->sreg1
, ins
->sreg2
, ins
->inst_destbasereg
);
3125 case OP_STORE_MEMINDEX
:
3126 ppc_store_reg_indexed (code
, ins
->sreg1
, ins
->sreg2
, ins
->inst_destbasereg
);
3129 g_assert_not_reached ();
3131 case OP_LOAD_MEMBASE
:
3132 if (ppc_is_imm16 (ins
->inst_offset
)) {
3133 ppc_load_reg (code
, ins
->dreg
, ins
->inst_offset
, ins
->inst_basereg
);
3135 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3136 ppc_load_reg_indexed (code
, ins
->dreg
, ins
->inst_basereg
, ppc_r0
);
3139 case OP_LOADI4_MEMBASE
:
3140 #ifdef __mono_ppc64__
3141 if (ppc_is_imm16 (ins
->inst_offset
)) {
3142 ppc_lwa (code
, ins
->dreg
, ins
->inst_offset
, ins
->inst_basereg
);
3144 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3145 ppc_lwax (code
, ins
->dreg
, ins
->inst_basereg
, ppc_r0
);
3149 case OP_LOADU4_MEMBASE
:
3150 if (ppc_is_imm16 (ins
->inst_offset
)) {
3151 ppc_lwz (code
, ins
->dreg
, ins
->inst_offset
, ins
->inst_basereg
);
3153 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3154 ppc_lwzx (code
, ins
->dreg
, ins
->inst_basereg
, ppc_r0
);
3157 case OP_LOADI1_MEMBASE
:
3158 case OP_LOADU1_MEMBASE
:
3159 if (ppc_is_imm16 (ins
->inst_offset
)) {
3160 ppc_lbz (code
, ins
->dreg
, ins
->inst_offset
, ins
->inst_basereg
);
3162 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3163 ppc_lbzx (code
, ins
->dreg
, ins
->inst_basereg
, ppc_r0
);
3165 if (ins
->opcode
== OP_LOADI1_MEMBASE
)
3166 ppc_extsb (code
, ins
->dreg
, ins
->dreg
);
3168 case OP_LOADU2_MEMBASE
:
3169 if (ppc_is_imm16 (ins
->inst_offset
)) {
3170 ppc_lhz (code
, ins
->dreg
, ins
->inst_offset
, ins
->inst_basereg
);
3172 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3173 ppc_lhzx (code
, ins
->dreg
, ins
->inst_basereg
, ppc_r0
);
3176 case OP_LOADI2_MEMBASE
:
3177 if (ppc_is_imm16 (ins
->inst_offset
)) {
3178 ppc_lha (code
, ins
->dreg
, ins
->inst_offset
, ins
->inst_basereg
);
3180 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3181 ppc_lhax (code
, ins
->dreg
, ins
->inst_basereg
, ppc_r0
);
3184 case OP_LOAD_MEMINDEX
:
3185 ppc_load_reg_indexed (code
, ins
->dreg
, ins
->sreg2
, ins
->inst_basereg
);
3187 case OP_LOADI4_MEMINDEX
:
3188 #ifdef __mono_ppc64__
3189 ppc_lwax (code
, ins
->dreg
, ins
->sreg2
, ins
->inst_basereg
);
3192 case OP_LOADU4_MEMINDEX
:
3193 ppc_lwzx (code
, ins
->dreg
, ins
->sreg2
, ins
->inst_basereg
);
3195 case OP_LOADU2_MEMINDEX
:
3196 ppc_lhzx (code
, ins
->dreg
, ins
->sreg2
, ins
->inst_basereg
);
3198 case OP_LOADI2_MEMINDEX
:
3199 ppc_lhax (code
, ins
->dreg
, ins
->sreg2
, ins
->inst_basereg
);
3201 case OP_LOADU1_MEMINDEX
:
3202 ppc_lbzx (code
, ins
->dreg
, ins
->sreg2
, ins
->inst_basereg
);
3204 case OP_LOADI1_MEMINDEX
:
3205 ppc_lbzx (code
, ins
->dreg
, ins
->sreg2
, ins
->inst_basereg
);
3206 ppc_extsb (code
, ins
->dreg
, ins
->dreg
);
3208 case OP_ICONV_TO_I1
:
3209 CASE_PPC64 (OP_LCONV_TO_I1
)
3210 ppc_extsb (code
, ins
->dreg
, ins
->sreg1
);
3212 case OP_ICONV_TO_I2
:
3213 CASE_PPC64 (OP_LCONV_TO_I2
)
3214 ppc_extsh (code
, ins
->dreg
, ins
->sreg1
);
3216 case OP_ICONV_TO_U1
:
3217 CASE_PPC64 (OP_LCONV_TO_U1
)
3218 ppc_clrlwi (code
, ins
->dreg
, ins
->sreg1
, 24);
3220 case OP_ICONV_TO_U2
:
3221 CASE_PPC64 (OP_LCONV_TO_U2
)
3222 ppc_clrlwi (code
, ins
->dreg
, ins
->sreg1
, 16);
3226 CASE_PPC64 (OP_LCOMPARE
)
3227 L
= (sizeof (gpointer
) == 4 || ins
->opcode
== OP_ICOMPARE
) ? 0 : 1;
3229 if (next
&& compare_opcode_is_unsigned (next
->opcode
))
3230 ppc_cmpl (code
, 0, L
, ins
->sreg1
, ins
->sreg2
);
3232 ppc_cmp (code
, 0, L
, ins
->sreg1
, ins
->sreg2
);
3234 case OP_COMPARE_IMM
:
3235 case OP_ICOMPARE_IMM
:
3236 CASE_PPC64 (OP_LCOMPARE_IMM
)
3237 L
= (sizeof (gpointer
) == 4 || ins
->opcode
== OP_ICOMPARE_IMM
) ? 0 : 1;
3239 if (next
&& compare_opcode_is_unsigned (next
->opcode
)) {
3240 if (ppc_is_uimm16 (ins
->inst_imm
)) {
3241 ppc_cmpli (code
, 0, L
, ins
->sreg1
, (ins
->inst_imm
& 0xffff));
3243 g_assert_not_reached ();
3246 if (ppc_is_imm16 (ins
->inst_imm
)) {
3247 ppc_cmpi (code
, 0, L
, ins
->sreg1
, (ins
->inst_imm
& 0xffff));
3249 g_assert_not_reached ();
3258 ppc_addco (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3261 CASE_PPC64 (OP_LADD
)
3262 ppc_add (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3266 ppc_adde (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3269 if (ppc_is_imm16 (ins
->inst_imm
)) {
3270 ppc_addic (code
, ins
->dreg
, ins
->sreg1
, ins
->inst_imm
);
3272 g_assert_not_reached ();
3277 CASE_PPC64 (OP_LADD_IMM
)
3278 if (ppc_is_imm16 (ins
->inst_imm
)) {
3279 ppc_addi (code
, ins
->dreg
, ins
->sreg1
, ins
->inst_imm
);
3281 g_assert_not_reached ();
3285 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3287 ppc_addo (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3288 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3289 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<14));
3290 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, "OverflowException");
3292 case OP_IADD_OVF_UN
:
3293 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3295 ppc_addco (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3296 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3297 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<13));
3298 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, "OverflowException");
3301 CASE_PPC64 (OP_LSUB_OVF
)
3302 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3304 ppc_subfo (code
, ins
->dreg
, ins
->sreg2
, ins
->sreg1
);
3305 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3306 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<14));
3307 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, "OverflowException");
3309 case OP_ISUB_OVF_UN
:
3310 CASE_PPC64 (OP_LSUB_OVF_UN
)
3311 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3313 ppc_subfc (code
, ins
->dreg
, ins
->sreg2
, ins
->sreg1
);
3314 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3315 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<13));
3316 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE
, PPC_BR_EQ
, "OverflowException");
3318 case OP_ADD_OVF_CARRY
:
3319 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3321 ppc_addeo (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3322 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3323 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<14));
3324 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, "OverflowException");
3326 case OP_ADD_OVF_UN_CARRY
:
3327 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3329 ppc_addeo (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3330 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3331 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<13));
3332 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, "OverflowException");
3334 case OP_SUB_OVF_CARRY
:
3335 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3337 ppc_subfeo (code
, ins
->dreg
, ins
->sreg2
, ins
->sreg1
);
3338 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3339 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<14));
3340 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, "OverflowException");
3342 case OP_SUB_OVF_UN_CARRY
:
3343 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3345 ppc_subfeo (code
, ins
->dreg
, ins
->sreg2
, ins
->sreg1
);
3346 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3347 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<13));
3348 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE
, PPC_BR_EQ
, "OverflowException");
3352 ppc_subfco (code
, ins
->dreg
, ins
->sreg2
, ins
->sreg1
);
3355 CASE_PPC64 (OP_LSUB
)
3356 ppc_subf (code
, ins
->dreg
, ins
->sreg2
, ins
->sreg1
);
3360 ppc_subfe (code
, ins
->dreg
, ins
->sreg2
, ins
->sreg1
);
3364 CASE_PPC64 (OP_LSUB_IMM
)
3365 // we add the negated value
3366 if (ppc_is_imm16 (-ins
->inst_imm
))
3367 ppc_addi (code
, ins
->dreg
, ins
->sreg1
, -ins
->inst_imm
);
3369 g_assert_not_reached ();
3373 g_assert (ppc_is_imm16 (ins
->inst_imm
));
3374 ppc_subfic (code
, ins
->dreg
, ins
->sreg1
, ins
->inst_imm
);
3377 ppc_subfze (code
, ins
->dreg
, ins
->sreg1
);
3380 CASE_PPC64 (OP_LAND
)
3381 /* FIXME: the ppc macros as inconsistent here: put dest as the first arg! */
3382 ppc_and (code
, ins
->sreg1
, ins
->dreg
, ins
->sreg2
);
3386 CASE_PPC64 (OP_LAND_IMM
)
3387 if (!(ins
->inst_imm
& 0xffff0000)) {
3388 ppc_andid (code
, ins
->sreg1
, ins
->dreg
, ins
->inst_imm
);
3389 } else if (!(ins
->inst_imm
& 0xffff)) {
3390 ppc_andisd (code
, ins
->sreg1
, ins
->dreg
, ((guint32
)ins
->inst_imm
>> 16));
3392 g_assert_not_reached ();
3396 CASE_PPC64 (OP_LDIV
) {
3397 guint8
*divisor_is_m1
;
3398 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3400 ppc_compare_reg_imm (code
, 0, ins
->sreg2
, -1);
3401 divisor_is_m1
= code
;
3402 ppc_bc (code
, PPC_BR_FALSE
| PPC_BR_LIKELY
, PPC_BR_EQ
, 0);
3403 ppc_lis (code
, ppc_r0
, 0x8000);
3404 #ifdef __mono_ppc64__
3405 if (ins
->opcode
== OP_LDIV
)
3406 ppc_sldi (code
, ppc_r0
, ppc_r0
, 32);
3408 ppc_compare (code
, 0, ins
->sreg1
, ppc_r0
);
3409 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE
, PPC_BR_EQ
, "ArithmeticException");
3410 ppc_patch (divisor_is_m1
, code
);
3411 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3413 if (ins
->opcode
== OP_IDIV
)
3414 ppc_divwod (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3415 #ifdef __mono_ppc64__
3417 ppc_divdod (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3419 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3420 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<14));
3421 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, "DivideByZeroException");
3425 CASE_PPC64 (OP_LDIV_UN
)
3426 if (ins
->opcode
== OP_IDIV_UN
)
3427 ppc_divwuod (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3428 #ifdef __mono_ppc64__
3430 ppc_divduod (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3432 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3433 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<14));
3434 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, "DivideByZeroException");
3440 g_assert_not_reached ();
3443 ppc_or (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3447 CASE_PPC64 (OP_LOR_IMM
)
3448 if (!(ins
->inst_imm
& 0xffff0000)) {
3449 ppc_ori (code
, ins
->sreg1
, ins
->dreg
, ins
->inst_imm
);
3450 } else if (!(ins
->inst_imm
& 0xffff)) {
3451 ppc_oris (code
, ins
->dreg
, ins
->sreg1
, ((guint32
)(ins
->inst_imm
) >> 16));
3453 g_assert_not_reached ();
3457 CASE_PPC64 (OP_LXOR
)
3458 ppc_xor (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3462 CASE_PPC64 (OP_LXOR_IMM
)
3463 if (!(ins
->inst_imm
& 0xffff0000)) {
3464 ppc_xori (code
, ins
->sreg1
, ins
->dreg
, ins
->inst_imm
);
3465 } else if (!(ins
->inst_imm
& 0xffff)) {
3466 ppc_xoris (code
, ins
->sreg1
, ins
->dreg
, ((guint32
)(ins
->inst_imm
) >> 16));
3468 g_assert_not_reached ();
3472 CASE_PPC64 (OP_LSHL
)
3473 ppc_shift_left (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3477 CASE_PPC64 (OP_LSHL_IMM
)
3478 ppc_shift_left_imm (code
, ins
->dreg
, ins
->sreg1
, MASK_SHIFT_IMM (ins
->inst_imm
));
3481 ppc_sraw (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3484 ppc_shift_right_arith_imm (code
, ins
->dreg
, ins
->sreg1
, MASK_SHIFT_IMM (ins
->inst_imm
));
3487 if (MASK_SHIFT_IMM (ins
->inst_imm
))
3488 ppc_shift_right_imm (code
, ins
->dreg
, ins
->sreg1
, MASK_SHIFT_IMM (ins
->inst_imm
));
3490 ppc_mr (code
, ins
->dreg
, ins
->sreg1
);
3493 ppc_srw (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3496 CASE_PPC64 (OP_LNOT
)
3497 ppc_not (code
, ins
->dreg
, ins
->sreg1
);
3500 CASE_PPC64 (OP_LNEG
)
3501 ppc_neg (code
, ins
->dreg
, ins
->sreg1
);
3504 CASE_PPC64 (OP_LMUL
)
3505 ppc_multiply (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3509 CASE_PPC64 (OP_LMUL_IMM
)
3510 if (ppc_is_imm16 (ins
->inst_imm
)) {
3511 ppc_mulli (code
, ins
->dreg
, ins
->sreg1
, ins
->inst_imm
);
3513 g_assert_not_reached ();
3517 CASE_PPC64 (OP_LMUL_OVF
)
3518 /* we annot use mcrxr, since it's not implemented on some processors
3519 * XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3521 if (ins
->opcode
== OP_IMUL_OVF
)
3522 ppc_mullwo (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3523 #ifdef __mono_ppc64__
3525 ppc_mulldo (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3527 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
3528 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1<<14));
3529 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, "OverflowException");
3531 case OP_IMUL_OVF_UN
:
3532 CASE_PPC64 (OP_LMUL_OVF_UN
)
3533 /* we first multiply to get the high word and compare to 0
3534 * to set the flags, then the result is discarded and then
3535 * we multiply to get the lower * bits result
3537 if (ins
->opcode
== OP_IMUL_OVF_UN
)
3538 ppc_mulhwu (code
, ppc_r0
, ins
->sreg1
, ins
->sreg2
);
3539 #ifdef __mono_ppc64__
3541 ppc_mulhdu (code
, ppc_r0
, ins
->sreg1
, ins
->sreg2
);
3543 ppc_cmpi (code
, 0, 0, ppc_r0
, 0);
3544 EMIT_COND_SYSTEM_EXCEPTION (CEE_BNE_UN
- CEE_BEQ
, "OverflowException");
3545 ppc_multiply (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
3548 CASE_PPC64 (OP_I8CONST
)
3549 ppc_load (code
, ins
->dreg
, ins
->inst_c0
);
3552 mono_add_patch_info (cfg
, offset
, (MonoJumpInfoType
)ins
->inst_i1
, ins
->inst_p0
);
3553 ppc_load_sequence (code
, ins
->dreg
, 0);
3555 CASE_PPC32 (OP_ICONV_TO_I4
)
3556 CASE_PPC32 (OP_ICONV_TO_U4
)
3558 ppc_mr (code
, ins
->dreg
, ins
->sreg1
);
3561 int saved
= ins
->sreg1
;
3562 if (ins
->sreg1
== ppc_r3
) {
3563 ppc_mr (code
, ppc_r0
, ins
->sreg1
);
3566 if (ins
->sreg2
!= ppc_r3
)
3567 ppc_mr (code
, ppc_r3
, ins
->sreg2
);
3568 if (saved
!= ppc_r4
)
3569 ppc_mr (code
, ppc_r4
, saved
);
3573 ppc_fmr (code
, ins
->dreg
, ins
->sreg1
);
3575 case OP_FCONV_TO_R4
:
3576 ppc_frsp (code
, ins
->dreg
, ins
->sreg1
);
3582 * Keep in sync with mono_arch_emit_epilog
3584 g_assert (!cfg
->method
->save_lmf
);
3586 * Note: we can use ppc_r11 here because it is dead anyway:
3587 * we're leaving the method.
3589 if (1 || cfg
->flags
& MONO_CFG_HAS_CALLS
) {
3590 long ret_offset
= cfg
->stack_usage
+ PPC_RET_ADDR_OFFSET
;
3591 if (ppc_is_imm16 (ret_offset
)) {
3592 ppc_load_reg (code
, ppc_r0
, ret_offset
, cfg
->frame_reg
);
3594 ppc_load (code
, ppc_r11
, ret_offset
);
3595 ppc_load_reg_indexed (code
, ppc_r0
, cfg
->frame_reg
, ppc_r11
);
3597 ppc_mtlr (code
, ppc_r0
);
3600 code
= emit_load_volatile_arguments (cfg
, code
);
3602 if (ppc_is_imm16 (cfg
->stack_usage
)) {
3603 ppc_addi (code
, ppc_r11
, cfg
->frame_reg
, cfg
->stack_usage
);
3605 ppc_load (code
, ppc_r11
, cfg
->stack_usage
);
3606 ppc_add (code
, ppc_r11
, cfg
->frame_reg
, ppc_r11
);
3608 if (!cfg
->method
->save_lmf
) {
3609 /*for (i = 31; i >= 14; --i) {
3610 if (cfg->used_float_regs & (1 << i)) {
3611 pos += sizeof (double);
3612 ppc_lfd (code, i, -pos, cfg->frame_reg);
3616 for (i
= 31; i
>= 13; --i
) {
3617 if (cfg
->used_int_regs
& (1 << i
)) {
3618 pos
+= sizeof (gpointer
);
3619 ppc_load_reg (code
, i
, -pos
, ppc_r11
);
3623 /* FIXME restore from MonoLMF: though this can't happen yet */
3625 ppc_mr (code
, ppc_sp
, ppc_r11
);
3626 mono_add_patch_info (cfg
, (guint8
*) code
- cfg
->native_code
, MONO_PATCH_INFO_METHOD_JUMP
, ins
->inst_p0
);
3631 /* ensure ins->sreg1 is not NULL */
3632 ppc_load_reg (code
, ppc_r0
, 0, ins
->sreg1
);
3635 long cookie_offset
= cfg
->sig_cookie
+ cfg
->stack_usage
;
3636 if (ppc_is_imm16 (cookie_offset
)) {
3637 ppc_addi (code
, ppc_r0
, cfg
->frame_reg
, cookie_offset
);
3639 ppc_load (code
, ppc_r0
, cookie_offset
);
3640 ppc_add (code
, ppc_r0
, cfg
->frame_reg
, ppc_r0
);
3642 ppc_store_reg (code
, ppc_r0
, 0, ins
->sreg1
);
3651 call
= (MonoCallInst
*)ins
;
3652 if (ins
->flags
& MONO_INST_HAS_METHOD
)
3653 mono_add_patch_info (cfg
, offset
, MONO_PATCH_INFO_METHOD
, call
->method
);
3655 mono_add_patch_info (cfg
, offset
, MONO_PATCH_INFO_ABS
, call
->fptr
);
3656 if (FORCE_INDIR_CALL
|| cfg
->method
->dynamic
) {
3657 ppc_load_func (code
, ppc_r0
, 0);
3658 ppc_mtlr (code
, ppc_r0
);
3663 /* FIXME: this should be handled somewhere else in the new jit */
3664 code
= emit_move_return_value (cfg
, ins
, code
);
3670 case OP_VOIDCALL_REG
:
3672 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3673 ppc_load_reg (code
, ppc_r0
, 0, ins
->sreg1
);
3674 /* FIXME: if we know that this is a method, we
3675 can omit this load */
3676 ppc_load_reg (code
, ppc_r2
, 8, ins
->sreg1
);
3677 ppc_mtlr (code
, ppc_r0
);
3679 ppc_mtlr (code
, ins
->sreg1
);
3682 /* FIXME: this should be handled somewhere else in the new jit */
3683 code
= emit_move_return_value (cfg
, ins
, code
);
3685 case OP_FCALL_MEMBASE
:
3686 case OP_LCALL_MEMBASE
:
3687 case OP_VCALL_MEMBASE
:
3688 case OP_VCALL2_MEMBASE
:
3689 case OP_VOIDCALL_MEMBASE
:
3690 case OP_CALL_MEMBASE
:
3691 ppc_load_reg (code
, ppc_r0
, ins
->inst_offset
, ins
->sreg1
);
3692 ppc_mtlr (code
, ppc_r0
);
3694 /* FIXME: this should be handled somewhere else in the new jit */
3695 code
= emit_move_return_value (cfg
, ins
, code
);
3698 guint8
* zero_loop_jump
, * zero_loop_start
;
3699 /* keep alignment */
3700 int alloca_waste
= PPC_STACK_PARAM_OFFSET
+ cfg
->param_area
+ 31;
3701 int area_offset
= alloca_waste
;
3703 ppc_addi (code
, ppc_r11
, ins
->sreg1
, alloca_waste
+ 31);
3704 /* FIXME: should be calculated from MONO_ARCH_FRAME_ALIGNMENT */
3705 ppc_clear_right_imm (code
, ppc_r11
, ppc_r11
, 4);
3706 /* use ctr to store the number of words to 0 if needed */
3707 if (ins
->flags
& MONO_INST_INIT
) {
3708 /* we zero 4 bytes at a time:
3709 * we add 7 instead of 3 so that we set the counter to
3710 * at least 1, otherwise the bdnz instruction will make
3711 * it negative and iterate billions of times.
3713 ppc_addi (code
, ppc_r0
, ins
->sreg1
, 7);
3714 ppc_shift_right_arith_imm (code
, ppc_r0
, ppc_r0
, 2);
3715 ppc_mtctr (code
, ppc_r0
);
3717 ppc_load_reg (code
, ppc_r0
, 0, ppc_sp
);
3718 ppc_neg (code
, ppc_r11
, ppc_r11
);
3719 ppc_store_reg_update_indexed (code
, ppc_r0
, ppc_sp
, ppc_r11
);
3721 /* FIXME: make this loop work in 8 byte
3722 increments on PPC64 */
3723 if (ins
->flags
& MONO_INST_INIT
) {
3724 /* adjust the dest reg by -4 so we can use stwu */
3725 /* we actually adjust -8 because we let the loop
3728 ppc_addi (code
, ins
->dreg
, ppc_sp
, (area_offset
- 8));
3729 ppc_li (code
, ppc_r11
, 0);
3730 zero_loop_start
= code
;
3731 ppc_stwu (code
, ppc_r11
, 4, ins
->dreg
);
3732 zero_loop_jump
= code
;
3733 ppc_bc (code
, PPC_BR_DEC_CTR_NONZERO
, 0, 0);
3734 ppc_patch (zero_loop_jump
, zero_loop_start
);
3736 ppc_addi (code
, ins
->dreg
, ppc_sp
, area_offset
);
3741 ppc_mr (code
, ppc_r3
, ins
->sreg1
);
3742 mono_add_patch_info (cfg
, code
- cfg
->native_code
, MONO_PATCH_INFO_INTERNAL_METHOD
,
3743 (gpointer
)"mono_arch_throw_exception");
3744 if (FORCE_INDIR_CALL
|| cfg
->method
->dynamic
) {
3745 ppc_load_func (code
, ppc_r0
, 0);
3746 ppc_mtlr (code
, ppc_r0
);
3755 ppc_mr (code
, ppc_r3
, ins
->sreg1
);
3756 mono_add_patch_info (cfg
, code
- cfg
->native_code
, MONO_PATCH_INFO_INTERNAL_METHOD
,
3757 (gpointer
)"mono_arch_rethrow_exception");
3758 if (FORCE_INDIR_CALL
|| cfg
->method
->dynamic
) {
3759 ppc_load_func (code
, ppc_r0
, 0);
3760 ppc_mtlr (code
, ppc_r0
);
3767 case OP_START_HANDLER
: {
3768 MonoInst
*spvar
= mono_find_spvar_for_region (cfg
, bb
->region
);
3769 g_assert (spvar
->inst_basereg
!= ppc_sp
);
3770 code
= emit_reserve_param_area (cfg
, code
);
3771 ppc_mflr (code
, ppc_r0
);
3772 if (ppc_is_imm16 (spvar
->inst_offset
)) {
3773 ppc_store_reg (code
, ppc_r0
, spvar
->inst_offset
, spvar
->inst_basereg
);
3775 ppc_load (code
, ppc_r11
, spvar
->inst_offset
);
3776 ppc_store_reg_indexed (code
, ppc_r0
, ppc_r11
, spvar
->inst_basereg
);
3780 case OP_ENDFILTER
: {
3781 MonoInst
*spvar
= mono_find_spvar_for_region (cfg
, bb
->region
);
3782 g_assert (spvar
->inst_basereg
!= ppc_sp
);
3783 code
= emit_unreserve_param_area (cfg
, code
);
3784 if (ins
->sreg1
!= ppc_r3
)
3785 ppc_mr (code
, ppc_r3
, ins
->sreg1
);
3786 if (ppc_is_imm16 (spvar
->inst_offset
)) {
3787 ppc_load_reg (code
, ppc_r0
, spvar
->inst_offset
, spvar
->inst_basereg
);
3789 ppc_load (code
, ppc_r11
, spvar
->inst_offset
);
3790 ppc_load_reg_indexed (code
, ppc_r0
, spvar
->inst_basereg
, ppc_r11
);
3792 ppc_mtlr (code
, ppc_r0
);
3796 case OP_ENDFINALLY
: {
3797 MonoInst
*spvar
= mono_find_spvar_for_region (cfg
, bb
->region
);
3798 g_assert (spvar
->inst_basereg
!= ppc_sp
);
3799 code
= emit_unreserve_param_area (cfg
, code
);
3800 ppc_load_reg (code
, ppc_r0
, spvar
->inst_offset
, spvar
->inst_basereg
);
3801 ppc_mtlr (code
, ppc_r0
);
3805 case OP_CALL_HANDLER
:
3806 mono_add_patch_info (cfg
, code
- cfg
->native_code
, MONO_PATCH_INFO_BB
, ins
->inst_target_bb
);
3810 ins
->inst_c0
= code
- cfg
->native_code
;
3813 if (ins
->flags
& MONO_INST_BRLABEL
) {
3814 /*if (ins->inst_i0->inst_c0) {
3816 //x86_jump_code (code, cfg->native_code + ins->inst_i0->inst_c0);
3818 mono_add_patch_info (cfg
, offset
, MONO_PATCH_INFO_LABEL
, ins
->inst_i0
);
3822 /*if (ins->inst_target_bb->native_offset) {
3824 //x86_jump_code (code, cfg->native_code + ins->inst_target_bb->native_offset);
3826 mono_add_patch_info (cfg
, offset
, MONO_PATCH_INFO_BB
, ins
->inst_target_bb
);
3832 ppc_mtctr (code
, ins
->sreg1
);
3833 ppc_bcctr (code
, PPC_BR_ALWAYS
, 0);
3837 CASE_PPC64 (OP_LCEQ
)
3838 ppc_li (code
, ins
->dreg
, 0);
3839 ppc_bc (code
, PPC_BR_FALSE
, PPC_BR_EQ
, 2);
3840 ppc_li (code
, ins
->dreg
, 1);
3846 CASE_PPC64 (OP_LCLT
)
3847 CASE_PPC64 (OP_LCLT_UN
)
3848 ppc_li (code
, ins
->dreg
, 1);
3849 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_LT
, 2);
3850 ppc_li (code
, ins
->dreg
, 0);
3856 CASE_PPC64 (OP_LCGT
)
3857 CASE_PPC64 (OP_LCGT_UN
)
3858 ppc_li (code
, ins
->dreg
, 1);
3859 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_GT
, 2);
3860 ppc_li (code
, ins
->dreg
, 0);
3862 case OP_COND_EXC_EQ
:
3863 case OP_COND_EXC_NE_UN
:
3864 case OP_COND_EXC_LT
:
3865 case OP_COND_EXC_LT_UN
:
3866 case OP_COND_EXC_GT
:
3867 case OP_COND_EXC_GT_UN
:
3868 case OP_COND_EXC_GE
:
3869 case OP_COND_EXC_GE_UN
:
3870 case OP_COND_EXC_LE
:
3871 case OP_COND_EXC_LE_UN
:
3872 EMIT_COND_SYSTEM_EXCEPTION (ins
->opcode
- OP_COND_EXC_EQ
, ins
->inst_p1
);
3874 case OP_COND_EXC_IEQ
:
3875 case OP_COND_EXC_INE_UN
:
3876 case OP_COND_EXC_ILT
:
3877 case OP_COND_EXC_ILT_UN
:
3878 case OP_COND_EXC_IGT
:
3879 case OP_COND_EXC_IGT_UN
:
3880 case OP_COND_EXC_IGE
:
3881 case OP_COND_EXC_IGE_UN
:
3882 case OP_COND_EXC_ILE
:
3883 case OP_COND_EXC_ILE_UN
:
3884 EMIT_COND_SYSTEM_EXCEPTION (ins
->opcode
- OP_COND_EXC_IEQ
, ins
->inst_p1
);
3896 EMIT_COND_BRANCH (ins
, ins
->opcode
- OP_IBEQ
);
3899 /* floating point opcodes */
3902 g_assert_not_reached ();
3903 case OP_STORER8_MEMBASE_REG
:
3904 if (ppc_is_imm16 (ins
->inst_offset
)) {
3905 ppc_stfd (code
, ins
->sreg1
, ins
->inst_offset
, ins
->inst_destbasereg
);
3907 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3908 ppc_stfdx (code
, ins
->sreg1
, ins
->inst_destbasereg
, ppc_r0
);
3911 case OP_LOADR8_MEMBASE
:
3912 if (ppc_is_imm16 (ins
->inst_offset
)) {
3913 ppc_lfd (code
, ins
->dreg
, ins
->inst_offset
, ins
->inst_basereg
);
3915 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3916 ppc_lfdx (code
, ins
->dreg
, ins
->inst_destbasereg
, ppc_r0
);
3919 case OP_STORER4_MEMBASE_REG
:
3920 ppc_frsp (code
, ins
->sreg1
, ins
->sreg1
);
3921 if (ppc_is_imm16 (ins
->inst_offset
)) {
3922 ppc_stfs (code
, ins
->sreg1
, ins
->inst_offset
, ins
->inst_destbasereg
);
3924 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3925 ppc_stfsx (code
, ins
->sreg1
, ins
->inst_destbasereg
, ppc_r0
);
3928 case OP_LOADR4_MEMBASE
:
3929 if (ppc_is_imm16 (ins
->inst_offset
)) {
3930 ppc_lfs (code
, ins
->dreg
, ins
->inst_offset
, ins
->inst_basereg
);
3932 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
3933 ppc_lfsx (code
, ins
->dreg
, ins
->inst_destbasereg
, ppc_r0
);
3936 case OP_LOADR4_MEMINDEX
:
3937 ppc_lfsx (code
, ins
->dreg
, ins
->sreg2
, ins
->inst_basereg
);
3939 case OP_LOADR8_MEMINDEX
:
3940 ppc_lfdx (code
, ins
->dreg
, ins
->sreg2
, ins
->inst_basereg
);
3942 case OP_STORER4_MEMINDEX
:
3943 ppc_frsp (code
, ins
->sreg1
, ins
->sreg1
);
3944 ppc_stfsx (code
, ins
->sreg1
, ins
->sreg2
, ins
->inst_destbasereg
);
3946 case OP_STORER8_MEMINDEX
:
3947 ppc_stfdx (code
, ins
->sreg1
, ins
->sreg2
, ins
->inst_destbasereg
);
3950 case CEE_CONV_R4
: /* FIXME: change precision */
3952 g_assert_not_reached ();
3953 case OP_FCONV_TO_I1
:
3954 code
= emit_float_to_int (cfg
, code
, ins
->dreg
, ins
->sreg1
, 1, TRUE
);
3956 case OP_FCONV_TO_U1
:
3957 code
= emit_float_to_int (cfg
, code
, ins
->dreg
, ins
->sreg1
, 1, FALSE
);
3959 case OP_FCONV_TO_I2
:
3960 code
= emit_float_to_int (cfg
, code
, ins
->dreg
, ins
->sreg1
, 2, TRUE
);
3962 case OP_FCONV_TO_U2
:
3963 code
= emit_float_to_int (cfg
, code
, ins
->dreg
, ins
->sreg1
, 2, FALSE
);
3965 case OP_FCONV_TO_I4
:
3967 code
= emit_float_to_int (cfg
, code
, ins
->dreg
, ins
->sreg1
, 4, TRUE
);
3969 case OP_FCONV_TO_U4
:
3971 code
= emit_float_to_int (cfg
, code
, ins
->dreg
, ins
->sreg1
, 4, FALSE
);
3973 case OP_LCONV_TO_R_UN
:
3974 g_assert_not_reached ();
3975 /* Implemented as helper calls */
3977 case OP_LCONV_TO_OVF_I4_2
:
3978 case OP_LCONV_TO_OVF_I
: {
3979 #ifdef __mono_ppc64__
3982 guint8
*negative_branch
, *msword_positive_branch
, *msword_negative_branch
, *ovf_ex_target
;
3983 // Check if its negative
3984 ppc_cmpi (code
, 0, 0, ins
->sreg1
, 0);
3985 negative_branch
= code
;
3986 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_LT
, 0);
3987 // Its positive msword == 0
3988 ppc_cmpi (code
, 0, 0, ins
->sreg2
, 0);
3989 msword_positive_branch
= code
;
3990 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_EQ
, 0);
3992 ovf_ex_target
= code
;
3993 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_ALWAYS
, 0, "OverflowException");
3995 ppc_patch (negative_branch
, code
);
3996 ppc_cmpi (code
, 0, 0, ins
->sreg2
, -1);
3997 msword_negative_branch
= code
;
3998 ppc_bc (code
, PPC_BR_FALSE
, PPC_BR_EQ
, 0);
3999 ppc_patch (msword_negative_branch
, ovf_ex_target
);
4001 ppc_patch (msword_positive_branch
, code
);
4002 if (ins
->dreg
!= ins
->sreg1
)
4003 ppc_mr (code
, ins
->dreg
, ins
->sreg1
);
4008 ppc_fsqrtd (code
, ins
->dreg
, ins
->sreg1
);
4011 ppc_fadd (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
4014 ppc_fsub (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
4017 ppc_fmul (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
4020 ppc_fdiv (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
4023 ppc_fneg (code
, ins
->dreg
, ins
->sreg1
);
4027 g_assert_not_reached ();
4030 ppc_fcmpu (code
, 0, ins
->sreg1
, ins
->sreg2
);
4033 ppc_fcmpo (code
, 0, ins
->sreg1
, ins
->sreg2
);
4034 ppc_li (code
, ins
->dreg
, 0);
4035 ppc_bc (code
, PPC_BR_FALSE
, PPC_BR_EQ
, 2);
4036 ppc_li (code
, ins
->dreg
, 1);
4039 ppc_fcmpo (code
, 0, ins
->sreg1
, ins
->sreg2
);
4040 ppc_li (code
, ins
->dreg
, 1);
4041 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_LT
, 2);
4042 ppc_li (code
, ins
->dreg
, 0);
4045 ppc_fcmpu (code
, 0, ins
->sreg1
, ins
->sreg2
);
4046 ppc_li (code
, ins
->dreg
, 1);
4047 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_SO
, 3);
4048 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_LT
, 2);
4049 ppc_li (code
, ins
->dreg
, 0);
4052 ppc_fcmpo (code
, 0, ins
->sreg1
, ins
->sreg2
);
4053 ppc_li (code
, ins
->dreg
, 1);
4054 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_GT
, 2);
4055 ppc_li (code
, ins
->dreg
, 0);
4058 ppc_fcmpu (code
, 0, ins
->sreg1
, ins
->sreg2
);
4059 ppc_li (code
, ins
->dreg
, 1);
4060 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_SO
, 3);
4061 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_GT
, 2);
4062 ppc_li (code
, ins
->dreg
, 0);
4065 EMIT_COND_BRANCH (ins
, CEE_BEQ
- CEE_BEQ
);
4068 EMIT_COND_BRANCH (ins
, CEE_BNE_UN
- CEE_BEQ
);
4071 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_SO
, 2);
4072 EMIT_COND_BRANCH (ins
, CEE_BLT
- CEE_BEQ
);
4075 EMIT_COND_BRANCH_FLAGS (ins
, PPC_BR_TRUE
, PPC_BR_SO
);
4076 EMIT_COND_BRANCH (ins
, CEE_BLT_UN
- CEE_BEQ
);
4079 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_SO
, 2);
4080 EMIT_COND_BRANCH (ins
, CEE_BGT
- CEE_BEQ
);
4083 EMIT_COND_BRANCH_FLAGS (ins
, PPC_BR_TRUE
, PPC_BR_SO
);
4084 EMIT_COND_BRANCH (ins
, CEE_BGT_UN
- CEE_BEQ
);
4087 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_SO
, 2);
4088 EMIT_COND_BRANCH (ins
, CEE_BGE
- CEE_BEQ
);
4091 EMIT_COND_BRANCH (ins
, CEE_BGE_UN
- CEE_BEQ
);
4094 ppc_bc (code
, PPC_BR_TRUE
, PPC_BR_SO
, 2);
4095 EMIT_COND_BRANCH (ins
, CEE_BLE
- CEE_BEQ
);
4098 EMIT_COND_BRANCH (ins
, CEE_BLE_UN
- CEE_BEQ
);
4101 g_assert_not_reached ();
4102 case OP_CHECK_FINITE
: {
4103 ppc_rlwinm (code
, ins
->sreg1
, ins
->sreg1
, 0, 1, 31);
4104 ppc_addis (code
, ins
->sreg1
, ins
->sreg1
, -32752);
4105 ppc_rlwinmd (code
, ins
->sreg1
, ins
->sreg1
, 1, 31, 31);
4106 EMIT_COND_SYSTEM_EXCEPTION (CEE_BEQ
- CEE_BEQ
, "ArithmeticException");
4109 mono_add_patch_info (cfg
, offset
, (MonoJumpInfoType
)ins
->inst_i1
, ins
->inst_p0
);
4110 #ifdef __mono_ppc64__
4111 ppc_load_sequence (code
, ins
->dreg
, (gulong
)0x0f0f0f0f0f0f0f0fL
);
4113 ppc_load_sequence (code
, ins
->dreg
, (gulong
)0x0f0f0f0fL
);
4118 #ifdef __mono_ppc64__
4119 case OP_ICONV_TO_I4
:
4121 ppc_extsw (code
, ins
->dreg
, ins
->sreg1
);
4123 case OP_ICONV_TO_U4
:
4125 ppc_clrldi (code
, ins
->dreg
, ins
->sreg1
, 32);
4127 case OP_ICONV_TO_R4
:
4128 case OP_ICONV_TO_R8
:
4129 case OP_LCONV_TO_R4
:
4130 case OP_LCONV_TO_R8
: {
4132 if (ins
->opcode
== OP_ICONV_TO_R4
|| ins
->opcode
== OP_ICONV_TO_R8
) {
4133 ppc_extsw (code
, ppc_r0
, ins
->sreg1
);
4138 ppc_store_reg (code
, tmp
, -8, ppc_r1
);
4139 ppc_lfd (code
, ins
->dreg
, -8, ppc_r1
);
4140 ppc_fcfid (code
, ins
->dreg
, ins
->dreg
);
4141 if (ins
->opcode
== OP_ICONV_TO_R4
|| ins
->opcode
== OP_LCONV_TO_R4
)
4142 ppc_frsp (code
, ins
->dreg
, ins
->dreg
);
4146 ppc_srad (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
4149 ppc_srd (code
, ins
->dreg
, ins
->sreg1
, ins
->sreg2
);
4152 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
4154 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
4155 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1 << 13)); /* CA */
4156 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, ins
->inst_p1
);
4158 case OP_COND_EXC_OV
:
4159 ppc_mfspr (code
, ppc_r0
, ppc_xer
);
4160 ppc_andisd (code
, ppc_r0
, ppc_r0
, (1 << 14)); /* OV */
4161 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE
, PPC_BR_EQ
, ins
->inst_p1
);
4173 EMIT_COND_BRANCH (ins
, ins
->opcode
- OP_LBEQ
);
4175 case OP_FCONV_TO_I8
:
4176 code
= emit_float_to_int (cfg
, code
, ins
->dreg
, ins
->sreg1
, 8, TRUE
);
4178 case OP_FCONV_TO_U8
:
4179 code
= emit_float_to_int (cfg
, code
, ins
->dreg
, ins
->sreg1
, 8, FALSE
);
4181 case OP_STOREI4_MEMBASE_REG
:
4182 if (ppc_is_imm16 (ins
->inst_offset
)) {
4183 ppc_stw (code
, ins
->sreg1
, ins
->inst_offset
, ins
->inst_destbasereg
);
4185 ppc_load (code
, ppc_r0
, ins
->inst_offset
);
4186 ppc_stwx (code
, ins
->sreg1
, ins
->inst_destbasereg
, ppc_r0
);
4189 case OP_STOREI4_MEMINDEX
:
4190 ppc_stwx (code
, ins
->sreg1
, ins
->sreg2
, ins
->inst_destbasereg
);
4193 ppc_srawi (code
, ins
->dreg
, ins
->sreg1
, (ins
->inst_imm
& 0x1f));
4195 case OP_ISHR_UN_IMM
:
4196 if (ins
->inst_imm
& 0x1f)
4197 ppc_srwi (code
, ins
->dreg
, ins
->sreg1
, (ins
->inst_imm
& 0x1f));
4199 ppc_mr (code
, ins
->dreg
, ins
->sreg1
);
4201 case OP_ATOMIC_ADD_NEW_I4
:
4202 case OP_ATOMIC_ADD_NEW_I8
: {
4203 guint8
*loop
= code
, *branch
;
4204 g_assert (ins
->inst_offset
== 0);
4205 if (ins
->opcode
== OP_ATOMIC_ADD_NEW_I4
)
4206 ppc_lwarx (code
, ppc_r0
, 0, ins
->inst_basereg
);
4208 ppc_ldarx (code
, ppc_r0
, 0, ins
->inst_basereg
);
4209 ppc_add (code
, ppc_r0
, ppc_r0
, ins
->sreg2
);
4210 if (ins
->opcode
== OP_ATOMIC_ADD_NEW_I4
)
4211 ppc_stwcxd (code
, ppc_r0
, 0, ins
->inst_basereg
);
4213 ppc_stdcxd (code
, ppc_r0
, 0, ins
->inst_basereg
);
4215 ppc_bc (code
, PPC_BR_FALSE
, PPC_BR_EQ
, 0);
4216 ppc_patch (branch
, loop
);
4217 ppc_mr (code
, ins
->dreg
, ppc_r0
);
4223 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins
->opcode
), __FUNCTION__
);
4224 g_assert_not_reached ();
4227 if ((cfg
->opt
& MONO_OPT_BRANCH
) && ((code
- cfg
->native_code
- offset
) > max_len
)) {
4228 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %ld)",
4229 mono_inst_name (ins
->opcode
), max_len
, (glong
)(code
- cfg
->native_code
- offset
));
4230 g_assert_not_reached ();
4236 last_offset
= offset
;
4239 cfg
->code_len
= code
- cfg
->native_code
;
4243 mono_arch_register_lowlevel_calls (void)
4247 #ifdef __mono_ppc64__
4248 #define patch_load_sequence(ip,val) do {\
4249 guint16 *__load = (guint16*)(ip); \
4250 __load [1] = (((guint64)(val)) >> 48) & 0xffff; \
4251 __load [3] = (((guint64)(val)) >> 32) & 0xffff; \
4252 __load [7] = (((guint64)(val)) >> 16) & 0xffff; \
4253 __load [9] = ((guint64)(val)) & 0xffff; \
4256 #define patch_load_sequence(ip,val) do {\
4257 guint16 *__lis_ori = (guint16*)(ip); \
4258 __lis_ori [1] = (((gulong)(val)) >> 16) & 0xffff; \
4259 __lis_ori [3] = ((gulong)(val)) & 0xffff; \
4264 mono_arch_patch_code (MonoMethod
*method
, MonoDomain
*domain
, guint8
*code
, MonoJumpInfo
*ji
, gboolean run_cctors
)
4266 MonoJumpInfo
*patch_info
;
4268 for (patch_info
= ji
; patch_info
; patch_info
= patch_info
->next
) {
4269 unsigned char *ip
= patch_info
->ip
.i
+ code
;
4270 unsigned char *target
;
4271 gboolean is_fd
= FALSE
;
4273 target
= mono_resolve_patch_target (method
, domain
, code
, patch_info
, run_cctors
);
4275 switch (patch_info
->type
) {
4276 case MONO_PATCH_INFO_IP
:
4277 patch_load_sequence (ip
, ip
);
4279 case MONO_PATCH_INFO_METHOD_REL
:
4280 g_assert_not_reached ();
4281 *((gpointer
*)(ip
)) = code
+ patch_info
->data
.offset
;
4283 case MONO_PATCH_INFO_SWITCH
: {
4284 gpointer
*table
= (gpointer
*)patch_info
->data
.table
->table
;
4287 patch_load_sequence (ip
, table
);
4289 for (i
= 0; i
< patch_info
->data
.table
->table_size
; i
++) {
4290 table
[i
] = (glong
)patch_info
->data
.table
->table
[i
] + code
;
4292 /* we put into the table the absolute address, no need for ppc_patch in this case */
4295 case MONO_PATCH_INFO_METHODCONST
:
4296 case MONO_PATCH_INFO_CLASS
:
4297 case MONO_PATCH_INFO_IMAGE
:
4298 case MONO_PATCH_INFO_FIELD
:
4299 case MONO_PATCH_INFO_VTABLE
:
4300 case MONO_PATCH_INFO_IID
:
4301 case MONO_PATCH_INFO_SFLDA
:
4302 case MONO_PATCH_INFO_LDSTR
:
4303 case MONO_PATCH_INFO_TYPE_FROM_HANDLE
:
4304 case MONO_PATCH_INFO_LDTOKEN
:
4305 /* from OP_AOTCONST : lis + ori */
4306 patch_load_sequence (ip
, target
);
4308 case MONO_PATCH_INFO_R4
:
4309 case MONO_PATCH_INFO_R8
:
4310 g_assert_not_reached ();
4311 *((gconstpointer
*)(ip
+ 2)) = patch_info
->data
.target
;
4313 case MONO_PATCH_INFO_EXC_NAME
:
4314 g_assert_not_reached ();
4315 *((gconstpointer
*)(ip
+ 1)) = patch_info
->data
.name
;
4317 case MONO_PATCH_INFO_NONE
:
4318 case MONO_PATCH_INFO_BB_OVF
:
4319 case MONO_PATCH_INFO_EXC_OVF
:
4320 /* everything is dealt with at epilog output time */
4322 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
4323 case MONO_PATCH_INFO_INTERNAL_METHOD
:
4324 case MONO_PATCH_INFO_ABS
:
4325 case MONO_PATCH_INFO_CLASS_INIT
:
4326 case MONO_PATCH_INFO_RGCTX_FETCH
:
4333 ppc_patch_full (ip
, target
, is_fd
);
4338 * Emit code to save the registers in used_int_regs or the registers in the MonoLMF
4339 * structure at positive offset pos from register base_reg. pos is guaranteed to fit into
4340 * the instruction offset immediate for all the registers.
4343 save_registers (guint8
* code
, int pos
, int base_reg
, gboolean save_lmf
, guint32 used_int_regs
)
4347 for (i
= 13; i
<= 31; i
++) {
4348 if (used_int_regs
& (1 << i
)) {
4349 ppc_store_reg (code
, i
, pos
, base_reg
);
4350 pos
+= sizeof (gulong
);
4354 /* pos is the start of the MonoLMF structure */
4355 int offset
= pos
+ G_STRUCT_OFFSET (MonoLMF
, iregs
);
4356 for (i
= 13; i
<= 31; i
++) {
4357 ppc_store_reg (code
, i
, offset
, base_reg
);
4358 offset
+= sizeof (gulong
);
4360 offset
= pos
+ G_STRUCT_OFFSET (MonoLMF
, fregs
);
4361 for (i
= 14; i
< 32; i
++) {
4362 ppc_stfd (code
, i
, offset
, base_reg
);
4363 offset
+= sizeof (gdouble
);
4370 * Stack frame layout:
4372 * ------------------- sp
4373 * MonoLMF structure or saved registers
4374 * -------------------
4376 * -------------------
4378 * -------------------
4379 * optional 8 bytes for tracing
4380 * -------------------
4381 * param area size is cfg->param_area
4382 * -------------------
4383 * linkage area size is PPC_STACK_PARAM_OFFSET
4384 * ------------------- sp
4388 mono_arch_emit_prolog (MonoCompile
*cfg
)
4390 MonoMethod
*method
= cfg
->method
;
4392 MonoMethodSignature
*sig
;
4394 long alloc_size
, pos
, max_offset
;
4400 int tailcall_struct_index
;
4402 if (mono_jit_trace_calls
!= NULL
&& mono_trace_eval (method
))
4405 sig
= mono_method_signature (method
);
4406 cfg
->code_size
= MONO_PPC_32_64_CASE (260, 384) + sig
->param_count
* 20;
4407 code
= cfg
->native_code
= g_malloc (cfg
->code_size
);
4409 if (1 || cfg
->flags
& MONO_CFG_HAS_CALLS
) {
4410 ppc_mflr (code
, ppc_r0
);
4411 ppc_store_reg (code
, ppc_r0
, PPC_RET_ADDR_OFFSET
, ppc_sp
);
4414 alloc_size
= cfg
->stack_offset
;
4417 if (!method
->save_lmf
) {
4418 for (i
= 31; i
>= 13; --i
) {
4419 if (cfg
->used_int_regs
& (1 << i
)) {
4420 pos
+= sizeof (gulong
);
4424 pos
+= sizeof (MonoLMF
);
4428 // align to MONO_ARCH_FRAME_ALIGNMENT bytes
4429 if (alloc_size
& (MONO_ARCH_FRAME_ALIGNMENT
- 1)) {
4430 alloc_size
+= MONO_ARCH_FRAME_ALIGNMENT
- 1;
4431 alloc_size
&= ~(MONO_ARCH_FRAME_ALIGNMENT
- 1);
4434 cfg
->stack_usage
= alloc_size
;
4435 g_assert ((alloc_size
& (MONO_ARCH_FRAME_ALIGNMENT
-1)) == 0);
4437 if (ppc_is_imm16 (-alloc_size
)) {
4438 ppc_store_reg_update (code
, ppc_sp
, -alloc_size
, ppc_sp
);
4439 code
= save_registers (code
, alloc_size
- pos
, ppc_sp
, method
->save_lmf
, cfg
->used_int_regs
);
4442 ppc_addi (code
, ppc_r11
, ppc_sp
, -pos
);
4443 ppc_load (code
, ppc_r0
, -alloc_size
);
4444 ppc_store_reg_update_indexed (code
, ppc_sp
, ppc_sp
, ppc_r0
);
4445 code
= save_registers (code
, 0, ppc_r11
, method
->save_lmf
, cfg
->used_int_regs
);
4448 if (cfg
->frame_reg
!= ppc_sp
)
4449 ppc_mr (code
, cfg
->frame_reg
, ppc_sp
);
4451 /* store runtime generic context */
4452 if (cfg
->rgctx_var
) {
4453 g_assert (cfg
->rgctx_var
->opcode
== OP_REGOFFSET
&&
4454 (cfg
->rgctx_var
->inst_basereg
== ppc_r1
|| cfg
->rgctx_var
->inst_basereg
== ppc_r31
));
4456 ppc_store_reg (code
, MONO_ARCH_RGCTX_REG
, cfg
->rgctx_var
->inst_offset
, cfg
->rgctx_var
->inst_basereg
);
4459 /* compute max_offset in order to use short forward jumps
4460 * we always do it on ppc because the immediate displacement
4461 * for jumps is too small
4464 for (bb
= cfg
->bb_entry
; bb
; bb
= bb
->next_bb
) {
4466 bb
->max_offset
= max_offset
;
4468 if (cfg
->prof_options
& MONO_PROFILE_COVERAGE
)
4471 MONO_BB_FOR_EACH_INS (bb
, ins
)
4472 max_offset
+= ins_native_length (cfg
, ins
);
4475 /* load arguments allocated to register from the stack */
4478 cinfo
= calculate_sizes (sig
, sig
->pinvoke
);
4480 if (MONO_TYPE_ISSTRUCT (sig
->ret
)) {
4481 ArgInfo
*ainfo
= &cinfo
->ret
;
4483 inst
= cfg
->vret_addr
;
4486 if (ppc_is_imm16 (inst
->inst_offset
)) {
4487 ppc_store_reg (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
4489 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4490 ppc_store_reg_indexed (code
, ainfo
->reg
, ppc_r11
, inst
->inst_basereg
);
4494 tailcall_struct_index
= 0;
4495 for (i
= 0; i
< sig
->param_count
+ sig
->hasthis
; ++i
) {
4496 ArgInfo
*ainfo
= cinfo
->args
+ i
;
4497 inst
= cfg
->args
[pos
];
4499 if (cfg
->verbose_level
> 2)
4500 g_print ("Saving argument %d (type: %d)\n", i
, ainfo
->regtype
);
4501 if (inst
->opcode
== OP_REGVAR
) {
4502 if (ainfo
->regtype
== RegTypeGeneral
)
4503 ppc_mr (code
, inst
->dreg
, ainfo
->reg
);
4504 else if (ainfo
->regtype
== RegTypeFP
)
4505 ppc_fmr (code
, inst
->dreg
, ainfo
->reg
);
4506 else if (ainfo
->regtype
== RegTypeBase
) {
4507 ppc_load_reg (code
, ppc_r11
, 0, ppc_sp
);
4508 ppc_load_reg (code
, inst
->dreg
, ainfo
->offset
, ppc_r11
);
4510 g_assert_not_reached ();
4512 if (cfg
->verbose_level
> 2)
4513 g_print ("Argument %ld assigned to register %s\n", pos
, mono_arch_regname (inst
->dreg
));
4515 /* the argument should be put on the stack: FIXME handle size != word */
4516 if (ainfo
->regtype
== RegTypeGeneral
) {
4517 switch (ainfo
->size
) {
4519 if (ppc_is_imm16 (inst
->inst_offset
)) {
4520 ppc_stb (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
4522 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4523 ppc_stbx (code
, ainfo
->reg
, ppc_r11
, inst
->inst_basereg
);
4527 if (ppc_is_imm16 (inst
->inst_offset
)) {
4528 ppc_sth (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
4530 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4531 ppc_sthx (code
, ainfo
->reg
, ppc_r11
, inst
->inst_basereg
);
4534 #ifdef __mono_ppc64__
4536 if (ppc_is_imm16 (inst
->inst_offset
)) {
4537 ppc_stw (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
4539 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4540 ppc_stwx (code
, ainfo
->reg
, ppc_r11
, inst
->inst_basereg
);
4545 if (ppc_is_imm16 (inst
->inst_offset
+ 4)) {
4546 ppc_stw (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
4547 ppc_stw (code
, ainfo
->reg
+ 1, inst
->inst_offset
+ 4, inst
->inst_basereg
);
4549 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4550 ppc_add (code
, ppc_r11
, ppc_r11
, inst
->inst_basereg
);
4551 ppc_stw (code
, ainfo
->reg
, 0, ppc_r11
);
4552 ppc_stw (code
, ainfo
->reg
+ 1, 4, ppc_r11
);
4557 if (ppc_is_imm16 (inst
->inst_offset
)) {
4558 ppc_store_reg (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
4560 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4561 ppc_store_reg_indexed (code
, ainfo
->reg
, ppc_r11
, inst
->inst_basereg
);
4565 } else if (ainfo
->regtype
== RegTypeBase
) {
4566 /* load the previous stack pointer in r11 */
4567 ppc_load_reg (code
, ppc_r11
, 0, ppc_sp
);
4568 ppc_load_reg (code
, ppc_r0
, ainfo
->offset
, ppc_r11
);
4569 switch (ainfo
->size
) {
4571 if (ppc_is_imm16 (inst
->inst_offset
)) {
4572 ppc_stb (code
, ppc_r0
, inst
->inst_offset
, inst
->inst_basereg
);
4574 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4575 ppc_stbx (code
, ppc_r0
, ppc_r11
, inst
->inst_basereg
);
4579 if (ppc_is_imm16 (inst
->inst_offset
)) {
4580 ppc_sth (code
, ppc_r0
, inst
->inst_offset
, inst
->inst_basereg
);
4582 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4583 ppc_sthx (code
, ppc_r0
, ppc_r11
, inst
->inst_basereg
);
4586 #ifdef __mono_ppc64__
4588 if (ppc_is_imm16 (inst
->inst_offset
)) {
4589 ppc_stw (code
, ppc_r0
, inst
->inst_offset
, inst
->inst_basereg
);
4591 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4592 ppc_stwx (code
, ppc_r0
, ppc_r11
, inst
->inst_basereg
);
4597 if (ppc_is_imm16 (inst
->inst_offset
+ 4)) {
4598 ppc_stw (code
, ppc_r0
, inst
->inst_offset
, inst
->inst_basereg
);
4599 ppc_lwz (code
, ppc_r0
, ainfo
->offset
+ 4, ppc_r11
);
4600 ppc_stw (code
, ppc_r0
, inst
->inst_offset
+ 4, inst
->inst_basereg
);
4603 g_assert_not_reached ();
4608 if (ppc_is_imm16 (inst
->inst_offset
)) {
4609 ppc_store_reg (code
, ppc_r0
, inst
->inst_offset
, inst
->inst_basereg
);
4611 ppc_load (code
, ppc_r11
, inst
->inst_offset
);
4612 ppc_store_reg_indexed (code
, ppc_r0
, ppc_r11
, inst
->inst_basereg
);
4616 } else if (ainfo
->regtype
== RegTypeFP
) {
4617 g_assert (ppc_is_imm16 (inst
->inst_offset
));
4618 if (ainfo
->size
== 8)
4619 ppc_stfd (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
4620 else if (ainfo
->size
== 4)
4621 ppc_stfs (code
, ainfo
->reg
, inst
->inst_offset
, inst
->inst_basereg
);
4623 g_assert_not_reached ();
4624 } else if (ainfo
->regtype
== RegTypeStructByVal
) {
4625 int doffset
= inst
->inst_offset
;
4629 g_assert (ppc_is_imm16 (inst
->inst_offset
));
4630 g_assert (ppc_is_imm16 (inst
->inst_offset
+ ainfo
->size
* sizeof (gpointer
)));
4631 /* FIXME: what if there is no class? */
4632 if (sig
->pinvoke
&& mono_class_from_mono_type (inst
->inst_vtype
))
4633 size
= mono_class_native_size (mono_class_from_mono_type (inst
->inst_vtype
), NULL
);
4634 for (cur_reg
= 0; cur_reg
< ainfo
->size
; ++cur_reg
) {
4637 * Darwin handles 1 and 2 byte
4638 * structs specially by
4639 * loading h/b into the arg
4640 * register. Only done for
4644 ppc_sth (code
, ainfo
->reg
+ cur_reg
, doffset
, inst
->inst_basereg
);
4646 ppc_stb (code
, ainfo
->reg
+ cur_reg
, doffset
, inst
->inst_basereg
);
4650 #ifdef __mono_ppc64__
4652 g_assert (cur_reg
== 0);
4653 ppc_sldi (code
, ppc_r0
, ainfo
->reg
,
4654 (sizeof (gpointer
) - ainfo
->bytes
) * 8);
4655 ppc_store_reg (code
, ppc_r0
, doffset
, inst
->inst_basereg
);
4659 ppc_store_reg (code
, ainfo
->reg
+ cur_reg
, doffset
,
4660 inst
->inst_basereg
);
4663 soffset
+= sizeof (gpointer
);
4664 doffset
+= sizeof (gpointer
);
4666 if (ainfo
->vtsize
) {
4667 /* FIXME: we need to do the shifting here, too */
4670 /* load the previous stack pointer in r11 (r0 gets overwritten by the memcpy) */
4671 ppc_load_reg (code
, ppc_r11
, 0, ppc_sp
);
4672 if ((size
& MONO_PPC_32_64_CASE (3, 7)) != 0) {
4673 code
= emit_memcpy (code
, size
- soffset
,
4674 inst
->inst_basereg
, doffset
,
4675 ppc_r11
, ainfo
->offset
+ soffset
);
4677 code
= emit_memcpy (code
, ainfo
->vtsize
* sizeof (gpointer
),
4678 inst
->inst_basereg
, doffset
,
4679 ppc_r11
, ainfo
->offset
+ soffset
);
4682 } else if (ainfo
->regtype
== RegTypeStructByAddr
) {
4683 /* if it was originally a RegTypeBase */
4684 if (ainfo
->offset
) {
4685 /* load the previous stack pointer in r11 */
4686 ppc_load_reg (code
, ppc_r11
, 0, ppc_sp
);
4687 ppc_load_reg (code
, ppc_r11
, ainfo
->offset
, ppc_r11
);
4689 ppc_mr (code
, ppc_r11
, ainfo
->reg
);
4692 if (cfg
->tailcall_valuetype_addrs
) {
4693 MonoInst
*addr
= cfg
->tailcall_valuetype_addrs
[tailcall_struct_index
];
4695 g_assert (ppc_is_imm16 (addr
->inst_offset
));
4696 ppc_store_reg (code
, ppc_r11
, addr
->inst_offset
, addr
->inst_basereg
);
4698 tailcall_struct_index
++;
4701 g_assert (ppc_is_imm16 (inst
->inst_offset
));
4702 code
= emit_memcpy (code
, ainfo
->vtsize
, inst
->inst_basereg
, inst
->inst_offset
, ppc_r11
, 0);
4703 /*g_print ("copy in %s: %d bytes from %d to offset: %d\n", method->name, ainfo->vtsize, ainfo->reg, inst->inst_offset);*/
4705 g_assert_not_reached ();
4710 if (method
->wrapper_type
== MONO_WRAPPER_NATIVE_TO_MANAGED
) {
4711 ppc_load (code
, ppc_r3
, cfg
->domain
);
4712 mono_add_patch_info (cfg
, code
- cfg
->native_code
, MONO_PATCH_INFO_INTERNAL_METHOD
, (gpointer
)"mono_jit_thread_attach");
4713 if (FORCE_INDIR_CALL
|| cfg
->method
->dynamic
) {
4714 ppc_load_func (code
, ppc_r0
, 0);
4715 ppc_mtlr (code
, ppc_r0
);
4722 if (method
->save_lmf
) {
4723 if (lmf_pthread_key
!= -1) {
4724 emit_tls_access (code
, ppc_r3
, lmf_pthread_key
);
4725 if (tls_mode
!= TLS_MODE_NPTL
&& G_STRUCT_OFFSET (MonoJitTlsData
, lmf
))
4726 ppc_addi (code
, ppc_r3
, ppc_r3
, G_STRUCT_OFFSET (MonoJitTlsData
, lmf
));
4728 mono_add_patch_info (cfg
, code
- cfg
->native_code
, MONO_PATCH_INFO_INTERNAL_METHOD
,
4729 (gpointer
)"mono_get_lmf_addr");
4730 if (FORCE_INDIR_CALL
|| cfg
->method
->dynamic
) {
4731 ppc_load_func (code
, ppc_r0
, 0);
4732 ppc_mtlr (code
, ppc_r0
);
4738 /* we build the MonoLMF structure on the stack - see mini-ppc.h */
4739 /* lmf_offset is the offset from the previous stack pointer,
4740 * alloc_size is the total stack space allocated, so the offset
4741 * of MonoLMF from the current stack ptr is alloc_size - lmf_offset.
4742 * The pointer to the struct is put in ppc_r11 (new_lmf).
4743 * The callee-saved registers are already in the MonoLMF structure
4745 ppc_addi (code
, ppc_r11
, ppc_sp
, alloc_size
- lmf_offset
);
4746 /* ppc_r3 is the result from mono_get_lmf_addr () */
4747 ppc_store_reg (code
, ppc_r3
, G_STRUCT_OFFSET(MonoLMF
, lmf_addr
), ppc_r11
);
4748 /* new_lmf->previous_lmf = *lmf_addr */
4749 ppc_load_reg (code
, ppc_r0
, G_STRUCT_OFFSET(MonoLMF
, previous_lmf
), ppc_r3
);
4750 ppc_store_reg (code
, ppc_r0
, G_STRUCT_OFFSET(MonoLMF
, previous_lmf
), ppc_r11
);
4751 /* *(lmf_addr) = r11 */
4752 ppc_store_reg (code
, ppc_r11
, G_STRUCT_OFFSET(MonoLMF
, previous_lmf
), ppc_r3
);
4753 /* save method info */
4754 ppc_load (code
, ppc_r0
, method
);
4755 ppc_store_reg (code
, ppc_r0
, G_STRUCT_OFFSET(MonoLMF
, method
), ppc_r11
);
4756 ppc_store_reg (code
, ppc_sp
, G_STRUCT_OFFSET(MonoLMF
, ebp
), ppc_r11
);
4757 /* save the current IP */
4758 mono_add_patch_info (cfg
, code
- cfg
->native_code
, MONO_PATCH_INFO_IP
, NULL
);
4759 #ifdef __mono_ppc64__
4760 ppc_load_sequence (code
, ppc_r0
, (gulong
)0x0101010101010101L
);
4762 ppc_load_sequence (code
, ppc_r0
, (gulong
)0x01010101L
);
4764 ppc_store_reg (code
, ppc_r0
, G_STRUCT_OFFSET(MonoLMF
, eip
), ppc_r11
);
4768 code
= mono_arch_instrument_prolog (cfg
, mono_trace_enter_method
, code
, TRUE
);
4770 cfg
->code_len
= code
- cfg
->native_code
;
4771 g_assert (cfg
->code_len
<= cfg
->code_size
);
4778 mono_arch_emit_epilog (MonoCompile
*cfg
)
4780 MonoMethod
*method
= cfg
->method
;
4782 int max_epilog_size
= 16 + 20*4;
4785 if (cfg
->method
->save_lmf
)
4786 max_epilog_size
+= 128;
4788 if (mono_jit_trace_calls
!= NULL
)
4789 max_epilog_size
+= 50;
4791 if (cfg
->prof_options
& MONO_PROFILE_ENTER_LEAVE
)
4792 max_epilog_size
+= 50;
4794 while (cfg
->code_len
+ max_epilog_size
> (cfg
->code_size
- 16)) {
4795 cfg
->code_size
*= 2;
4796 cfg
->native_code
= g_realloc (cfg
->native_code
, cfg
->code_size
);
4797 mono_jit_stats
.code_reallocs
++;
4801 * Keep in sync with OP_JMP
4803 code
= cfg
->native_code
+ cfg
->code_len
;
4805 if (mono_jit_trace_calls
!= NULL
&& mono_trace_eval (method
)) {
4806 code
= mono_arch_instrument_epilog (cfg
, mono_trace_leave_method
, code
, TRUE
);
4810 if (method
->save_lmf
) {
4812 pos
+= sizeof (MonoLMF
);
4814 /* save the frame reg in r8 */
4815 ppc_mr (code
, ppc_r8
, cfg
->frame_reg
);
4816 ppc_addi (code
, ppc_r11
, cfg
->frame_reg
, cfg
->stack_usage
- lmf_offset
);
4817 /* r5 = previous_lmf */
4818 ppc_load_reg (code
, ppc_r5
, G_STRUCT_OFFSET(MonoLMF
, previous_lmf
), ppc_r11
);
4820 ppc_load_reg (code
, ppc_r6
, G_STRUCT_OFFSET(MonoLMF
, lmf_addr
), ppc_r11
);
4821 /* *(lmf_addr) = previous_lmf */
4822 ppc_store_reg (code
, ppc_r5
, G_STRUCT_OFFSET(MonoLMF
, previous_lmf
), ppc_r6
);
4823 /* FIXME: speedup: there is no actual need to restore the registers if
4824 * we didn't actually change them (idea from Zoltan).
4827 ppc_load_multiple_regs (code
, ppc_r13
, G_STRUCT_OFFSET(MonoLMF
, iregs
), ppc_r11
);
4829 /*for (i = 14; i < 32; i++) {
4830 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
4832 g_assert (ppc_is_imm16 (cfg
->stack_usage
+ PPC_RET_ADDR_OFFSET
));
4833 /* use the saved copy of the frame reg in r8 */
4834 if (1 || cfg
->flags
& MONO_CFG_HAS_CALLS
) {
4835 ppc_load_reg (code
, ppc_r0
, cfg
->stack_usage
+ PPC_RET_ADDR_OFFSET
, ppc_r8
);
4836 ppc_mtlr (code
, ppc_r0
);
4838 ppc_addic (code
, ppc_sp
, ppc_r8
, cfg
->stack_usage
);
4840 if (1 || cfg
->flags
& MONO_CFG_HAS_CALLS
) {
4841 long return_offset
= cfg
->stack_usage
+ PPC_RET_ADDR_OFFSET
;
4842 if (ppc_is_imm16 (return_offset
)) {
4843 ppc_load_reg (code
, ppc_r0
, return_offset
, cfg
->frame_reg
);
4845 ppc_load (code
, ppc_r11
, return_offset
);
4846 ppc_load_reg_indexed (code
, ppc_r0
, cfg
->frame_reg
, ppc_r11
);
4848 ppc_mtlr (code
, ppc_r0
);
4850 if (ppc_is_imm16 (cfg
->stack_usage
)) {
4851 int offset
= cfg
->stack_usage
;
4852 for (i
= 13; i
<= 31; i
++) {
4853 if (cfg
->used_int_regs
& (1 << i
))
4854 offset
-= sizeof (gulong
);
4856 if (cfg
->frame_reg
!= ppc_sp
)
4857 ppc_mr (code
, ppc_r11
, cfg
->frame_reg
);
4858 /* note r31 (possibly the frame register) is restored last */
4859 for (i
= 13; i
<= 31; i
++) {
4860 if (cfg
->used_int_regs
& (1 << i
)) {
4861 ppc_load_reg (code
, i
, offset
, cfg
->frame_reg
);
4862 offset
+= sizeof (gulong
);
4865 if (cfg
->frame_reg
!= ppc_sp
)
4866 ppc_addi (code
, ppc_sp
, ppc_r11
, cfg
->stack_usage
);
4868 ppc_addi (code
, ppc_sp
, ppc_sp
, cfg
->stack_usage
);
4870 ppc_load (code
, ppc_r11
, cfg
->stack_usage
);
4871 if (cfg
->used_int_regs
) {
4872 ppc_add (code
, ppc_r11
, cfg
->frame_reg
, ppc_r11
);
4873 for (i
= 31; i
>= 13; --i
) {
4874 if (cfg
->used_int_regs
& (1 << i
)) {
4875 pos
+= sizeof (gulong
);
4876 ppc_load_reg (code
, i
, -pos
, ppc_r11
);
4879 ppc_mr (code
, ppc_sp
, ppc_r11
);
4881 ppc_add (code
, ppc_sp
, cfg
->frame_reg
, ppc_r11
);
4888 cfg
->code_len
= code
- cfg
->native_code
;
4890 g_assert (cfg
->code_len
< cfg
->code_size
);
4894 /* remove once throw_exception_by_name is eliminated */
4896 exception_id_by_name (const char *name
)
4898 if (strcmp (name
, "IndexOutOfRangeException") == 0)
4899 return MONO_EXC_INDEX_OUT_OF_RANGE
;
4900 if (strcmp (name
, "OverflowException") == 0)
4901 return MONO_EXC_OVERFLOW
;
4902 if (strcmp (name
, "ArithmeticException") == 0)
4903 return MONO_EXC_ARITHMETIC
;
4904 if (strcmp (name
, "DivideByZeroException") == 0)
4905 return MONO_EXC_DIVIDE_BY_ZERO
;
4906 if (strcmp (name
, "InvalidCastException") == 0)
4907 return MONO_EXC_INVALID_CAST
;
4908 if (strcmp (name
, "NullReferenceException") == 0)
4909 return MONO_EXC_NULL_REF
;
4910 if (strcmp (name
, "ArrayTypeMismatchException") == 0)
4911 return MONO_EXC_ARRAY_TYPE_MISMATCH
;
4912 g_error ("Unknown intrinsic exception %s\n", name
);
4917 mono_arch_emit_exceptions (MonoCompile
*cfg
)
4919 MonoJumpInfo
*patch_info
;
4922 const guint8
* exc_throw_pos
[MONO_EXC_INTRINS_NUM
] = {NULL
};
4923 guint8 exc_throw_found
[MONO_EXC_INTRINS_NUM
] = {0};
4924 int max_epilog_size
= 50;
4926 /* count the number of exception infos */
4929 * make sure we have enough space for exceptions
4930 * 24 is the simulated call to throw_exception_by_name
4932 for (patch_info
= cfg
->patch_info
; patch_info
; patch_info
= patch_info
->next
) {
4933 if (patch_info
->type
== MONO_PATCH_INFO_EXC
) {
4934 i
= exception_id_by_name (patch_info
->data
.target
);
4935 if (!exc_throw_found
[i
]) {
4936 max_epilog_size
+= 24;
4937 exc_throw_found
[i
] = TRUE
;
4939 } else if (patch_info
->type
== MONO_PATCH_INFO_BB_OVF
)
4940 max_epilog_size
+= 12;
4941 else if (patch_info
->type
== MONO_PATCH_INFO_EXC_OVF
) {
4942 MonoOvfJump
*ovfj
= (MonoOvfJump
*)patch_info
->data
.target
;
4943 i
= exception_id_by_name (ovfj
->data
.exception
);
4944 if (!exc_throw_found
[i
]) {
4945 max_epilog_size
+= 24;
4946 exc_throw_found
[i
] = TRUE
;
4948 max_epilog_size
+= 8;
4952 while (cfg
->code_len
+ max_epilog_size
> (cfg
->code_size
- 16)) {
4953 cfg
->code_size
*= 2;
4954 cfg
->native_code
= g_realloc (cfg
->native_code
, cfg
->code_size
);
4955 mono_jit_stats
.code_reallocs
++;
4958 code
= cfg
->native_code
+ cfg
->code_len
;
4960 /* add code to raise exceptions */
4961 for (patch_info
= cfg
->patch_info
; patch_info
; patch_info
= patch_info
->next
) {
4962 switch (patch_info
->type
) {
4963 case MONO_PATCH_INFO_BB_OVF
: {
4964 MonoOvfJump
*ovfj
= (MonoOvfJump
*)patch_info
->data
.target
;
4965 unsigned char *ip
= patch_info
->ip
.i
+ cfg
->native_code
;
4966 /* patch the initial jump */
4967 ppc_patch (ip
, code
);
4968 ppc_bc (code
, ovfj
->b0_cond
, ovfj
->b1_cond
, 2);
4970 ppc_patch (code
- 4, ip
+ 4); /* jump back after the initiali branch */
4971 /* jump back to the true target */
4973 ip
= ovfj
->data
.bb
->native_offset
+ cfg
->native_code
;
4974 ppc_patch (code
- 4, ip
);
4977 case MONO_PATCH_INFO_EXC_OVF
: {
4978 MonoOvfJump
*ovfj
= (MonoOvfJump
*)patch_info
->data
.target
;
4979 MonoJumpInfo
*newji
;
4980 unsigned char *ip
= patch_info
->ip
.i
+ cfg
->native_code
;
4981 unsigned char *bcl
= code
;
4982 /* patch the initial jump: we arrived here with a call */
4983 ppc_patch (ip
, code
);
4984 ppc_bc (code
, ovfj
->b0_cond
, ovfj
->b1_cond
, 0);
4986 ppc_patch (code
- 4, ip
+ 4); /* jump back after the initiali branch */
4987 /* patch the conditional jump to the right handler */
4988 /* make it processed next */
4989 newji
= mono_mempool_alloc (cfg
->mempool
, sizeof (MonoJumpInfo
));
4990 newji
->type
= MONO_PATCH_INFO_EXC
;
4991 newji
->ip
.i
= bcl
- cfg
->native_code
;
4992 newji
->data
.target
= ovfj
->data
.exception
;
4993 newji
->next
= patch_info
->next
;
4994 patch_info
->next
= newji
;
4997 case MONO_PATCH_INFO_EXC
: {
4998 unsigned char *ip
= patch_info
->ip
.i
+ cfg
->native_code
;
4999 i
= exception_id_by_name (patch_info
->data
.target
);
5000 if (exc_throw_pos
[i
]) {
5001 ppc_patch (ip
, exc_throw_pos
[i
]);
5002 patch_info
->type
= MONO_PATCH_INFO_NONE
;
5005 exc_throw_pos
[i
] = code
;
5007 ppc_patch (ip
, code
);
5008 /*mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_NAME, patch_info->data.target);*/
5009 ppc_load (code
, ppc_r3
, patch_info
->data
.target
);
5010 /* we got here from a conditional call, so the calling ip is set in lr already */
5011 patch_info
->type
= MONO_PATCH_INFO_INTERNAL_METHOD
;
5012 patch_info
->data
.name
= "mono_arch_throw_exception_by_name";
5013 patch_info
->ip
.i
= code
- cfg
->native_code
;
5014 if (FORCE_INDIR_CALL
|| cfg
->method
->dynamic
) {
5015 ppc_load_func (code
, ppc_r0
, 0);
5016 ppc_mtctr (code
, ppc_r0
);
5017 ppc_bcctr (code
, PPC_BR_ALWAYS
, 0);
5029 cfg
->code_len
= code
- cfg
->native_code
;
5031 g_assert (cfg
->code_len
< cfg
->code_size
);
5037 try_offset_access (void *value
, guint32 idx
)
5039 register void* me
__asm__ ("r2");
5040 void ***p
= (void***)((char*)me
+ 284);
5041 int idx1
= idx
/ 32;
5042 int idx2
= idx
% 32;
5045 if (value
!= p
[idx1
][idx2
])
5052 setup_tls_access (void)
5056 #if defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5057 size_t conf_size
= 0;
5060 /* FIXME for darwin */
5061 guint32
*ins
, *code
;
5062 guint32 cmplwi_1023
, li_0x48
, blr_ins
;
5065 if (tls_mode
== TLS_MODE_FAILED
)
5067 if (g_getenv ("MONO_NO_TLS")) {
5068 tls_mode
= TLS_MODE_FAILED
;
5072 if (tls_mode
== TLS_MODE_DETECT
) {
5073 #if defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5074 conf_size
= confstr ( _CS_GNU_LIBPTHREAD_VERSION
, confbuf
, sizeof(confbuf
));
5075 if ((conf_size
> 4) && (strncmp (confbuf
, "NPTL", 4) == 0))
5076 tls_mode
= TLS_MODE_NPTL
;
5078 tls_mode
= TLS_MODE_LTHREADS
;
5080 ins
= (guint32
*)pthread_getspecific
;
5081 /* uncond branch to the real method */
5082 if ((*ins
>> 26) == 18) {
5084 val
= (*ins
& ~3) << 6;
5088 ins
= (guint32
*)(long)val
;
5090 ins
= (guint32
*) ((char*)ins
+ val
);
5093 code
= &cmplwi_1023
;
5094 ppc_cmpli (code
, 0, 0, ppc_r3
, 1023);
5096 ppc_li (code
, ppc_r4
, 0x48);
5099 if (*ins
== cmplwi_1023
) {
5100 int found_lwz_284
= 0;
5101 for (ptk
= 0; ptk
< 20; ++ptk
) {
5103 if (!*ins
|| *ins
== blr_ins
)
5105 if ((guint16
)*ins
== 284 && (*ins
>> 26) == 32) {
5110 if (!found_lwz_284
) {
5111 tls_mode
= TLS_MODE_FAILED
;
5114 tls_mode
= TLS_MODE_LTHREADS
;
5115 } else if (*ins
== li_0x48
) {
5117 /* uncond branch to the real method */
5118 if ((*ins
>> 26) == 18) {
5120 val
= (*ins
& ~3) << 6;
5124 ins
= (guint32
*)(long)val
;
5126 ins
= (guint32
*) ((char*)ins
+ val
);
5128 code
= (guint32
*)&val
;
5129 ppc_li (code
, ppc_r0
, 0x7FF2);
5130 if (ins
[1] == val
) {
5131 /* Darwin on G4, implement */
5132 tls_mode
= TLS_MODE_FAILED
;
5135 code
= (guint32
*)&val
;
5136 ppc_mfspr (code
, ppc_r3
, 104);
5137 if (ins
[1] != val
) {
5138 tls_mode
= TLS_MODE_FAILED
;
5141 tls_mode
= TLS_MODE_DARWIN_G5
;
5144 tls_mode
= TLS_MODE_FAILED
;
5148 tls_mode
= TLS_MODE_FAILED
;
5153 if ((monodomain_key
== -1) && (tls_mode
== TLS_MODE_NPTL
)) {
5154 monodomain_key
= mono_domain_get_tls_offset();
5156 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5157 mono_domain_get_tls_offset returning -1) then use keyed access. */
5158 if (monodomain_key
== -1) {
5159 ptk
= mono_domain_get_tls_key ();
5161 ptk
= mono_pthread_key_for_tls (ptk
);
5163 monodomain_key
= ptk
;
5168 if ((lmf_pthread_key
== -1) && (tls_mode
== TLS_MODE_NPTL
)) {
5169 lmf_pthread_key
= mono_get_lmf_addr_tls_offset();
5171 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5172 mono_get_lmf_addr_tls_offset returning -1) then use keyed access. */
5173 if (lmf_pthread_key
== -1) {
5174 ptk
= mono_pthread_key_for_tls (mono_jit_tls_id
);
5176 /*g_print ("MonoLMF at: %d\n", ptk);*/
5177 /*if (!try_offset_access (mono_get_lmf_addr (), ptk)) {
5178 init_tls_failed = 1;
5181 lmf_pthread_key
= ptk
;
5185 if ((monothread_key
== -1) && (tls_mode
== TLS_MODE_NPTL
)) {
5186 monothread_key
= mono_thread_get_tls_offset();
5188 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5189 mono_get_lmf_addr_tls_offset returning -1) then use keyed access. */
5190 if (monothread_key
== -1) {
5191 ptk
= mono_thread_get_tls_key ();
5193 ptk
= mono_pthread_key_for_tls (ptk
);
5195 monothread_key
= ptk
;
5196 /*g_print ("thread inited: %d\n", ptk);*/
5199 /*g_print ("thread not inited yet %d\n", ptk);*/
5205 mono_arch_setup_jit_tls_data (MonoJitTlsData
*tls
)
5207 setup_tls_access ();
5211 mono_arch_free_jit_tls_data (MonoJitTlsData
*tls
)
5215 #ifdef MONO_ARCH_HAVE_IMT
5217 #define CMP_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 4)
5219 #define LOADSTORE_SIZE 4
5220 #define JUMP_IMM_SIZE 12
5221 #define JUMP_IMM32_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 8)
5222 #define ENABLE_WRONG_METHOD_CHECK 0
5225 * LOCKING: called with the domain lock held
5228 mono_arch_build_imt_thunk (MonoVTable
*vtable
, MonoDomain
*domain
, MonoIMTCheckItem
**imt_entries
, int count
,
5229 gpointer fail_tramp
)
5233 guint8
*code
, *start
;
5235 for (i
= 0; i
< count
; ++i
) {
5236 MonoIMTCheckItem
*item
= imt_entries
[i
];
5237 if (item
->is_equals
) {
5238 if (item
->check_target_idx
) {
5239 if (!item
->compare_done
)
5240 item
->chunk_size
+= CMP_SIZE
;
5241 if (item
->has_target_code
)
5242 item
->chunk_size
+= BR_SIZE
+ JUMP_IMM32_SIZE
;
5244 item
->chunk_size
+= LOADSTORE_SIZE
+ BR_SIZE
+ JUMP_IMM_SIZE
;
5247 item
->chunk_size
+= CMP_SIZE
+ BR_SIZE
+ JUMP_IMM32_SIZE
* 2;
5248 if (!item
->has_target_code
)
5249 item
->chunk_size
+= LOADSTORE_SIZE
;
5251 item
->chunk_size
+= LOADSTORE_SIZE
+ JUMP_IMM_SIZE
;
5252 #if ENABLE_WRONG_METHOD_CHECK
5253 item
->chunk_size
+= CMP_SIZE
+ BR_SIZE
+ 4;
5258 item
->chunk_size
+= CMP_SIZE
+ BR_SIZE
;
5259 imt_entries
[item
->check_target_idx
]->compare_done
= TRUE
;
5261 size
+= item
->chunk_size
;
5264 code
= mono_method_alloc_generic_virtual_thunk (domain
, size
);
5266 /* the initial load of the vtable address */
5267 size
+= PPC_LOAD_SEQUENCE_LENGTH
+ LOADSTORE_SIZE
;
5268 code
= mono_domain_code_reserve (domain
, size
);
5273 * We need to save and restore r11 because it might be
5274 * used by the caller as the vtable register, so
5275 * clobbering it will trip up the magic trampoline.
5277 * FIXME: Get rid of this by making sure that r11 is
5278 * not used as the vtable register in interface calls.
5280 ppc_store_reg (code
, ppc_r11
, PPC_RET_ADDR_OFFSET
, ppc_sp
);
5281 ppc_load (code
, ppc_r11
, (gulong
)(& (vtable
->vtable
[0])));
5283 for (i
= 0; i
< count
; ++i
) {
5284 MonoIMTCheckItem
*item
= imt_entries
[i
];
5285 item
->code_target
= code
;
5286 if (item
->is_equals
) {
5287 if (item
->check_target_idx
) {
5288 if (!item
->compare_done
) {
5289 ppc_load (code
, ppc_r0
, (gulong
)item
->key
);
5290 ppc_compare_log (code
, 0, MONO_ARCH_IMT_REG
, ppc_r0
);
5292 item
->jmp_code
= code
;
5293 ppc_bc (code
, PPC_BR_FALSE
, PPC_BR_EQ
, 0);
5294 if (item
->has_target_code
) {
5295 ppc_load (code
, ppc_r0
, item
->value
.target_code
);
5297 ppc_load_reg (code
, ppc_r0
, (sizeof (gpointer
) * item
->value
.vtable_slot
), ppc_r11
);
5298 ppc_load_reg (code
, ppc_r11
, PPC_RET_ADDR_OFFSET
, ppc_sp
);
5300 ppc_mtctr (code
, ppc_r0
);
5301 ppc_bcctr (code
, PPC_BR_ALWAYS
, 0);
5304 ppc_load (code
, ppc_r0
, (gulong
)item
->key
);
5305 ppc_compare_log (code
, 0, MONO_ARCH_IMT_REG
, ppc_r0
);
5306 item
->jmp_code
= code
;
5307 ppc_bc (code
, PPC_BR_FALSE
, PPC_BR_EQ
, 0);
5308 if (item
->has_target_code
) {
5309 ppc_load (code
, ppc_r0
, item
->value
.target_code
);
5312 ppc_load (code
, ppc_r0
, & (vtable
->vtable
[item
->value
.vtable_slot
]));
5313 ppc_load_reg_indexed (code
, ppc_r0
, 0, ppc_r0
);
5315 ppc_mtctr (code
, ppc_r0
);
5316 ppc_bcctr (code
, PPC_BR_ALWAYS
, 0);
5317 ppc_patch (item
->jmp_code
, code
);
5318 ppc_load (code
, ppc_r0
, fail_tramp
);
5319 ppc_mtctr (code
, ppc_r0
);
5320 ppc_bcctr (code
, PPC_BR_ALWAYS
, 0);
5321 item
->jmp_code
= NULL
;
5323 /* enable the commented code to assert on wrong method */
5324 #if ENABLE_WRONG_METHOD_CHECK
5325 ppc_load (code
, ppc_r0
, (guint32
)item
->key
);
5326 ppc_compare_log (code
, 0, MONO_ARCH_IMT_REG
, ppc_r0
);
5327 item
->jmp_code
= code
;
5328 ppc_bc (code
, PPC_BR_FALSE
, PPC_BR_EQ
, 0);
5330 ppc_load_reg (code
, ppc_r0
, (sizeof (gpointer
) * item
->value
.vtable_slot
), ppc_r11
);
5331 ppc_load_reg (code
, ppc_r11
, PPC_RET_ADDR_OFFSET
, ppc_sp
);
5332 ppc_mtctr (code
, ppc_r0
);
5333 ppc_bcctr (code
, PPC_BR_ALWAYS
, 0);
5334 #if ENABLE_WRONG_METHOD_CHECK
5335 ppc_patch (item
->jmp_code
, code
);
5337 item
->jmp_code
= NULL
;
5342 ppc_load (code
, ppc_r0
, (gulong
)item
->key
);
5343 ppc_compare_log (code
, 0, MONO_ARCH_IMT_REG
, ppc_r0
);
5344 item
->jmp_code
= code
;
5345 ppc_bc (code
, PPC_BR_FALSE
, PPC_BR_LT
, 0);
5348 /* patch the branches to get to the target items */
5349 for (i
= 0; i
< count
; ++i
) {
5350 MonoIMTCheckItem
*item
= imt_entries
[i
];
5351 if (item
->jmp_code
) {
5352 if (item
->check_target_idx
) {
5353 ppc_patch (item
->jmp_code
, imt_entries
[item
->check_target_idx
]->code_target
);
5359 mono_stats
.imt_thunks_size
+= code
- start
;
5360 g_assert (code
- start
<= size
);
5361 mono_arch_flush_icache (start
, size
);
5366 mono_arch_find_imt_method (gpointer
*regs
, guint8
*code
)
5368 return (MonoMethod
*) regs
[MONO_ARCH_IMT_REG
];
5372 mono_arch_find_this_argument (gpointer
*regs
, MonoMethod
*method
, MonoGenericSharingContext
*gsctx
)
5374 return mono_arch_get_this_arg_from_call (gsctx
, mono_method_signature (method
), (gssize
*)regs
, NULL
);
5379 mono_arch_find_static_call_vtable (gpointer
*regs
, guint8
*code
)
5381 return (MonoVTable
*) regs
[MONO_ARCH_RGCTX_REG
];
5385 mono_arch_emit_inst_for_method (MonoCompile
*cfg
, MonoMethod
*cmethod
, MonoMethodSignature
*fsig
, MonoInst
**args
)
5392 mono_arch_print_tree (MonoInst
*tree
, int arity
)
5397 MonoInst
* mono_arch_get_domain_intrinsic (MonoCompile
* cfg
)
5401 setup_tls_access ();
5402 if (monodomain_key
== -1)
5405 MONO_INST_NEW (cfg
, ins
, OP_TLS_GET
);
5406 ins
->inst_offset
= monodomain_key
;
5411 mono_arch_get_thread_intrinsic (MonoCompile
* cfg
)
5415 setup_tls_access ();
5416 if (monothread_key
== -1)
5419 MONO_INST_NEW (cfg
, ins
, OP_TLS_GET
);
5420 ins
->inst_offset
= monothread_key
;
5425 mono_arch_context_get_int_reg (MonoContext
*ctx
, int reg
)
5428 return MONO_CONTEXT_GET_SP (ctx
);
5430 g_assert (reg
>= ppc_r13
);
5432 return (gpointer
)ctx
->regs
[reg
- ppc_r13
];