Make sure x86 ATOMIC_CAS doesn't overwrite its own operands.
[mono-debugger.git] / mono / mini / tramp-ppc.c
blobdd23ff42f1a3b08e8f99c5f385700d0fa474c6eb
1 /*
2 * tramp-ppc.c: JIT trampoline code for PowerPC
4 * Authors:
5 * Dietmar Maurer (dietmar@ximian.com)
6 * Paolo Molaro (lupus@ximian.com)
7 * Carlos Valiente <yo@virutass.net>
8 * Andreas Faerber <andreas.faerber@web.de>
10 * (C) 2001 Ximian, Inc.
11 * (C) 2007-2008 Andreas Faerber
14 #include <config.h>
15 #include <glib.h>
17 #include <mono/metadata/appdomain.h>
18 #include <mono/metadata/marshal.h>
19 #include <mono/metadata/tabledefs.h>
20 #include <mono/arch/ppc/ppc-codegen.h>
22 #include "mini.h"
23 #include "mini-ppc.h"
26 * Return the instruction to jump from code to target, 0 if not
27 * reachable with a single instruction
29 static guint32
30 branch_for_target_reachable (guint8 *branch, guint8 *target)
32 gint diff = target - branch;
33 g_assert ((diff & 3) == 0);
34 if (diff >= 0) {
35 if (diff <= 33554431)
36 return (18 << 26) | (diff);
37 } else {
38 /* diff between 0 and -33554432 */
39 if (diff >= -33554432)
40 return (18 << 26) | (diff & ~0xfc000000);
42 return 0;
46 * get_unbox_trampoline:
47 * @gsctx: the generic sharing context
48 * @m: method pointer
49 * @addr: pointer to native code for @m
51 * when value type methods are called through the vtable we need to unbox the
52 * this argument. This method returns a pointer to a trampoline which does
53 * unboxing before calling the method
55 gpointer
56 mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, gpointer addr)
58 guint8 *code, *start;
59 int this_pos = 3;
60 guint32 short_branch;
61 MonoDomain *domain = mono_domain_get ();
62 int size = MONO_PPC_32_64_CASE (20, 32) + PPC_FTNPTR_SIZE;
64 addr = mono_get_addr_from_ftnptr (addr);
66 if (MONO_TYPE_ISSTRUCT (mono_method_signature (m)->ret))
67 this_pos = 4;
69 mono_domain_lock (domain);
70 start = code = mono_domain_code_reserve (domain, size);
71 code = mono_ppc_create_pre_code_ftnptr (code);
72 short_branch = branch_for_target_reachable (code + 4, addr);
73 if (short_branch)
74 mono_domain_code_commit (domain, code, size, 8);
75 mono_domain_unlock (domain);
77 if (short_branch) {
78 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
79 ppc_emit32 (code, short_branch);
80 } else {
81 ppc_load (code, ppc_r0, addr);
82 ppc_mtctr (code, ppc_r0);
83 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
84 ppc_bcctr (code, 20, 0);
86 mono_arch_flush_icache (start, code - start);
87 g_assert ((code - start) <= size);
88 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
89 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
91 return start;
94 void
95 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
97 guint32 *code = (guint32*)code_ptr;
99 addr = mono_get_addr_from_ftnptr (addr);
101 /* This is the 'blrl' instruction */
102 --code;
105 * Note that methods are called also with the bl opcode.
107 if (((*code) >> 26) == 18) {
108 /*g_print ("direct patching\n");*/
109 ppc_patch ((guint8*)code, addr);
110 mono_arch_flush_icache ((guint8*)code, 4);
111 return;
114 /* Sanity check */
115 g_assert (mono_ppc_is_direct_call_sequence (code));
117 ppc_patch ((guint8*)code, addr);
120 void
121 mono_arch_patch_plt_entry (guint8 *code, guint8 *addr)
123 g_assert_not_reached ();
126 void
127 mono_arch_nullify_class_init_trampoline (guint8 *code, gssize *regs)
129 return;
132 void
133 mono_arch_nullify_plt_entry (guint8 *code)
135 g_assert_not_reached ();
138 /* Stack size for trampoline function
139 * PPC_MINIMAL_STACK_SIZE + 16 (args + alignment to ppc_magic_trampoline)
140 * + MonoLMF + 14 fp regs + 13 gregs + alignment
141 * #define STACK (PPC_MINIMAL_STACK_SIZE + 4 * sizeof (gulong) + sizeof (MonoLMF) + 14 * sizeof (double) + 13 * (sizeof (gulong)))
142 * STACK would be 444 for 32 bit darwin
144 #ifdef __mono_ppc64__
145 #define STACK (PPC_MINIMAL_STACK_SIZE + 4 * sizeof (gulong) + sizeof (MonoLMF) + 14 * sizeof (double) + 13 * sizeof (gulong))
146 #else
147 #define STACK (448)
148 #endif
150 /* Method-specific trampoline code fragment size */
151 #define METHOD_TRAMPOLINE_SIZE 64
153 /* Jump-specific trampoline code fragment size */
154 #define JUMP_TRAMPOLINE_SIZE 64
157 * Stack frame description when the generic trampoline is called.
158 * caller frame
159 * --------------------
160 * MonoLMF
161 * -------------------
162 * Saved FP registers 0-13
163 * -------------------
164 * Saved general registers 0-12
165 * -------------------
166 * param area for 3 args to ppc_magic_trampoline
167 * -------------------
168 * linkage area
169 * -------------------
171 guchar*
172 mono_arch_create_trampoline_code (MonoTrampolineType tramp_type)
174 guint8 *buf, *code = NULL;
175 int i, offset;
176 gconstpointer tramp_handler;
177 int size = MONO_PPC_32_64_CASE (516, 692);
179 /* Now we'll create in 'buf' the PowerPC trampoline code. This
180 is the trampoline code common to all methods */
182 code = buf = mono_global_codeman_reserve (size);
184 ppc_store_reg_update (buf, ppc_r1, -STACK, ppc_r1);
186 /* start building the MonoLMF on the stack */
187 offset = STACK - sizeof (double) * MONO_SAVED_FREGS;
188 for (i = 14; i < 32; i++) {
189 ppc_stfd (buf, i, offset, ppc_r1);
190 offset += sizeof (double);
193 * now the integer registers.
195 offset = STACK - sizeof (MonoLMF) + G_STRUCT_OFFSET (MonoLMF, iregs);
196 ppc_store_multiple_regs (buf, ppc_r13, offset, ppc_r1);
198 /* Now save the rest of the registers below the MonoLMF struct, first 14
199 * fp regs and then the 13 gregs.
201 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
202 for (i = 0; i < 14; i++) {
203 ppc_stfd (buf, i, offset, ppc_r1);
204 offset += sizeof (double);
206 #define GREGS_OFFSET (STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (13 * sizeof (gulong)))
207 offset = GREGS_OFFSET;
208 for (i = 0; i < 13; i++) {
209 ppc_store_reg (buf, i, offset, ppc_r1);
210 offset += sizeof (gulong);
212 /* we got here through a jump to the ctr reg, we must save the lr
213 * in the parent frame (we do it here to reduce the size of the
214 * method-specific trampoline)
216 ppc_mflr (buf, ppc_r0);
217 ppc_store_reg (buf, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
219 /* ok, now we can continue with the MonoLMF setup, mostly untouched
220 * from emit_prolog in mini-ppc.c
222 ppc_load_func (buf, ppc_r0, mono_get_lmf_addr);
223 ppc_mtlr (buf, ppc_r0);
224 ppc_blrl (buf);
225 /* we build the MonoLMF structure on the stack - see mini-ppc.h
226 * The pointer to the struct is put in ppc_r11.
228 ppc_addi (buf, ppc_r11, ppc_sp, STACK - sizeof (MonoLMF));
229 ppc_store_reg (buf, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
230 /* new_lmf->previous_lmf = *lmf_addr */
231 ppc_load_reg (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
232 ppc_store_reg (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
233 /* *(lmf_addr) = r11 */
234 ppc_store_reg (buf, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
235 /* save method info (it's stored on the stack, so get it first and put it
236 * in r5 as it's the third argument to the function)
238 if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
239 ppc_load_reg (buf, ppc_r5, GREGS_OFFSET + PPC_FIRST_ARG_REG * sizeof (gpointer), ppc_r1);
240 else
241 ppc_load_reg (buf, ppc_r5, GREGS_OFFSET, ppc_r1);
242 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP))
243 ppc_store_reg (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
244 /* store the frame pointer of the calling method */
245 ppc_addi (buf, ppc_r0, ppc_sp, STACK);
246 ppc_store_reg (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
247 /* save the IP (caller ip) */
248 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
249 ppc_li (buf, ppc_r0, 0);
250 } else {
251 ppc_load_reg (buf, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
253 ppc_store_reg (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
256 * Now we're ready to call trampoline (gssize *regs, guint8 *code, gpointer value, guint8 *tramp)
257 * Note that the last argument is unused.
259 /* Arg 1: a pointer to the registers */
260 ppc_addi (buf, ppc_r3, ppc_r1, GREGS_OFFSET);
262 /* Arg 2: code (next address to the instruction that called us) */
263 if (tramp_type == MONO_TRAMPOLINE_JUMP)
264 ppc_li (buf, ppc_r4, 0);
265 else
266 ppc_load_reg (buf, ppc_r4, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
268 /* Arg 3: MonoMethod *method. It was put in r5 already above */
269 /*ppc_mr (buf, ppc_r5, ppc_r5);*/
271 tramp_handler = mono_get_trampoline_func (tramp_type);
272 ppc_load_func (buf, ppc_r0, tramp_handler);
273 ppc_mtlr (buf, ppc_r0);
274 ppc_blrl (buf);
276 /* OK, code address is now on r3. Move it to the counter reg
277 * so it will be ready for the final jump: this is safe since we
278 * won't do any more calls.
280 if (!MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type)) {
281 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
282 ppc_load_reg (buf, ppc_r3, 0, ppc_r3);
283 #endif
284 ppc_mtctr (buf, ppc_r3);
288 * Now we restore the MonoLMF (see emit_epilogue in mini-ppc.c)
289 * and the rest of the registers, so the method called will see
290 * the same state as before we executed.
291 * The pointer to MonoLMF is in ppc_r11.
293 ppc_addi (buf, ppc_r11, ppc_r1, STACK - sizeof (MonoLMF));
294 /* r5 = previous_lmf */
295 ppc_load_reg (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
296 /* r6 = lmf_addr */
297 ppc_load_reg (buf, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
298 /* *(lmf_addr) = previous_lmf */
299 ppc_store_reg (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
300 /* restore iregs */
301 ppc_load_multiple_regs (buf, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r11);
302 /* restore fregs */
303 for (i = 14; i < 32; i++)
304 ppc_lfd (buf, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
306 /* restore the volatile registers, we skip r1, of course */
307 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
308 for (i = 0; i < 14; i++) {
309 ppc_lfd (buf, i, offset, ppc_r1);
310 offset += sizeof (double);
312 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (13 * sizeof (gulong));
313 ppc_load_reg (buf, ppc_r0, offset, ppc_r1);
314 offset += 2 * sizeof (gulong);
315 for (i = 2; i < 13; i++) {
316 if (i != 3 || tramp_type != MONO_TRAMPOLINE_RGCTX_LAZY_FETCH)
317 ppc_load_reg (buf, i, offset, ppc_r1);
318 offset += sizeof (gulong);
321 /* Non-standard function epilogue. Instead of doing a proper
322 * return, we just jump to the compiled code.
324 /* Restore stack pointer and LR and jump to the code */
325 ppc_load_reg (buf, ppc_r1, 0, ppc_r1);
326 ppc_load_reg (buf, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_r1);
327 ppc_mtlr (buf, ppc_r11);
328 if (MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type))
329 ppc_blr (buf);
330 else
331 ppc_bcctr (buf, 20, 0);
333 /* Flush instruction cache, since we've generated code */
334 mono_arch_flush_icache (code, buf - code);
336 /* Sanity check */
337 g_assert ((buf - code) <= size);
339 return code;
342 #define TRAMPOLINE_SIZE (MONO_PPC_32_64_CASE (24, (5+5+1+1)*4))
343 gpointer
344 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
346 guint8 *code, *buf, *tramp;
347 guint32 short_branch;
349 tramp = mono_get_trampoline_code (tramp_type);
351 mono_domain_lock (domain);
352 code = buf = mono_domain_code_reserve_align (domain, TRAMPOLINE_SIZE, 4);
353 short_branch = branch_for_target_reachable (code + MONO_PPC_32_64_CASE (8, 5*4), tramp);
354 #ifdef __mono_ppc64__
355 /* FIXME: make shorter if possible */
356 #else
357 if (short_branch)
358 mono_domain_code_commit (domain, code, TRAMPOLINE_SIZE, 12);
359 #endif
360 mono_domain_unlock (domain);
362 if (short_branch) {
363 ppc_load_sequence (buf, ppc_r0, (gulong) arg1);
364 ppc_emit32 (buf, short_branch);
365 } else {
366 /* Prepare the jump to the generic trampoline code.*/
367 ppc_load (buf, ppc_r0, (gulong) tramp);
368 ppc_mtctr (buf, ppc_r0);
370 /* And finally put 'arg1' in r0 and fly! */
371 ppc_load (buf, ppc_r0, (gulong) arg1);
372 ppc_bcctr (buf, 20, 0);
375 /* Flush instruction cache, since we've generated code */
376 mono_arch_flush_icache (code, buf - code);
378 g_assert ((buf - code) <= TRAMPOLINE_SIZE);
379 if (code_len)
380 *code_len = buf - code;
382 return code;
385 static guint8*
386 emit_trampoline_jump (guint8 *code, guint8 *tramp)
388 guint32 short_branch = branch_for_target_reachable (code, tramp);
390 /* FIXME: we can save a few bytes here by committing if the
391 short branch is possible */
392 if (short_branch) {
393 ppc_emit32 (code, short_branch);
394 } else {
395 ppc_load (code, ppc_r0, tramp);
396 ppc_mtctr (code, ppc_r0);
397 ppc_bcctr (code, 20, 0);
400 return code;
403 gpointer
404 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot)
406 #ifdef MONO_ARCH_VTABLE_REG
407 guint8 *tramp;
408 guint8 *code, *buf;
409 guint8 **rgctx_null_jumps;
410 int tramp_size;
411 int depth, index;
412 int i;
413 gboolean mrgctx;
415 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
416 index = MONO_RGCTX_SLOT_INDEX (slot);
417 if (mrgctx)
418 index += sizeof (MonoMethodRuntimeGenericContext) / sizeof (gpointer);
419 for (depth = 0; ; ++depth) {
420 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
422 if (index < size - 1)
423 break;
424 index -= size - 1;
427 tramp_size = MONO_PPC_32_64_CASE (40, 52) + 12 * depth;
428 if (mrgctx)
429 tramp_size += 4;
430 else
431 tramp_size += 12;
433 code = buf = mono_global_codeman_reserve (tramp_size);
435 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
437 if (mrgctx) {
438 /* get mrgctx ptr */
439 ppc_mr (code, ppc_r4, PPC_FIRST_ARG_REG);
440 } else {
441 /* load rgctx ptr from vtable */
442 ppc_load_reg (code, ppc_r4, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context), PPC_FIRST_ARG_REG);
443 /* is the rgctx ptr null? */
444 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
445 /* if yes, jump to actual trampoline */
446 rgctx_null_jumps [0] = code;
447 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
450 for (i = 0; i < depth; ++i) {
451 /* load ptr to next array */
452 if (mrgctx && i == 0)
453 ppc_load_reg (code, ppc_r4, sizeof (MonoMethodRuntimeGenericContext), ppc_r4);
454 else
455 ppc_load_reg (code, ppc_r4, 0, ppc_r4);
456 /* is the ptr null? */
457 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
458 /* if yes, jump to actual trampoline */
459 rgctx_null_jumps [i + 1] = code;
460 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
463 /* fetch slot */
464 ppc_load_reg (code, ppc_r4, sizeof (gpointer) * (index + 1), ppc_r4);
465 /* is the slot null? */
466 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
467 /* if yes, jump to actual trampoline */
468 rgctx_null_jumps [depth + 1] = code;
469 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
470 /* otherwise return r4 */
471 /* FIXME: if we use r3 as the work register we can avoid this copy */
472 ppc_mr (code, ppc_r3, ppc_r4);
473 ppc_blr (code);
475 for (i = mrgctx ? 1 : 0; i <= depth + 1; ++i)
476 ppc_patch (rgctx_null_jumps [i], code);
478 g_free (rgctx_null_jumps);
480 /* move the rgctx pointer to the VTABLE register */
481 ppc_mr (code, MONO_ARCH_VTABLE_REG, ppc_r3);
483 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot),
484 MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), NULL);
486 /* jump to the actual trampoline */
487 code = emit_trampoline_jump (code, tramp);
489 mono_arch_flush_icache (buf, code - buf);
491 g_assert (code - buf <= tramp_size);
493 return buf;
494 #else
495 g_assert_not_reached ();
496 #endif
499 gpointer
500 mono_arch_create_generic_class_init_trampoline (void)
502 guint8 *tramp;
503 guint8 *code, *buf;
504 static int byte_offset = -1;
505 static guint8 bitmask;
506 guint8 *jump;
507 int tramp_size;
509 tramp_size = MONO_PPC_32_64_CASE (32, 44);
511 code = buf = mono_global_codeman_reserve (tramp_size);
513 if (byte_offset < 0)
514 mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
516 ppc_lbz (code, ppc_r4, byte_offset, PPC_FIRST_ARG_REG);
517 ppc_andid (code, ppc_r4, ppc_r4, bitmask);
518 jump = code;
519 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
521 ppc_blr (code);
523 ppc_patch (jump, code);
525 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT,
526 mono_get_root_domain (), NULL);
528 /* jump to the actual trampoline */
529 code = emit_trampoline_jump (code, tramp);
531 mono_arch_flush_icache (buf, code - buf);
533 g_assert (code - buf <= tramp_size);
535 return buf;