2 /*--------------------------------------------------------------------*/
3 /*--- The core dispatch loop, for jumping to a code address. ---*/
4 /*--- dispatch-amd64-darwin.S ---*/
5 /*--------------------------------------------------------------------*/
8 This file is part of Valgrind, a dynamic binary instrumentation
11 Copyright (C) 2000-2013 Julian Seward
14 This program is free software; you can redistribute it and/or
15 modify it under the terms of the GNU General Public License as
16 published by the Free Software Foundation; either version 2 of the
17 License, or (at your option) any later version.
19 This program is distributed in the hope that it will be useful, but
20 WITHOUT ANY WARRANTY; without even the implied warranty of
21 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
22 General Public License for more details.
24 You should have received a copy of the GNU General Public License
25 along with this program; if not, write to the Free Software
26 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
29 The GNU General Public License is contained in the file COPYING.
32 #if defined(VGP_amd64_darwin)
34 #include "pub_core_basics_asm.h"
35 #include "pub_core_dispatch_asm.h"
36 #include "pub_core_transtab_asm.h"
37 #include "libvex_guest_offsets.h" /* for OFFSET_amd64_RIP */
40 /*------------------------------------------------------------*/
42 /*--- The dispatch loop. VG_(disp_run_translations) is ---*/
43 /*--- used to run all translations, ---*/
44 /*--- including no-redir ones. ---*/
46 /*------------------------------------------------------------*/
48 /*----------------------------------------------------*/
49 /*--- Entry and preamble (set everything up) ---*/
50 /*----------------------------------------------------*/
53 void VG_(disp_run_translations)( UWord* two_words,
58 .globl VG_(disp_run_translations)
59 VG_(disp_run_translations):
60 /* %rdi holds two_words */
61 /* %rsi holds guest_state */
62 /* %rdx holds host_addr */
66 /* Save integer registers, since this is a pseudo-function. */
81 /* %rdi must be saved last */
84 /* Get the host CPU in the state expected by generated code. */
86 /* set host FPU control word to the default mode expected
87 by VEX-generated code. See comments in libvex.h for
94 /* set host SSE control word to the default mode expected
95 by VEX-generated code. */
100 /* set dir flag to known value */
103 /* Set up the guest state pointer */
106 /* and jump into the code cache. Chained translations in
107 the code cache run, until for whatever reason, they can't
108 continue. When that happens, the translation in question
109 will jump (or call) to one of the continuation points
110 VG_(cp_...) below. */
114 /*----------------------------------------------------*/
115 /*--- Postamble and exit. ---*/
116 /*----------------------------------------------------*/
119 /* At this point, %rax and %rdx contain two
120 words to be returned to the caller. %rax
121 holds a TRC value, and %rdx optionally may
122 hold another word (for CHAIN_ME exits, the
123 address of the place to patch.) */
125 /* We're leaving. Check that nobody messed with %mxcsr
126 or %fpucw. We can't mess with %rax or %rdx here as they
127 hold the tentative return values, but any others are OK. */
128 #if !defined(ENABLE_INNER)
129 /* This check fails for self-hosting, so skip in that case */
133 popq %r15 /* get rid of the word without trashing %rflags */
134 jnz invariant_violation
138 andl $0xFFFFFFC0, (%rsp) /* mask out status flags */
141 jnz invariant_violation
142 /* otherwise we're OK */
145 movq $VG_TRC_INVARIANT_FAILED, %rax
149 /* Pop %rdi, stash return values */
153 /* Now pop everything else */
170 /*----------------------------------------------------*/
171 /*--- Continuation points ---*/
172 /*----------------------------------------------------*/
174 /* ------ Chain me to slow entry point ------ */
175 .globl VG_(disp_cp_chain_me_to_slowEP)
176 VG_(disp_cp_chain_me_to_slowEP):
177 /* We got called. The return address indicates
178 where the patching needs to happen. Collect
179 the return address and, exit back to C land,
180 handing the caller the pair (Chain_me_S, RA) */
181 movq $VG_TRC_CHAIN_ME_TO_SLOW_EP, %rax
183 /* 10 = movabsq $VG_(disp_chain_me_to_slowEP), %r11;
188 /* ------ Chain me to fast entry point ------ */
189 .globl VG_(disp_cp_chain_me_to_fastEP)
190 VG_(disp_cp_chain_me_to_fastEP):
191 /* We got called. The return address indicates
192 where the patching needs to happen. Collect
193 the return address and, exit back to C land,
194 handing the caller the pair (Chain_me_F, RA) */
195 movq $VG_TRC_CHAIN_ME_TO_FAST_EP, %rax
197 /* 10 = movabsq $VG_(disp_chain_me_to_fastEP), %r11;
202 /* ------ Indirect but boring jump ------ */
203 .globl VG_(disp_cp_xindir)
205 /* Where are we going? */
206 movq OFFSET_amd64_RIP(%rbp), %rax
209 movabsq $VG_(stats__n_xindirs_32), %r10
212 /* try a fast lookup in the translation cache */
213 movabsq $VG_(tt_fast), %rcx
214 movq %rax, %rbx /* next guest addr */
215 andq $VG_TT_FAST_MASK, %rbx /* entry# */
216 shlq $4, %rbx /* entry# * sizeof(FastCacheEntry) */
217 movq 0(%rcx,%rbx,1), %r10 /* .guest */
218 movq 8(%rcx,%rbx,1), %r11 /* .host */
220 jnz fast_lookup_failed
222 /* Found a match. Jump to .host. */
224 ud2 /* persuade insn decoders not to speculate past here */
228 movabsq $VG_(stats__n_xindir_misses_32), %r10
231 movq $VG_TRC_INNER_FASTMISS, %rax
235 /* ------ Assisted jump ------ */
236 .globl VG_(disp_cp_xassisted)
237 VG_(disp_cp_xassisted):
238 /* %rbp contains the TRC */
243 /* ------ Event check failed ------ */
244 .globl VG_(disp_cp_evcheck_fail)
245 VG_(disp_cp_evcheck_fail):
246 movq $VG_TRC_INNER_COUNTERZERO, %rax
251 #endif // defined(VGP_amd64_darwin)
253 /*--------------------------------------------------------------------*/
255 /*--------------------------------------------------------------------*/