2 /*--------------------------------------------------------------------*/
3 /*--- The core dispatch loop, for jumping to a code address. ---*/
4 /*--- dispatch-amd64-darwin.S ---*/
5 /*--------------------------------------------------------------------*/
8 This file is part of Valgrind, a dynamic binary instrumentation
11 Copyright (C) 2000-2017 Julian Seward
14 This program is free software; you can redistribute it and/or
15 modify it under the terms of the GNU General Public License as
16 published by the Free Software Foundation; either version 2 of the
17 License, or (at your option) any later version.
19 This program is distributed in the hope that it will be useful, but
20 WITHOUT ANY WARRANTY; without even the implied warranty of
21 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
22 General Public License for more details.
24 You should have received a copy of the GNU General Public License
25 along with this program; if not, write to the Free Software
26 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
29 The GNU General Public License is contained in the file COPYING.
32 #include "pub_core_basics_asm.h"
34 #if defined(VGP_amd64_darwin)
36 #include "pub_core_dispatch_asm.h"
37 #include "pub_core_transtab_asm.h"
38 #include "libvex_guest_offsets.h" /* for OFFSET_amd64_RIP */
41 /*------------------------------------------------------------*/
43 /*--- The dispatch loop. VG_(disp_run_translations) is ---*/
44 /*--- used to run all translations, ---*/
45 /*--- including no-redir ones. ---*/
47 /*------------------------------------------------------------*/
49 /*----------------------------------------------------*/
50 /*--- Entry and preamble (set everything up) ---*/
51 /*----------------------------------------------------*/
54 void VG_(disp_run_translations)( UWord* two_words,
59 .globl VG_(disp_run_translations)
60 VG_(disp_run_translations):
61 /* %rdi holds two_words */
62 /* %rsi holds guest_state */
63 /* %rdx holds host_addr */
67 /* Save integer registers, since this is a pseudo-function. */
82 /* %rdi must be saved last */
85 /* Get the host CPU in the state expected by generated code. */
87 /* set host FPU control word to the default mode expected
88 by VEX-generated code. See comments in libvex.h for
95 /* set host SSE control word to the default mode expected
96 by VEX-generated code. */
101 /* set dir flag to known value */
104 /* Set up the guest state pointer */
107 /* and jump into the code cache. Chained translations in
108 the code cache run, until for whatever reason, they can't
109 continue. When that happens, the translation in question
110 will jump (or call) to one of the continuation points
111 VG_(cp_...) below. */
115 /*----------------------------------------------------*/
116 /*--- Postamble and exit. ---*/
117 /*----------------------------------------------------*/
120 /* At this point, %rax and %rdx contain two
121 words to be returned to the caller. %rax
122 holds a TRC value, and %rdx optionally may
123 hold another word (for CHAIN_ME exits, the
124 address of the place to patch.) */
126 /* We're leaving. Check that nobody messed with %mxcsr
127 or %fpucw. We can't mess with %rax or %rdx here as they
128 hold the tentative return values, but any others are OK. */
129 #if !defined(ENABLE_INNER)
130 /* This check fails for self-hosting, so skip in that case */
134 popq %r15 /* get rid of the word without trashing %rflags */
135 jnz invariant_violation
139 andl $0xFFFFFFC0, (%rsp) /* mask out status flags */
142 jnz invariant_violation
143 /* otherwise we're OK */
146 movq $VG_TRC_INVARIANT_FAILED, %rax
150 /* Pop %rdi, stash return values */
154 /* Now pop everything else */
171 /*----------------------------------------------------*/
172 /*--- Continuation points ---*/
173 /*----------------------------------------------------*/
175 /* ------ Chain me to slow entry point ------ */
176 .globl VG_(disp_cp_chain_me_to_slowEP)
177 VG_(disp_cp_chain_me_to_slowEP):
178 /* We got called. The return address indicates
179 where the patching needs to happen. Collect
180 the return address and, exit back to C land,
181 handing the caller the pair (Chain_me_S, RA) */
182 movq $VG_TRC_CHAIN_ME_TO_SLOW_EP, %rax
184 /* 10 = movabsq $VG_(disp_chain_me_to_slowEP), %r11;
189 /* ------ Chain me to fast entry point ------ */
190 .globl VG_(disp_cp_chain_me_to_fastEP)
191 VG_(disp_cp_chain_me_to_fastEP):
192 /* We got called. The return address indicates
193 where the patching needs to happen. Collect
194 the return address and, exit back to C land,
195 handing the caller the pair (Chain_me_F, RA) */
196 movq $VG_TRC_CHAIN_ME_TO_FAST_EP, %rax
198 /* 10 = movabsq $VG_(disp_chain_me_to_fastEP), %r11;
203 /* ------ Indirect but boring jump ------ */
204 .globl VG_(disp_cp_xindir)
206 /* Where are we going? */
207 movq OFFSET_amd64_RIP(%rbp), %rax
210 movabsq $VG_(stats__n_xindirs_32), %r10
213 /* try a fast lookup in the translation cache */
214 movabsq $VG_(tt_fast), %rcx
215 movq %rax, %rbx /* next guest addr */
216 andq $VG_TT_FAST_MASK, %rbx /* entry# */
217 shlq $4, %rbx /* entry# * sizeof(FastCacheEntry) */
218 movq 0(%rcx,%rbx,1), %r10 /* .guest */
219 movq 8(%rcx,%rbx,1), %r11 /* .host */
221 jnz fast_lookup_failed
223 /* Found a match. Jump to .host. */
225 ud2 /* persuade insn decoders not to speculate past here */
229 movabsq $VG_(stats__n_xindir_misses_32), %r10
232 movq $VG_TRC_INNER_FASTMISS, %rax
236 /* ------ Assisted jump ------ */
237 .globl VG_(disp_cp_xassisted)
238 VG_(disp_cp_xassisted):
239 /* %rbp contains the TRC */
244 /* ------ Event check failed ------ */
245 .globl VG_(disp_cp_evcheck_fail)
246 VG_(disp_cp_evcheck_fail):
247 movq $VG_TRC_INNER_COUNTERZERO, %rax
252 #endif // defined(VGP_amd64_darwin)
254 /* Let the linker know we don't need an executable stack */
257 /*--------------------------------------------------------------------*/
259 /*--------------------------------------------------------------------*/