2 /*--------------------------------------------------------------------*/
3 /*--- The core dispatch loop, for jumping to a code address. ---*/
4 /*--- dispatch-amd64-solaris.S ---*/
5 /*--------------------------------------------------------------------*/
8 This file is part of Valgrind, a dynamic binary instrumentation
11 Copyright (C) 2000-2017 Julian Seward
14 This program is free software; you can redistribute it and/or
15 modify it under the terms of the GNU General Public License as
16 published by the Free Software Foundation; either version 2 of the
17 License, or (at your option) any later version.
19 This program is distributed in the hope that it will be useful, but
20 WITHOUT ANY WARRANTY; without even the implied warranty of
21 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
22 General Public License for more details.
24 You should have received a copy of the GNU General Public License
25 along with this program; if not, write to the Free Software
26 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
29 The GNU General Public License is contained in the file COPYING.
32 #include "pub_core_basics_asm.h"
34 #if defined(VGP_amd64_solaris)
36 #include "pub_core_dispatch_asm.h"
37 #include "pub_core_transtab_asm.h"
38 #include "libvex_guest_offsets.h" /* for OFFSET_amd64_RIP */
41 /*------------------------------------------------------------*/
43 /*--- The dispatch loop. VG_(disp_run_translations) is ---*/
44 /*--- used to run all translations, ---*/
45 /*--- including no-redir ones. ---*/
47 /*------------------------------------------------------------*/
49 /*----------------------------------------------------*/
50 /*--- Entry and preamble (set everything up) ---*/
51 /*----------------------------------------------------*/
54 void VG_(disp_run_translations)( UWord* two_words,
59 .globl VG_(disp_run_translations)
60 .type VG_(disp_run_translations), @function
61 VG_(disp_run_translations):
62 /* %rdi holds two_words */
63 /* %rsi holds guest_state */
64 /* %rdx holds host_addr */
68 /* Save integer registers, since this is a pseudo-function. */
83 /* %rdi must be saved last */
86 /* Get the host CPU in the state expected by generated code. */
88 /* set host FPU control word to the default mode expected
89 by VEX-generated code. See comments in libvex.h for
96 /* set host SSE control word to the default mode expected
97 by VEX-generated code. */
102 /* set dir flag to known value */
105 /* Set up the guest state pointer */
108 /* and jump into the code cache. Chained translations in
109 the code cache run, until for whatever reason, they can't
110 continue. When that happens, the translation in question
111 will jump (or call) to one of the continuation points
112 VG_(cp_...) below. */
116 /*----------------------------------------------------*/
117 /*--- Postamble and exit. ---*/
118 /*----------------------------------------------------*/
121 /* At this point, %rax and %rdx contain two
122 words to be returned to the caller. %rax
123 holds a TRC value, and %rdx optionally may
124 hold another word (for CHAIN_ME exits, the
125 address of the place to patch.) */
127 /* We're leaving. Check that nobody messed with %mxcsr
128 or %fpucw. We can't mess with %rax or %rdx here as they
129 hold the tentative return values, but any others are OK. */
130 #if !defined(ENABLE_INNER)
131 /* This check fails for self-hosting, so skip in that case */
135 popq %r15 /* get rid of the word without trashing %rflags */
136 jnz invariant_violation
140 andl $0xFFFFFFC0, (%rsp) /* mask out status flags */
143 jnz invariant_violation
144 /* otherwise we're OK */
147 movq $VG_TRC_INVARIANT_FAILED, %rax
151 /* Pop %rdi, stash return values */
155 /* Now pop everything else */
172 /*----------------------------------------------------*/
173 /*--- Continuation points ---*/
174 /*----------------------------------------------------*/
176 /* ------ Chain me to slow entry point ------ */
177 .global VG_(disp_cp_chain_me_to_slowEP)
178 VG_(disp_cp_chain_me_to_slowEP):
179 /* We got called. The return address indicates
180 where the patching needs to happen. Collect
181 the return address and, exit back to C land,
182 handing the caller the pair (Chain_me_S, RA) */
183 movq $VG_TRC_CHAIN_ME_TO_SLOW_EP, %rax
185 /* 10 = movabsq $VG_(disp_chain_me_to_slowEP), %r11;
190 /* ------ Chain me to fast entry point ------ */
191 .global VG_(disp_cp_chain_me_to_fastEP)
192 VG_(disp_cp_chain_me_to_fastEP):
193 /* We got called. The return address indicates
194 where the patching needs to happen. Collect
195 the return address and, exit back to C land,
196 handing the caller the pair (Chain_me_F, RA) */
197 movq $VG_TRC_CHAIN_ME_TO_FAST_EP, %rax
199 /* 10 = movabsq $VG_(disp_chain_me_to_fastEP), %r11;
204 /* ------ Indirect but boring jump ------ */
205 .global VG_(disp_cp_xindir)
207 /* Where are we going? */
208 movq OFFSET_amd64_RIP(%rbp), %rax
211 addl $1, VG_(stats__n_xindirs_32)
213 /* try a fast lookup in the translation cache */
214 movabsq $VG_(tt_fast), %rcx
215 movq %rax, %rbx /* next guest addr */
216 andq $VG_TT_FAST_MASK, %rbx /* entry# */
217 shlq $4, %rbx /* entry# * sizeof(FastCacheEntry) */
218 movq 0(%rcx,%rbx,1), %r10 /* .guest */
219 movq 8(%rcx,%rbx,1), %r11 /* .host */
221 jnz fast_lookup_failed
223 /* Found a match. Jump to .host. */
225 ud2 /* persuade insn decoders not to speculate past here */
229 addl $1, VG_(stats__n_xindir_misses_32)
231 movq $VG_TRC_INNER_FASTMISS, %rax
235 /* ------ Assisted jump ------ */
236 .global VG_(disp_cp_xassisted)
237 VG_(disp_cp_xassisted):
238 /* %rbp contains the TRC */
243 /* ------ Event check failed ------ */
244 .global VG_(disp_cp_evcheck_fail)
245 VG_(disp_cp_evcheck_fail):
246 movq $VG_TRC_INNER_COUNTERZERO, %rax
251 .size VG_(disp_run_translations), .-VG_(disp_run_translations)
253 #endif // defined(VGP_amd64_solaris)
255 /* Let the linker know we don't need an executable stack */
258 /*--------------------------------------------------------------------*/
260 /*--------------------------------------------------------------------*/