1 /* Copyright (c) 2007, Google Inc.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above
11 * copyright notice, this list of conditions and the following disclaimer
12 * in the documentation and/or other materials provided with the
14 * * Neither the name of Google Inc. nor the names of its
15 * contributors may be used to endorse or promote products derived from
16 * this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 * Author: Joi Sigurdsson
32 * Author: Scott Francis
34 * Implementation of PreamblePatcher
37 #include "preamble_patcher.h"
39 #include "mini_disassembler.h"
41 // Definitions of assembly statements we need
42 #define ASM_JMP32REL 0xE9
46 #define ASM_MOVRAX_IMM 0xB8
49 #define ASM_JMP_RAX 0xE0
55 SideStepError
PreamblePatcher::RawPatchWithStub(
56 void* target_function
,
57 void* replacement_function
,
58 unsigned char* preamble_stub
,
59 unsigned long stub_size
,
60 unsigned long* bytes_needed
) {
61 if ((NULL
== target_function
) ||
62 (NULL
== replacement_function
) ||
63 (NULL
== preamble_stub
)) {
64 SIDESTEP_ASSERT(false &&
65 "Invalid parameters - either pTargetFunction or "
66 "pReplacementFunction or pPreambleStub were NULL.");
67 return SIDESTEP_INVALID_PARAMETER
;
70 // TODO(V7:joi) Siggi and I just had a discussion and decided that both
71 // patching and unpatching are actually unsafe. We also discussed a
72 // method of making it safe, which is to freeze all other threads in the
73 // process, check their thread context to see if their eip is currently
74 // inside the block of instructions we need to copy to the stub, and if so
75 // wait a bit and try again, then unfreeze all threads once we've patched.
76 // Not implementing this for now since we're only using SideStep for unit
77 // testing, but if we ever use it for production code this is what we
80 // NOTE: Stoyan suggests we can write 8 or even 10 bytes atomically using
81 // FPU instructions, and on newer processors we could use cmpxchg8b or
82 // cmpxchg16b. So it might be possible to do the patching/unpatching
83 // atomically and avoid having to freeze other threads. Note though, that
84 // doing it atomically does not help if one of the other threads happens
85 // to have its eip in the middle of the bytes you change while you change
87 unsigned char* target
= reinterpret_cast<unsigned char*>(target_function
);
88 unsigned int required_trampoline_bytes
= 0;
89 const unsigned int kRequiredStubJumpBytes
= 5;
90 const unsigned int kRequiredTargetPatchBytes
= 5;
92 // Initialize the stub with INT3's just in case.
94 memset(preamble_stub
, 0xcc, stub_size
);
97 // In 64-bit mode JMP instructions are always relative to RIP. If the
98 // replacement - target offset is > 2GB, we can't JMP to the replacement
99 // function. In this case, we're going to use a trampoline - that is,
100 // we're going to do a relative jump to a small chunk of code in the stub
101 // that will then do the absolute jump to the replacement function. By
102 // doing this, we only need to patch 5 bytes in the target function, as
103 // opposed to patching 12 bytes if we were to do an absolute jump.
105 // Note that the first byte of the trampoline is a NOP instruction. This
106 // is used as a trampoline signature that will be detected when unpatching
113 // mov rax, <replacement_function>
116 __int64 replacement_target_offset
= reinterpret_cast<__int64
>(
117 replacement_function
) - reinterpret_cast<__int64
>(target
) - 5;
118 if (replacement_target_offset
> INT_MAX
119 || replacement_target_offset
< INT_MIN
) {
120 // The stub needs to be within 2GB of the target for the trampoline to
122 __int64 trampoline_offset
= reinterpret_cast<__int64
>(preamble_stub
)
123 - reinterpret_cast<__int64
>(target
) - 5;
124 if (trampoline_offset
> INT_MAX
|| trampoline_offset
< INT_MIN
) {
126 SIDESTEP_ASSERT(false
127 && "Preamble stub is too far from target to patch.");
128 return SIDESTEP_UNEXPECTED
;
130 required_trampoline_bytes
= 13;
134 // Let's disassemble the preamble of the target function to see if we can
135 // patch, and to see how much of the preamble we need to take. We need 5
136 // bytes for our jmp instruction, so let's find the minimum number of
137 // instructions to get 5 bytes.
138 MiniDisassembler disassembler
;
139 unsigned int preamble_bytes
= 0;
140 unsigned int stub_bytes
= 0;
141 while (preamble_bytes
< kRequiredTargetPatchBytes
) {
142 unsigned int cur_bytes
= 0;
143 InstructionType instruction_type
=
144 disassembler
.Disassemble(target
+ preamble_bytes
, cur_bytes
);
145 if (IT_JUMP
== instruction_type
) {
146 unsigned int jump_bytes
= 0;
147 SideStepError jump_ret
= SIDESTEP_JUMP_INSTRUCTION
;
148 if (IsShortConditionalJump(target
+ preamble_bytes
, cur_bytes
)) {
149 jump_ret
= PatchShortConditionalJump(target
+ preamble_bytes
, cur_bytes
,
150 preamble_stub
+ stub_bytes
,
152 stub_size
- stub_bytes
);
153 } else if (IsNearConditionalJump(target
+ preamble_bytes
, cur_bytes
) ||
154 IsNearRelativeJump(target
+ preamble_bytes
, cur_bytes
) ||
155 IsNearAbsoluteCall(target
+ preamble_bytes
, cur_bytes
) ||
156 IsNearRelativeCall(target
+ preamble_bytes
, cur_bytes
)) {
157 jump_ret
= PatchNearJumpOrCall(target
+ preamble_bytes
, cur_bytes
,
158 preamble_stub
+ stub_bytes
, &jump_bytes
,
159 stub_size
- stub_bytes
);
161 if (jump_ret
!= SIDESTEP_SUCCESS
) {
162 SIDESTEP_ASSERT(false &&
163 "Unable to patch because there is an unhandled branch "
164 "instruction in the initial preamble bytes.");
165 return SIDESTEP_JUMP_INSTRUCTION
;
167 stub_bytes
+= jump_bytes
;
168 } else if (IT_RETURN
== instruction_type
) {
169 SIDESTEP_ASSERT(false &&
170 "Unable to patch because function is too short");
171 return SIDESTEP_FUNCTION_TOO_SMALL
;
172 } else if (IT_GENERIC
== instruction_type
) {
173 if (IsMovWithDisplacement(target
+ preamble_bytes
, cur_bytes
)) {
174 unsigned int mov_bytes
= 0;
175 if (PatchMovWithDisplacement(target
+ preamble_bytes
, cur_bytes
,
176 preamble_stub
+ stub_bytes
, &mov_bytes
,
177 stub_size
- stub_bytes
)
178 != SIDESTEP_SUCCESS
) {
179 return SIDESTEP_UNSUPPORTED_INSTRUCTION
;
181 stub_bytes
+= mov_bytes
;
183 memcpy(reinterpret_cast<void*>(preamble_stub
+ stub_bytes
),
184 reinterpret_cast<void*>(target
+ preamble_bytes
), cur_bytes
);
185 stub_bytes
+= cur_bytes
;
188 SIDESTEP_ASSERT(false &&
189 "Disassembler encountered unsupported instruction "
190 "(either unused or unknown");
191 return SIDESTEP_UNSUPPORTED_INSTRUCTION
;
193 preamble_bytes
+= cur_bytes
;
196 if (NULL
!= bytes_needed
)
197 *bytes_needed
= stub_bytes
+ kRequiredStubJumpBytes
198 + required_trampoline_bytes
;
200 // Inv: cbPreamble is the number of bytes (at least 5) that we need to take
201 // from the preamble to have whole instructions that are 5 bytes or more
202 // in size total. The size of the stub required is cbPreamble +
203 // kRequiredStubJumpBytes (5) + required_trampoline_bytes (0 or 13)
204 if (stub_bytes
+ kRequiredStubJumpBytes
+ required_trampoline_bytes
206 SIDESTEP_ASSERT(false);
207 return SIDESTEP_INSUFFICIENT_BUFFER
;
210 // Now, make a jmp instruction to the rest of the target function (minus the
211 // preamble bytes we moved into the stub) and copy it into our preamble-stub.
212 // find address to jump to, relative to next address after jmp instruction
214 #pragma warning(push)
215 #pragma warning(disable:4244)
217 int relative_offset_to_target_rest
218 = ((reinterpret_cast<unsigned char*>(target
) + preamble_bytes
) -
219 (preamble_stub
+ stub_bytes
+ kRequiredStubJumpBytes
));
223 // jmp (Jump near, relative, displacement relative to next instruction)
224 preamble_stub
[stub_bytes
] = ASM_JMP32REL
;
226 memcpy(reinterpret_cast<void*>(preamble_stub
+ stub_bytes
+ 1),
227 reinterpret_cast<void*>(&relative_offset_to_target_rest
), 4);
229 if (kIs64BitBinary
&& required_trampoline_bytes
!= 0) {
230 // Construct the trampoline
231 unsigned int trampoline_pos
= stub_bytes
+ kRequiredStubJumpBytes
;
232 preamble_stub
[trampoline_pos
] = ASM_NOP
;
233 preamble_stub
[trampoline_pos
+ 1] = ASM_REXW
;
234 preamble_stub
[trampoline_pos
+ 2] = ASM_MOVRAX_IMM
;
235 memcpy(reinterpret_cast<void*>(preamble_stub
+ trampoline_pos
+ 3),
236 reinterpret_cast<void*>(&replacement_function
),
238 preamble_stub
[trampoline_pos
+ 11] = ASM_JMP
;
239 preamble_stub
[trampoline_pos
+ 12] = ASM_JMP_RAX
;
241 // Now update replacement_function to point to the trampoline
242 replacement_function
= preamble_stub
+ trampoline_pos
;
245 // Inv: preamble_stub points to assembly code that will execute the
246 // original function by first executing the first cbPreamble bytes of the
247 // preamble, then jumping to the rest of the function.
249 // Overwrite the first 5 bytes of the target function with a jump to our
250 // replacement function.
251 // (Jump near, relative, displacement relative to next instruction)
252 target
[0] = ASM_JMP32REL
;
254 // Find offset from instruction after jmp, to the replacement function.
256 #pragma warning(push)
257 #pragma warning(disable:4244)
259 int offset_to_replacement_function
=
260 reinterpret_cast<unsigned char*>(replacement_function
) -
261 reinterpret_cast<unsigned char*>(target
) - 5;
265 // complete the jmp instruction
266 memcpy(reinterpret_cast<void*>(target
+ 1),
267 reinterpret_cast<void*>(&offset_to_replacement_function
), 4);
269 // Set any remaining bytes that were moved to the preamble-stub to INT3 so
270 // as not to cause confusion (otherwise you might see some strange
271 // instructions if you look at the disassembly, or even invalid
272 // instructions). Also, by doing this, we will break into the debugger if
273 // some code calls into this portion of the code. If this happens, it
274 // means that this function cannot be patched using this patcher without
276 if (preamble_bytes
> kRequiredTargetPatchBytes
) {
277 memset(reinterpret_cast<void*>(target
+ kRequiredTargetPatchBytes
),
278 ASM_INT3
, preamble_bytes
- kRequiredTargetPatchBytes
);
281 // Inv: The memory pointed to by target_function now points to a relative
282 // jump instruction that jumps over to the preamble_stub. The preamble
283 // stub contains the first stub_size bytes of the original target
284 // function's preamble code, followed by a relative jump back to the next
285 // instruction after the first cbPreamble bytes.
287 // In 64-bit mode the memory pointed to by target_function *may* point to a
288 // relative jump instruction that jumps to a trampoline which will then
289 // perform an absolute jump to the replacement function. The preamble stub
290 // still contains the original target function's preamble code, followed by a
291 // jump back to the instructions after the first preamble bytes.
293 return SIDESTEP_SUCCESS
;
296 }; // namespace sidestep