1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
6 * Implementation of PreamblePatcher
9 #include "preamble_patcher.h"
11 #include "mini_disassembler.h"
13 // Definitions of assembly statements we need
14 #define ASM_JMP32REL 0xE9
19 SideStepError
PreamblePatcher::RawPatchWithStub(
20 void* target_function
,
21 void *replacement_function
,
22 unsigned char* preamble_stub
,
23 unsigned long stub_size
,
24 unsigned long* bytes_needed
) {
25 if ((NULL
== target_function
) ||
26 (NULL
== replacement_function
) ||
27 (NULL
== preamble_stub
)) {
28 ASSERT(false, "Invalid parameters - either pTargetFunction or "
29 "pReplacementFunction or pPreambleStub were NULL.");
30 return SIDESTEP_INVALID_PARAMETER
;
33 // TODO(V7:joi) Siggi and I just had a discussion and decided that both
34 // patching and unpatching are actually unsafe. We also discussed a
35 // method of making it safe, which is to freeze all other threads in the
36 // process, check their thread context to see if their eip is currently
37 // inside the block of instructions we need to copy to the stub, and if so
38 // wait a bit and try again, then unfreeze all threads once we've patched.
39 // Not implementing this for now since we're only using SideStep for unit
40 // testing, but if we ever use it for production code this is what we
43 // NOTE: Stoyan suggests we can write 8 or even 10 bytes atomically using
44 // FPU instructions, and on newer processors we could use cmpxchg8b or
45 // cmpxchg16b. So it might be possible to do the patching/unpatching
46 // atomically and avoid having to freeze other threads. Note though, that
47 // doing it atomically does not help if one of the other threads happens
48 // to have its eip in the middle of the bytes you change while you change
50 unsigned char* target
= reinterpret_cast<unsigned char*>(target_function
);
52 // First, deal with a special case that we see with functions that
53 // point into an IAT table (including functions linked statically
54 // into the application): these function already starts with
55 // ASM_JMP32REL. For instance, malloc() might be implemented as a
56 // JMP to __malloc(). In that case, we replace the destination of
57 // the JMP (__malloc), rather than the JMP itself (malloc). This
58 // way we get the correct behavior no matter how malloc gets called.
59 if (target
[0] == ASM_JMP32REL
) {
60 // target[1-4] holds the place the jmp goes to, but it's
61 // relative to the next instruction.
62 int relative_offset
; // Windows guarantees int is 4 bytes
63 ASSERT1(sizeof(relative_offset
) == 4);
64 memcpy(reinterpret_cast<void*>(&relative_offset
),
65 reinterpret_cast<void*>(target
+ 1), 4);
66 // I'd like to just say "target = target + 5 + relative_offset" here, but
67 // I can't, because the new target will need to have its protections set.
68 return RawPatchWithStubAndProtections(target
+ 5 + relative_offset
,
69 replacement_function
, preamble_stub
,
70 stub_size
, bytes_needed
);
73 // Let's disassemble the preamble of the target function to see if we can
74 // patch, and to see how much of the preamble we need to take. We need 5
75 // bytes for our jmp instruction, so let's find the minimum number of
76 // instructions to get 5 bytes.
77 MiniDisassembler disassembler
;
78 unsigned int preamble_bytes
= 0;
79 while (preamble_bytes
< 5) {
80 InstructionType instruction_type
=
81 disassembler
.Disassemble(target
+ preamble_bytes
, preamble_bytes
);
82 if (IT_JUMP
== instruction_type
) {
83 ASSERT(false, "Unable to patch because there is a jump instruction "
84 "in the first 5 bytes.");
85 return SIDESTEP_JUMP_INSTRUCTION
;
86 } else if (IT_RETURN
== instruction_type
) {
87 ASSERT(false, "Unable to patch because function is too short");
88 return SIDESTEP_FUNCTION_TOO_SMALL
;
89 } else if (IT_GENERIC
!= instruction_type
) {
90 ASSERT(false, "Disassembler encountered unsupported instruction "
91 "(either unused or unknown)");
92 return SIDESTEP_UNSUPPORTED_INSTRUCTION
;
96 if (NULL
!= bytes_needed
)
97 *bytes_needed
= preamble_bytes
+ 5;
99 // Inv: cbPreamble is the number of bytes (at least 5) that we need to take
100 // from the preamble to have whole instructions that are 5 bytes or more
101 // in size total. The size of the stub required is cbPreamble + size of
103 if (preamble_bytes
+ 5 > stub_size
) {
105 return SIDESTEP_INSUFFICIENT_BUFFER
;
108 // First, copy the preamble that we will overwrite.
109 memcpy(reinterpret_cast<void*>(preamble_stub
),
110 reinterpret_cast<void*>(target
), preamble_bytes
);
112 // Now, make a jmp instruction to the rest of the target function (minus the
113 // preamble bytes we moved into the stub) and copy it into our preamble-stub.
114 // find address to jump to, relative to next address after jmp instruction
116 #pragma warning(push)
117 #pragma warning(disable:4244)
119 int relative_offset_to_target_rest
120 = ((reinterpret_cast<unsigned char*>(target
) + preamble_bytes
) -
121 (preamble_stub
+ preamble_bytes
+ 5));
125 // jmp (Jump near, relative, displacement relative to next instruction)
126 preamble_stub
[preamble_bytes
] = ASM_JMP32REL
;
128 memcpy(reinterpret_cast<void*>(preamble_stub
+ preamble_bytes
+ 1),
129 reinterpret_cast<void*>(&relative_offset_to_target_rest
), 4);
131 // Inv: preamble_stub points to assembly code that will execute the
132 // original function by first executing the first cbPreamble bytes of the
133 // preamble, then jumping to the rest of the function.
135 // Overwrite the first 5 bytes of the target function with a jump to our
136 // replacement function.
137 // (Jump near, relative, displacement relative to next instruction)
138 target
[0] = ASM_JMP32REL
;
140 // Find offset from instruction after jmp, to the replacement function.
142 #pragma warning(push)
143 #pragma warning(disable:4244)
145 int offset_to_replacement_function
=
146 reinterpret_cast<unsigned char*>(replacement_function
) -
147 reinterpret_cast<unsigned char*>(target
) - 5;
151 // complete the jmp instruction
152 memcpy(reinterpret_cast<void*>(target
+ 1),
153 reinterpret_cast<void*>(&offset_to_replacement_function
), 4);
154 // Set any remaining bytes that were moved to the preamble-stub to INT3 so
155 // as not to cause confusion (otherwise you might see some strange
156 // instructions if you look at the disassembly, or even invalid
157 // instructions). Also, by doing this, we will break into the debugger if
158 // some code calls into this portion of the code. If this happens, it
159 // means that this function cannot be patched using this patcher without
161 if (preamble_bytes
> 5) {
162 memset(reinterpret_cast<void*>(target
+ 5), ASM_INT3
, preamble_bytes
- 5);
165 // Inv: The memory pointed to by target_function now points to a relative
166 // jump instruction that jumps over to the preamble_stub. The preamble
167 // stub contains the first stub_size bytes of the original target
168 // function's preamble code, followed by a relative jump back to the next
169 // instruction after the first cbPreamble bytes.
171 return SIDESTEP_SUCCESS
;
174 }; // namespace sidestep