2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef MacroAssemblerX86_h
27 #define MacroAssemblerX86_h
29 #include <wtf/Platform.h>
31 #if ENABLE(ASSEMBLER) && PLATFORM(X86)
33 #include "MacroAssemblerX86Common.h"
37 class MacroAssemblerX86
: public MacroAssemblerX86Common
{
40 : m_isSSE2Present(isSSE2Present())
44 static const Scale ScalePtr
= TimesFour
;
46 using MacroAssemblerX86Common::add32
;
47 using MacroAssemblerX86Common::and32
;
48 using MacroAssemblerX86Common::sub32
;
49 using MacroAssemblerX86Common::or32
;
50 using MacroAssemblerX86Common::load32
;
51 using MacroAssemblerX86Common::store32
;
52 using MacroAssemblerX86Common::branch32
;
53 using MacroAssemblerX86Common::call
;
54 using MacroAssemblerX86Common::loadDouble
;
55 using MacroAssemblerX86Common::convertInt32ToDouble
;
57 void add32(Imm32 imm
, RegisterID src
, RegisterID dest
)
59 m_assembler
.leal_mr(imm
.m_value
, src
, dest
);
62 void add32(Imm32 imm
, AbsoluteAddress address
)
64 m_assembler
.addl_im(imm
.m_value
, address
.m_ptr
);
67 void addWithCarry32(Imm32 imm
, AbsoluteAddress address
)
69 m_assembler
.adcl_im(imm
.m_value
, address
.m_ptr
);
72 void and32(Imm32 imm
, AbsoluteAddress address
)
74 m_assembler
.andl_im(imm
.m_value
, address
.m_ptr
);
77 void or32(Imm32 imm
, AbsoluteAddress address
)
79 m_assembler
.orl_im(imm
.m_value
, address
.m_ptr
);
82 void sub32(Imm32 imm
, AbsoluteAddress address
)
84 m_assembler
.subl_im(imm
.m_value
, address
.m_ptr
);
87 void load32(void* address
, RegisterID dest
)
89 m_assembler
.movl_mr(address
, dest
);
92 void loadDouble(void* address
, FPRegisterID dest
)
94 ASSERT(isSSE2Present());
95 m_assembler
.movsd_mr(address
, dest
);
98 void convertInt32ToDouble(AbsoluteAddress src
, FPRegisterID dest
)
100 m_assembler
.cvtsi2sd_mr(src
.m_ptr
, dest
);
103 void store32(Imm32 imm
, void* address
)
105 m_assembler
.movl_i32m(imm
.m_value
, address
);
108 void store32(RegisterID src
, void* address
)
110 m_assembler
.movl_rm(src
, address
);
113 Jump
branch32(Condition cond
, AbsoluteAddress left
, RegisterID right
)
115 m_assembler
.cmpl_rm(right
, left
.m_ptr
);
116 return Jump(m_assembler
.jCC(x86Condition(cond
)));
119 Jump
branch32(Condition cond
, AbsoluteAddress left
, Imm32 right
)
121 m_assembler
.cmpl_im(right
.m_value
, left
.m_ptr
);
122 return Jump(m_assembler
.jCC(x86Condition(cond
)));
127 return Call(m_assembler
.call(), Call::Linkable
);
130 Call
tailRecursiveCall()
132 return Call::fromTailJump(jump());
135 Call
makeTailRecursiveCall(Jump oldJump
)
137 return Call::fromTailJump(oldJump
);
141 DataLabelPtr
moveWithPatch(ImmPtr initialValue
, RegisterID dest
)
143 m_assembler
.movl_i32r(initialValue
.asIntptr(), dest
);
144 return DataLabelPtr(this);
147 Jump
branchPtrWithPatch(Condition cond
, RegisterID left
, DataLabelPtr
& dataLabel
, ImmPtr initialRightValue
= ImmPtr(0))
149 m_assembler
.cmpl_ir_force32(initialRightValue
.asIntptr(), left
);
150 dataLabel
= DataLabelPtr(this);
151 return Jump(m_assembler
.jCC(x86Condition(cond
)));
154 Jump
branchPtrWithPatch(Condition cond
, Address left
, DataLabelPtr
& dataLabel
, ImmPtr initialRightValue
= ImmPtr(0))
156 m_assembler
.cmpl_im_force32(initialRightValue
.asIntptr(), left
.offset
, left
.base
);
157 dataLabel
= DataLabelPtr(this);
158 return Jump(m_assembler
.jCC(x86Condition(cond
)));
161 DataLabelPtr
storePtrWithPatch(ImmPtr initialValue
, ImplicitAddress address
)
163 m_assembler
.movl_i32m(initialValue
.asIntptr(), address
.offset
, address
.base
);
164 return DataLabelPtr(this);
167 Label
loadPtrWithPatchToLEA(Address address
, RegisterID dest
)
170 load32(address
, dest
);
174 bool supportsFloatingPoint() const { return m_isSSE2Present
; }
175 // See comment on MacroAssemblerARMv7::supportsFloatingPointTruncate()
176 bool supportsFloatingPointTruncate() const { return m_isSSE2Present
; }
179 const bool m_isSSE2Present
;
181 friend class LinkBuffer
;
182 friend class RepatchBuffer
;
184 static void linkCall(void* code
, Call call
, FunctionPtr function
)
186 X86Assembler::linkCall(code
, call
.m_jmp
, function
.value());
189 static void repatchCall(CodeLocationCall call
, CodeLocationLabel destination
)
191 X86Assembler::relinkCall(call
.dataLocation(), destination
.executableAddress());
194 static void repatchCall(CodeLocationCall call
, FunctionPtr destination
)
196 X86Assembler::relinkCall(call
.dataLocation(), destination
.executableAddress());
202 #endif // ENABLE(ASSEMBLER)
204 #endif // MacroAssemblerX86_h