blob: f51a3ea8871712bd6b04d42f1ab14ed01e682523 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_ASSEMBLER_X64_INL_H_
29#define V8_X64_ASSEMBLER_X64_INL_H_
30
31#include "cpu.h"
32#include "memory.h"
33
34namespace v8 {
35namespace internal {
36
37Condition NegateCondition(Condition cc) {
38 return static_cast<Condition>(cc ^ 1);
39}
40
41// -----------------------------------------------------------------------------
42
43Immediate::Immediate(Smi* value) {
44 value_ = static_cast<int32_t>(reinterpret_cast<intptr_t>(value));
45}
46
47// -----------------------------------------------------------------------------
48// Implementation of Assembler
49
50
51
52void Assembler::emitl(uint32_t x) {
53 Memory::uint32_at(pc_) = x;
54 pc_ += sizeof(uint32_t);
55}
56
57
58void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
59 Memory::uint64_at(pc_) = x;
60 if (rmode != RelocInfo::NONE) {
61 RecordRelocInfo(rmode, x);
62 }
63 pc_ += sizeof(uint64_t);
64}
65
66
67void Assembler::emitw(uint16_t x) {
68 Memory::uint16_at(pc_) = x;
69 pc_ += sizeof(uint16_t);
70}
71
72
73void Assembler::emit_rex_64(Register reg, Register rm_reg) {
74 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
75}
76
77
78void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
79 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
80}
81
82
83void Assembler::emit_rex_64(Register reg, const Operand& op) {
84 emit(0x48 | reg.high_bit() << 2 | op.rex_);
85}
86
87
88void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
89 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
90}
91
92
93void Assembler::emit_rex_64(Register rm_reg) {
94 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
95 emit(0x48 | rm_reg.high_bit());
96}
97
98
99void Assembler::emit_rex_64(const Operand& op) {
100 emit(0x48 | op.rex_);
101}
102
103
104void Assembler::emit_rex_32(Register reg, Register rm_reg) {
105 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
106}
107
108
109void Assembler::emit_rex_32(Register reg, const Operand& op) {
110 emit(0x40 | reg.high_bit() << 2 | op.rex_);
111}
112
113
114void Assembler::emit_rex_32(Register rm_reg) {
115 emit(0x40 | rm_reg.high_bit());
116}
117
118
119void Assembler::emit_rex_32(const Operand& op) {
120 emit(0x40 | op.rex_);
121}
122
123
124void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
125 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
126 if (rex_bits != 0) emit(0x40 | rex_bits);
127}
128
129
130void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
131 byte rex_bits = reg.high_bit() << 2 | op.rex_;
132 if (rex_bits != 0) emit(0x40 | rex_bits);
133}
134
135
136void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
137 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
138 if (rex_bits != 0) emit(0x40 | rex_bits);
139}
140
141
142void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
143 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
144 if (rex_bits != 0) emit(0x40 | rex_bits);
145}
146
147
148void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
149 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
150 if (rex_bits != 0) emit(0x40 | rex_bits);
151}
152
153
154void Assembler::emit_optional_rex_32(Register rm_reg) {
155 if (rm_reg.high_bit()) emit(0x41);
156}
157
158
159void Assembler::emit_optional_rex_32(const Operand& op) {
160 if (op.rex_ != 0) emit(0x40 | op.rex_);
161}
162
163
164Address Assembler::target_address_at(Address pc) {
165 return Memory::Address_at(pc);
166}
167
168
169void Assembler::set_target_address_at(Address pc, Address target) {
170 Memory::Address_at(pc) = target;
171 CPU::FlushICache(pc, sizeof(intptr_t));
172}
173
174
175// -----------------------------------------------------------------------------
176// Implementation of RelocInfo
177
178// The modes possibly affected by apply must be in kApplyMask.
179void RelocInfo::apply(intptr_t delta) {
180 if (IsInternalReference(rmode_)) {
181 // absolute code pointer inside code object moves with the code object.
182 intptr_t* p = reinterpret_cast<intptr_t*>(pc_);
183 *p += delta; // relocate entry
184 }
185}
186
187
188Address RelocInfo::target_address() {
189 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
190 return Assembler::target_address_at(pc_);
191}
192
193
194Address RelocInfo::target_address_address() {
195 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
196 return reinterpret_cast<Address>(pc_);
197}
198
199
200void RelocInfo::set_target_address(Address target) {
201 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
202 Assembler::set_target_address_at(pc_, target);
203}
204
205
206Object* RelocInfo::target_object() {
207 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
208 return *reinterpret_cast<Object**>(pc_);
209}
210
211
212Object** RelocInfo::target_object_address() {
213 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
214 return reinterpret_cast<Object**>(pc_);
215}
216
217
218Address* RelocInfo::target_reference_address() {
219 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
220 return reinterpret_cast<Address*>(pc_);
221}
222
223
224void RelocInfo::set_target_object(Object* target) {
225 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
226 *reinterpret_cast<Object**>(pc_) = target;
227}
228
229
230bool RelocInfo::IsCallInstruction() {
231 // The recognized call sequence is:
232 // movq(kScratchRegister, immediate64); call(kScratchRegister);
233 // It only needs to be distinguished from a return sequence
234 // movq(rsp, rbp); pop(rbp); ret(n); int3 *6
235 // The 11th byte is int3 (0xCC) in the return sequence and
236 // REX.WB (0x48+register bit) for the call sequence.
237 return pc_[10] != 0xCC;
238}
239
240
241Address RelocInfo::call_address() {
242 ASSERT(IsCallInstruction());
243 return Assembler::target_address_at(
244 pc_ + Assembler::kPatchReturnSequenceAddressOffset);
245}
246
247
248void RelocInfo::set_call_address(Address target) {
249 ASSERT(IsCallInstruction());
250 Assembler::set_target_address_at(
251 pc_ + Assembler::kPatchReturnSequenceAddressOffset,
252 target);
253}
254
255
256Object* RelocInfo::call_object() {
257 ASSERT(IsCallInstruction());
258 return *call_object_address();
259}
260
261
262void RelocInfo::set_call_object(Object* target) {
263 ASSERT(IsCallInstruction());
264 *call_object_address() = target;
265}
266
267
268Object** RelocInfo::call_object_address() {
269 ASSERT(IsCallInstruction());
270 return reinterpret_cast<Object**>(
271 pc_ + Assembler::kPatchReturnSequenceAddressOffset);
272}
273
274// -----------------------------------------------------------------------------
275// Implementation of Operand
276
277void Operand::set_modrm(int mod, Register rm_reg) {
278 ASSERT(is_uint2(mod));
279 buf_[0] = mod << 6 | rm_reg.low_bits();
280 // Set REX.B to the high bit of rm.code().
281 rex_ |= rm_reg.high_bit();
282}
283
284
285void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
286 ASSERT(len_ == 1);
287 ASSERT(is_uint2(scale));
288 // Use SIB with no index register only for base rsp or r12. Otherwise we
289 // would skip the SIB byte entirely.
290 ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
291 buf_[1] = scale << 6 | index.low_bits() << 3 | base.low_bits();
292 rex_ |= index.high_bit() << 1 | base.high_bit();
293 len_ = 2;
294}
295
296void Operand::set_disp8(int disp) {
297 ASSERT(is_int8(disp));
298 ASSERT(len_ == 1 || len_ == 2);
299 int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
300 *p = disp;
301 len_ += sizeof(int8_t);
302}
303
304void Operand::set_disp32(int disp) {
305 ASSERT(len_ == 1 || len_ == 2);
306 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
307 *p = disp;
308 len_ += sizeof(int32_t);
309}
310
311
312} } // namespace v8::internal
313
314#endif // V8_X64_ASSEMBLER_X64_INL_H_