blob: c9ab6275f2b8d7d038e5a7119c4e65e3e9d806b1 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_ASSEMBLER_X64_INL_H_
29#define V8_X64_ASSEMBLER_X64_INL_H_
30
31#include "cpu.h"
32#include "memory.h"
33
34namespace v8 {
35namespace internal {
36
37Condition NegateCondition(Condition cc) {
38 return static_cast<Condition>(cc ^ 1);
39}
40
Steve Blocka7e24c12009-10-30 11:49:00 +000041
42// -----------------------------------------------------------------------------
43// Implementation of Assembler
44
45
46
47void Assembler::emitl(uint32_t x) {
48 Memory::uint32_at(pc_) = x;
49 pc_ += sizeof(uint32_t);
50}
51
52
53void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
54 Memory::uint64_at(pc_) = x;
55 if (rmode != RelocInfo::NONE) {
56 RecordRelocInfo(rmode, x);
57 }
58 pc_ += sizeof(uint64_t);
59}
60
61
62void Assembler::emitw(uint16_t x) {
63 Memory::uint16_at(pc_) = x;
64 pc_ += sizeof(uint16_t);
65}
66
67
Steve Block3ce2e202009-11-05 08:53:23 +000068void Assembler::emit_code_target(Handle<Code> target, RelocInfo::Mode rmode) {
69 ASSERT(RelocInfo::IsCodeTarget(rmode));
70 RecordRelocInfo(rmode);
71 int current = code_targets_.length();
72 if (current > 0 && code_targets_.last().is_identical_to(target)) {
73 // Optimization if we keep jumping to the same code target.
74 emitl(current - 1);
75 } else {
76 code_targets_.Add(target);
77 emitl(current);
78 }
79}
80
81
Steve Blocka7e24c12009-10-30 11:49:00 +000082void Assembler::emit_rex_64(Register reg, Register rm_reg) {
83 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
84}
85
86
87void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
88 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
89}
90
91
Steve Block6ded16b2010-05-10 14:33:55 +010092void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
93 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
94}
95
96
Steve Blocka7e24c12009-10-30 11:49:00 +000097void Assembler::emit_rex_64(Register reg, const Operand& op) {
98 emit(0x48 | reg.high_bit() << 2 | op.rex_);
99}
100
101
102void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
103 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
104}
105
106
107void Assembler::emit_rex_64(Register rm_reg) {
108 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
109 emit(0x48 | rm_reg.high_bit());
110}
111
112
113void Assembler::emit_rex_64(const Operand& op) {
114 emit(0x48 | op.rex_);
115}
116
117
118void Assembler::emit_rex_32(Register reg, Register rm_reg) {
119 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
120}
121
122
123void Assembler::emit_rex_32(Register reg, const Operand& op) {
124 emit(0x40 | reg.high_bit() << 2 | op.rex_);
125}
126
127
128void Assembler::emit_rex_32(Register rm_reg) {
129 emit(0x40 | rm_reg.high_bit());
130}
131
132
133void Assembler::emit_rex_32(const Operand& op) {
134 emit(0x40 | op.rex_);
135}
136
137
138void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
139 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
140 if (rex_bits != 0) emit(0x40 | rex_bits);
141}
142
143
144void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
145 byte rex_bits = reg.high_bit() << 2 | op.rex_;
146 if (rex_bits != 0) emit(0x40 | rex_bits);
147}
148
149
150void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
151 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
152 if (rex_bits != 0) emit(0x40 | rex_bits);
153}
154
155
156void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
157 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
158 if (rex_bits != 0) emit(0x40 | rex_bits);
159}
160
161
162void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
163 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
164 if (rex_bits != 0) emit(0x40 | rex_bits);
165}
166
167
Steve Block6ded16b2010-05-10 14:33:55 +0100168void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
169 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
170 if (rex_bits != 0) emit(0x40 | rex_bits);
171}
172
173
Steve Blocka7e24c12009-10-30 11:49:00 +0000174void Assembler::emit_optional_rex_32(Register rm_reg) {
175 if (rm_reg.high_bit()) emit(0x41);
176}
177
178
179void Assembler::emit_optional_rex_32(const Operand& op) {
180 if (op.rex_ != 0) emit(0x40 | op.rex_);
181}
182
183
184Address Assembler::target_address_at(Address pc) {
Steve Block3ce2e202009-11-05 08:53:23 +0000185 return Memory::int32_at(pc) + pc + 4;
Steve Blocka7e24c12009-10-30 11:49:00 +0000186}
187
188
189void Assembler::set_target_address_at(Address pc, Address target) {
Steve Blockd0582a62009-12-15 09:54:21 +0000190 Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
Steve Block3ce2e202009-11-05 08:53:23 +0000191 CPU::FlushICache(pc, sizeof(int32_t));
Steve Blocka7e24c12009-10-30 11:49:00 +0000192}
193
Steve Block3ce2e202009-11-05 08:53:23 +0000194Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
195 return code_targets_[Memory::int32_at(pc)];
196}
Steve Blocka7e24c12009-10-30 11:49:00 +0000197
198// -----------------------------------------------------------------------------
199// Implementation of RelocInfo
200
201// The modes possibly affected by apply must be in kApplyMask.
202void RelocInfo::apply(intptr_t delta) {
203 if (IsInternalReference(rmode_)) {
204 // absolute code pointer inside code object moves with the code object.
Steve Blockd0582a62009-12-15 09:54:21 +0000205 Memory::Address_at(pc_) += static_cast<int32_t>(delta);
Steve Block3ce2e202009-11-05 08:53:23 +0000206 } else if (IsCodeTarget(rmode_)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000207 Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
Steve Block3ce2e202009-11-05 08:53:23 +0000208 } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
209 // Special handling of js_return when a break point is set (call
210 // instruction has been inserted).
Steve Blockd0582a62009-12-15 09:54:21 +0000211 Memory::int32_at(pc_ + 1) -= static_cast<int32_t>(delta); // relocate entry
Steve Blocka7e24c12009-10-30 11:49:00 +0000212 }
213}
214
215
216Address RelocInfo::target_address() {
217 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
Steve Block3ce2e202009-11-05 08:53:23 +0000218 if (IsCodeTarget(rmode_)) {
219 return Assembler::target_address_at(pc_);
220 } else {
221 return Memory::Address_at(pc_);
222 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000223}
224
225
226Address RelocInfo::target_address_address() {
227 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
228 return reinterpret_cast<Address>(pc_);
229}
230
231
232void RelocInfo::set_target_address(Address target) {
233 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
Steve Block3ce2e202009-11-05 08:53:23 +0000234 if (IsCodeTarget(rmode_)) {
235 Assembler::set_target_address_at(pc_, target);
236 } else {
237 Memory::Address_at(pc_) = target;
238 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000239}
240
241
242Object* RelocInfo::target_object() {
243 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
Steve Block3ce2e202009-11-05 08:53:23 +0000244 return Memory::Object_at(pc_);
245}
246
247
248Handle<Object> RelocInfo::target_object_handle(Assembler *origin) {
249 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
250 if (rmode_ == EMBEDDED_OBJECT) {
251 return Memory::Object_Handle_at(pc_);
252 } else {
253 return origin->code_target_object_handle_at(pc_);
254 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000255}
256
257
258Object** RelocInfo::target_object_address() {
259 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
260 return reinterpret_cast<Object**>(pc_);
261}
262
263
264Address* RelocInfo::target_reference_address() {
265 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
266 return reinterpret_cast<Address*>(pc_);
267}
268
269
270void RelocInfo::set_target_object(Object* target) {
271 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
272 *reinterpret_cast<Object**>(pc_) = target;
273}
274
275
Steve Block3ce2e202009-11-05 08:53:23 +0000276bool RelocInfo::IsPatchedReturnSequence() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000277 // The recognized call sequence is:
278 // movq(kScratchRegister, immediate64); call(kScratchRegister);
279 // It only needs to be distinguished from a return sequence
280 // movq(rsp, rbp); pop(rbp); ret(n); int3 *6
281 // The 11th byte is int3 (0xCC) in the return sequence and
282 // REX.WB (0x48+register bit) for the call sequence.
Steve Block3ce2e202009-11-05 08:53:23 +0000283#ifdef ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +0000284 return pc_[10] != 0xCC;
Steve Block3ce2e202009-11-05 08:53:23 +0000285#else
286 return false;
287#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000288}
289
290
291Address RelocInfo::call_address() {
Steve Block3ce2e202009-11-05 08:53:23 +0000292 ASSERT(IsPatchedReturnSequence());
293 return Memory::Address_at(
294 pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000295}
296
297
298void RelocInfo::set_call_address(Address target) {
Steve Block3ce2e202009-11-05 08:53:23 +0000299 ASSERT(IsPatchedReturnSequence());
300 Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
301 target;
Steve Blocka7e24c12009-10-30 11:49:00 +0000302}
303
304
305Object* RelocInfo::call_object() {
Steve Block3ce2e202009-11-05 08:53:23 +0000306 ASSERT(IsPatchedReturnSequence());
Steve Blocka7e24c12009-10-30 11:49:00 +0000307 return *call_object_address();
308}
309
310
311void RelocInfo::set_call_object(Object* target) {
Steve Block3ce2e202009-11-05 08:53:23 +0000312 ASSERT(IsPatchedReturnSequence());
Steve Blocka7e24c12009-10-30 11:49:00 +0000313 *call_object_address() = target;
314}
315
316
317Object** RelocInfo::call_object_address() {
Steve Block3ce2e202009-11-05 08:53:23 +0000318 ASSERT(IsPatchedReturnSequence());
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 return reinterpret_cast<Object**>(
320 pc_ + Assembler::kPatchReturnSequenceAddressOffset);
321}
322
323// -----------------------------------------------------------------------------
324// Implementation of Operand
325
326void Operand::set_modrm(int mod, Register rm_reg) {
327 ASSERT(is_uint2(mod));
328 buf_[0] = mod << 6 | rm_reg.low_bits();
329 // Set REX.B to the high bit of rm.code().
330 rex_ |= rm_reg.high_bit();
331}
332
333
334void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
335 ASSERT(len_ == 1);
336 ASSERT(is_uint2(scale));
337 // Use SIB with no index register only for base rsp or r12. Otherwise we
338 // would skip the SIB byte entirely.
339 ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
340 buf_[1] = scale << 6 | index.low_bits() << 3 | base.low_bits();
341 rex_ |= index.high_bit() << 1 | base.high_bit();
342 len_ = 2;
343}
344
345void Operand::set_disp8(int disp) {
346 ASSERT(is_int8(disp));
347 ASSERT(len_ == 1 || len_ == 2);
348 int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
349 *p = disp;
350 len_ += sizeof(int8_t);
351}
352
353void Operand::set_disp32(int disp) {
354 ASSERT(len_ == 1 || len_ == 2);
355 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
356 *p = disp;
357 len_ += sizeof(int32_t);
358}
359
360
361} } // namespace v8::internal
362
363#endif // V8_X64_ASSEMBLER_X64_INL_H_