blob: 44159e062665371d0ff4c2c4e663180eefa8dca3 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_ASSEMBLER_X64_INL_H_
29#define V8_X64_ASSEMBLER_X64_INL_H_
30
31#include "cpu.h"
Leon Clarkef7060e22010-06-03 12:02:55 +010032#include "debug.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000033#include "memory.h"
34
35namespace v8 {
36namespace internal {
37
Steve Blocka7e24c12009-10-30 11:49:00 +000038
39// -----------------------------------------------------------------------------
40// Implementation of Assembler
41
42
Steve Blocka7e24c12009-10-30 11:49:00 +000043void Assembler::emitl(uint32_t x) {
44 Memory::uint32_at(pc_) = x;
45 pc_ += sizeof(uint32_t);
46}
47
48
49void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
50 Memory::uint64_at(pc_) = x;
51 if (rmode != RelocInfo::NONE) {
52 RecordRelocInfo(rmode, x);
53 }
54 pc_ += sizeof(uint64_t);
55}
56
57
58void Assembler::emitw(uint16_t x) {
59 Memory::uint16_at(pc_) = x;
60 pc_ += sizeof(uint16_t);
61}
62
63
Steve Block3ce2e202009-11-05 08:53:23 +000064void Assembler::emit_code_target(Handle<Code> target, RelocInfo::Mode rmode) {
65 ASSERT(RelocInfo::IsCodeTarget(rmode));
66 RecordRelocInfo(rmode);
67 int current = code_targets_.length();
68 if (current > 0 && code_targets_.last().is_identical_to(target)) {
69 // Optimization if we keep jumping to the same code target.
70 emitl(current - 1);
71 } else {
72 code_targets_.Add(target);
73 emitl(current);
74 }
75}
76
77
Steve Blocka7e24c12009-10-30 11:49:00 +000078void Assembler::emit_rex_64(Register reg, Register rm_reg) {
79 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
80}
81
82
83void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
84 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
85}
86
87
Steve Block6ded16b2010-05-10 14:33:55 +010088void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
89 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
90}
91
92
Steve Blocka7e24c12009-10-30 11:49:00 +000093void Assembler::emit_rex_64(Register reg, const Operand& op) {
94 emit(0x48 | reg.high_bit() << 2 | op.rex_);
95}
96
97
98void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
99 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
100}
101
102
103void Assembler::emit_rex_64(Register rm_reg) {
104 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
105 emit(0x48 | rm_reg.high_bit());
106}
107
108
109void Assembler::emit_rex_64(const Operand& op) {
110 emit(0x48 | op.rex_);
111}
112
113
114void Assembler::emit_rex_32(Register reg, Register rm_reg) {
115 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
116}
117
118
119void Assembler::emit_rex_32(Register reg, const Operand& op) {
120 emit(0x40 | reg.high_bit() << 2 | op.rex_);
121}
122
123
124void Assembler::emit_rex_32(Register rm_reg) {
125 emit(0x40 | rm_reg.high_bit());
126}
127
128
129void Assembler::emit_rex_32(const Operand& op) {
130 emit(0x40 | op.rex_);
131}
132
133
134void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
135 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
136 if (rex_bits != 0) emit(0x40 | rex_bits);
137}
138
139
140void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
141 byte rex_bits = reg.high_bit() << 2 | op.rex_;
142 if (rex_bits != 0) emit(0x40 | rex_bits);
143}
144
145
146void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
147 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
148 if (rex_bits != 0) emit(0x40 | rex_bits);
149}
150
151
152void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
153 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
154 if (rex_bits != 0) emit(0x40 | rex_bits);
155}
156
157
158void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
159 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
160 if (rex_bits != 0) emit(0x40 | rex_bits);
161}
162
163
Steve Block6ded16b2010-05-10 14:33:55 +0100164void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
165 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
166 if (rex_bits != 0) emit(0x40 | rex_bits);
167}
168
169
Steve Blocka7e24c12009-10-30 11:49:00 +0000170void Assembler::emit_optional_rex_32(Register rm_reg) {
171 if (rm_reg.high_bit()) emit(0x41);
172}
173
174
175void Assembler::emit_optional_rex_32(const Operand& op) {
176 if (op.rex_ != 0) emit(0x40 | op.rex_);
177}
178
179
180Address Assembler::target_address_at(Address pc) {
Steve Block3ce2e202009-11-05 08:53:23 +0000181 return Memory::int32_at(pc) + pc + 4;
Steve Blocka7e24c12009-10-30 11:49:00 +0000182}
183
184
185void Assembler::set_target_address_at(Address pc, Address target) {
Steve Blockd0582a62009-12-15 09:54:21 +0000186 Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
Steve Block3ce2e202009-11-05 08:53:23 +0000187 CPU::FlushICache(pc, sizeof(int32_t));
Steve Blocka7e24c12009-10-30 11:49:00 +0000188}
189
Steve Block3ce2e202009-11-05 08:53:23 +0000190Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
191 return code_targets_[Memory::int32_at(pc)];
192}
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
194// -----------------------------------------------------------------------------
195// Implementation of RelocInfo
196
197// The modes possibly affected by apply must be in kApplyMask.
198void RelocInfo::apply(intptr_t delta) {
199 if (IsInternalReference(rmode_)) {
200 // absolute code pointer inside code object moves with the code object.
Steve Blockd0582a62009-12-15 09:54:21 +0000201 Memory::Address_at(pc_) += static_cast<int32_t>(delta);
Steve Block3ce2e202009-11-05 08:53:23 +0000202 } else if (IsCodeTarget(rmode_)) {
Steve Blockd0582a62009-12-15 09:54:21 +0000203 Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
Steve Blocka7e24c12009-10-30 11:49:00 +0000204 }
205}
206
207
208Address RelocInfo::target_address() {
209 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
Steve Block3ce2e202009-11-05 08:53:23 +0000210 if (IsCodeTarget(rmode_)) {
211 return Assembler::target_address_at(pc_);
212 } else {
213 return Memory::Address_at(pc_);
214 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000215}
216
217
218Address RelocInfo::target_address_address() {
219 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
220 return reinterpret_cast<Address>(pc_);
221}
222
223
Leon Clarkef7060e22010-06-03 12:02:55 +0100224int RelocInfo::target_address_size() {
225 if (IsCodedSpecially()) {
226 return Assembler::kCallTargetSize;
227 } else {
228 return Assembler::kExternalTargetSize;
229 }
230}
231
232
Steve Blocka7e24c12009-10-30 11:49:00 +0000233void RelocInfo::set_target_address(Address target) {
234 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
Steve Block3ce2e202009-11-05 08:53:23 +0000235 if (IsCodeTarget(rmode_)) {
236 Assembler::set_target_address_at(pc_, target);
237 } else {
238 Memory::Address_at(pc_) = target;
239 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000240}
241
242
243Object* RelocInfo::target_object() {
244 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
Steve Block3ce2e202009-11-05 08:53:23 +0000245 return Memory::Object_at(pc_);
246}
247
248
249Handle<Object> RelocInfo::target_object_handle(Assembler *origin) {
250 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
251 if (rmode_ == EMBEDDED_OBJECT) {
252 return Memory::Object_Handle_at(pc_);
253 } else {
254 return origin->code_target_object_handle_at(pc_);
255 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000256}
257
258
259Object** RelocInfo::target_object_address() {
260 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
261 return reinterpret_cast<Object**>(pc_);
262}
263
264
265Address* RelocInfo::target_reference_address() {
266 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
267 return reinterpret_cast<Address*>(pc_);
268}
269
270
271void RelocInfo::set_target_object(Object* target) {
272 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
273 *reinterpret_cast<Object**>(pc_) = target;
274}
275
276
Steve Block3ce2e202009-11-05 08:53:23 +0000277bool RelocInfo::IsPatchedReturnSequence() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000278 // The recognized call sequence is:
279 // movq(kScratchRegister, immediate64); call(kScratchRegister);
280 // It only needs to be distinguished from a return sequence
281 // movq(rsp, rbp); pop(rbp); ret(n); int3 *6
282 // The 11th byte is int3 (0xCC) in the return sequence and
283 // REX.WB (0x48+register bit) for the call sequence.
Steve Block3ce2e202009-11-05 08:53:23 +0000284#ifdef ENABLE_DEBUGGER_SUPPORT
Steve Blocka7e24c12009-10-30 11:49:00 +0000285 return pc_[10] != 0xCC;
Steve Block3ce2e202009-11-05 08:53:23 +0000286#else
287 return false;
288#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000289}
290
291
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100292bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
293 return !Assembler::IsNop(pc());
294}
295
296
Steve Blocka7e24c12009-10-30 11:49:00 +0000297Address RelocInfo::call_address() {
Ben Murdochbb769b22010-08-11 14:56:33 +0100298 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
299 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
Steve Block3ce2e202009-11-05 08:53:23 +0000300 return Memory::Address_at(
301 pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000302}
303
304
305void RelocInfo::set_call_address(Address target) {
Ben Murdochbb769b22010-08-11 14:56:33 +0100306 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
307 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
Steve Block3ce2e202009-11-05 08:53:23 +0000308 Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
309 target;
Steve Blocka7e24c12009-10-30 11:49:00 +0000310}
311
312
313Object* RelocInfo::call_object() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000314 return *call_object_address();
315}
316
317
318void RelocInfo::set_call_object(Object* target) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 *call_object_address() = target;
320}
321
322
323Object** RelocInfo::call_object_address() {
Ben Murdochbb769b22010-08-11 14:56:33 +0100324 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
325 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000326 return reinterpret_cast<Object**>(
327 pc_ + Assembler::kPatchReturnSequenceAddressOffset);
328}
329
Leon Clarkef7060e22010-06-03 12:02:55 +0100330
331void RelocInfo::Visit(ObjectVisitor* visitor) {
332 RelocInfo::Mode mode = rmode();
333 if (mode == RelocInfo::EMBEDDED_OBJECT) {
334 visitor->VisitPointer(target_object_address());
335 } else if (RelocInfo::IsCodeTarget(mode)) {
336 visitor->VisitCodeTarget(this);
337 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
338 visitor->VisitExternalReference(target_reference_address());
339#ifdef ENABLE_DEBUGGER_SUPPORT
340 } else if (Debug::has_break_points() &&
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100341 ((RelocInfo::IsJSReturn(mode) &&
342 IsPatchedReturnSequence()) ||
343 (RelocInfo::IsDebugBreakSlot(mode) &&
344 IsPatchedDebugBreakSlotSequence()))) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100345 visitor->VisitDebugTarget(this);
346#endif
347 } else if (mode == RelocInfo::RUNTIME_ENTRY) {
348 visitor->VisitRuntimeEntry(this);
349 }
350}
351
352
Iain Merrick75681382010-08-19 15:07:18 +0100353template<typename StaticVisitor>
354void RelocInfo::Visit() {
355 RelocInfo::Mode mode = rmode();
356 if (mode == RelocInfo::EMBEDDED_OBJECT) {
357 StaticVisitor::VisitPointer(target_object_address());
358 } else if (RelocInfo::IsCodeTarget(mode)) {
359 StaticVisitor::VisitCodeTarget(this);
360 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
361 StaticVisitor::VisitExternalReference(target_reference_address());
362#ifdef ENABLE_DEBUGGER_SUPPORT
363 } else if (Debug::has_break_points() &&
364 ((RelocInfo::IsJSReturn(mode) &&
365 IsPatchedReturnSequence()) ||
366 (RelocInfo::IsDebugBreakSlot(mode) &&
367 IsPatchedDebugBreakSlotSequence()))) {
368 StaticVisitor::VisitDebugTarget(this);
369#endif
370 } else if (mode == RelocInfo::RUNTIME_ENTRY) {
371 StaticVisitor::VisitRuntimeEntry(this);
372 }
373}
374
375
Steve Blocka7e24c12009-10-30 11:49:00 +0000376// -----------------------------------------------------------------------------
377// Implementation of Operand
378
379void Operand::set_modrm(int mod, Register rm_reg) {
380 ASSERT(is_uint2(mod));
381 buf_[0] = mod << 6 | rm_reg.low_bits();
382 // Set REX.B to the high bit of rm.code().
383 rex_ |= rm_reg.high_bit();
384}
385
386
387void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
388 ASSERT(len_ == 1);
389 ASSERT(is_uint2(scale));
390 // Use SIB with no index register only for base rsp or r12. Otherwise we
391 // would skip the SIB byte entirely.
392 ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
393 buf_[1] = scale << 6 | index.low_bits() << 3 | base.low_bits();
394 rex_ |= index.high_bit() << 1 | base.high_bit();
395 len_ = 2;
396}
397
398void Operand::set_disp8(int disp) {
399 ASSERT(is_int8(disp));
400 ASSERT(len_ == 1 || len_ == 2);
401 int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
402 *p = disp;
403 len_ += sizeof(int8_t);
404}
405
406void Operand::set_disp32(int disp) {
407 ASSERT(len_ == 1 || len_ == 2);
408 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
409 *p = disp;
410 len_ += sizeof(int32_t);
411}
412
413
414} } // namespace v8::internal
415
416#endif // V8_X64_ASSEMBLER_X64_INL_H_