blob: de294ee6653408126d85bd46a4016623626fa497 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001
2// Copyright (c) 1994-2006 Sun Microsystems Inc.
3// All Rights Reserved.
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are
7// met:
8//
9// - Redistributions of source code must retain the above copyright notice,
10// this list of conditions and the following disclaimer.
11//
12// - Redistribution in binary form must reproduce the above copyright
13// notice, this list of conditions and the following disclaimer in the
14// documentation and/or other materials provided with the distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32// The original source code covered by the above license above has been
33// modified significantly by Google Inc.
34// Copyright 2012 the V8 project authors. All rights reserved.
35
36
37#ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38#define V8_MIPS_ASSEMBLER_MIPS_INL_H_
39
40#include "src/mips64/assembler-mips64.h"
41
42#include "src/assembler.h"
43#include "src/debug.h"
44
45
46namespace v8 {
47namespace internal {
48
49
50bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }
51
52
53// -----------------------------------------------------------------------------
54// Operand and MemOperand.
55
56Operand::Operand(int64_t immediate, RelocInfo::Mode rmode) {
57 rm_ = no_reg;
58 imm64_ = immediate;
59 rmode_ = rmode;
60}
61
62
63Operand::Operand(const ExternalReference& f) {
64 rm_ = no_reg;
65 imm64_ = reinterpret_cast<int64_t>(f.address());
66 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
67}
68
69
70Operand::Operand(Smi* value) {
71 rm_ = no_reg;
72 imm64_ = reinterpret_cast<intptr_t>(value);
73 rmode_ = RelocInfo::NONE32;
74}
75
76
77Operand::Operand(Register rm) {
78 rm_ = rm;
79}
80
81
82bool Operand::is_reg() const {
83 return rm_.is_valid();
84}
85
86
87int Register::NumAllocatableRegisters() {
88 return kMaxNumAllocatableRegisters;
89}
90
91
92int DoubleRegister::NumRegisters() {
93 return FPURegister::kMaxNumRegisters;
94}
95
96
97int DoubleRegister::NumAllocatableRegisters() {
98 return FPURegister::kMaxNumAllocatableRegisters;
99}
100
101
102int FPURegister::ToAllocationIndex(FPURegister reg) {
103 DCHECK(reg.code() % 2 == 0);
104 DCHECK(reg.code() / 2 < kMaxNumAllocatableRegisters);
105 DCHECK(reg.is_valid());
106 DCHECK(!reg.is(kDoubleRegZero));
107 DCHECK(!reg.is(kLithiumScratchDouble));
108 return (reg.code() / 2);
109}
110
111
112// -----------------------------------------------------------------------------
113// RelocInfo.
114
115void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
116 if (IsInternalReference(rmode_)) {
117 // Absolute code pointer inside code object moves with the code object.
118 byte* p = reinterpret_cast<byte*>(pc_);
119 int count = Assembler::RelocateInternalReference(p, delta);
120 CpuFeatures::FlushICache(p, count * sizeof(uint32_t));
121 }
122}
123
124
125Address RelocInfo::target_address() {
126 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
127 return Assembler::target_address_at(pc_, host_);
128}
129
130
131Address RelocInfo::target_address_address() {
132 DCHECK(IsCodeTarget(rmode_) ||
133 IsRuntimeEntry(rmode_) ||
134 rmode_ == EMBEDDED_OBJECT ||
135 rmode_ == EXTERNAL_REFERENCE);
136 // Read the address of the word containing the target_address in an
137 // instruction stream.
138 // The only architecture-independent user of this function is the serializer.
139 // The serializer uses it to find out how many raw bytes of instruction to
140 // output before the next target.
141 // For an instruction like LUI/ORI where the target bits are mixed into the
142 // instruction bits, the size of the target will be zero, indicating that the
143 // serializer should not step forward in memory after a target is resolved
144 // and written. In this case the target_address_address function should
145 // return the end of the instructions to be patched, allowing the
146 // deserializer to deserialize the instructions as raw bytes and put them in
147 // place, ready to be patched with the target. After jump optimization,
148 // that is the address of the instruction that follows J/JAL/JR/JALR
149 // instruction.
150 // return reinterpret_cast<Address>(
151 // pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
152 return reinterpret_cast<Address>(
153 pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize);
154}
155
156
157Address RelocInfo::constant_pool_entry_address() {
158 UNREACHABLE();
159 return NULL;
160}
161
162
163int RelocInfo::target_address_size() {
164 return Assembler::kSpecialTargetSize;
165}
166
167
168void RelocInfo::set_target_address(Address target,
169 WriteBarrierMode write_barrier_mode,
170 ICacheFlushMode icache_flush_mode) {
171 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
172 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
173 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
174 host() != NULL && IsCodeTarget(rmode_)) {
175 Object* target_code = Code::GetCodeFromTargetAddress(target);
176 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
177 host(), this, HeapObject::cast(target_code));
178 }
179}
180
181
182Address Assembler::target_address_from_return_address(Address pc) {
183 return pc - kCallTargetAddressOffset;
184}
185
186
187Address Assembler::break_address_from_return_address(Address pc) {
188 return pc - Assembler::kPatchDebugBreakSlotReturnOffset;
189}
190
191
192Object* RelocInfo::target_object() {
193 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
194 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
195}
196
197
198Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
199 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
200 return Handle<Object>(reinterpret_cast<Object**>(
201 Assembler::target_address_at(pc_, host_)));
202}
203
204
205void RelocInfo::set_target_object(Object* target,
206 WriteBarrierMode write_barrier_mode,
207 ICacheFlushMode icache_flush_mode) {
208 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
209 Assembler::set_target_address_at(pc_, host_,
210 reinterpret_cast<Address>(target),
211 icache_flush_mode);
212 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
213 host() != NULL &&
214 target->IsHeapObject()) {
215 host()->GetHeap()->incremental_marking()->RecordWrite(
216 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
217 }
218}
219
220
221Address RelocInfo::target_reference() {
222 DCHECK(rmode_ == EXTERNAL_REFERENCE);
223 return Assembler::target_address_at(pc_, host_);
224}
225
226
227Address RelocInfo::target_runtime_entry(Assembler* origin) {
228 DCHECK(IsRuntimeEntry(rmode_));
229 return target_address();
230}
231
232
233void RelocInfo::set_target_runtime_entry(Address target,
234 WriteBarrierMode write_barrier_mode,
235 ICacheFlushMode icache_flush_mode) {
236 DCHECK(IsRuntimeEntry(rmode_));
237 if (target_address() != target)
238 set_target_address(target, write_barrier_mode, icache_flush_mode);
239}
240
241
242Handle<Cell> RelocInfo::target_cell_handle() {
243 DCHECK(rmode_ == RelocInfo::CELL);
244 Address address = Memory::Address_at(pc_);
245 return Handle<Cell>(reinterpret_cast<Cell**>(address));
246}
247
248
249Cell* RelocInfo::target_cell() {
250 DCHECK(rmode_ == RelocInfo::CELL);
251 return Cell::FromValueAddress(Memory::Address_at(pc_));
252}
253
254
255void RelocInfo::set_target_cell(Cell* cell,
256 WriteBarrierMode write_barrier_mode,
257 ICacheFlushMode icache_flush_mode) {
258 DCHECK(rmode_ == RelocInfo::CELL);
259 Address address = cell->address() + Cell::kValueOffset;
260 Memory::Address_at(pc_) = address;
261 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
262 // TODO(1550) We are passing NULL as a slot because cell can never be on
263 // evacuation candidate.
264 host()->GetHeap()->incremental_marking()->RecordWrite(
265 host(), NULL, cell);
266 }
267}
268
269
270static const int kNoCodeAgeSequenceLength = 9 * Assembler::kInstrSize;
271
272
273Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
274 UNREACHABLE(); // This should never be reached on Arm.
275 return Handle<Object>();
276}
277
278
279Code* RelocInfo::code_age_stub() {
280 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
281 return Code::GetCodeFromTargetAddress(
282 Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
283}
284
285
286void RelocInfo::set_code_age_stub(Code* stub,
287 ICacheFlushMode icache_flush_mode) {
288 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
289 Assembler::set_target_address_at(pc_ + Assembler::kInstrSize,
290 host_,
291 stub->instruction_start());
292}
293
294
295Address RelocInfo::call_address() {
296 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
297 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
298 // The pc_ offset of 0 assumes mips patched return sequence per
299 // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
300 // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
301 return Assembler::target_address_at(pc_, host_);
302}
303
304
305void RelocInfo::set_call_address(Address target) {
306 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
307 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
308 // The pc_ offset of 0 assumes mips patched return sequence per
309 // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
310 // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
311 Assembler::set_target_address_at(pc_, host_, target);
312 if (host() != NULL) {
313 Object* target_code = Code::GetCodeFromTargetAddress(target);
314 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
315 host(), this, HeapObject::cast(target_code));
316 }
317}
318
319
320Object* RelocInfo::call_object() {
321 return *call_object_address();
322}
323
324
325Object** RelocInfo::call_object_address() {
326 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
327 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
328 return reinterpret_cast<Object**>(pc_ + 6 * Assembler::kInstrSize);
329}
330
331
332void RelocInfo::set_call_object(Object* target) {
333 *call_object_address() = target;
334}
335
336
337void RelocInfo::WipeOut() {
338 DCHECK(IsEmbeddedObject(rmode_) ||
339 IsCodeTarget(rmode_) ||
340 IsRuntimeEntry(rmode_) ||
341 IsExternalReference(rmode_));
342 Assembler::set_target_address_at(pc_, host_, NULL);
343}
344
345
346bool RelocInfo::IsPatchedReturnSequence() {
347 Instr instr0 = Assembler::instr_at(pc_); // lui.
348 Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize); // ori.
349 Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize); // dsll.
350 Instr instr3 = Assembler::instr_at(pc_ + 3 * Assembler::kInstrSize); // ori.
351 Instr instr4 = Assembler::instr_at(pc_ + 4 * Assembler::kInstrSize); // jalr.
352
353 bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
354 (instr1 & kOpcodeMask) == ORI &&
355 (instr2 & kFunctionFieldMask) == DSLL &&
356 (instr3 & kOpcodeMask) == ORI &&
357 (instr4 & kFunctionFieldMask) == JALR);
358 return patched_return;
359}
360
361
362bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
363 Instr current_instr = Assembler::instr_at(pc_);
364 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
365}
366
367
368void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
369 RelocInfo::Mode mode = rmode();
370 if (mode == RelocInfo::EMBEDDED_OBJECT) {
371 visitor->VisitEmbeddedPointer(this);
372 } else if (RelocInfo::IsCodeTarget(mode)) {
373 visitor->VisitCodeTarget(this);
374 } else if (mode == RelocInfo::CELL) {
375 visitor->VisitCell(this);
376 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
377 visitor->VisitExternalReference(this);
378 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
379 visitor->VisitCodeAgeSequence(this);
380 } else if (((RelocInfo::IsJSReturn(mode) &&
381 IsPatchedReturnSequence()) ||
382 (RelocInfo::IsDebugBreakSlot(mode) &&
383 IsPatchedDebugBreakSlotSequence())) &&
384 isolate->debug()->has_break_points()) {
385 visitor->VisitDebugTarget(this);
386 } else if (RelocInfo::IsRuntimeEntry(mode)) {
387 visitor->VisitRuntimeEntry(this);
388 }
389}
390
391
392template<typename StaticVisitor>
393void RelocInfo::Visit(Heap* heap) {
394 RelocInfo::Mode mode = rmode();
395 if (mode == RelocInfo::EMBEDDED_OBJECT) {
396 StaticVisitor::VisitEmbeddedPointer(heap, this);
397 } else if (RelocInfo::IsCodeTarget(mode)) {
398 StaticVisitor::VisitCodeTarget(heap, this);
399 } else if (mode == RelocInfo::CELL) {
400 StaticVisitor::VisitCell(heap, this);
401 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
402 StaticVisitor::VisitExternalReference(this);
403 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
404 StaticVisitor::VisitCodeAgeSequence(heap, this);
405 } else if (heap->isolate()->debug()->has_break_points() &&
406 ((RelocInfo::IsJSReturn(mode) &&
407 IsPatchedReturnSequence()) ||
408 (RelocInfo::IsDebugBreakSlot(mode) &&
409 IsPatchedDebugBreakSlotSequence()))) {
410 StaticVisitor::VisitDebugTarget(heap, this);
411 } else if (RelocInfo::IsRuntimeEntry(mode)) {
412 StaticVisitor::VisitRuntimeEntry(this);
413 }
414}
415
416
417// -----------------------------------------------------------------------------
418// Assembler.
419
420
421void Assembler::CheckBuffer() {
422 if (buffer_space() <= kGap) {
423 GrowBuffer();
424 }
425}
426
427
428void Assembler::CheckTrampolinePoolQuick() {
429 if (pc_offset() >= next_buffer_check_) {
430 CheckTrampolinePool();
431 }
432}
433
434
435void Assembler::emit(Instr x) {
436 if (!is_buffer_growth_blocked()) {
437 CheckBuffer();
438 }
439 *reinterpret_cast<Instr*>(pc_) = x;
440 pc_ += kInstrSize;
441 CheckTrampolinePoolQuick();
442}
443
444
445void Assembler::emit(uint64_t x) {
446 if (!is_buffer_growth_blocked()) {
447 CheckBuffer();
448 }
449 *reinterpret_cast<uint64_t*>(pc_) = x;
450 pc_ += kInstrSize * 2;
451 CheckTrampolinePoolQuick();
452}
453
454
455} } // namespace v8::internal
456
457#endif // V8_MIPS_ASSEMBLER_MIPS_INL_H_