blob: 52ebe32b36fd4dea681196fb022397bd8ebeda72 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions
6// are met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the
14// distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31// OF THE POSSIBILITY OF SUCH DAMAGE.
32
33// The original source code covered by the above license above has been modified
34// significantly by Google Inc.
Ben Murdoch692be652012-01-10 18:47:50 +000035// Copyright 2012 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +000036
37#ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38#define V8_ARM_ASSEMBLER_ARM_INL_H_
39
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040#include "src/arm/assembler-arm.h"
Ben Murdoch3ef787d2012-04-12 10:51:47 +010041
Ben Murdochb8a8cc12014-11-26 15:28:44 +000042#include "src/assembler.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043#include "src/debug/debug.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000044
45
46namespace v8 {
47namespace internal {
48
Steve Blocka7e24c12009-10-30 11:49:00 +000049
Ben Murdochb8a8cc12014-11-26 15:28:44 +000050bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
51
52
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000053int DoubleRegister::NumRegisters() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054 return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
55}
56
57
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058void RelocInfo::apply(intptr_t delta) {
Steve Blocka7e24c12009-10-30 11:49:00 +000059 if (RelocInfo::IsInternalReference(rmode_)) {
60 // absolute code pointer inside code object moves with the code object.
61 int32_t* p = reinterpret_cast<int32_t*>(pc_);
62 *p += delta; // relocate entry
63 }
64 // We do not use pc relative addressing on ARM, so there is
65 // nothing else to do.
66}
67
68
69Address RelocInfo::target_address() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
71 return Assembler::target_address_at(pc_, host_);
Steve Blocka7e24c12009-10-30 11:49:00 +000072}
73
Steve Blocka7e24c12009-10-30 11:49:00 +000074Address RelocInfo::target_address_address() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
Ben Murdoch3ef787d2012-04-12 10:51:47 +010076 || rmode_ == EMBEDDED_OBJECT
77 || rmode_ == EXTERNAL_REFERENCE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 if (FLAG_enable_embedded_constant_pool ||
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079 Assembler::IsMovW(Memory::int32_at(pc_))) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080 // We return the PC for embedded constant pool since this function is used
81 // by the serializer and expects the address to reside within the code
82 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000083 return reinterpret_cast<Address>(pc_);
84 } else {
85 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
86 return constant_pool_entry_address();
87 }
88}
89
90
91Address RelocInfo::constant_pool_entry_address() {
92 DCHECK(IsInConstantPool());
93 return Assembler::constant_pool_entry_address(pc_, host_->constant_pool());
Steve Blocka7e24c12009-10-30 11:49:00 +000094}
95
96
Leon Clarkef7060e22010-06-03 12:02:55 +010097int RelocInfo::target_address_size() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010098 return kPointerSize;
Leon Clarkef7060e22010-06-03 12:02:55 +010099}
100
101
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000102void RelocInfo::set_target_address(Address target,
103 WriteBarrierMode write_barrier_mode,
104 ICacheFlushMode icache_flush_mode) {
105 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000106 Assembler::set_target_address_at(isolate_, pc_, host_, target,
107 icache_flush_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000108 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
109 host() != NULL && IsCodeTarget(rmode_)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100110 Object* target_code = Code::GetCodeFromTargetAddress(target);
111 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
112 host(), this, HeapObject::cast(target_code));
113 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000114}
115
Steve Blocka7e24c12009-10-30 11:49:00 +0000116Object* RelocInfo::target_object() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000117 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
118 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
Steve Block3ce2e202009-11-05 08:53:23 +0000119}
120
121
Steve Blockd0582a62009-12-15 09:54:21 +0000122Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
124 return Handle<Object>(reinterpret_cast<Object**>(
125 Assembler::target_address_at(pc_, host_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000126}
127
128
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129void RelocInfo::set_target_object(Object* target,
130 WriteBarrierMode write_barrier_mode,
131 ICacheFlushMode icache_flush_mode) {
132 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000133 Assembler::set_target_address_at(isolate_, pc_, host_,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000134 reinterpret_cast<Address>(target),
135 icache_flush_mode);
136 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100137 host() != NULL &&
138 target->IsHeapObject()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
140 host(), this, HeapObject::cast(target));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100141 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000142}
143
144
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000145Address RelocInfo::target_external_reference() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000146 DCHECK(rmode_ == EXTERNAL_REFERENCE);
147 return Assembler::target_address_at(pc_, host_);
Steve Blocka7e24c12009-10-30 11:49:00 +0000148}
149
150
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000151Address RelocInfo::target_internal_reference() {
152 DCHECK(rmode_ == INTERNAL_REFERENCE);
153 return Memory::Address_at(pc_);
154}
155
156
157Address RelocInfo::target_internal_reference_address() {
158 DCHECK(rmode_ == INTERNAL_REFERENCE);
159 return reinterpret_cast<Address>(pc_);
160}
161
162
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163Address RelocInfo::target_runtime_entry(Assembler* origin) {
164 DCHECK(IsRuntimeEntry(rmode_));
165 return target_address();
166}
167
168
169void RelocInfo::set_target_runtime_entry(Address target,
170 WriteBarrierMode write_barrier_mode,
171 ICacheFlushMode icache_flush_mode) {
172 DCHECK(IsRuntimeEntry(rmode_));
173 if (target_address() != target)
174 set_target_address(target, write_barrier_mode, icache_flush_mode);
175}
176
177
178Handle<Cell> RelocInfo::target_cell_handle() {
179 DCHECK(rmode_ == RelocInfo::CELL);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100180 Address address = Memory::Address_at(pc_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000181 return Handle<Cell>(reinterpret_cast<Cell**>(address));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100182}
183
184
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000185Cell* RelocInfo::target_cell() {
186 DCHECK(rmode_ == RelocInfo::CELL);
187 return Cell::FromValueAddress(Memory::Address_at(pc_));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100188}
189
190
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000191void RelocInfo::set_target_cell(Cell* cell,
192 WriteBarrierMode write_barrier_mode,
193 ICacheFlushMode icache_flush_mode) {
194 DCHECK(rmode_ == RelocInfo::CELL);
195 Address address = cell->address() + Cell::kValueOffset;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100196 Memory::Address_at(pc_) = address;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000197 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100198 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
199 cell);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100200 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100201}
202
203
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize;
205
206
207Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
208 UNREACHABLE(); // This should never be reached on Arm.
209 return Handle<Object>();
210}
211
212
213Code* RelocInfo::code_age_stub() {
214 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
215 return Code::GetCodeFromTargetAddress(
216 Memory::Address_at(pc_ +
217 (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
218}
219
220
221void RelocInfo::set_code_age_stub(Code* stub,
222 ICacheFlushMode icache_flush_mode) {
223 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
224 Memory::Address_at(pc_ +
225 (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) =
226 stub->instruction_start();
227}
228
229
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000230Address RelocInfo::debug_call_address() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100231 // The 2 instructions offset assumes patched debug break slot or return
232 // sequence.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000233 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
234 return Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000235}
236
237
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000238void RelocInfo::set_debug_call_address(Address target) {
239 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
240 Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset) =
241 target;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100242 if (host() != NULL) {
243 Object* target_code = Code::GetCodeFromTargetAddress(target);
244 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
245 host(), this, HeapObject::cast(target_code));
246 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000247}
248
249
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250void RelocInfo::WipeOut() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
252 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
253 IsInternalReference(rmode_));
254 if (IsInternalReference(rmode_)) {
255 Memory::Address_at(pc_) = NULL;
256 } else {
257 Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
258 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000259}
260
Ben Murdochc5610432016-08-08 18:44:38 +0100261template <typename ObjectVisitor>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100263 RelocInfo::Mode mode = rmode();
264 if (mode == RelocInfo::EMBEDDED_OBJECT) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100265 visitor->VisitEmbeddedPointer(this);
Leon Clarkef7060e22010-06-03 12:02:55 +0100266 } else if (RelocInfo::IsCodeTarget(mode)) {
267 visitor->VisitCodeTarget(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000268 } else if (mode == RelocInfo::CELL) {
269 visitor->VisitCell(this);
Leon Clarkef7060e22010-06-03 12:02:55 +0100270 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100271 visitor->VisitExternalReference(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000272 } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
273 visitor->VisitInternalReference(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
275 visitor->VisitCodeAgeSequence(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
277 IsPatchedDebugBreakSlotSequence()) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100278 visitor->VisitDebugTarget(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000279 } else if (RelocInfo::IsRuntimeEntry(mode)) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100280 visitor->VisitRuntimeEntry(this);
281 }
282}
283
284
Iain Merrick75681382010-08-19 15:07:18 +0100285template<typename StaticVisitor>
Steve Block44f0eee2011-05-26 01:26:41 +0100286void RelocInfo::Visit(Heap* heap) {
Iain Merrick75681382010-08-19 15:07:18 +0100287 RelocInfo::Mode mode = rmode();
288 if (mode == RelocInfo::EMBEDDED_OBJECT) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100289 StaticVisitor::VisitEmbeddedPointer(heap, this);
Iain Merrick75681382010-08-19 15:07:18 +0100290 } else if (RelocInfo::IsCodeTarget(mode)) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100291 StaticVisitor::VisitCodeTarget(heap, this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000292 } else if (mode == RelocInfo::CELL) {
293 StaticVisitor::VisitCell(heap, this);
Iain Merrick75681382010-08-19 15:07:18 +0100294 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100295 StaticVisitor::VisitExternalReference(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296 } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
297 StaticVisitor::VisitInternalReference(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
299 StaticVisitor::VisitCodeAgeSequence(heap, this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000300 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
301 IsPatchedDebugBreakSlotSequence()) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100302 StaticVisitor::VisitDebugTarget(heap, this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000303 } else if (RelocInfo::IsRuntimeEntry(mode)) {
Iain Merrick75681382010-08-19 15:07:18 +0100304 StaticVisitor::VisitRuntimeEntry(this);
305 }
306}
307
308
Steve Blocka7e24c12009-10-30 11:49:00 +0000309Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
310 rm_ = no_reg;
311 imm32_ = immediate;
312 rmode_ = rmode;
313}
314
315
Steve Blocka7e24c12009-10-30 11:49:00 +0000316Operand::Operand(const ExternalReference& f) {
317 rm_ = no_reg;
318 imm32_ = reinterpret_cast<int32_t>(f.address());
319 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
320}
321
322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323Operand::Operand(Smi* value) {
324 rm_ = no_reg;
325 imm32_ = reinterpret_cast<intptr_t>(value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 rmode_ = RelocInfo::NONE32;
Steve Blocka7e24c12009-10-30 11:49:00 +0000327}
328
329
330Operand::Operand(Register rm) {
331 rm_ = rm;
332 rs_ = no_reg;
333 shift_op_ = LSL;
334 shift_imm_ = 0;
335}
336
337
338bool Operand::is_reg() const {
339 return rm_.is_valid() &&
340 rs_.is(no_reg) &&
341 shift_op_ == LSL &&
342 shift_imm_ == 0;
343}
344
345
346void Assembler::CheckBuffer() {
347 if (buffer_space() <= kGap) {
348 GrowBuffer();
349 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000350 MaybeCheckConstPool();
Steve Blocka7e24c12009-10-30 11:49:00 +0000351}
352
353
354void Assembler::emit(Instr x) {
355 CheckBuffer();
356 *reinterpret_cast<Instr*>(pc_) = x;
357 pc_ += kInstrSize;
358}
359
360
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000361Address Assembler::target_address_from_return_address(Address pc) {
362 // Returns the address of the call target from the return address that will
363 // be returned to after a call.
364 // Call sequence on V7 or later is:
365 // movw ip, #... @ call address low 16
366 // movt ip, #... @ call address high 16
367 // blx ip
368 // @ return address
369 // For V6 when the constant pool is unavailable, it is:
370 // mov ip, #... @ call address low 8
371 // orr ip, ip, #... @ call address 2nd 8
372 // orr ip, ip, #... @ call address 3rd 8
373 // orr ip, ip, #... @ call address high 8
374 // blx ip
375 // @ return address
376 // In cases that need frequent patching, the address is in the
377 // constant pool. It could be a small constant pool load:
378 // ldr ip, [pc / pp, #...] @ call address
379 // blx ip
380 // @ return address
381 // Or an extended constant pool load (ARMv7):
382 // movw ip, #...
383 // movt ip, #...
384 // ldr ip, [pc, ip] @ call address
385 // blx ip
386 // @ return address
387 // Or an extended constant pool load (ARMv6):
388 // mov ip, #...
389 // orr ip, ip, #...
390 // orr ip, ip, #...
391 // orr ip, ip, #...
392 // ldr ip, [pc, ip] @ call address
393 // blx ip
394 // @ return address
395 Address candidate = pc - 2 * Assembler::kInstrSize;
396 Instr candidate_instr(Memory::int32_at(candidate));
397 if (IsLdrPcImmediateOffset(candidate_instr) |
398 IsLdrPpImmediateOffset(candidate_instr)) {
399 return candidate;
400 } else {
401 if (IsLdrPpRegOffset(candidate_instr)) {
402 candidate -= Assembler::kInstrSize;
403 }
404 if (CpuFeatures::IsSupported(ARMv7)) {
405 candidate -= 1 * Assembler::kInstrSize;
406 DCHECK(IsMovW(Memory::int32_at(candidate)) &&
407 IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
408 } else {
409 candidate -= 3 * Assembler::kInstrSize;
410 DCHECK(
411 IsMovImmed(Memory::int32_at(candidate)) &&
412 IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) &&
413 IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) &&
414 IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize)));
415 }
416 return candidate;
Leon Clarkee46be812010-01-19 14:06:41 +0000417 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000418}
419
420
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000421Address Assembler::return_address_from_call_start(Address pc) {
422 if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
423 IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
424 // Load from constant pool, small section.
425 return pc + kInstrSize * 2;
426 } else {
427 if (CpuFeatures::IsSupported(ARMv7)) {
428 DCHECK(IsMovW(Memory::int32_at(pc)));
429 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
430 if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) {
431 // Load from constant pool, extended section.
432 return pc + kInstrSize * 4;
433 } else {
434 // A movw / movt load immediate.
435 return pc + kInstrSize * 3;
436 }
437 } else {
438 DCHECK(IsMovImmed(Memory::int32_at(pc)));
439 DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)));
440 DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)));
441 DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
442 if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) {
443 // Load from constant pool, extended section.
444 return pc + kInstrSize * 6;
445 } else {
446 // A mov / orr load immediate.
447 return pc + kInstrSize * 5;
448 }
449 }
450 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000451}
452
453
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100454void Assembler::deserialization_set_special_target_at(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000455 Isolate* isolate, Address constant_pool_entry, Code* code, Address target) {
456 if (FLAG_enable_embedded_constant_pool) {
457 set_target_address_at(isolate, constant_pool_entry, code, target);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458 } else {
459 Memory::Address_at(constant_pool_entry) = target;
460 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100461}
462
463
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000464void Assembler::deserialization_set_target_internal_reference_at(
465 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
466 Memory::Address_at(pc) = target;
467}
468
469
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000470bool Assembler::is_constant_pool_load(Address pc) {
471 if (CpuFeatures::IsSupported(ARMv7)) {
472 return !Assembler::IsMovW(Memory::int32_at(pc)) ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000473 (FLAG_enable_embedded_constant_pool &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000474 Assembler::IsLdrPpRegOffset(
475 Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
476 } else {
477 return !Assembler::IsMovImmed(Memory::int32_at(pc)) ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478 (FLAG_enable_embedded_constant_pool &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 Assembler::IsLdrPpRegOffset(
480 Memory::int32_at(pc + 4 * Assembler::kInstrSize)));
481 }
Steve Blockd0582a62009-12-15 09:54:21 +0000482}
483
484
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000485Address Assembler::constant_pool_entry_address(Address pc,
486 Address constant_pool) {
487 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000488 DCHECK(constant_pool != NULL);
489 int cp_offset;
490 if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) {
491 DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
492 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
493 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) &&
494 IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize)));
495 // This is an extended constant pool lookup (ARMv6).
496 Instr mov_instr = instr_at(pc);
497 Instr orr_instr_1 = instr_at(pc + kInstrSize);
498 Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
499 Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
500 cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
501 DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3);
502 } else if (IsMovW(Memory::int32_at(pc))) {
503 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
504 IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
505 // This is an extended constant pool lookup (ARMv7).
506 Instruction* movw_instr = Instruction::At(pc);
507 Instruction* movt_instr = Instruction::At(pc + kInstrSize);
508 cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
509 movw_instr->ImmedMovwMovtValue();
510 } else {
511 // This is a small constant pool lookup.
512 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
513 cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
514 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000515 return constant_pool + cp_offset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000516 } else {
517 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
518 Instr instr = Memory::int32_at(pc);
519 return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
520 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000521}
522
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000523
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000524Address Assembler::target_address_at(Address pc, Address constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000525 if (is_constant_pool_load(pc)) {
526 // This is a constant pool lookup. Return the value in the constant pool.
527 return Memory::Address_at(constant_pool_entry_address(pc, constant_pool));
528 } else if (CpuFeatures::IsSupported(ARMv7)) {
529 // This is an movw / movt immediate load. Return the immediate.
530 DCHECK(IsMovW(Memory::int32_at(pc)) &&
531 IsMovT(Memory::int32_at(pc + kInstrSize)));
532 Instruction* movw_instr = Instruction::At(pc);
533 Instruction* movt_instr = Instruction::At(pc + kInstrSize);
534 return reinterpret_cast<Address>(
535 (movt_instr->ImmedMovwMovtValue() << 16) |
536 movw_instr->ImmedMovwMovtValue());
537 } else {
538 // This is an mov / orr immediate load. Return the immediate.
539 DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
540 IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
541 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
542 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
543 Instr mov_instr = instr_at(pc);
544 Instr orr_instr_1 = instr_at(pc + kInstrSize);
545 Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
546 Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
547 Address ret = reinterpret_cast<Address>(
548 DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
549 DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
550 return ret;
551 }
552}
553
554
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000555void Assembler::set_target_address_at(Isolate* isolate, Address pc,
556 Address constant_pool, Address target,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000557 ICacheFlushMode icache_flush_mode) {
558 if (is_constant_pool_load(pc)) {
559 // This is a constant pool lookup. Update the entry in the constant pool.
560 Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target;
561 // Intuitively, we would think it is necessary to always flush the
562 // instruction cache after patching a target address in the code as follows:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000563 // Assembler::FlushICache(isolate, pc, sizeof(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000564 // However, on ARM, no instruction is actually patched in the case
565 // of embedded constants of the form:
566 // ldr ip, [pp, #...]
567 // since the instruction accessing this address in the constant pool remains
568 // unchanged.
569 } else if (CpuFeatures::IsSupported(ARMv7)) {
570 // This is an movw / movt immediate load. Patch the immediate embedded in
571 // the instructions.
572 DCHECK(IsMovW(Memory::int32_at(pc)));
573 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
574 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
575 uint32_t immediate = reinterpret_cast<uint32_t>(target);
576 instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
577 instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
578 DCHECK(IsMovW(Memory::int32_at(pc)));
579 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
580 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000581 Assembler::FlushICache(isolate, pc, 2 * kInstrSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000582 }
583 } else {
584 // This is an mov / orr immediate load. Patch the immediate embedded in
585 // the instructions.
586 DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
587 IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
588 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
589 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
590 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
591 uint32_t immediate = reinterpret_cast<uint32_t>(target);
592 instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
593 instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
594 instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
595 instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
596 DCHECK(IsMovImmed(Memory::int32_at(pc)) &&
597 IsOrrImmed(Memory::int32_at(pc + kInstrSize)) &&
598 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) &&
599 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)));
600 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000601 Assembler::FlushICache(isolate, pc, 4 * kInstrSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 }
603 }
604}
605
606
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000607} // namespace internal
608} // namespace v8
Steve Blocka7e24c12009-10-30 11:49:00 +0000609
610#endif // V8_ARM_ASSEMBLER_ARM_INL_H_