| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1 | // Copyright (c) 1994-2006 Sun Microsystems Inc. | 
|  | 2 | // All Rights Reserved. | 
|  | 3 | // | 
|  | 4 | // Redistribution and use in source and binary forms, with or without | 
|  | 5 | // modification, are permitted provided that the following conditions | 
|  | 6 | // are met: | 
|  | 7 | // | 
|  | 8 | // - Redistributions of source code must retain the above copyright notice, | 
|  | 9 | // this list of conditions and the following disclaimer. | 
|  | 10 | // | 
|  | 11 | // - Redistribution in binary form must reproduce the above copyright | 
|  | 12 | // notice, this list of conditions and the following disclaimer in the | 
|  | 13 | // documentation and/or other materials provided with the | 
|  | 14 | // distribution. | 
|  | 15 | // | 
|  | 16 | // - Neither the name of Sun Microsystems or the names of contributors may | 
|  | 17 | // be used to endorse or promote products derived from this software without | 
|  | 18 | // specific prior written permission. | 
|  | 19 | // | 
|  | 20 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 
|  | 21 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 
|  | 22 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS | 
|  | 23 | // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE | 
|  | 24 | // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, | 
|  | 25 | // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | 
|  | 26 | // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR | 
|  | 27 | // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) | 
|  | 28 | // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, | 
|  | 29 | // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | 
|  | 30 | // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED | 
|  | 31 | // OF THE POSSIBILITY OF SUCH DAMAGE. | 
|  | 32 |  | 
|  | 33 | // The original source code covered by the above license above has been modified | 
|  | 34 | // significantly by Google Inc. | 
| Ben Murdoch | 692be65 | 2012-01-10 18:47:50 +0000 | [diff] [blame] | 35 | // Copyright 2012 the V8 project authors. All rights reserved. | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 36 |  | 
|  | 37 | #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_ | 
|  | 38 | #define V8_ARM_ASSEMBLER_ARM_INL_H_ | 
|  | 39 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 40 | #include "src/arm/assembler-arm.h" | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 41 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 42 | #include "src/assembler.h" | 
|  | 43 | #include "src/debug.h" | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 44 |  | 
|  | 45 |  | 
|  | 46 | namespace v8 { | 
|  | 47 | namespace internal { | 
|  | 48 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 49 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 50 | bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); } | 
|  | 51 |  | 
|  | 52 |  | 
|  | 53 | int Register::NumAllocatableRegisters() { | 
|  | 54 | return kMaxNumAllocatableRegisters; | 
|  | 55 | } | 
|  | 56 |  | 
|  | 57 |  | 
|  | 58 | int DwVfpRegister::NumRegisters() { | 
|  | 59 | return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16; | 
|  | 60 | } | 
|  | 61 |  | 
|  | 62 |  | 
|  | 63 | int DwVfpRegister::NumReservedRegisters() { | 
|  | 64 | return kNumReservedRegisters; | 
|  | 65 | } | 
|  | 66 |  | 
|  | 67 |  | 
|  | 68 | int DwVfpRegister::NumAllocatableRegisters() { | 
|  | 69 | return NumRegisters() - kNumReservedRegisters; | 
|  | 70 | } | 
|  | 71 |  | 
|  | 72 |  | 
| Ben Murdoch | 692be65 | 2012-01-10 18:47:50 +0000 | [diff] [blame] | 73 | int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 74 | DCHECK(!reg.is(kDoubleRegZero)); | 
|  | 75 | DCHECK(!reg.is(kScratchDoubleReg)); | 
|  | 76 | if (reg.code() > kDoubleRegZero.code()) { | 
|  | 77 | return reg.code() - kNumReservedRegisters; | 
|  | 78 | } | 
| Ben Murdoch | 692be65 | 2012-01-10 18:47:50 +0000 | [diff] [blame] | 79 | return reg.code(); | 
|  | 80 | } | 
|  | 81 |  | 
|  | 82 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 83 | DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) { | 
|  | 84 | DCHECK(index >= 0 && index < NumAllocatableRegisters()); | 
|  | 85 | DCHECK(kScratchDoubleReg.code() - kDoubleRegZero.code() == | 
|  | 86 | kNumReservedRegisters - 1); | 
|  | 87 | if (index >= kDoubleRegZero.code()) { | 
|  | 88 | return from_code(index + kNumReservedRegisters); | 
|  | 89 | } | 
|  | 90 | return from_code(index); | 
|  | 91 | } | 
|  | 92 |  | 
|  | 93 |  | 
|  | 94 | void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 95 | if (RelocInfo::IsInternalReference(rmode_)) { | 
|  | 96 | // absolute code pointer inside code object moves with the code object. | 
|  | 97 | int32_t* p = reinterpret_cast<int32_t*>(pc_); | 
|  | 98 | *p += delta;  // relocate entry | 
|  | 99 | } | 
|  | 100 | // We do not use pc relative addressing on ARM, so there is | 
|  | 101 | // nothing else to do. | 
|  | 102 | } | 
|  | 103 |  | 
|  | 104 |  | 
|  | 105 | Address RelocInfo::target_address() { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 106 | DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); | 
|  | 107 | return Assembler::target_address_at(pc_, host_); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 108 | } | 
|  | 109 |  | 
|  | 110 |  | 
|  | 111 | Address RelocInfo::target_address_address() { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 112 | DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 113 | || rmode_ == EMBEDDED_OBJECT | 
|  | 114 | || rmode_ == EXTERNAL_REFERENCE); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 115 | if (FLAG_enable_ool_constant_pool || | 
|  | 116 | Assembler::IsMovW(Memory::int32_at(pc_))) { | 
|  | 117 | // We return the PC for ool constant pool since this function is used by the | 
|  | 118 | // serializerer and expects the address to reside within the code object. | 
|  | 119 | return reinterpret_cast<Address>(pc_); | 
|  | 120 | } else { | 
|  | 121 | DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_))); | 
|  | 122 | return constant_pool_entry_address(); | 
|  | 123 | } | 
|  | 124 | } | 
|  | 125 |  | 
|  | 126 |  | 
|  | 127 | Address RelocInfo::constant_pool_entry_address() { | 
|  | 128 | DCHECK(IsInConstantPool()); | 
|  | 129 | return Assembler::constant_pool_entry_address(pc_, host_->constant_pool()); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 130 | } | 
|  | 131 |  | 
|  | 132 |  | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 133 | int RelocInfo::target_address_size() { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 134 | return kPointerSize; | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 135 | } | 
|  | 136 |  | 
|  | 137 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 138 | void RelocInfo::set_target_address(Address target, | 
|  | 139 | WriteBarrierMode write_barrier_mode, | 
|  | 140 | ICacheFlushMode icache_flush_mode) { | 
|  | 141 | DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); | 
|  | 142 | Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode); | 
|  | 143 | if (write_barrier_mode == UPDATE_WRITE_BARRIER && | 
|  | 144 | host() != NULL && IsCodeTarget(rmode_)) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 145 | Object* target_code = Code::GetCodeFromTargetAddress(target); | 
|  | 146 | host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( | 
|  | 147 | host(), this, HeapObject::cast(target_code)); | 
|  | 148 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 149 | } | 
|  | 150 |  | 
|  | 151 |  | 
|  | 152 | Object* RelocInfo::target_object() { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 153 | DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); | 
|  | 154 | return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_)); | 
| Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 155 | } | 
|  | 156 |  | 
|  | 157 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 158 | Handle<Object> RelocInfo::target_object_handle(Assembler* origin) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 159 | DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); | 
|  | 160 | return Handle<Object>(reinterpret_cast<Object**>( | 
|  | 161 | Assembler::target_address_at(pc_, host_))); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 162 | } | 
|  | 163 |  | 
|  | 164 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 165 | void RelocInfo::set_target_object(Object* target, | 
|  | 166 | WriteBarrierMode write_barrier_mode, | 
|  | 167 | ICacheFlushMode icache_flush_mode) { | 
|  | 168 | DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); | 
|  | 169 | Assembler::set_target_address_at(pc_, host_, | 
|  | 170 | reinterpret_cast<Address>(target), | 
|  | 171 | icache_flush_mode); | 
|  | 172 | if (write_barrier_mode == UPDATE_WRITE_BARRIER && | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 173 | host() != NULL && | 
|  | 174 | target->IsHeapObject()) { | 
|  | 175 | host()->GetHeap()->incremental_marking()->RecordWrite( | 
|  | 176 | host(), &Memory::Object_at(pc_), HeapObject::cast(target)); | 
|  | 177 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 178 | } | 
|  | 179 |  | 
|  | 180 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 181 | Address RelocInfo::target_reference() { | 
|  | 182 | DCHECK(rmode_ == EXTERNAL_REFERENCE); | 
|  | 183 | return Assembler::target_address_at(pc_, host_); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 184 | } | 
|  | 185 |  | 
|  | 186 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 187 | Address RelocInfo::target_runtime_entry(Assembler* origin) { | 
|  | 188 | DCHECK(IsRuntimeEntry(rmode_)); | 
|  | 189 | return target_address(); | 
|  | 190 | } | 
|  | 191 |  | 
|  | 192 |  | 
|  | 193 | void RelocInfo::set_target_runtime_entry(Address target, | 
|  | 194 | WriteBarrierMode write_barrier_mode, | 
|  | 195 | ICacheFlushMode icache_flush_mode) { | 
|  | 196 | DCHECK(IsRuntimeEntry(rmode_)); | 
|  | 197 | if (target_address() != target) | 
|  | 198 | set_target_address(target, write_barrier_mode, icache_flush_mode); | 
|  | 199 | } | 
|  | 200 |  | 
|  | 201 |  | 
|  | 202 | Handle<Cell> RelocInfo::target_cell_handle() { | 
|  | 203 | DCHECK(rmode_ == RelocInfo::CELL); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 204 | Address address = Memory::Address_at(pc_); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 205 | return Handle<Cell>(reinterpret_cast<Cell**>(address)); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 206 | } | 
|  | 207 |  | 
|  | 208 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 209 | Cell* RelocInfo::target_cell() { | 
|  | 210 | DCHECK(rmode_ == RelocInfo::CELL); | 
|  | 211 | return Cell::FromValueAddress(Memory::Address_at(pc_)); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 212 | } | 
|  | 213 |  | 
|  | 214 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 215 | void RelocInfo::set_target_cell(Cell* cell, | 
|  | 216 | WriteBarrierMode write_barrier_mode, | 
|  | 217 | ICacheFlushMode icache_flush_mode) { | 
|  | 218 | DCHECK(rmode_ == RelocInfo::CELL); | 
|  | 219 | Address address = cell->address() + Cell::kValueOffset; | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 220 | Memory::Address_at(pc_) = address; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 221 | if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 222 | // TODO(1550) We are passing NULL as a slot because cell can never be on | 
|  | 223 | // evacuation candidate. | 
|  | 224 | host()->GetHeap()->incremental_marking()->RecordWrite( | 
|  | 225 | host(), NULL, cell); | 
|  | 226 | } | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 227 | } | 
|  | 228 |  | 
|  | 229 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 230 | static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize; | 
|  | 231 |  | 
|  | 232 |  | 
|  | 233 | Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { | 
|  | 234 | UNREACHABLE();  // This should never be reached on Arm. | 
|  | 235 | return Handle<Object>(); | 
|  | 236 | } | 
|  | 237 |  | 
|  | 238 |  | 
|  | 239 | Code* RelocInfo::code_age_stub() { | 
|  | 240 | DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); | 
|  | 241 | return Code::GetCodeFromTargetAddress( | 
|  | 242 | Memory::Address_at(pc_ + | 
|  | 243 | (kNoCodeAgeSequenceLength - Assembler::kInstrSize))); | 
|  | 244 | } | 
|  | 245 |  | 
|  | 246 |  | 
|  | 247 | void RelocInfo::set_code_age_stub(Code* stub, | 
|  | 248 | ICacheFlushMode icache_flush_mode) { | 
|  | 249 | DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); | 
|  | 250 | Memory::Address_at(pc_ + | 
|  | 251 | (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) = | 
|  | 252 | stub->instruction_start(); | 
|  | 253 | } | 
|  | 254 |  | 
|  | 255 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 256 | Address RelocInfo::call_address() { | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 257 | // The 2 instructions offset assumes patched debug break slot or return | 
|  | 258 | // sequence. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 259 | DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 260 | (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 261 | return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize); | 
|  | 262 | } | 
|  | 263 |  | 
|  | 264 |  | 
|  | 265 | void RelocInfo::set_call_address(Address target) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 266 | DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || | 
| Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 267 | (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 268 | Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target; | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 269 | if (host() != NULL) { | 
|  | 270 | Object* target_code = Code::GetCodeFromTargetAddress(target); | 
|  | 271 | host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( | 
|  | 272 | host(), this, HeapObject::cast(target_code)); | 
|  | 273 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 274 | } | 
|  | 275 |  | 
|  | 276 |  | 
|  | 277 | Object* RelocInfo::call_object() { | 
|  | 278 | return *call_object_address(); | 
|  | 279 | } | 
|  | 280 |  | 
|  | 281 |  | 
| Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 282 | void RelocInfo::set_call_object(Object* target) { | 
|  | 283 | *call_object_address() = target; | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 284 | } | 
|  | 285 |  | 
|  | 286 |  | 
| Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 287 | Object** RelocInfo::call_object_address() { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 288 | DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || | 
| Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 289 | (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); | 
|  | 290 | return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 291 | } | 
|  | 292 |  | 
|  | 293 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 294 | void RelocInfo::WipeOut() { | 
|  | 295 | DCHECK(IsEmbeddedObject(rmode_) || | 
|  | 296 | IsCodeTarget(rmode_) || | 
|  | 297 | IsRuntimeEntry(rmode_) || | 
|  | 298 | IsExternalReference(rmode_)); | 
|  | 299 | Assembler::set_target_address_at(pc_, host_, NULL); | 
|  | 300 | } | 
|  | 301 |  | 
|  | 302 |  | 
| Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 303 | bool RelocInfo::IsPatchedReturnSequence() { | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 304 | Instr current_instr = Assembler::instr_at(pc_); | 
|  | 305 | Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 306 | // A patched return sequence is: | 
|  | 307 | //  ldr ip, [pc, #0] | 
|  | 308 | //  blx ip | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 309 | return Assembler::IsLdrPcImmediateOffset(current_instr) && | 
|  | 310 | Assembler::IsBlxReg(next_instr); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 311 | } | 
|  | 312 |  | 
|  | 313 |  | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 314 | bool RelocInfo::IsPatchedDebugBreakSlotSequence() { | 
|  | 315 | Instr current_instr = Assembler::instr_at(pc_); | 
| Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 316 | return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP); | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 317 | } | 
|  | 318 |  | 
|  | 319 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 320 | void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) { | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 321 | RelocInfo::Mode mode = rmode(); | 
|  | 322 | if (mode == RelocInfo::EMBEDDED_OBJECT) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 323 | visitor->VisitEmbeddedPointer(this); | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 324 | } else if (RelocInfo::IsCodeTarget(mode)) { | 
|  | 325 | visitor->VisitCodeTarget(this); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 326 | } else if (mode == RelocInfo::CELL) { | 
|  | 327 | visitor->VisitCell(this); | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 328 | } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 329 | visitor->VisitExternalReference(this); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 330 | } else if (RelocInfo::IsCodeAgeSequence(mode)) { | 
|  | 331 | visitor->VisitCodeAgeSequence(this); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 332 | } else if (((RelocInfo::IsJSReturn(mode) && | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 333 | IsPatchedReturnSequence()) || | 
|  | 334 | (RelocInfo::IsDebugBreakSlot(mode) && | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 335 | IsPatchedDebugBreakSlotSequence())) && | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 336 | isolate->debug()->has_break_points()) { | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 337 | visitor->VisitDebugTarget(this); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 338 | } else if (RelocInfo::IsRuntimeEntry(mode)) { | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 339 | visitor->VisitRuntimeEntry(this); | 
|  | 340 | } | 
|  | 341 | } | 
|  | 342 |  | 
|  | 343 |  | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 344 | template<typename StaticVisitor> | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 345 | void RelocInfo::Visit(Heap* heap) { | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 346 | RelocInfo::Mode mode = rmode(); | 
|  | 347 | if (mode == RelocInfo::EMBEDDED_OBJECT) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 348 | StaticVisitor::VisitEmbeddedPointer(heap, this); | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 349 | } else if (RelocInfo::IsCodeTarget(mode)) { | 
| Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 350 | StaticVisitor::VisitCodeTarget(heap, this); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 351 | } else if (mode == RelocInfo::CELL) { | 
|  | 352 | StaticVisitor::VisitCell(heap, this); | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 353 | } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 354 | StaticVisitor::VisitExternalReference(this); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 355 | } else if (RelocInfo::IsCodeAgeSequence(mode)) { | 
|  | 356 | StaticVisitor::VisitCodeAgeSequence(heap, this); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 357 | } else if (heap->isolate()->debug()->has_break_points() && | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 358 | ((RelocInfo::IsJSReturn(mode) && | 
|  | 359 | IsPatchedReturnSequence()) || | 
|  | 360 | (RelocInfo::IsDebugBreakSlot(mode) && | 
|  | 361 | IsPatchedDebugBreakSlotSequence()))) { | 
| Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 362 | StaticVisitor::VisitDebugTarget(heap, this); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 363 | } else if (RelocInfo::IsRuntimeEntry(mode)) { | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 364 | StaticVisitor::VisitRuntimeEntry(this); | 
|  | 365 | } | 
|  | 366 | } | 
|  | 367 |  | 
|  | 368 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 369 | Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  { | 
|  | 370 | rm_ = no_reg; | 
|  | 371 | imm32_ = immediate; | 
|  | 372 | rmode_ = rmode; | 
|  | 373 | } | 
|  | 374 |  | 
|  | 375 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 376 | Operand::Operand(const ExternalReference& f)  { | 
|  | 377 | rm_ = no_reg; | 
|  | 378 | imm32_ = reinterpret_cast<int32_t>(f.address()); | 
|  | 379 | rmode_ = RelocInfo::EXTERNAL_REFERENCE; | 
|  | 380 | } | 
|  | 381 |  | 
|  | 382 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 383 | Operand::Operand(Smi* value) { | 
|  | 384 | rm_ = no_reg; | 
|  | 385 | imm32_ =  reinterpret_cast<intptr_t>(value); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 386 | rmode_ = RelocInfo::NONE32; | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 387 | } | 
|  | 388 |  | 
|  | 389 |  | 
|  | 390 | Operand::Operand(Register rm) { | 
|  | 391 | rm_ = rm; | 
|  | 392 | rs_ = no_reg; | 
|  | 393 | shift_op_ = LSL; | 
|  | 394 | shift_imm_ = 0; | 
|  | 395 | } | 
|  | 396 |  | 
|  | 397 |  | 
|  | 398 | bool Operand::is_reg() const { | 
|  | 399 | return rm_.is_valid() && | 
|  | 400 | rs_.is(no_reg) && | 
|  | 401 | shift_op_ == LSL && | 
|  | 402 | shift_imm_ == 0; | 
|  | 403 | } | 
|  | 404 |  | 
|  | 405 |  | 
|  | 406 | void Assembler::CheckBuffer() { | 
|  | 407 | if (buffer_space() <= kGap) { | 
|  | 408 | GrowBuffer(); | 
|  | 409 | } | 
|  | 410 | if (pc_offset() >= next_buffer_check_) { | 
|  | 411 | CheckConstPool(false, true); | 
|  | 412 | } | 
|  | 413 | } | 
|  | 414 |  | 
|  | 415 |  | 
|  | 416 | void Assembler::emit(Instr x) { | 
|  | 417 | CheckBuffer(); | 
|  | 418 | *reinterpret_cast<Instr*>(pc_) = x; | 
|  | 419 | pc_ += kInstrSize; | 
|  | 420 | } | 
|  | 421 |  | 
|  | 422 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 423 | Address Assembler::target_address_from_return_address(Address pc) { | 
|  | 424 | // Returns the address of the call target from the return address that will | 
|  | 425 | // be returned to after a call. | 
|  | 426 | // Call sequence on V7 or later is: | 
|  | 427 | //  movw  ip, #... @ call address low 16 | 
|  | 428 | //  movt  ip, #... @ call address high 16 | 
|  | 429 | //  blx   ip | 
|  | 430 | //                      @ return address | 
|  | 431 | // For V6 when the constant pool is unavailable, it is: | 
|  | 432 | //  mov  ip, #...     @ call address low 8 | 
|  | 433 | //  orr  ip, ip, #... @ call address 2nd 8 | 
|  | 434 | //  orr  ip, ip, #... @ call address 3rd 8 | 
|  | 435 | //  orr  ip, ip, #... @ call address high 8 | 
|  | 436 | //  blx   ip | 
|  | 437 | //                      @ return address | 
|  | 438 | // In cases that need frequent patching, the address is in the | 
|  | 439 | // constant pool.  It could be a small constant pool load: | 
|  | 440 | //  ldr   ip, [pc / pp, #...] @ call address | 
|  | 441 | //  blx   ip | 
|  | 442 | //                      @ return address | 
|  | 443 | // Or an extended constant pool load (ARMv7): | 
|  | 444 | //  movw  ip, #... | 
|  | 445 | //  movt  ip, #... | 
|  | 446 | //  ldr   ip, [pc, ip]  @ call address | 
|  | 447 | //  blx   ip | 
|  | 448 | //                      @ return address | 
|  | 449 | // Or an extended constant pool load (ARMv6): | 
|  | 450 | //  mov  ip, #... | 
|  | 451 | //  orr  ip, ip, #... | 
|  | 452 | //  orr  ip, ip, #... | 
|  | 453 | //  orr  ip, ip, #... | 
|  | 454 | //  ldr   ip, [pc, ip]  @ call address | 
|  | 455 | //  blx   ip | 
|  | 456 | //                      @ return address | 
|  | 457 | Address candidate = pc - 2 * Assembler::kInstrSize; | 
|  | 458 | Instr candidate_instr(Memory::int32_at(candidate)); | 
|  | 459 | if (IsLdrPcImmediateOffset(candidate_instr) | | 
|  | 460 | IsLdrPpImmediateOffset(candidate_instr)) { | 
|  | 461 | return candidate; | 
|  | 462 | } else { | 
|  | 463 | if (IsLdrPpRegOffset(candidate_instr)) { | 
|  | 464 | candidate -= Assembler::kInstrSize; | 
|  | 465 | } | 
|  | 466 | if (CpuFeatures::IsSupported(ARMv7)) { | 
|  | 467 | candidate -= 1 * Assembler::kInstrSize; | 
|  | 468 | DCHECK(IsMovW(Memory::int32_at(candidate)) && | 
|  | 469 | IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize))); | 
|  | 470 | } else { | 
|  | 471 | candidate -= 3 * Assembler::kInstrSize; | 
|  | 472 | DCHECK( | 
|  | 473 | IsMovImmed(Memory::int32_at(candidate)) && | 
|  | 474 | IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) && | 
|  | 475 | IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) && | 
|  | 476 | IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize))); | 
|  | 477 | } | 
|  | 478 | return candidate; | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 479 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 480 | } | 
|  | 481 |  | 
|  | 482 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 483 | Address Assembler::break_address_from_return_address(Address pc) { | 
|  | 484 | return pc - Assembler::kPatchDebugBreakSlotReturnOffset; | 
|  | 485 | } | 
|  | 486 |  | 
|  | 487 |  | 
|  | 488 | Address Assembler::return_address_from_call_start(Address pc) { | 
|  | 489 | if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) | | 
|  | 490 | IsLdrPpImmediateOffset(Memory::int32_at(pc))) { | 
|  | 491 | // Load from constant pool, small section. | 
|  | 492 | return pc + kInstrSize * 2; | 
|  | 493 | } else { | 
|  | 494 | if (CpuFeatures::IsSupported(ARMv7)) { | 
|  | 495 | DCHECK(IsMovW(Memory::int32_at(pc))); | 
|  | 496 | DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize))); | 
|  | 497 | if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) { | 
|  | 498 | // Load from constant pool, extended section. | 
|  | 499 | return pc + kInstrSize * 4; | 
|  | 500 | } else { | 
|  | 501 | // A movw / movt load immediate. | 
|  | 502 | return pc + kInstrSize * 3; | 
|  | 503 | } | 
|  | 504 | } else { | 
|  | 505 | DCHECK(IsMovImmed(Memory::int32_at(pc))); | 
|  | 506 | DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize))); | 
|  | 507 | DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize))); | 
|  | 508 | DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize))); | 
|  | 509 | if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) { | 
|  | 510 | // Load from constant pool, extended section. | 
|  | 511 | return pc + kInstrSize * 6; | 
|  | 512 | } else { | 
|  | 513 | // A mov / orr load immediate. | 
|  | 514 | return pc + kInstrSize * 5; | 
|  | 515 | } | 
|  | 516 | } | 
|  | 517 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 518 | } | 
|  | 519 |  | 
|  | 520 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 521 | void Assembler::deserialization_set_special_target_at( | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 522 | Address constant_pool_entry, Code* code, Address target) { | 
|  | 523 | if (FLAG_enable_ool_constant_pool) { | 
|  | 524 | set_target_address_at(constant_pool_entry, code, target); | 
|  | 525 | } else { | 
|  | 526 | Memory::Address_at(constant_pool_entry) = target; | 
|  | 527 | } | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 528 | } | 
|  | 529 |  | 
|  | 530 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 531 | bool Assembler::is_constant_pool_load(Address pc) { | 
|  | 532 | if (CpuFeatures::IsSupported(ARMv7)) { | 
|  | 533 | return !Assembler::IsMovW(Memory::int32_at(pc)) || | 
|  | 534 | (FLAG_enable_ool_constant_pool && | 
|  | 535 | Assembler::IsLdrPpRegOffset( | 
|  | 536 | Memory::int32_at(pc + 2 * Assembler::kInstrSize))); | 
|  | 537 | } else { | 
|  | 538 | return !Assembler::IsMovImmed(Memory::int32_at(pc)) || | 
|  | 539 | (FLAG_enable_ool_constant_pool && | 
|  | 540 | Assembler::IsLdrPpRegOffset( | 
|  | 541 | Memory::int32_at(pc + 4 * Assembler::kInstrSize))); | 
|  | 542 | } | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 543 | } | 
|  | 544 |  | 
|  | 545 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 546 | Address Assembler::constant_pool_entry_address( | 
|  | 547 | Address pc, ConstantPoolArray* constant_pool) { | 
|  | 548 | if (FLAG_enable_ool_constant_pool) { | 
|  | 549 | DCHECK(constant_pool != NULL); | 
|  | 550 | int cp_offset; | 
|  | 551 | if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) { | 
|  | 552 | DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && | 
|  | 553 | IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && | 
|  | 554 | IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) && | 
|  | 555 | IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))); | 
|  | 556 | // This is an extended constant pool lookup (ARMv6). | 
|  | 557 | Instr mov_instr = instr_at(pc); | 
|  | 558 | Instr orr_instr_1 = instr_at(pc + kInstrSize); | 
|  | 559 | Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize); | 
|  | 560 | Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize); | 
|  | 561 | cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) | | 
|  | 562 | DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3); | 
|  | 563 | } else if (IsMovW(Memory::int32_at(pc))) { | 
|  | 564 | DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) && | 
|  | 565 | IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))); | 
|  | 566 | // This is an extended constant pool lookup (ARMv7). | 
|  | 567 | Instruction* movw_instr = Instruction::At(pc); | 
|  | 568 | Instruction* movt_instr = Instruction::At(pc + kInstrSize); | 
|  | 569 | cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) | | 
|  | 570 | movw_instr->ImmedMovwMovtValue(); | 
|  | 571 | } else { | 
|  | 572 | // This is a small constant pool lookup. | 
|  | 573 | DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc))); | 
|  | 574 | cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc)); | 
|  | 575 | } | 
|  | 576 | return reinterpret_cast<Address>(constant_pool) + cp_offset; | 
|  | 577 | } else { | 
|  | 578 | DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc))); | 
|  | 579 | Instr instr = Memory::int32_at(pc); | 
|  | 580 | return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta; | 
|  | 581 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 582 | } | 
|  | 583 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 584 |  | 
|  | 585 | Address Assembler::target_address_at(Address pc, | 
|  | 586 | ConstantPoolArray* constant_pool) { | 
|  | 587 | if (is_constant_pool_load(pc)) { | 
|  | 588 | // This is a constant pool lookup. Return the value in the constant pool. | 
|  | 589 | return Memory::Address_at(constant_pool_entry_address(pc, constant_pool)); | 
|  | 590 | } else if (CpuFeatures::IsSupported(ARMv7)) { | 
|  | 591 | // This is an movw / movt immediate load. Return the immediate. | 
|  | 592 | DCHECK(IsMovW(Memory::int32_at(pc)) && | 
|  | 593 | IsMovT(Memory::int32_at(pc + kInstrSize))); | 
|  | 594 | Instruction* movw_instr = Instruction::At(pc); | 
|  | 595 | Instruction* movt_instr = Instruction::At(pc + kInstrSize); | 
|  | 596 | return reinterpret_cast<Address>( | 
|  | 597 | (movt_instr->ImmedMovwMovtValue() << 16) | | 
|  | 598 | movw_instr->ImmedMovwMovtValue()); | 
|  | 599 | } else { | 
|  | 600 | // This is an mov / orr immediate load. Return the immediate. | 
|  | 601 | DCHECK(IsMovImmed(Memory::int32_at(pc)) && | 
|  | 602 | IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && | 
|  | 603 | IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && | 
|  | 604 | IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize))); | 
|  | 605 | Instr mov_instr = instr_at(pc); | 
|  | 606 | Instr orr_instr_1 = instr_at(pc + kInstrSize); | 
|  | 607 | Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize); | 
|  | 608 | Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize); | 
|  | 609 | Address ret = reinterpret_cast<Address>( | 
|  | 610 | DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) | | 
|  | 611 | DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3)); | 
|  | 612 | return ret; | 
|  | 613 | } | 
|  | 614 | } | 
|  | 615 |  | 
|  | 616 |  | 
|  | 617 | void Assembler::set_target_address_at(Address pc, | 
|  | 618 | ConstantPoolArray* constant_pool, | 
|  | 619 | Address target, | 
|  | 620 | ICacheFlushMode icache_flush_mode) { | 
|  | 621 | if (is_constant_pool_load(pc)) { | 
|  | 622 | // This is a constant pool lookup. Update the entry in the constant pool. | 
|  | 623 | Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target; | 
|  | 624 | // Intuitively, we would think it is necessary to always flush the | 
|  | 625 | // instruction cache after patching a target address in the code as follows: | 
|  | 626 | //   CpuFeatures::FlushICache(pc, sizeof(target)); | 
|  | 627 | // However, on ARM, no instruction is actually patched in the case | 
|  | 628 | // of embedded constants of the form: | 
|  | 629 | // ldr   ip, [pp, #...] | 
|  | 630 | // since the instruction accessing this address in the constant pool remains | 
|  | 631 | // unchanged. | 
|  | 632 | } else if (CpuFeatures::IsSupported(ARMv7)) { | 
|  | 633 | // This is an movw / movt immediate load. Patch the immediate embedded in | 
|  | 634 | // the instructions. | 
|  | 635 | DCHECK(IsMovW(Memory::int32_at(pc))); | 
|  | 636 | DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize))); | 
|  | 637 | uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc); | 
|  | 638 | uint32_t immediate = reinterpret_cast<uint32_t>(target); | 
|  | 639 | instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF); | 
|  | 640 | instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16); | 
|  | 641 | DCHECK(IsMovW(Memory::int32_at(pc))); | 
|  | 642 | DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize))); | 
|  | 643 | if (icache_flush_mode != SKIP_ICACHE_FLUSH) { | 
|  | 644 | CpuFeatures::FlushICache(pc, 2 * kInstrSize); | 
|  | 645 | } | 
|  | 646 | } else { | 
|  | 647 | // This is an mov / orr immediate load. Patch the immediate embedded in | 
|  | 648 | // the instructions. | 
|  | 649 | DCHECK(IsMovImmed(Memory::int32_at(pc)) && | 
|  | 650 | IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && | 
|  | 651 | IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && | 
|  | 652 | IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize))); | 
|  | 653 | uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc); | 
|  | 654 | uint32_t immediate = reinterpret_cast<uint32_t>(target); | 
|  | 655 | instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask); | 
|  | 656 | instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8)); | 
|  | 657 | instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16)); | 
|  | 658 | instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24)); | 
|  | 659 | DCHECK(IsMovImmed(Memory::int32_at(pc)) && | 
|  | 660 | IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && | 
|  | 661 | IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && | 
|  | 662 | IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize))); | 
|  | 663 | if (icache_flush_mode != SKIP_ICACHE_FLUSH) { | 
|  | 664 | CpuFeatures::FlushICache(pc, 4 * kInstrSize); | 
|  | 665 | } | 
|  | 666 | } | 
|  | 667 | } | 
|  | 668 |  | 
|  | 669 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 670 | } }  // namespace v8::internal | 
|  | 671 |  | 
|  | 672 | #endif  // V8_ARM_ASSEMBLER_ARM_INL_H_ |