Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1 | // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 | // Redistribution and use in source and binary forms, with or without |
| 3 | // modification, are permitted provided that the following conditions are |
| 4 | // met: |
| 5 | // |
| 6 | // * Redistributions of source code must retain the above copyright |
| 7 | // notice, this list of conditions and the following disclaimer. |
| 8 | // * Redistributions in binary form must reproduce the above |
| 9 | // copyright notice, this list of conditions and the following |
| 10 | // disclaimer in the documentation and/or other materials provided |
| 11 | // with the distribution. |
| 12 | // * Neither the name of Google Inc. nor the names of its |
| 13 | // contributors may be used to endorse or promote products derived |
| 14 | // from this software without specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | |
| 28 | #ifndef V8_X64_ASSEMBLER_X64_INL_H_ |
| 29 | #define V8_X64_ASSEMBLER_X64_INL_H_ |
| 30 | |
| 31 | #include "cpu.h" |
| 32 | #include "memory.h" |
| 33 | |
| 34 | namespace v8 { |
| 35 | namespace internal { |
| 36 | |
| 37 | Condition NegateCondition(Condition cc) { |
| 38 | return static_cast<Condition>(cc ^ 1); |
| 39 | } |
| 40 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 41 | |
| 42 | // ----------------------------------------------------------------------------- |
| 43 | // Implementation of Assembler |
| 44 | |
| 45 | |
| 46 | |
| 47 | void Assembler::emitl(uint32_t x) { |
| 48 | Memory::uint32_at(pc_) = x; |
| 49 | pc_ += sizeof(uint32_t); |
| 50 | } |
| 51 | |
| 52 | |
| 53 | void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) { |
| 54 | Memory::uint64_at(pc_) = x; |
| 55 | if (rmode != RelocInfo::NONE) { |
| 56 | RecordRelocInfo(rmode, x); |
| 57 | } |
| 58 | pc_ += sizeof(uint64_t); |
| 59 | } |
| 60 | |
| 61 | |
| 62 | void Assembler::emitw(uint16_t x) { |
| 63 | Memory::uint16_at(pc_) = x; |
| 64 | pc_ += sizeof(uint16_t); |
| 65 | } |
| 66 | |
| 67 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 68 | void Assembler::emit_code_target(Handle<Code> target, RelocInfo::Mode rmode) { |
| 69 | ASSERT(RelocInfo::IsCodeTarget(rmode)); |
| 70 | RecordRelocInfo(rmode); |
| 71 | int current = code_targets_.length(); |
| 72 | if (current > 0 && code_targets_.last().is_identical_to(target)) { |
| 73 | // Optimization if we keep jumping to the same code target. |
| 74 | emitl(current - 1); |
| 75 | } else { |
| 76 | code_targets_.Add(target); |
| 77 | emitl(current); |
| 78 | } |
| 79 | } |
| 80 | |
| 81 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 82 | void Assembler::emit_rex_64(Register reg, Register rm_reg) { |
| 83 | emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit()); |
| 84 | } |
| 85 | |
| 86 | |
| 87 | void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) { |
| 88 | emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3); |
| 89 | } |
| 90 | |
| 91 | |
| 92 | void Assembler::emit_rex_64(Register reg, const Operand& op) { |
| 93 | emit(0x48 | reg.high_bit() << 2 | op.rex_); |
| 94 | } |
| 95 | |
| 96 | |
| 97 | void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) { |
| 98 | emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_); |
| 99 | } |
| 100 | |
| 101 | |
| 102 | void Assembler::emit_rex_64(Register rm_reg) { |
| 103 | ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code()); |
| 104 | emit(0x48 | rm_reg.high_bit()); |
| 105 | } |
| 106 | |
| 107 | |
| 108 | void Assembler::emit_rex_64(const Operand& op) { |
| 109 | emit(0x48 | op.rex_); |
| 110 | } |
| 111 | |
| 112 | |
| 113 | void Assembler::emit_rex_32(Register reg, Register rm_reg) { |
| 114 | emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit()); |
| 115 | } |
| 116 | |
| 117 | |
| 118 | void Assembler::emit_rex_32(Register reg, const Operand& op) { |
| 119 | emit(0x40 | reg.high_bit() << 2 | op.rex_); |
| 120 | } |
| 121 | |
| 122 | |
| 123 | void Assembler::emit_rex_32(Register rm_reg) { |
| 124 | emit(0x40 | rm_reg.high_bit()); |
| 125 | } |
| 126 | |
| 127 | |
| 128 | void Assembler::emit_rex_32(const Operand& op) { |
| 129 | emit(0x40 | op.rex_); |
| 130 | } |
| 131 | |
| 132 | |
| 133 | void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) { |
| 134 | byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit(); |
| 135 | if (rex_bits != 0) emit(0x40 | rex_bits); |
| 136 | } |
| 137 | |
| 138 | |
| 139 | void Assembler::emit_optional_rex_32(Register reg, const Operand& op) { |
| 140 | byte rex_bits = reg.high_bit() << 2 | op.rex_; |
| 141 | if (rex_bits != 0) emit(0x40 | rex_bits); |
| 142 | } |
| 143 | |
| 144 | |
| 145 | void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) { |
| 146 | byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_; |
| 147 | if (rex_bits != 0) emit(0x40 | rex_bits); |
| 148 | } |
| 149 | |
| 150 | |
| 151 | void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) { |
| 152 | byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3; |
| 153 | if (rex_bits != 0) emit(0x40 | rex_bits); |
| 154 | } |
| 155 | |
| 156 | |
| 157 | void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) { |
| 158 | byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3; |
| 159 | if (rex_bits != 0) emit(0x40 | rex_bits); |
| 160 | } |
| 161 | |
| 162 | |
| 163 | void Assembler::emit_optional_rex_32(Register rm_reg) { |
| 164 | if (rm_reg.high_bit()) emit(0x41); |
| 165 | } |
| 166 | |
| 167 | |
| 168 | void Assembler::emit_optional_rex_32(const Operand& op) { |
| 169 | if (op.rex_ != 0) emit(0x40 | op.rex_); |
| 170 | } |
| 171 | |
| 172 | |
| 173 | Address Assembler::target_address_at(Address pc) { |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 174 | return Memory::int32_at(pc) + pc + 4; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 175 | } |
| 176 | |
| 177 | |
| 178 | void Assembler::set_target_address_at(Address pc, Address target) { |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame^] | 179 | Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 180 | CPU::FlushICache(pc, sizeof(int32_t)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 181 | } |
| 182 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 183 | Handle<Object> Assembler::code_target_object_handle_at(Address pc) { |
| 184 | return code_targets_[Memory::int32_at(pc)]; |
| 185 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 186 | |
| 187 | // ----------------------------------------------------------------------------- |
| 188 | // Implementation of RelocInfo |
| 189 | |
| 190 | // The modes possibly affected by apply must be in kApplyMask. |
| 191 | void RelocInfo::apply(intptr_t delta) { |
| 192 | if (IsInternalReference(rmode_)) { |
| 193 | // absolute code pointer inside code object moves with the code object. |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame^] | 194 | Memory::Address_at(pc_) += static_cast<int32_t>(delta); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 195 | } else if (IsCodeTarget(rmode_)) { |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame^] | 196 | Memory::int32_at(pc_) -= static_cast<int32_t>(delta); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 197 | } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) { |
| 198 | // Special handling of js_return when a break point is set (call |
| 199 | // instruction has been inserted). |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame^] | 200 | Memory::int32_at(pc_ + 1) -= static_cast<int32_t>(delta); // relocate entry |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 201 | } |
| 202 | } |
| 203 | |
| 204 | |
| 205 | Address RelocInfo::target_address() { |
| 206 | ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 207 | if (IsCodeTarget(rmode_)) { |
| 208 | return Assembler::target_address_at(pc_); |
| 209 | } else { |
| 210 | return Memory::Address_at(pc_); |
| 211 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 212 | } |
| 213 | |
| 214 | |
| 215 | Address RelocInfo::target_address_address() { |
| 216 | ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); |
| 217 | return reinterpret_cast<Address>(pc_); |
| 218 | } |
| 219 | |
| 220 | |
| 221 | void RelocInfo::set_target_address(Address target) { |
| 222 | ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 223 | if (IsCodeTarget(rmode_)) { |
| 224 | Assembler::set_target_address_at(pc_, target); |
| 225 | } else { |
| 226 | Memory::Address_at(pc_) = target; |
| 227 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 228 | } |
| 229 | |
| 230 | |
| 231 | Object* RelocInfo::target_object() { |
| 232 | ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 233 | return Memory::Object_at(pc_); |
| 234 | } |
| 235 | |
| 236 | |
| 237 | Handle<Object> RelocInfo::target_object_handle(Assembler *origin) { |
| 238 | ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 239 | if (rmode_ == EMBEDDED_OBJECT) { |
| 240 | return Memory::Object_Handle_at(pc_); |
| 241 | } else { |
| 242 | return origin->code_target_object_handle_at(pc_); |
| 243 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 244 | } |
| 245 | |
| 246 | |
| 247 | Object** RelocInfo::target_object_address() { |
| 248 | ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 249 | return reinterpret_cast<Object**>(pc_); |
| 250 | } |
| 251 | |
| 252 | |
| 253 | Address* RelocInfo::target_reference_address() { |
| 254 | ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE); |
| 255 | return reinterpret_cast<Address*>(pc_); |
| 256 | } |
| 257 | |
| 258 | |
| 259 | void RelocInfo::set_target_object(Object* target) { |
| 260 | ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 261 | *reinterpret_cast<Object**>(pc_) = target; |
| 262 | } |
| 263 | |
| 264 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 265 | bool RelocInfo::IsPatchedReturnSequence() { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 266 | // The recognized call sequence is: |
| 267 | // movq(kScratchRegister, immediate64); call(kScratchRegister); |
| 268 | // It only needs to be distinguished from a return sequence |
| 269 | // movq(rsp, rbp); pop(rbp); ret(n); int3 *6 |
| 270 | // The 11th byte is int3 (0xCC) in the return sequence and |
| 271 | // REX.WB (0x48+register bit) for the call sequence. |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 272 | #ifdef ENABLE_DEBUGGER_SUPPORT |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 273 | return pc_[10] != 0xCC; |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 274 | #else |
| 275 | return false; |
| 276 | #endif |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 277 | } |
| 278 | |
| 279 | |
| 280 | Address RelocInfo::call_address() { |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 281 | ASSERT(IsPatchedReturnSequence()); |
| 282 | return Memory::Address_at( |
| 283 | pc_ + Assembler::kRealPatchReturnSequenceAddressOffset); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 284 | } |
| 285 | |
| 286 | |
| 287 | void RelocInfo::set_call_address(Address target) { |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 288 | ASSERT(IsPatchedReturnSequence()); |
| 289 | Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) = |
| 290 | target; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 291 | } |
| 292 | |
| 293 | |
| 294 | Object* RelocInfo::call_object() { |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 295 | ASSERT(IsPatchedReturnSequence()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 296 | return *call_object_address(); |
| 297 | } |
| 298 | |
| 299 | |
| 300 | void RelocInfo::set_call_object(Object* target) { |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 301 | ASSERT(IsPatchedReturnSequence()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 302 | *call_object_address() = target; |
| 303 | } |
| 304 | |
| 305 | |
| 306 | Object** RelocInfo::call_object_address() { |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 307 | ASSERT(IsPatchedReturnSequence()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 308 | return reinterpret_cast<Object**>( |
| 309 | pc_ + Assembler::kPatchReturnSequenceAddressOffset); |
| 310 | } |
| 311 | |
| 312 | // ----------------------------------------------------------------------------- |
| 313 | // Implementation of Operand |
| 314 | |
| 315 | void Operand::set_modrm(int mod, Register rm_reg) { |
| 316 | ASSERT(is_uint2(mod)); |
| 317 | buf_[0] = mod << 6 | rm_reg.low_bits(); |
| 318 | // Set REX.B to the high bit of rm.code(). |
| 319 | rex_ |= rm_reg.high_bit(); |
| 320 | } |
| 321 | |
| 322 | |
| 323 | void Operand::set_sib(ScaleFactor scale, Register index, Register base) { |
| 324 | ASSERT(len_ == 1); |
| 325 | ASSERT(is_uint2(scale)); |
| 326 | // Use SIB with no index register only for base rsp or r12. Otherwise we |
| 327 | // would skip the SIB byte entirely. |
| 328 | ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12)); |
| 329 | buf_[1] = scale << 6 | index.low_bits() << 3 | base.low_bits(); |
| 330 | rex_ |= index.high_bit() << 1 | base.high_bit(); |
| 331 | len_ = 2; |
| 332 | } |
| 333 | |
| 334 | void Operand::set_disp8(int disp) { |
| 335 | ASSERT(is_int8(disp)); |
| 336 | ASSERT(len_ == 1 || len_ == 2); |
| 337 | int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]); |
| 338 | *p = disp; |
| 339 | len_ += sizeof(int8_t); |
| 340 | } |
| 341 | |
| 342 | void Operand::set_disp32(int disp) { |
| 343 | ASSERT(len_ == 1 || len_ == 2); |
| 344 | int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]); |
| 345 | *p = disp; |
| 346 | len_ += sizeof(int32_t); |
| 347 | } |
| 348 | |
| 349 | |
| 350 | } } // namespace v8::internal |
| 351 | |
| 352 | #endif // V8_X64_ASSEMBLER_X64_INL_H_ |