| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1 | // Copyright 2006-2009 the V8 project authors. All rights reserved. | 
|  | 2 | // Redistribution and use in source and binary forms, with or without | 
|  | 3 | // modification, are permitted provided that the following conditions are | 
|  | 4 | // met: | 
|  | 5 | // | 
|  | 6 | //     * Redistributions of source code must retain the above copyright | 
|  | 7 | //       notice, this list of conditions and the following disclaimer. | 
|  | 8 | //     * Redistributions in binary form must reproduce the above | 
|  | 9 | //       copyright notice, this list of conditions and the following | 
|  | 10 | //       disclaimer in the documentation and/or other materials provided | 
|  | 11 | //       with the distribution. | 
|  | 12 | //     * Neither the name of Google Inc. nor the names of its | 
|  | 13 | //       contributors may be used to endorse or promote products derived | 
|  | 14 | //       from this software without specific prior written permission. | 
|  | 15 | // | 
|  | 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 
|  | 17 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 
|  | 18 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 
|  | 19 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 
|  | 20 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 
|  | 21 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 
|  | 22 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 
|  | 23 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 
|  | 24 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 
|  | 25 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 
|  | 26 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 
|  | 27 |  | 
|  | 28 | #include "v8.h" | 
|  | 29 |  | 
|  | 30 | #include "bootstrapper.h" | 
|  | 31 | #include "codegen-inl.h" | 
|  | 32 | #include "debug.h" | 
|  | 33 | #include "runtime.h" | 
|  | 34 | #include "serialize.h" | 
|  | 35 |  | 
|  | 36 | namespace v8 { | 
|  | 37 | namespace internal { | 
|  | 38 |  | 
|  | 39 | // ------------------------------------------------------------------------- | 
|  | 40 | // MacroAssembler implementation. | 
|  | 41 |  | 
|  | 42 | MacroAssembler::MacroAssembler(void* buffer, int size) | 
|  | 43 | : Assembler(buffer, size), | 
|  | 44 | unresolved_(0), | 
|  | 45 | generating_stub_(false), | 
|  | 46 | allow_stub_calls_(true), | 
|  | 47 | code_object_(Heap::undefined_value()) { | 
|  | 48 | } | 
|  | 49 |  | 
|  | 50 |  | 
|  | 51 | static void RecordWriteHelper(MacroAssembler* masm, | 
|  | 52 | Register object, | 
|  | 53 | Register addr, | 
|  | 54 | Register scratch) { | 
|  | 55 | Label fast; | 
|  | 56 |  | 
|  | 57 | // Compute the page start address from the heap object pointer, and reuse | 
|  | 58 | // the 'object' register for it. | 
|  | 59 | masm->and_(object, ~Page::kPageAlignmentMask); | 
|  | 60 | Register page_start = object; | 
|  | 61 |  | 
|  | 62 | // Compute the bit addr in the remembered set/index of the pointer in the | 
|  | 63 | // page. Reuse 'addr' as pointer_offset. | 
|  | 64 | masm->sub(addr, Operand(page_start)); | 
|  | 65 | masm->shr(addr, kObjectAlignmentBits); | 
|  | 66 | Register pointer_offset = addr; | 
|  | 67 |  | 
|  | 68 | // If the bit offset lies beyond the normal remembered set range, it is in | 
|  | 69 | // the extra remembered set area of a large object. | 
|  | 70 | masm->cmp(pointer_offset, Page::kPageSize / kPointerSize); | 
|  | 71 | masm->j(less, &fast); | 
|  | 72 |  | 
|  | 73 | // Adjust 'page_start' so that addressing using 'pointer_offset' hits the | 
|  | 74 | // extra remembered set after the large object. | 
|  | 75 |  | 
|  | 76 | // Find the length of the large object (FixedArray). | 
|  | 77 | masm->mov(scratch, Operand(page_start, Page::kObjectStartOffset | 
|  | 78 | + FixedArray::kLengthOffset)); | 
|  | 79 | Register array_length = scratch; | 
|  | 80 |  | 
|  | 81 | // Extra remembered set starts right after the large object (a FixedArray), at | 
|  | 82 | //   page_start + kObjectStartOffset + objectSize | 
|  | 83 | // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length. | 
|  | 84 | // Add the delta between the end of the normal RSet and the start of the | 
|  | 85 | // extra RSet to 'page_start', so that addressing the bit using | 
|  | 86 | // 'pointer_offset' hits the extra RSet words. | 
|  | 87 | masm->lea(page_start, | 
|  | 88 | Operand(page_start, array_length, times_pointer_size, | 
|  | 89 | Page::kObjectStartOffset + FixedArray::kHeaderSize | 
|  | 90 | - Page::kRSetEndOffset)); | 
|  | 91 |  | 
|  | 92 | // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction | 
|  | 93 | // to limit code size. We should probably evaluate this decision by | 
|  | 94 | // measuring the performance of an equivalent implementation using | 
|  | 95 | // "simpler" instructions | 
|  | 96 | masm->bind(&fast); | 
|  | 97 | masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset); | 
|  | 98 | } | 
|  | 99 |  | 
|  | 100 |  | 
|  | 101 | class RecordWriteStub : public CodeStub { | 
|  | 102 | public: | 
|  | 103 | RecordWriteStub(Register object, Register addr, Register scratch) | 
|  | 104 | : object_(object), addr_(addr), scratch_(scratch) { } | 
|  | 105 |  | 
|  | 106 | void Generate(MacroAssembler* masm); | 
|  | 107 |  | 
|  | 108 | private: | 
|  | 109 | Register object_; | 
|  | 110 | Register addr_; | 
|  | 111 | Register scratch_; | 
|  | 112 |  | 
|  | 113 | #ifdef DEBUG | 
|  | 114 | void Print() { | 
|  | 115 | PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n", | 
|  | 116 | object_.code(), addr_.code(), scratch_.code()); | 
|  | 117 | } | 
|  | 118 | #endif | 
|  | 119 |  | 
|  | 120 | // Minor key encoding in 12 bits of three registers (object, address and | 
|  | 121 | // scratch) OOOOAAAASSSS. | 
|  | 122 | class ScratchBits: public BitField<uint32_t, 0, 4> {}; | 
|  | 123 | class AddressBits: public BitField<uint32_t, 4, 4> {}; | 
|  | 124 | class ObjectBits: public BitField<uint32_t, 8, 4> {}; | 
|  | 125 |  | 
|  | 126 | Major MajorKey() { return RecordWrite; } | 
|  | 127 |  | 
|  | 128 | int MinorKey() { | 
|  | 129 | // Encode the registers. | 
|  | 130 | return ObjectBits::encode(object_.code()) | | 
|  | 131 | AddressBits::encode(addr_.code()) | | 
|  | 132 | ScratchBits::encode(scratch_.code()); | 
|  | 133 | } | 
|  | 134 | }; | 
|  | 135 |  | 
|  | 136 |  | 
|  | 137 | void RecordWriteStub::Generate(MacroAssembler* masm) { | 
|  | 138 | RecordWriteHelper(masm, object_, addr_, scratch_); | 
|  | 139 | masm->ret(0); | 
|  | 140 | } | 
|  | 141 |  | 
|  | 142 |  | 
|  | 143 | // Set the remembered set bit for [object+offset]. | 
|  | 144 | // object is the object being stored into, value is the object being stored. | 
|  | 145 | // If offset is zero, then the scratch register contains the array index into | 
|  | 146 | // the elements array represented as a Smi. | 
|  | 147 | // All registers are clobbered by the operation. | 
|  | 148 | void MacroAssembler::RecordWrite(Register object, int offset, | 
|  | 149 | Register value, Register scratch) { | 
| Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame^] | 150 | // The compiled code assumes that record write doesn't change the | 
|  | 151 | // context register, so we check that none of the clobbered | 
|  | 152 | // registers are esi. | 
|  | 153 | ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi)); | 
|  | 154 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 155 | // First, check if a remembered set write is even needed. The tests below | 
|  | 156 | // catch stores of Smis and stores into young gen (which does not have space | 
|  | 157 | // for the remembered set bits. | 
|  | 158 | Label done; | 
|  | 159 |  | 
|  | 160 | // Skip barrier if writing a smi. | 
|  | 161 | ASSERT_EQ(0, kSmiTag); | 
|  | 162 | test(value, Immediate(kSmiTagMask)); | 
|  | 163 | j(zero, &done); | 
|  | 164 |  | 
|  | 165 | if (Serializer::enabled()) { | 
|  | 166 | // Can't do arithmetic on external references if it might get serialized. | 
|  | 167 | mov(value, Operand(object)); | 
|  | 168 | and_(value, Heap::NewSpaceMask()); | 
|  | 169 | cmp(Operand(value), Immediate(ExternalReference::new_space_start())); | 
|  | 170 | j(equal, &done); | 
|  | 171 | } else { | 
|  | 172 | int32_t new_space_start = reinterpret_cast<int32_t>( | 
|  | 173 | ExternalReference::new_space_start().address()); | 
|  | 174 | lea(value, Operand(object, -new_space_start)); | 
|  | 175 | and_(value, Heap::NewSpaceMask()); | 
|  | 176 | j(equal, &done); | 
|  | 177 | } | 
|  | 178 |  | 
|  | 179 | if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) { | 
|  | 180 | // Compute the bit offset in the remembered set, leave it in 'value'. | 
|  | 181 | lea(value, Operand(object, offset)); | 
|  | 182 | and_(value, Page::kPageAlignmentMask); | 
|  | 183 | shr(value, kPointerSizeLog2); | 
|  | 184 |  | 
|  | 185 | // Compute the page address from the heap object pointer, leave it in | 
|  | 186 | // 'object'. | 
|  | 187 | and_(object, ~Page::kPageAlignmentMask); | 
|  | 188 |  | 
|  | 189 | // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction | 
|  | 190 | // to limit code size. We should probably evaluate this decision by | 
|  | 191 | // measuring the performance of an equivalent implementation using | 
|  | 192 | // "simpler" instructions | 
|  | 193 | bts(Operand(object, Page::kRSetOffset), value); | 
|  | 194 | } else { | 
|  | 195 | Register dst = scratch; | 
|  | 196 | if (offset != 0) { | 
|  | 197 | lea(dst, Operand(object, offset)); | 
|  | 198 | } else { | 
|  | 199 | // array access: calculate the destination address in the same manner as | 
|  | 200 | // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset | 
|  | 201 | // into an array of words. | 
|  | 202 | ASSERT_EQ(1, kSmiTagSize); | 
|  | 203 | ASSERT_EQ(0, kSmiTag); | 
|  | 204 | lea(dst, Operand(object, dst, times_half_pointer_size, | 
|  | 205 | FixedArray::kHeaderSize - kHeapObjectTag)); | 
|  | 206 | } | 
|  | 207 | // If we are already generating a shared stub, not inlining the | 
|  | 208 | // record write code isn't going to save us any memory. | 
|  | 209 | if (generating_stub()) { | 
|  | 210 | RecordWriteHelper(this, object, dst, value); | 
|  | 211 | } else { | 
|  | 212 | RecordWriteStub stub(object, dst, value); | 
|  | 213 | CallStub(&stub); | 
|  | 214 | } | 
|  | 215 | } | 
|  | 216 |  | 
|  | 217 | bind(&done); | 
| Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame^] | 218 |  | 
|  | 219 | // Clobber all input registers when running with the debug-code flag | 
|  | 220 | // turned on to provoke errors. | 
|  | 221 | if (FLAG_debug_code) { | 
|  | 222 | mov(object, Immediate(bit_cast<int32_t>(kZapValue))); | 
|  | 223 | mov(value, Immediate(bit_cast<int32_t>(kZapValue))); | 
|  | 224 | mov(scratch, Immediate(bit_cast<int32_t>(kZapValue))); | 
|  | 225 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 226 | } | 
|  | 227 |  | 
|  | 228 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 229 | void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) { | 
|  | 230 | cmp(esp, | 
|  | 231 | Operand::StaticVariable(ExternalReference::address_of_stack_limit())); | 
|  | 232 | j(below, on_stack_overflow); | 
|  | 233 | } | 
|  | 234 |  | 
|  | 235 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 236 | #ifdef ENABLE_DEBUGGER_SUPPORT | 
|  | 237 | void MacroAssembler::SaveRegistersToMemory(RegList regs) { | 
|  | 238 | ASSERT((regs & ~kJSCallerSaved) == 0); | 
|  | 239 | // Copy the content of registers to memory location. | 
|  | 240 | for (int i = 0; i < kNumJSCallerSaved; i++) { | 
|  | 241 | int r = JSCallerSavedCode(i); | 
|  | 242 | if ((regs & (1 << r)) != 0) { | 
|  | 243 | Register reg = { r }; | 
|  | 244 | ExternalReference reg_addr = | 
|  | 245 | ExternalReference(Debug_Address::Register(i)); | 
|  | 246 | mov(Operand::StaticVariable(reg_addr), reg); | 
|  | 247 | } | 
|  | 248 | } | 
|  | 249 | } | 
|  | 250 |  | 
|  | 251 |  | 
|  | 252 | void MacroAssembler::RestoreRegistersFromMemory(RegList regs) { | 
|  | 253 | ASSERT((regs & ~kJSCallerSaved) == 0); | 
|  | 254 | // Copy the content of memory location to registers. | 
|  | 255 | for (int i = kNumJSCallerSaved; --i >= 0;) { | 
|  | 256 | int r = JSCallerSavedCode(i); | 
|  | 257 | if ((regs & (1 << r)) != 0) { | 
|  | 258 | Register reg = { r }; | 
|  | 259 | ExternalReference reg_addr = | 
|  | 260 | ExternalReference(Debug_Address::Register(i)); | 
|  | 261 | mov(reg, Operand::StaticVariable(reg_addr)); | 
|  | 262 | } | 
|  | 263 | } | 
|  | 264 | } | 
|  | 265 |  | 
|  | 266 |  | 
|  | 267 | void MacroAssembler::PushRegistersFromMemory(RegList regs) { | 
|  | 268 | ASSERT((regs & ~kJSCallerSaved) == 0); | 
|  | 269 | // Push the content of the memory location to the stack. | 
|  | 270 | for (int i = 0; i < kNumJSCallerSaved; i++) { | 
|  | 271 | int r = JSCallerSavedCode(i); | 
|  | 272 | if ((regs & (1 << r)) != 0) { | 
|  | 273 | ExternalReference reg_addr = | 
|  | 274 | ExternalReference(Debug_Address::Register(i)); | 
|  | 275 | push(Operand::StaticVariable(reg_addr)); | 
|  | 276 | } | 
|  | 277 | } | 
|  | 278 | } | 
|  | 279 |  | 
|  | 280 |  | 
|  | 281 | void MacroAssembler::PopRegistersToMemory(RegList regs) { | 
|  | 282 | ASSERT((regs & ~kJSCallerSaved) == 0); | 
|  | 283 | // Pop the content from the stack to the memory location. | 
|  | 284 | for (int i = kNumJSCallerSaved; --i >= 0;) { | 
|  | 285 | int r = JSCallerSavedCode(i); | 
|  | 286 | if ((regs & (1 << r)) != 0) { | 
|  | 287 | ExternalReference reg_addr = | 
|  | 288 | ExternalReference(Debug_Address::Register(i)); | 
|  | 289 | pop(Operand::StaticVariable(reg_addr)); | 
|  | 290 | } | 
|  | 291 | } | 
|  | 292 | } | 
|  | 293 |  | 
|  | 294 |  | 
|  | 295 | void MacroAssembler::CopyRegistersFromStackToMemory(Register base, | 
|  | 296 | Register scratch, | 
|  | 297 | RegList regs) { | 
|  | 298 | ASSERT((regs & ~kJSCallerSaved) == 0); | 
|  | 299 | // Copy the content of the stack to the memory location and adjust base. | 
|  | 300 | for (int i = kNumJSCallerSaved; --i >= 0;) { | 
|  | 301 | int r = JSCallerSavedCode(i); | 
|  | 302 | if ((regs & (1 << r)) != 0) { | 
|  | 303 | mov(scratch, Operand(base, 0)); | 
|  | 304 | ExternalReference reg_addr = | 
|  | 305 | ExternalReference(Debug_Address::Register(i)); | 
|  | 306 | mov(Operand::StaticVariable(reg_addr), scratch); | 
|  | 307 | lea(base, Operand(base, kPointerSize)); | 
|  | 308 | } | 
|  | 309 | } | 
|  | 310 | } | 
|  | 311 | #endif | 
|  | 312 |  | 
|  | 313 | void MacroAssembler::Set(Register dst, const Immediate& x) { | 
|  | 314 | if (x.is_zero()) { | 
|  | 315 | xor_(dst, Operand(dst));  // shorter than mov | 
|  | 316 | } else { | 
|  | 317 | mov(dst, x); | 
|  | 318 | } | 
|  | 319 | } | 
|  | 320 |  | 
|  | 321 |  | 
|  | 322 | void MacroAssembler::Set(const Operand& dst, const Immediate& x) { | 
|  | 323 | mov(dst, x); | 
|  | 324 | } | 
|  | 325 |  | 
|  | 326 |  | 
|  | 327 | void MacroAssembler::CmpObjectType(Register heap_object, | 
|  | 328 | InstanceType type, | 
|  | 329 | Register map) { | 
|  | 330 | mov(map, FieldOperand(heap_object, HeapObject::kMapOffset)); | 
|  | 331 | CmpInstanceType(map, type); | 
|  | 332 | } | 
|  | 333 |  | 
|  | 334 |  | 
|  | 335 | void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { | 
|  | 336 | cmpb(FieldOperand(map, Map::kInstanceTypeOffset), | 
|  | 337 | static_cast<int8_t>(type)); | 
|  | 338 | } | 
|  | 339 |  | 
|  | 340 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 341 | Condition MacroAssembler::IsObjectStringType(Register heap_object, | 
|  | 342 | Register map, | 
|  | 343 | Register instance_type) { | 
|  | 344 | mov(map, FieldOperand(heap_object, HeapObject::kMapOffset)); | 
|  | 345 | movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); | 
|  | 346 | ASSERT(kNotStringTag != 0); | 
|  | 347 | test(instance_type, Immediate(kIsNotStringMask)); | 
|  | 348 | return zero; | 
|  | 349 | } | 
|  | 350 |  | 
|  | 351 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 352 | void MacroAssembler::FCmp() { | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 353 | if (CpuFeatures::IsSupported(CMOV)) { | 
| Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 354 | fucomip(); | 
|  | 355 | ffree(0); | 
|  | 356 | fincstp(); | 
|  | 357 | } else { | 
|  | 358 | fucompp(); | 
|  | 359 | push(eax); | 
|  | 360 | fnstsw_ax(); | 
|  | 361 | sahf(); | 
|  | 362 | pop(eax); | 
|  | 363 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 364 | } | 
|  | 365 |  | 
|  | 366 |  | 
|  | 367 | void MacroAssembler::EnterFrame(StackFrame::Type type) { | 
|  | 368 | push(ebp); | 
|  | 369 | mov(ebp, Operand(esp)); | 
|  | 370 | push(esi); | 
|  | 371 | push(Immediate(Smi::FromInt(type))); | 
|  | 372 | push(Immediate(CodeObject())); | 
|  | 373 | if (FLAG_debug_code) { | 
|  | 374 | cmp(Operand(esp, 0), Immediate(Factory::undefined_value())); | 
|  | 375 | Check(not_equal, "code object not properly patched"); | 
|  | 376 | } | 
|  | 377 | } | 
|  | 378 |  | 
|  | 379 |  | 
|  | 380 | void MacroAssembler::LeaveFrame(StackFrame::Type type) { | 
|  | 381 | if (FLAG_debug_code) { | 
|  | 382 | cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset), | 
|  | 383 | Immediate(Smi::FromInt(type))); | 
|  | 384 | Check(equal, "stack frame types must match"); | 
|  | 385 | } | 
|  | 386 | leave(); | 
|  | 387 | } | 
|  | 388 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 389 | void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 390 | // Setup the frame structure on the stack. | 
|  | 391 | ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); | 
|  | 392 | ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); | 
|  | 393 | ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize); | 
|  | 394 | push(ebp); | 
|  | 395 | mov(ebp, Operand(esp)); | 
|  | 396 |  | 
|  | 397 | // Reserve room for entry stack pointer and push the debug marker. | 
|  | 398 | ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize); | 
|  | 399 | push(Immediate(0));  // saved entry sp, patched before call | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 400 | if (mode == ExitFrame::MODE_DEBUG) { | 
|  | 401 | push(Immediate(0)); | 
|  | 402 | } else { | 
|  | 403 | push(Immediate(CodeObject())); | 
|  | 404 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 405 |  | 
|  | 406 | // Save the frame pointer and the context in top. | 
|  | 407 | ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); | 
|  | 408 | ExternalReference context_address(Top::k_context_address); | 
|  | 409 | mov(Operand::StaticVariable(c_entry_fp_address), ebp); | 
|  | 410 | mov(Operand::StaticVariable(context_address), esi); | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 411 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 412 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 413 | void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 414 | #ifdef ENABLE_DEBUGGER_SUPPORT | 
|  | 415 | // Save the state of all registers to the stack from the memory | 
|  | 416 | // location. This is needed to allow nested break points. | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 417 | if (mode == ExitFrame::MODE_DEBUG) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 418 | // TODO(1243899): This should be symmetric to | 
|  | 419 | // CopyRegistersFromStackToMemory() but it isn't! esp is assumed | 
|  | 420 | // correct here, but computed for the other call. Very error | 
|  | 421 | // prone! FIX THIS.  Actually there are deeper problems with | 
|  | 422 | // register saving than this asymmetry (see the bug report | 
|  | 423 | // associated with this issue). | 
|  | 424 | PushRegistersFromMemory(kJSCallerSaved); | 
|  | 425 | } | 
|  | 426 | #endif | 
|  | 427 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 428 | // Reserve space for arguments. | 
|  | 429 | sub(Operand(esp), Immediate(argc * kPointerSize)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 430 |  | 
|  | 431 | // Get the required frame alignment for the OS. | 
|  | 432 | static const int kFrameAlignment = OS::ActivationFrameAlignment(); | 
|  | 433 | if (kFrameAlignment > 0) { | 
|  | 434 | ASSERT(IsPowerOf2(kFrameAlignment)); | 
|  | 435 | and_(esp, -kFrameAlignment); | 
|  | 436 | } | 
|  | 437 |  | 
|  | 438 | // Patch the saved entry sp. | 
|  | 439 | mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp); | 
|  | 440 | } | 
|  | 441 |  | 
|  | 442 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 443 | void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) { | 
|  | 444 | EnterExitFramePrologue(mode); | 
|  | 445 |  | 
|  | 446 | // Setup argc and argv in callee-saved registers. | 
|  | 447 | int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; | 
|  | 448 | mov(edi, Operand(eax)); | 
|  | 449 | lea(esi, Operand(ebp, eax, times_4, offset)); | 
|  | 450 |  | 
|  | 451 | EnterExitFrameEpilogue(mode, 2); | 
|  | 452 | } | 
|  | 453 |  | 
|  | 454 |  | 
|  | 455 | void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode, | 
|  | 456 | int stack_space, | 
|  | 457 | int argc) { | 
|  | 458 | EnterExitFramePrologue(mode); | 
|  | 459 |  | 
|  | 460 | int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; | 
|  | 461 | lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset)); | 
|  | 462 |  | 
|  | 463 | EnterExitFrameEpilogue(mode, argc); | 
|  | 464 | } | 
|  | 465 |  | 
|  | 466 |  | 
|  | 467 | void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 468 | #ifdef ENABLE_DEBUGGER_SUPPORT | 
|  | 469 | // Restore the memory copy of the registers by digging them out from | 
|  | 470 | // the stack. This is needed to allow nested break points. | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 471 | if (mode == ExitFrame::MODE_DEBUG) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 472 | // It's okay to clobber register ebx below because we don't need | 
|  | 473 | // the function pointer after this. | 
|  | 474 | const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize; | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 475 | int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize; | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 476 | lea(ebx, Operand(ebp, kOffset)); | 
|  | 477 | CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved); | 
|  | 478 | } | 
|  | 479 | #endif | 
|  | 480 |  | 
|  | 481 | // Get the return address from the stack and restore the frame pointer. | 
|  | 482 | mov(ecx, Operand(ebp, 1 * kPointerSize)); | 
|  | 483 | mov(ebp, Operand(ebp, 0 * kPointerSize)); | 
|  | 484 |  | 
|  | 485 | // Pop the arguments and the receiver from the caller stack. | 
|  | 486 | lea(esp, Operand(esi, 1 * kPointerSize)); | 
|  | 487 |  | 
|  | 488 | // Restore current context from top and clear it in debug mode. | 
|  | 489 | ExternalReference context_address(Top::k_context_address); | 
|  | 490 | mov(esi, Operand::StaticVariable(context_address)); | 
|  | 491 | #ifdef DEBUG | 
|  | 492 | mov(Operand::StaticVariable(context_address), Immediate(0)); | 
|  | 493 | #endif | 
|  | 494 |  | 
|  | 495 | // Push the return address to get ready to return. | 
|  | 496 | push(ecx); | 
|  | 497 |  | 
|  | 498 | // Clear the top frame. | 
|  | 499 | ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); | 
|  | 500 | mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0)); | 
|  | 501 | } | 
|  | 502 |  | 
|  | 503 |  | 
|  | 504 | void MacroAssembler::PushTryHandler(CodeLocation try_location, | 
|  | 505 | HandlerType type) { | 
|  | 506 | // Adjust this code if not the case. | 
|  | 507 | ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); | 
|  | 508 | // The pc (return address) is already on TOS. | 
|  | 509 | if (try_location == IN_JAVASCRIPT) { | 
|  | 510 | if (type == TRY_CATCH_HANDLER) { | 
|  | 511 | push(Immediate(StackHandler::TRY_CATCH)); | 
|  | 512 | } else { | 
|  | 513 | push(Immediate(StackHandler::TRY_FINALLY)); | 
|  | 514 | } | 
|  | 515 | push(ebp); | 
|  | 516 | } else { | 
|  | 517 | ASSERT(try_location == IN_JS_ENTRY); | 
|  | 518 | // The frame pointer does not point to a JS frame so we save NULL | 
|  | 519 | // for ebp. We expect the code throwing an exception to check ebp | 
|  | 520 | // before dereferencing it to restore the context. | 
|  | 521 | push(Immediate(StackHandler::ENTRY)); | 
|  | 522 | push(Immediate(0));  // NULL frame pointer. | 
|  | 523 | } | 
|  | 524 | // Save the current handler as the next handler. | 
|  | 525 | push(Operand::StaticVariable(ExternalReference(Top::k_handler_address))); | 
|  | 526 | // Link this handler as the new current one. | 
|  | 527 | mov(Operand::StaticVariable(ExternalReference(Top::k_handler_address)), esp); | 
|  | 528 | } | 
|  | 529 |  | 
|  | 530 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 531 | void MacroAssembler::PopTryHandler() { | 
|  | 532 | ASSERT_EQ(0, StackHandlerConstants::kNextOffset); | 
|  | 533 | pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address))); | 
|  | 534 | add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize)); | 
|  | 535 | } | 
|  | 536 |  | 
|  | 537 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 538 | Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg, | 
|  | 539 | JSObject* holder, Register holder_reg, | 
|  | 540 | Register scratch, | 
|  | 541 | Label* miss) { | 
|  | 542 | // Make sure there's no overlap between scratch and the other | 
|  | 543 | // registers. | 
|  | 544 | ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg)); | 
|  | 545 |  | 
|  | 546 | // Keep track of the current object in register reg. | 
|  | 547 | Register reg = object_reg; | 
|  | 548 | int depth = 1; | 
|  | 549 |  | 
|  | 550 | // Check the maps in the prototype chain. | 
|  | 551 | // Traverse the prototype chain from the object and do map checks. | 
|  | 552 | while (object != holder) { | 
|  | 553 | depth++; | 
|  | 554 |  | 
|  | 555 | // Only global objects and objects that do not require access | 
|  | 556 | // checks are allowed in stubs. | 
|  | 557 | ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | 
|  | 558 |  | 
|  | 559 | JSObject* prototype = JSObject::cast(object->GetPrototype()); | 
|  | 560 | if (Heap::InNewSpace(prototype)) { | 
|  | 561 | // Get the map of the current object. | 
|  | 562 | mov(scratch, FieldOperand(reg, HeapObject::kMapOffset)); | 
|  | 563 | cmp(Operand(scratch), Immediate(Handle<Map>(object->map()))); | 
|  | 564 | // Branch on the result of the map check. | 
|  | 565 | j(not_equal, miss, not_taken); | 
|  | 566 | // Check access rights to the global object.  This has to happen | 
|  | 567 | // after the map check so that we know that the object is | 
|  | 568 | // actually a global object. | 
|  | 569 | if (object->IsJSGlobalProxy()) { | 
|  | 570 | CheckAccessGlobalProxy(reg, scratch, miss); | 
|  | 571 |  | 
|  | 572 | // Restore scratch register to be the map of the object. | 
|  | 573 | // We load the prototype from the map in the scratch register. | 
|  | 574 | mov(scratch, FieldOperand(reg, HeapObject::kMapOffset)); | 
|  | 575 | } | 
|  | 576 | // The prototype is in new space; we cannot store a reference | 
|  | 577 | // to it in the code. Load it from the map. | 
|  | 578 | reg = holder_reg;  // from now the object is in holder_reg | 
|  | 579 | mov(reg, FieldOperand(scratch, Map::kPrototypeOffset)); | 
|  | 580 |  | 
|  | 581 | } else { | 
|  | 582 | // Check the map of the current object. | 
|  | 583 | cmp(FieldOperand(reg, HeapObject::kMapOffset), | 
|  | 584 | Immediate(Handle<Map>(object->map()))); | 
|  | 585 | // Branch on the result of the map check. | 
|  | 586 | j(not_equal, miss, not_taken); | 
|  | 587 | // Check access rights to the global object.  This has to happen | 
|  | 588 | // after the map check so that we know that the object is | 
|  | 589 | // actually a global object. | 
|  | 590 | if (object->IsJSGlobalProxy()) { | 
|  | 591 | CheckAccessGlobalProxy(reg, scratch, miss); | 
|  | 592 | } | 
|  | 593 | // The prototype is in old space; load it directly. | 
|  | 594 | reg = holder_reg;  // from now the object is in holder_reg | 
|  | 595 | mov(reg, Handle<JSObject>(prototype)); | 
|  | 596 | } | 
|  | 597 |  | 
|  | 598 | // Go to the next object in the prototype chain. | 
|  | 599 | object = prototype; | 
|  | 600 | } | 
|  | 601 |  | 
|  | 602 | // Check the holder map. | 
|  | 603 | cmp(FieldOperand(reg, HeapObject::kMapOffset), | 
|  | 604 | Immediate(Handle<Map>(holder->map()))); | 
|  | 605 | j(not_equal, miss, not_taken); | 
|  | 606 |  | 
|  | 607 | // Log the check depth. | 
|  | 608 | LOG(IntEvent("check-maps-depth", depth)); | 
|  | 609 |  | 
|  | 610 | // Perform security check for access to the global object and return | 
|  | 611 | // the holder register. | 
|  | 612 | ASSERT(object == holder); | 
|  | 613 | ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | 
|  | 614 | if (object->IsJSGlobalProxy()) { | 
|  | 615 | CheckAccessGlobalProxy(reg, scratch, miss); | 
|  | 616 | } | 
|  | 617 | return reg; | 
|  | 618 | } | 
|  | 619 |  | 
|  | 620 |  | 
|  | 621 | void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, | 
|  | 622 | Register scratch, | 
|  | 623 | Label* miss) { | 
|  | 624 | Label same_contexts; | 
|  | 625 |  | 
|  | 626 | ASSERT(!holder_reg.is(scratch)); | 
|  | 627 |  | 
|  | 628 | // Load current lexical context from the stack frame. | 
|  | 629 | mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset)); | 
|  | 630 |  | 
|  | 631 | // When generating debug code, make sure the lexical context is set. | 
|  | 632 | if (FLAG_debug_code) { | 
|  | 633 | cmp(Operand(scratch), Immediate(0)); | 
|  | 634 | Check(not_equal, "we should not have an empty lexical context"); | 
|  | 635 | } | 
|  | 636 | // Load the global context of the current context. | 
|  | 637 | int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 
|  | 638 | mov(scratch, FieldOperand(scratch, offset)); | 
|  | 639 | mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); | 
|  | 640 |  | 
|  | 641 | // Check the context is a global context. | 
|  | 642 | if (FLAG_debug_code) { | 
|  | 643 | push(scratch); | 
|  | 644 | // Read the first word and compare to global_context_map. | 
|  | 645 | mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 
|  | 646 | cmp(scratch, Factory::global_context_map()); | 
|  | 647 | Check(equal, "JSGlobalObject::global_context should be a global context."); | 
|  | 648 | pop(scratch); | 
|  | 649 | } | 
|  | 650 |  | 
|  | 651 | // Check if both contexts are the same. | 
|  | 652 | cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 
|  | 653 | j(equal, &same_contexts, taken); | 
|  | 654 |  | 
|  | 655 | // Compare security tokens, save holder_reg on the stack so we can use it | 
|  | 656 | // as a temporary register. | 
|  | 657 | // | 
|  | 658 | // TODO(119): avoid push(holder_reg)/pop(holder_reg) | 
|  | 659 | push(holder_reg); | 
|  | 660 | // Check that the security token in the calling global object is | 
|  | 661 | // compatible with the security token in the receiving global | 
|  | 662 | // object. | 
|  | 663 | mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 
|  | 664 |  | 
|  | 665 | // Check the context is a global context. | 
|  | 666 | if (FLAG_debug_code) { | 
|  | 667 | cmp(holder_reg, Factory::null_value()); | 
|  | 668 | Check(not_equal, "JSGlobalProxy::context() should not be null."); | 
|  | 669 |  | 
|  | 670 | push(holder_reg); | 
|  | 671 | // Read the first word and compare to global_context_map(), | 
|  | 672 | mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); | 
|  | 673 | cmp(holder_reg, Factory::global_context_map()); | 
|  | 674 | Check(equal, "JSGlobalObject::global_context should be a global context."); | 
|  | 675 | pop(holder_reg); | 
|  | 676 | } | 
|  | 677 |  | 
|  | 678 | int token_offset = Context::kHeaderSize + | 
|  | 679 | Context::SECURITY_TOKEN_INDEX * kPointerSize; | 
|  | 680 | mov(scratch, FieldOperand(scratch, token_offset)); | 
|  | 681 | cmp(scratch, FieldOperand(holder_reg, token_offset)); | 
|  | 682 | pop(holder_reg); | 
|  | 683 | j(not_equal, miss, not_taken); | 
|  | 684 |  | 
|  | 685 | bind(&same_contexts); | 
|  | 686 | } | 
|  | 687 |  | 
|  | 688 |  | 
|  | 689 | void MacroAssembler::LoadAllocationTopHelper(Register result, | 
|  | 690 | Register result_end, | 
|  | 691 | Register scratch, | 
|  | 692 | AllocationFlags flags) { | 
|  | 693 | ExternalReference new_space_allocation_top = | 
|  | 694 | ExternalReference::new_space_allocation_top_address(); | 
|  | 695 |  | 
|  | 696 | // Just return if allocation top is already known. | 
|  | 697 | if ((flags & RESULT_CONTAINS_TOP) != 0) { | 
|  | 698 | // No use of scratch if allocation top is provided. | 
|  | 699 | ASSERT(scratch.is(no_reg)); | 
|  | 700 | #ifdef DEBUG | 
|  | 701 | // Assert that result actually contains top on entry. | 
|  | 702 | cmp(result, Operand::StaticVariable(new_space_allocation_top)); | 
|  | 703 | Check(equal, "Unexpected allocation top"); | 
|  | 704 | #endif | 
|  | 705 | return; | 
|  | 706 | } | 
|  | 707 |  | 
|  | 708 | // Move address of new object to result. Use scratch register if available. | 
|  | 709 | if (scratch.is(no_reg)) { | 
|  | 710 | mov(result, Operand::StaticVariable(new_space_allocation_top)); | 
|  | 711 | } else { | 
|  | 712 | ASSERT(!scratch.is(result_end)); | 
|  | 713 | mov(Operand(scratch), Immediate(new_space_allocation_top)); | 
|  | 714 | mov(result, Operand(scratch, 0)); | 
|  | 715 | } | 
|  | 716 | } | 
|  | 717 |  | 
|  | 718 |  | 
|  | 719 | void MacroAssembler::UpdateAllocationTopHelper(Register result_end, | 
|  | 720 | Register scratch) { | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 721 | if (FLAG_debug_code) { | 
|  | 722 | test(result_end, Immediate(kObjectAlignmentMask)); | 
|  | 723 | Check(zero, "Unaligned allocation in new space"); | 
|  | 724 | } | 
|  | 725 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 726 | ExternalReference new_space_allocation_top = | 
|  | 727 | ExternalReference::new_space_allocation_top_address(); | 
|  | 728 |  | 
|  | 729 | // Update new top. Use scratch if available. | 
|  | 730 | if (scratch.is(no_reg)) { | 
|  | 731 | mov(Operand::StaticVariable(new_space_allocation_top), result_end); | 
|  | 732 | } else { | 
|  | 733 | mov(Operand(scratch, 0), result_end); | 
|  | 734 | } | 
|  | 735 | } | 
|  | 736 |  | 
|  | 737 |  | 
|  | 738 | void MacroAssembler::AllocateInNewSpace(int object_size, | 
|  | 739 | Register result, | 
|  | 740 | Register result_end, | 
|  | 741 | Register scratch, | 
|  | 742 | Label* gc_required, | 
|  | 743 | AllocationFlags flags) { | 
|  | 744 | ASSERT(!result.is(result_end)); | 
|  | 745 |  | 
|  | 746 | // Load address of new object into result. | 
|  | 747 | LoadAllocationTopHelper(result, result_end, scratch, flags); | 
|  | 748 |  | 
|  | 749 | // Calculate new top and bail out if new space is exhausted. | 
|  | 750 | ExternalReference new_space_allocation_limit = | 
|  | 751 | ExternalReference::new_space_allocation_limit_address(); | 
|  | 752 | lea(result_end, Operand(result, object_size)); | 
|  | 753 | cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); | 
|  | 754 | j(above, gc_required, not_taken); | 
|  | 755 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 756 | // Tag result if requested. | 
|  | 757 | if ((flags & TAG_OBJECT) != 0) { | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 758 | lea(result, Operand(result, kHeapObjectTag)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 759 | } | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 760 |  | 
|  | 761 | // Update allocation top. | 
|  | 762 | UpdateAllocationTopHelper(result_end, scratch); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 763 | } | 
|  | 764 |  | 
|  | 765 |  | 
|  | 766 | void MacroAssembler::AllocateInNewSpace(int header_size, | 
|  | 767 | ScaleFactor element_size, | 
|  | 768 | Register element_count, | 
|  | 769 | Register result, | 
|  | 770 | Register result_end, | 
|  | 771 | Register scratch, | 
|  | 772 | Label* gc_required, | 
|  | 773 | AllocationFlags flags) { | 
|  | 774 | ASSERT(!result.is(result_end)); | 
|  | 775 |  | 
|  | 776 | // Load address of new object into result. | 
|  | 777 | LoadAllocationTopHelper(result, result_end, scratch, flags); | 
|  | 778 |  | 
|  | 779 | // Calculate new top and bail out if new space is exhausted. | 
|  | 780 | ExternalReference new_space_allocation_limit = | 
|  | 781 | ExternalReference::new_space_allocation_limit_address(); | 
|  | 782 | lea(result_end, Operand(result, element_count, element_size, header_size)); | 
|  | 783 | cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); | 
|  | 784 | j(above, gc_required); | 
|  | 785 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 786 | // Tag result if requested. | 
|  | 787 | if ((flags & TAG_OBJECT) != 0) { | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 788 | lea(result, Operand(result, kHeapObjectTag)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 789 | } | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 790 |  | 
|  | 791 | // Update allocation top. | 
|  | 792 | UpdateAllocationTopHelper(result_end, scratch); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 793 | } | 
|  | 794 |  | 
|  | 795 |  | 
|  | 796 | void MacroAssembler::AllocateInNewSpace(Register object_size, | 
|  | 797 | Register result, | 
|  | 798 | Register result_end, | 
|  | 799 | Register scratch, | 
|  | 800 | Label* gc_required, | 
|  | 801 | AllocationFlags flags) { | 
|  | 802 | ASSERT(!result.is(result_end)); | 
|  | 803 |  | 
|  | 804 | // Load address of new object into result. | 
|  | 805 | LoadAllocationTopHelper(result, result_end, scratch, flags); | 
|  | 806 |  | 
|  | 807 | // Calculate new top and bail out if new space is exhausted. | 
|  | 808 | ExternalReference new_space_allocation_limit = | 
|  | 809 | ExternalReference::new_space_allocation_limit_address(); | 
|  | 810 | if (!object_size.is(result_end)) { | 
|  | 811 | mov(result_end, object_size); | 
|  | 812 | } | 
|  | 813 | add(result_end, Operand(result)); | 
|  | 814 | cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); | 
|  | 815 | j(above, gc_required, not_taken); | 
|  | 816 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 817 | // Tag result if requested. | 
|  | 818 | if ((flags & TAG_OBJECT) != 0) { | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 819 | lea(result, Operand(result, kHeapObjectTag)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 820 | } | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 821 |  | 
|  | 822 | // Update allocation top. | 
|  | 823 | UpdateAllocationTopHelper(result_end, scratch); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 824 | } | 
|  | 825 |  | 
|  | 826 |  | 
|  | 827 | void MacroAssembler::UndoAllocationInNewSpace(Register object) { | 
|  | 828 | ExternalReference new_space_allocation_top = | 
|  | 829 | ExternalReference::new_space_allocation_top_address(); | 
|  | 830 |  | 
|  | 831 | // Make sure the object has no tag before resetting top. | 
|  | 832 | and_(Operand(object), Immediate(~kHeapObjectTagMask)); | 
|  | 833 | #ifdef DEBUG | 
|  | 834 | cmp(object, Operand::StaticVariable(new_space_allocation_top)); | 
|  | 835 | Check(below, "Undo allocation of non allocated memory"); | 
|  | 836 | #endif | 
|  | 837 | mov(Operand::StaticVariable(new_space_allocation_top), object); | 
|  | 838 | } | 
|  | 839 |  | 
|  | 840 |  | 
| Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 841 | void MacroAssembler::AllocateHeapNumber(Register result, | 
|  | 842 | Register scratch1, | 
|  | 843 | Register scratch2, | 
|  | 844 | Label* gc_required) { | 
|  | 845 | // Allocate heap number in new space. | 
|  | 846 | AllocateInNewSpace(HeapNumber::kSize, | 
|  | 847 | result, | 
|  | 848 | scratch1, | 
|  | 849 | scratch2, | 
|  | 850 | gc_required, | 
|  | 851 | TAG_OBJECT); | 
|  | 852 |  | 
|  | 853 | // Set the map. | 
|  | 854 | mov(FieldOperand(result, HeapObject::kMapOffset), | 
|  | 855 | Immediate(Factory::heap_number_map())); | 
|  | 856 | } | 
|  | 857 |  | 
|  | 858 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 859 | void MacroAssembler::AllocateTwoByteString(Register result, | 
|  | 860 | Register length, | 
|  | 861 | Register scratch1, | 
|  | 862 | Register scratch2, | 
|  | 863 | Register scratch3, | 
|  | 864 | Label* gc_required) { | 
|  | 865 | // Calculate the number of bytes needed for the characters in the string while | 
|  | 866 | // observing object alignment. | 
|  | 867 | ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 868 | ASSERT(kShortSize == 2); | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 869 | // scratch1 = length * 2 + kObjectAlignmentMask. | 
|  | 870 | lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask)); | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 871 | and_(Operand(scratch1), Immediate(~kObjectAlignmentMask)); | 
|  | 872 |  | 
|  | 873 | // Allocate two byte string in new space. | 
|  | 874 | AllocateInNewSpace(SeqTwoByteString::kHeaderSize, | 
|  | 875 | times_1, | 
|  | 876 | scratch1, | 
|  | 877 | result, | 
|  | 878 | scratch2, | 
|  | 879 | scratch3, | 
|  | 880 | gc_required, | 
|  | 881 | TAG_OBJECT); | 
|  | 882 |  | 
|  | 883 | // Set the map, length and hash field. | 
|  | 884 | mov(FieldOperand(result, HeapObject::kMapOffset), | 
|  | 885 | Immediate(Factory::string_map())); | 
|  | 886 | mov(FieldOperand(result, String::kLengthOffset), length); | 
|  | 887 | mov(FieldOperand(result, String::kHashFieldOffset), | 
|  | 888 | Immediate(String::kEmptyHashField)); | 
|  | 889 | } | 
|  | 890 |  | 
|  | 891 |  | 
|  | 892 | void MacroAssembler::AllocateAsciiString(Register result, | 
|  | 893 | Register length, | 
|  | 894 | Register scratch1, | 
|  | 895 | Register scratch2, | 
|  | 896 | Register scratch3, | 
|  | 897 | Label* gc_required) { | 
|  | 898 | // Calculate the number of bytes needed for the characters in the string while | 
|  | 899 | // observing object alignment. | 
|  | 900 | ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0); | 
|  | 901 | mov(scratch1, length); | 
|  | 902 | ASSERT(kCharSize == 1); | 
|  | 903 | add(Operand(scratch1), Immediate(kObjectAlignmentMask)); | 
|  | 904 | and_(Operand(scratch1), Immediate(~kObjectAlignmentMask)); | 
|  | 905 |  | 
|  | 906 | // Allocate ascii string in new space. | 
|  | 907 | AllocateInNewSpace(SeqAsciiString::kHeaderSize, | 
|  | 908 | times_1, | 
|  | 909 | scratch1, | 
|  | 910 | result, | 
|  | 911 | scratch2, | 
|  | 912 | scratch3, | 
|  | 913 | gc_required, | 
|  | 914 | TAG_OBJECT); | 
|  | 915 |  | 
|  | 916 | // Set the map, length and hash field. | 
|  | 917 | mov(FieldOperand(result, HeapObject::kMapOffset), | 
|  | 918 | Immediate(Factory::ascii_string_map())); | 
|  | 919 | mov(FieldOperand(result, String::kLengthOffset), length); | 
|  | 920 | mov(FieldOperand(result, String::kHashFieldOffset), | 
|  | 921 | Immediate(String::kEmptyHashField)); | 
|  | 922 | } | 
|  | 923 |  | 
|  | 924 |  | 
|  | 925 | void MacroAssembler::AllocateConsString(Register result, | 
|  | 926 | Register scratch1, | 
|  | 927 | Register scratch2, | 
|  | 928 | Label* gc_required) { | 
|  | 929 | // Allocate heap number in new space. | 
|  | 930 | AllocateInNewSpace(ConsString::kSize, | 
|  | 931 | result, | 
|  | 932 | scratch1, | 
|  | 933 | scratch2, | 
|  | 934 | gc_required, | 
|  | 935 | TAG_OBJECT); | 
|  | 936 |  | 
|  | 937 | // Set the map. The other fields are left uninitialized. | 
|  | 938 | mov(FieldOperand(result, HeapObject::kMapOffset), | 
|  | 939 | Immediate(Factory::cons_string_map())); | 
|  | 940 | } | 
|  | 941 |  | 
|  | 942 |  | 
|  | 943 | void MacroAssembler::AllocateAsciiConsString(Register result, | 
|  | 944 | Register scratch1, | 
|  | 945 | Register scratch2, | 
|  | 946 | Label* gc_required) { | 
|  | 947 | // Allocate heap number in new space. | 
|  | 948 | AllocateInNewSpace(ConsString::kSize, | 
|  | 949 | result, | 
|  | 950 | scratch1, | 
|  | 951 | scratch2, | 
|  | 952 | gc_required, | 
|  | 953 | TAG_OBJECT); | 
|  | 954 |  | 
|  | 955 | // Set the map. The other fields are left uninitialized. | 
|  | 956 | mov(FieldOperand(result, HeapObject::kMapOffset), | 
|  | 957 | Immediate(Factory::cons_ascii_string_map())); | 
|  | 958 | } | 
|  | 959 |  | 
|  | 960 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 961 | void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen, | 
|  | 962 | Register result, | 
|  | 963 | Register op, | 
|  | 964 | JumpTarget* then_target) { | 
|  | 965 | JumpTarget ok; | 
|  | 966 | test(result, Operand(result)); | 
|  | 967 | ok.Branch(not_zero, taken); | 
|  | 968 | test(op, Operand(op)); | 
|  | 969 | then_target->Branch(sign, not_taken); | 
|  | 970 | ok.Bind(); | 
|  | 971 | } | 
|  | 972 |  | 
|  | 973 |  | 
|  | 974 | void MacroAssembler::NegativeZeroTest(Register result, | 
|  | 975 | Register op, | 
|  | 976 | Label* then_label) { | 
|  | 977 | Label ok; | 
|  | 978 | test(result, Operand(result)); | 
|  | 979 | j(not_zero, &ok, taken); | 
|  | 980 | test(op, Operand(op)); | 
|  | 981 | j(sign, then_label, not_taken); | 
|  | 982 | bind(&ok); | 
|  | 983 | } | 
|  | 984 |  | 
|  | 985 |  | 
|  | 986 | void MacroAssembler::NegativeZeroTest(Register result, | 
|  | 987 | Register op1, | 
|  | 988 | Register op2, | 
|  | 989 | Register scratch, | 
|  | 990 | Label* then_label) { | 
|  | 991 | Label ok; | 
|  | 992 | test(result, Operand(result)); | 
|  | 993 | j(not_zero, &ok, taken); | 
|  | 994 | mov(scratch, Operand(op1)); | 
|  | 995 | or_(scratch, Operand(op2)); | 
|  | 996 | j(sign, then_label, not_taken); | 
|  | 997 | bind(&ok); | 
|  | 998 | } | 
|  | 999 |  | 
|  | 1000 |  | 
|  | 1001 | void MacroAssembler::TryGetFunctionPrototype(Register function, | 
|  | 1002 | Register result, | 
|  | 1003 | Register scratch, | 
|  | 1004 | Label* miss) { | 
|  | 1005 | // Check that the receiver isn't a smi. | 
|  | 1006 | test(function, Immediate(kSmiTagMask)); | 
|  | 1007 | j(zero, miss, not_taken); | 
|  | 1008 |  | 
|  | 1009 | // Check that the function really is a function. | 
|  | 1010 | CmpObjectType(function, JS_FUNCTION_TYPE, result); | 
|  | 1011 | j(not_equal, miss, not_taken); | 
|  | 1012 |  | 
|  | 1013 | // Make sure that the function has an instance prototype. | 
|  | 1014 | Label non_instance; | 
|  | 1015 | movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset)); | 
|  | 1016 | test(scratch, Immediate(1 << Map::kHasNonInstancePrototype)); | 
|  | 1017 | j(not_zero, &non_instance, not_taken); | 
|  | 1018 |  | 
|  | 1019 | // Get the prototype or initial map from the function. | 
|  | 1020 | mov(result, | 
|  | 1021 | FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 
|  | 1022 |  | 
|  | 1023 | // If the prototype or initial map is the hole, don't return it and | 
|  | 1024 | // simply miss the cache instead. This will allow us to allocate a | 
|  | 1025 | // prototype object on-demand in the runtime system. | 
|  | 1026 | cmp(Operand(result), Immediate(Factory::the_hole_value())); | 
|  | 1027 | j(equal, miss, not_taken); | 
|  | 1028 |  | 
|  | 1029 | // If the function does not have an initial map, we're done. | 
|  | 1030 | Label done; | 
|  | 1031 | CmpObjectType(result, MAP_TYPE, scratch); | 
|  | 1032 | j(not_equal, &done); | 
|  | 1033 |  | 
|  | 1034 | // Get the prototype from the initial map. | 
|  | 1035 | mov(result, FieldOperand(result, Map::kPrototypeOffset)); | 
|  | 1036 | jmp(&done); | 
|  | 1037 |  | 
|  | 1038 | // Non-instance prototype: Fetch prototype from constructor field | 
|  | 1039 | // in initial map. | 
|  | 1040 | bind(&non_instance); | 
|  | 1041 | mov(result, FieldOperand(result, Map::kConstructorOffset)); | 
|  | 1042 |  | 
|  | 1043 | // All done. | 
|  | 1044 | bind(&done); | 
|  | 1045 | } | 
|  | 1046 |  | 
|  | 1047 |  | 
|  | 1048 | void MacroAssembler::CallStub(CodeStub* stub) { | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1049 | ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs. | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1050 | call(stub->GetCode(), RelocInfo::CODE_TARGET); | 
|  | 1051 | } | 
|  | 1052 |  | 
|  | 1053 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1054 | Object* MacroAssembler::TryCallStub(CodeStub* stub) { | 
|  | 1055 | ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs. | 
|  | 1056 | Object* result = stub->TryGetCode(); | 
|  | 1057 | if (!result->IsFailure()) { | 
|  | 1058 | call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); | 
|  | 1059 | } | 
|  | 1060 | return result; | 
|  | 1061 | } | 
|  | 1062 |  | 
|  | 1063 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1064 | void MacroAssembler::TailCallStub(CodeStub* stub) { | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1065 | ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs. | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1066 | jmp(stub->GetCode(), RelocInfo::CODE_TARGET); | 
|  | 1067 | } | 
|  | 1068 |  | 
|  | 1069 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1070 | Object* MacroAssembler::TryTailCallStub(CodeStub* stub) { | 
|  | 1071 | ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs. | 
|  | 1072 | Object* result = stub->TryGetCode(); | 
|  | 1073 | if (!result->IsFailure()) { | 
|  | 1074 | jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); | 
|  | 1075 | } | 
|  | 1076 | return result; | 
|  | 1077 | } | 
|  | 1078 |  | 
|  | 1079 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1080 | void MacroAssembler::StubReturn(int argc) { | 
|  | 1081 | ASSERT(argc >= 1 && generating_stub()); | 
|  | 1082 | ret((argc - 1) * kPointerSize); | 
|  | 1083 | } | 
|  | 1084 |  | 
|  | 1085 |  | 
|  | 1086 | void MacroAssembler::IllegalOperation(int num_arguments) { | 
|  | 1087 | if (num_arguments > 0) { | 
|  | 1088 | add(Operand(esp), Immediate(num_arguments * kPointerSize)); | 
|  | 1089 | } | 
|  | 1090 | mov(eax, Immediate(Factory::undefined_value())); | 
|  | 1091 | } | 
|  | 1092 |  | 
|  | 1093 |  | 
|  | 1094 | void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { | 
|  | 1095 | CallRuntime(Runtime::FunctionForId(id), num_arguments); | 
|  | 1096 | } | 
|  | 1097 |  | 
|  | 1098 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1099 | Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, | 
|  | 1100 | int num_arguments) { | 
|  | 1101 | return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); | 
|  | 1102 | } | 
|  | 1103 |  | 
|  | 1104 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1105 | void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { | 
|  | 1106 | // If the expected number of arguments of the runtime function is | 
|  | 1107 | // constant, we check that the actual number of arguments match the | 
|  | 1108 | // expectation. | 
|  | 1109 | if (f->nargs >= 0 && f->nargs != num_arguments) { | 
|  | 1110 | IllegalOperation(num_arguments); | 
|  | 1111 | return; | 
|  | 1112 | } | 
|  | 1113 |  | 
| Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame^] | 1114 | // TODO(1236192): Most runtime routines don't need the number of | 
|  | 1115 | // arguments passed in because it is constant. At some point we | 
|  | 1116 | // should remove this need and make the runtime routine entry code | 
|  | 1117 | // smarter. | 
|  | 1118 | Set(eax, Immediate(num_arguments)); | 
|  | 1119 | mov(ebx, Immediate(ExternalReference(f))); | 
|  | 1120 | CEntryStub ces(1); | 
|  | 1121 | CallStub(&ces); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1122 | } | 
|  | 1123 |  | 
|  | 1124 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1125 | Object* MacroAssembler::TryCallRuntime(Runtime::Function* f, | 
|  | 1126 | int num_arguments) { | 
|  | 1127 | if (f->nargs >= 0 && f->nargs != num_arguments) { | 
|  | 1128 | IllegalOperation(num_arguments); | 
|  | 1129 | // Since we did not call the stub, there was no allocation failure. | 
|  | 1130 | // Return some non-failure object. | 
|  | 1131 | return Heap::undefined_value(); | 
|  | 1132 | } | 
|  | 1133 |  | 
| Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame^] | 1134 | // TODO(1236192): Most runtime routines don't need the number of | 
|  | 1135 | // arguments passed in because it is constant. At some point we | 
|  | 1136 | // should remove this need and make the runtime routine entry code | 
|  | 1137 | // smarter. | 
|  | 1138 | Set(eax, Immediate(num_arguments)); | 
|  | 1139 | mov(ebx, Immediate(ExternalReference(f))); | 
|  | 1140 | CEntryStub ces(1); | 
|  | 1141 | return TryCallStub(&ces); | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1142 | } | 
|  | 1143 |  | 
|  | 1144 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1145 | void MacroAssembler::TailCallRuntime(const ExternalReference& ext, | 
|  | 1146 | int num_arguments, | 
|  | 1147 | int result_size) { | 
|  | 1148 | // TODO(1236192): Most runtime routines don't need the number of | 
|  | 1149 | // arguments passed in because it is constant. At some point we | 
|  | 1150 | // should remove this need and make the runtime routine entry code | 
|  | 1151 | // smarter. | 
|  | 1152 | Set(eax, Immediate(num_arguments)); | 
|  | 1153 | JumpToRuntime(ext); | 
|  | 1154 | } | 
|  | 1155 |  | 
|  | 1156 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1157 | void MacroAssembler::PushHandleScope(Register scratch) { | 
|  | 1158 | // Push the number of extensions, smi-tagged so the gc will ignore it. | 
|  | 1159 | ExternalReference extensions_address = | 
|  | 1160 | ExternalReference::handle_scope_extensions_address(); | 
|  | 1161 | mov(scratch, Operand::StaticVariable(extensions_address)); | 
|  | 1162 | ASSERT_EQ(0, kSmiTag); | 
|  | 1163 | shl(scratch, kSmiTagSize); | 
|  | 1164 | push(scratch); | 
|  | 1165 | mov(Operand::StaticVariable(extensions_address), Immediate(0)); | 
|  | 1166 | // Push next and limit pointers which will be wordsize aligned and | 
|  | 1167 | // hence automatically smi tagged. | 
|  | 1168 | ExternalReference next_address = | 
|  | 1169 | ExternalReference::handle_scope_next_address(); | 
|  | 1170 | push(Operand::StaticVariable(next_address)); | 
|  | 1171 | ExternalReference limit_address = | 
|  | 1172 | ExternalReference::handle_scope_limit_address(); | 
|  | 1173 | push(Operand::StaticVariable(limit_address)); | 
|  | 1174 | } | 
|  | 1175 |  | 
|  | 1176 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1177 | Object* MacroAssembler::PopHandleScopeHelper(Register saved, | 
|  | 1178 | Register scratch, | 
|  | 1179 | bool gc_allowed) { | 
|  | 1180 | Object* result = NULL; | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1181 | ExternalReference extensions_address = | 
|  | 1182 | ExternalReference::handle_scope_extensions_address(); | 
|  | 1183 | Label write_back; | 
|  | 1184 | mov(scratch, Operand::StaticVariable(extensions_address)); | 
|  | 1185 | cmp(Operand(scratch), Immediate(0)); | 
|  | 1186 | j(equal, &write_back); | 
|  | 1187 | // Calling a runtime function messes with registers so we save and | 
|  | 1188 | // restore any one we're asked not to change | 
|  | 1189 | if (saved.is_valid()) push(saved); | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1190 | if (gc_allowed) { | 
|  | 1191 | CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0); | 
|  | 1192 | } else { | 
|  | 1193 | result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0); | 
|  | 1194 | if (result->IsFailure()) return result; | 
|  | 1195 | } | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1196 | if (saved.is_valid()) pop(saved); | 
|  | 1197 |  | 
|  | 1198 | bind(&write_back); | 
|  | 1199 | ExternalReference limit_address = | 
|  | 1200 | ExternalReference::handle_scope_limit_address(); | 
|  | 1201 | pop(Operand::StaticVariable(limit_address)); | 
|  | 1202 | ExternalReference next_address = | 
|  | 1203 | ExternalReference::handle_scope_next_address(); | 
|  | 1204 | pop(Operand::StaticVariable(next_address)); | 
|  | 1205 | pop(scratch); | 
|  | 1206 | shr(scratch, kSmiTagSize); | 
|  | 1207 | mov(Operand::StaticVariable(extensions_address), scratch); | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1208 |  | 
|  | 1209 | return result; | 
|  | 1210 | } | 
|  | 1211 |  | 
|  | 1212 |  | 
|  | 1213 | void MacroAssembler::PopHandleScope(Register saved, Register scratch) { | 
|  | 1214 | PopHandleScopeHelper(saved, scratch, true); | 
|  | 1215 | } | 
|  | 1216 |  | 
|  | 1217 |  | 
|  | 1218 | Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) { | 
|  | 1219 | return PopHandleScopeHelper(saved, scratch, false); | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1220 | } | 
|  | 1221 |  | 
|  | 1222 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1223 | void MacroAssembler::JumpToRuntime(const ExternalReference& ext) { | 
|  | 1224 | // Set the entry point and jump to the C entry runtime stub. | 
|  | 1225 | mov(ebx, Immediate(ext)); | 
|  | 1226 | CEntryStub ces(1); | 
|  | 1227 | jmp(ces.GetCode(), RelocInfo::CODE_TARGET); | 
|  | 1228 | } | 
|  | 1229 |  | 
|  | 1230 |  | 
|  | 1231 | void MacroAssembler::InvokePrologue(const ParameterCount& expected, | 
|  | 1232 | const ParameterCount& actual, | 
|  | 1233 | Handle<Code> code_constant, | 
|  | 1234 | const Operand& code_operand, | 
|  | 1235 | Label* done, | 
|  | 1236 | InvokeFlag flag) { | 
|  | 1237 | bool definitely_matches = false; | 
|  | 1238 | Label invoke; | 
|  | 1239 | if (expected.is_immediate()) { | 
|  | 1240 | ASSERT(actual.is_immediate()); | 
|  | 1241 | if (expected.immediate() == actual.immediate()) { | 
|  | 1242 | definitely_matches = true; | 
|  | 1243 | } else { | 
|  | 1244 | mov(eax, actual.immediate()); | 
|  | 1245 | const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 
|  | 1246 | if (expected.immediate() == sentinel) { | 
|  | 1247 | // Don't worry about adapting arguments for builtins that | 
|  | 1248 | // don't want that done. Skip adaption code by making it look | 
|  | 1249 | // like we have a match between expected and actual number of | 
|  | 1250 | // arguments. | 
|  | 1251 | definitely_matches = true; | 
|  | 1252 | } else { | 
|  | 1253 | mov(ebx, expected.immediate()); | 
|  | 1254 | } | 
|  | 1255 | } | 
|  | 1256 | } else { | 
|  | 1257 | if (actual.is_immediate()) { | 
|  | 1258 | // Expected is in register, actual is immediate. This is the | 
|  | 1259 | // case when we invoke function values without going through the | 
|  | 1260 | // IC mechanism. | 
|  | 1261 | cmp(expected.reg(), actual.immediate()); | 
|  | 1262 | j(equal, &invoke); | 
|  | 1263 | ASSERT(expected.reg().is(ebx)); | 
|  | 1264 | mov(eax, actual.immediate()); | 
|  | 1265 | } else if (!expected.reg().is(actual.reg())) { | 
|  | 1266 | // Both expected and actual are in (different) registers. This | 
|  | 1267 | // is the case when we invoke functions using call and apply. | 
|  | 1268 | cmp(expected.reg(), Operand(actual.reg())); | 
|  | 1269 | j(equal, &invoke); | 
|  | 1270 | ASSERT(actual.reg().is(eax)); | 
|  | 1271 | ASSERT(expected.reg().is(ebx)); | 
|  | 1272 | } | 
|  | 1273 | } | 
|  | 1274 |  | 
|  | 1275 | if (!definitely_matches) { | 
|  | 1276 | Handle<Code> adaptor = | 
|  | 1277 | Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); | 
|  | 1278 | if (!code_constant.is_null()) { | 
|  | 1279 | mov(edx, Immediate(code_constant)); | 
|  | 1280 | add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); | 
|  | 1281 | } else if (!code_operand.is_reg(edx)) { | 
|  | 1282 | mov(edx, code_operand); | 
|  | 1283 | } | 
|  | 1284 |  | 
|  | 1285 | if (flag == CALL_FUNCTION) { | 
|  | 1286 | call(adaptor, RelocInfo::CODE_TARGET); | 
|  | 1287 | jmp(done); | 
|  | 1288 | } else { | 
|  | 1289 | jmp(adaptor, RelocInfo::CODE_TARGET); | 
|  | 1290 | } | 
|  | 1291 | bind(&invoke); | 
|  | 1292 | } | 
|  | 1293 | } | 
|  | 1294 |  | 
|  | 1295 |  | 
|  | 1296 | void MacroAssembler::InvokeCode(const Operand& code, | 
|  | 1297 | const ParameterCount& expected, | 
|  | 1298 | const ParameterCount& actual, | 
|  | 1299 | InvokeFlag flag) { | 
|  | 1300 | Label done; | 
|  | 1301 | InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag); | 
|  | 1302 | if (flag == CALL_FUNCTION) { | 
|  | 1303 | call(code); | 
|  | 1304 | } else { | 
|  | 1305 | ASSERT(flag == JUMP_FUNCTION); | 
|  | 1306 | jmp(code); | 
|  | 1307 | } | 
|  | 1308 | bind(&done); | 
|  | 1309 | } | 
|  | 1310 |  | 
|  | 1311 |  | 
|  | 1312 | void MacroAssembler::InvokeCode(Handle<Code> code, | 
|  | 1313 | const ParameterCount& expected, | 
|  | 1314 | const ParameterCount& actual, | 
|  | 1315 | RelocInfo::Mode rmode, | 
|  | 1316 | InvokeFlag flag) { | 
|  | 1317 | Label done; | 
|  | 1318 | Operand dummy(eax); | 
|  | 1319 | InvokePrologue(expected, actual, code, dummy, &done, flag); | 
|  | 1320 | if (flag == CALL_FUNCTION) { | 
|  | 1321 | call(code, rmode); | 
|  | 1322 | } else { | 
|  | 1323 | ASSERT(flag == JUMP_FUNCTION); | 
|  | 1324 | jmp(code, rmode); | 
|  | 1325 | } | 
|  | 1326 | bind(&done); | 
|  | 1327 | } | 
|  | 1328 |  | 
|  | 1329 |  | 
|  | 1330 | void MacroAssembler::InvokeFunction(Register fun, | 
|  | 1331 | const ParameterCount& actual, | 
|  | 1332 | InvokeFlag flag) { | 
|  | 1333 | ASSERT(fun.is(edi)); | 
|  | 1334 | mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 
|  | 1335 | mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | 
|  | 1336 | mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); | 
|  | 1337 | mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset)); | 
|  | 1338 | lea(edx, FieldOperand(edx, Code::kHeaderSize)); | 
|  | 1339 |  | 
|  | 1340 | ParameterCount expected(ebx); | 
|  | 1341 | InvokeCode(Operand(edx), expected, actual, flag); | 
|  | 1342 | } | 
|  | 1343 |  | 
|  | 1344 |  | 
|  | 1345 | void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) { | 
|  | 1346 | bool resolved; | 
|  | 1347 | Handle<Code> code = ResolveBuiltin(id, &resolved); | 
|  | 1348 |  | 
|  | 1349 | // Calls are not allowed in some stubs. | 
|  | 1350 | ASSERT(flag == JUMP_FUNCTION || allow_stub_calls()); | 
|  | 1351 |  | 
|  | 1352 | // Rely on the assertion to check that the number of provided | 
|  | 1353 | // arguments match the expected number of arguments. Fake a | 
|  | 1354 | // parameter count to avoid emitting code to do the check. | 
|  | 1355 | ParameterCount expected(0); | 
|  | 1356 | InvokeCode(Handle<Code>(code), expected, expected, | 
|  | 1357 | RelocInfo::CODE_TARGET, flag); | 
|  | 1358 |  | 
|  | 1359 | const char* name = Builtins::GetName(id); | 
|  | 1360 | int argc = Builtins::GetArgumentsCount(id); | 
|  | 1361 |  | 
|  | 1362 | if (!resolved) { | 
|  | 1363 | uint32_t flags = | 
|  | 1364 | Bootstrapper::FixupFlagsArgumentsCount::encode(argc) | | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1365 | Bootstrapper::FixupFlagsUseCodeObject::encode(false); | 
|  | 1366 | Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name }; | 
|  | 1367 | unresolved_.Add(entry); | 
|  | 1368 | } | 
|  | 1369 | } | 
|  | 1370 |  | 
|  | 1371 |  | 
|  | 1372 | void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { | 
|  | 1373 | bool resolved; | 
|  | 1374 | Handle<Code> code = ResolveBuiltin(id, &resolved); | 
|  | 1375 |  | 
|  | 1376 | const char* name = Builtins::GetName(id); | 
|  | 1377 | int argc = Builtins::GetArgumentsCount(id); | 
|  | 1378 |  | 
|  | 1379 | mov(Operand(target), Immediate(code)); | 
|  | 1380 | if (!resolved) { | 
|  | 1381 | uint32_t flags = | 
|  | 1382 | Bootstrapper::FixupFlagsArgumentsCount::encode(argc) | | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1383 | Bootstrapper::FixupFlagsUseCodeObject::encode(true); | 
|  | 1384 | Unresolved entry = { pc_offset() - sizeof(int32_t), flags, name }; | 
|  | 1385 | unresolved_.Add(entry); | 
|  | 1386 | } | 
|  | 1387 | add(Operand(target), Immediate(Code::kHeaderSize - kHeapObjectTag)); | 
|  | 1388 | } | 
|  | 1389 |  | 
|  | 1390 |  | 
|  | 1391 | Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id, | 
|  | 1392 | bool* resolved) { | 
|  | 1393 | // Move the builtin function into the temporary function slot by | 
|  | 1394 | // reading it from the builtins object. NOTE: We should be able to | 
|  | 1395 | // reduce this to two instructions by putting the function table in | 
|  | 1396 | // the global object instead of the "builtins" object and by using a | 
|  | 1397 | // real register for the function. | 
|  | 1398 | mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 
|  | 1399 | mov(edx, FieldOperand(edx, GlobalObject::kBuiltinsOffset)); | 
|  | 1400 | int builtins_offset = | 
|  | 1401 | JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize); | 
|  | 1402 | mov(edi, FieldOperand(edx, builtins_offset)); | 
|  | 1403 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1404 | return Builtins::GetCode(id, resolved); | 
|  | 1405 | } | 
|  | 1406 |  | 
|  | 1407 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1408 | void MacroAssembler::LoadContext(Register dst, int context_chain_length) { | 
|  | 1409 | if (context_chain_length > 0) { | 
|  | 1410 | // Move up the chain of contexts to the context containing the slot. | 
|  | 1411 | mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX))); | 
|  | 1412 | // Load the function context (which is the incoming, outer context). | 
|  | 1413 | mov(dst, FieldOperand(dst, JSFunction::kContextOffset)); | 
|  | 1414 | for (int i = 1; i < context_chain_length; i++) { | 
|  | 1415 | mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX))); | 
|  | 1416 | mov(dst, FieldOperand(dst, JSFunction::kContextOffset)); | 
|  | 1417 | } | 
|  | 1418 | // The context may be an intermediate context, not a function context. | 
|  | 1419 | mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); | 
|  | 1420 | } else {  // Slot is in the current function context. | 
|  | 1421 | // The context may be an intermediate context, not a function context. | 
|  | 1422 | mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX))); | 
|  | 1423 | } | 
|  | 1424 | } | 
|  | 1425 |  | 
|  | 1426 |  | 
|  | 1427 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1428 | void MacroAssembler::Ret() { | 
|  | 1429 | ret(0); | 
|  | 1430 | } | 
|  | 1431 |  | 
|  | 1432 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1433 | void MacroAssembler::Drop(int stack_elements) { | 
|  | 1434 | if (stack_elements > 0) { | 
|  | 1435 | add(Operand(esp), Immediate(stack_elements * kPointerSize)); | 
|  | 1436 | } | 
|  | 1437 | } | 
|  | 1438 |  | 
|  | 1439 |  | 
|  | 1440 | void MacroAssembler::Move(Register dst, Handle<Object> value) { | 
|  | 1441 | mov(dst, value); | 
|  | 1442 | } | 
|  | 1443 |  | 
|  | 1444 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1445 | void MacroAssembler::SetCounter(StatsCounter* counter, int value) { | 
|  | 1446 | if (FLAG_native_code_counters && counter->Enabled()) { | 
|  | 1447 | mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value)); | 
|  | 1448 | } | 
|  | 1449 | } | 
|  | 1450 |  | 
|  | 1451 |  | 
|  | 1452 | void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) { | 
|  | 1453 | ASSERT(value > 0); | 
|  | 1454 | if (FLAG_native_code_counters && counter->Enabled()) { | 
|  | 1455 | Operand operand = Operand::StaticVariable(ExternalReference(counter)); | 
|  | 1456 | if (value == 1) { | 
|  | 1457 | inc(operand); | 
|  | 1458 | } else { | 
|  | 1459 | add(operand, Immediate(value)); | 
|  | 1460 | } | 
|  | 1461 | } | 
|  | 1462 | } | 
|  | 1463 |  | 
|  | 1464 |  | 
|  | 1465 | void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) { | 
|  | 1466 | ASSERT(value > 0); | 
|  | 1467 | if (FLAG_native_code_counters && counter->Enabled()) { | 
|  | 1468 | Operand operand = Operand::StaticVariable(ExternalReference(counter)); | 
|  | 1469 | if (value == 1) { | 
|  | 1470 | dec(operand); | 
|  | 1471 | } else { | 
|  | 1472 | sub(operand, Immediate(value)); | 
|  | 1473 | } | 
|  | 1474 | } | 
|  | 1475 | } | 
|  | 1476 |  | 
|  | 1477 |  | 
| Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 1478 | void MacroAssembler::IncrementCounter(Condition cc, | 
|  | 1479 | StatsCounter* counter, | 
|  | 1480 | int value) { | 
|  | 1481 | ASSERT(value > 0); | 
|  | 1482 | if (FLAG_native_code_counters && counter->Enabled()) { | 
|  | 1483 | Label skip; | 
|  | 1484 | j(NegateCondition(cc), &skip); | 
|  | 1485 | pushfd(); | 
|  | 1486 | IncrementCounter(counter, value); | 
|  | 1487 | popfd(); | 
|  | 1488 | bind(&skip); | 
|  | 1489 | } | 
|  | 1490 | } | 
|  | 1491 |  | 
|  | 1492 |  | 
|  | 1493 | void MacroAssembler::DecrementCounter(Condition cc, | 
|  | 1494 | StatsCounter* counter, | 
|  | 1495 | int value) { | 
|  | 1496 | ASSERT(value > 0); | 
|  | 1497 | if (FLAG_native_code_counters && counter->Enabled()) { | 
|  | 1498 | Label skip; | 
|  | 1499 | j(NegateCondition(cc), &skip); | 
|  | 1500 | pushfd(); | 
|  | 1501 | DecrementCounter(counter, value); | 
|  | 1502 | popfd(); | 
|  | 1503 | bind(&skip); | 
|  | 1504 | } | 
|  | 1505 | } | 
|  | 1506 |  | 
|  | 1507 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1508 | void MacroAssembler::Assert(Condition cc, const char* msg) { | 
|  | 1509 | if (FLAG_debug_code) Check(cc, msg); | 
|  | 1510 | } | 
|  | 1511 |  | 
|  | 1512 |  | 
|  | 1513 | void MacroAssembler::Check(Condition cc, const char* msg) { | 
|  | 1514 | Label L; | 
|  | 1515 | j(cc, &L, taken); | 
|  | 1516 | Abort(msg); | 
|  | 1517 | // will not return here | 
|  | 1518 | bind(&L); | 
|  | 1519 | } | 
|  | 1520 |  | 
|  | 1521 |  | 
|  | 1522 | void MacroAssembler::Abort(const char* msg) { | 
|  | 1523 | // We want to pass the msg string like a smi to avoid GC | 
|  | 1524 | // problems, however msg is not guaranteed to be aligned | 
|  | 1525 | // properly. Instead, we pass an aligned pointer that is | 
|  | 1526 | // a proper v8 smi, but also pass the alignment difference | 
|  | 1527 | // from the real pointer as a smi. | 
|  | 1528 | intptr_t p1 = reinterpret_cast<intptr_t>(msg); | 
|  | 1529 | intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; | 
|  | 1530 | ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); | 
|  | 1531 | #ifdef DEBUG | 
|  | 1532 | if (msg != NULL) { | 
|  | 1533 | RecordComment("Abort message: "); | 
|  | 1534 | RecordComment(msg); | 
|  | 1535 | } | 
|  | 1536 | #endif | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1537 | // Disable stub call restrictions to always allow calls to abort. | 
|  | 1538 | set_allow_stub_calls(true); | 
|  | 1539 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1540 | push(eax); | 
|  | 1541 | push(Immediate(p0)); | 
|  | 1542 | push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); | 
|  | 1543 | CallRuntime(Runtime::kAbort, 2); | 
|  | 1544 | // will not return here | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1545 | int3(); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1546 | } | 
|  | 1547 |  | 
|  | 1548 |  | 
| Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 1549 | void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1, | 
|  | 1550 | Register object2, | 
|  | 1551 | Register scratch1, | 
|  | 1552 | Register scratch2, | 
|  | 1553 | Label* failure) { | 
|  | 1554 | // Check that both objects are not smis. | 
|  | 1555 | ASSERT_EQ(0, kSmiTag); | 
|  | 1556 | mov(scratch1, Operand(object1)); | 
|  | 1557 | and_(scratch1, Operand(object2)); | 
|  | 1558 | test(scratch1, Immediate(kSmiTagMask)); | 
|  | 1559 | j(zero, failure); | 
|  | 1560 |  | 
|  | 1561 | // Load instance type for both strings. | 
|  | 1562 | mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset)); | 
|  | 1563 | mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset)); | 
|  | 1564 | movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset)); | 
|  | 1565 | movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset)); | 
|  | 1566 |  | 
|  | 1567 | // Check that both are flat ascii strings. | 
|  | 1568 | const int kFlatAsciiStringMask = | 
|  | 1569 | kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; | 
|  | 1570 | const int kFlatAsciiStringTag = ASCII_STRING_TYPE; | 
|  | 1571 | // Interleave bits from both instance types and compare them in one check. | 
|  | 1572 | ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); | 
|  | 1573 | and_(scratch1, kFlatAsciiStringMask); | 
|  | 1574 | and_(scratch2, kFlatAsciiStringMask); | 
|  | 1575 | lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); | 
|  | 1576 | cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); | 
|  | 1577 | j(not_equal, failure); | 
|  | 1578 | } | 
|  | 1579 |  | 
|  | 1580 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1581 | CodePatcher::CodePatcher(byte* address, int size) | 
|  | 1582 | : address_(address), size_(size), masm_(address, size + Assembler::kGap) { | 
|  | 1583 | // Create a new macro assembler pointing to the address of the code to patch. | 
|  | 1584 | // The size is adjusted with kGap on order for the assembler to generate size | 
|  | 1585 | // bytes of instructions without failing with buffer size constraints. | 
|  | 1586 | ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 
|  | 1587 | } | 
|  | 1588 |  | 
|  | 1589 |  | 
|  | 1590 | CodePatcher::~CodePatcher() { | 
|  | 1591 | // Indicate that code has changed. | 
|  | 1592 | CPU::FlushICache(address_, size_); | 
|  | 1593 |  | 
|  | 1594 | // Check that the code was patched as expected. | 
|  | 1595 | ASSERT(masm_.pc_ == address_ + size_); | 
|  | 1596 | ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 
|  | 1597 | } | 
|  | 1598 |  | 
|  | 1599 |  | 
|  | 1600 | } }  // namespace v8::internal |