| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1 | // Copyright 2012 the V8 project authors. All rights reserved. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2 | // Use of this source code is governed by a BSD-style license that can be | 
|  | 3 | // found in the LICENSE file. | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4 |  | 
| Iain Merrick | 9ac36c9 | 2010-09-13 15:29:50 +0100 | [diff] [blame] | 5 | #include <limits.h>  // For LONG_MIN, LONG_MAX. | 
|  | 6 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 7 | #include "src/v8.h" | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 9 | #if V8_TARGET_ARCH_ARM | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 10 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 11 | #include "src/base/bits.h" | 
|  | 12 | #include "src/base/division-by-constant.h" | 
|  | 13 | #include "src/bootstrapper.h" | 
|  | 14 | #include "src/codegen.h" | 
|  | 15 | #include "src/cpu-profiler.h" | 
|  | 16 | #include "src/debug.h" | 
|  | 17 | #include "src/isolate-inl.h" | 
|  | 18 | #include "src/runtime.h" | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 19 |  | 
|  | 20 | namespace v8 { | 
|  | 21 | namespace internal { | 
|  | 22 |  | 
| Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 23 | MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) | 
|  | 24 | : Assembler(arg_isolate, buffer, size), | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 25 | generating_stub_(false), | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 26 | has_frame_(false) { | 
| Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 27 | if (isolate() != NULL) { | 
|  | 28 | code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), | 
|  | 29 | isolate()); | 
|  | 30 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 31 | } | 
|  | 32 |  | 
|  | 33 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 34 | void MacroAssembler::Jump(Register target, Condition cond) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 35 | bx(target, cond); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 36 | } | 
|  | 37 |  | 
|  | 38 |  | 
|  | 39 | void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode, | 
|  | 40 | Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 41 | DCHECK(RelocInfo::IsCodeTarget(rmode)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 42 | mov(pc, Operand(target, rmode), LeaveCC, cond); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 43 | } | 
|  | 44 |  | 
|  | 45 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 46 | void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode, | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 47 | Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 48 | DCHECK(!RelocInfo::IsCodeTarget(rmode)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 49 | Jump(reinterpret_cast<intptr_t>(target), rmode, cond); | 
|  | 50 | } | 
|  | 51 |  | 
|  | 52 |  | 
|  | 53 | void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, | 
|  | 54 | Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 55 | DCHECK(RelocInfo::IsCodeTarget(rmode)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 56 | // 'code' is always generated ARM code, never THUMB code | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 57 | AllowDeferredHandleDereference embedding_raw_address; | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 58 | Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | 
|  | 59 | } | 
|  | 60 |  | 
|  | 61 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 62 | int MacroAssembler::CallSize(Register target, Condition cond) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 63 | return kInstrSize; | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 64 | } | 
|  | 65 |  | 
|  | 66 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 67 | void MacroAssembler::Call(Register target, Condition cond) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 68 | // Block constant pool for the call instruction sequence. | 
|  | 69 | BlockConstPoolScope block_const_pool(this); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 70 | Label start; | 
|  | 71 | bind(&start); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 72 | blx(target, cond); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 73 | DCHECK_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 74 | } | 
|  | 75 |  | 
|  | 76 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 77 | int MacroAssembler::CallSize( | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 78 | Address target, RelocInfo::Mode rmode, Condition cond) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 79 | Instr mov_instr = cond | MOV | LeaveCC; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 80 | Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode); | 
|  | 81 | return kInstrSize + | 
|  | 82 | mov_operand.instructions_required(this, mov_instr) * kInstrSize; | 
|  | 83 | } | 
|  | 84 |  | 
|  | 85 |  | 
|  | 86 | int MacroAssembler::CallStubSize( | 
|  | 87 | CodeStub* stub, TypeFeedbackId ast_id, Condition cond) { | 
|  | 88 | return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond); | 
|  | 89 | } | 
|  | 90 |  | 
|  | 91 |  | 
|  | 92 | int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate, | 
|  | 93 | Address target, | 
|  | 94 | RelocInfo::Mode rmode, | 
|  | 95 | Condition cond) { | 
|  | 96 | Instr mov_instr = cond | MOV | LeaveCC; | 
|  | 97 | Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode); | 
|  | 98 | return kInstrSize + | 
|  | 99 | mov_operand.instructions_required(NULL, mov_instr) * kInstrSize; | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 100 | } | 
|  | 101 |  | 
|  | 102 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 103 | void MacroAssembler::Call(Address target, | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 104 | RelocInfo::Mode rmode, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 105 | Condition cond, | 
|  | 106 | TargetAddressStorageMode mode) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 107 | // Block constant pool for the call instruction sequence. | 
|  | 108 | BlockConstPoolScope block_const_pool(this); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 109 | Label start; | 
|  | 110 | bind(&start); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 111 |  | 
|  | 112 | bool old_predictable_code_size = predictable_code_size(); | 
|  | 113 | if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 
|  | 114 | set_predictable_code_size(true); | 
|  | 115 | } | 
|  | 116 |  | 
|  | 117 | #ifdef DEBUG | 
|  | 118 | // Check the expected size before generating code to ensure we assume the same | 
|  | 119 | // constant pool availability (e.g., whether constant pool is full or not). | 
|  | 120 | int expected_size = CallSize(target, rmode, cond); | 
|  | 121 | #endif | 
|  | 122 |  | 
|  | 123 | // Call sequence on V7 or later may be : | 
|  | 124 | //  movw  ip, #... @ call address low 16 | 
|  | 125 | //  movt  ip, #... @ call address high 16 | 
|  | 126 | //  blx   ip | 
|  | 127 | //                      @ return address | 
|  | 128 | // Or for pre-V7 or values that may be back-patched | 
|  | 129 | // to avoid ICache flushes: | 
|  | 130 | //  ldr   ip, [pc, #...] @ call address | 
|  | 131 | //  blx   ip | 
|  | 132 | //                      @ return address | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 133 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 134 | // Statement positions are expected to be recorded when the target | 
|  | 135 | // address is loaded. The mov method will automatically record | 
|  | 136 | // positions when pc is the target, since this is not the case here | 
|  | 137 | // we have to do it explicitly. | 
|  | 138 | positions_recorder()->WriteRecordedPositions(); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 139 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 140 | mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode)); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 141 | blx(ip, cond); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 142 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 143 | DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start)); | 
|  | 144 | if (mode == NEVER_INLINE_TARGET_ADDRESS) { | 
|  | 145 | set_predictable_code_size(old_predictable_code_size); | 
|  | 146 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 147 | } | 
|  | 148 |  | 
|  | 149 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 150 | int MacroAssembler::CallSize(Handle<Code> code, | 
|  | 151 | RelocInfo::Mode rmode, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 152 | TypeFeedbackId ast_id, | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 153 | Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 154 | AllowDeferredHandleDereference using_raw_address; | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 155 | return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 156 | } | 
|  | 157 |  | 
|  | 158 |  | 
|  | 159 | void MacroAssembler::Call(Handle<Code> code, | 
|  | 160 | RelocInfo::Mode rmode, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 161 | TypeFeedbackId ast_id, | 
|  | 162 | Condition cond, | 
|  | 163 | TargetAddressStorageMode mode) { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 164 | Label start; | 
|  | 165 | bind(&start); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 166 | DCHECK(RelocInfo::IsCodeTarget(rmode)); | 
|  | 167 | if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 168 | SetRecordedAstId(ast_id); | 
|  | 169 | rmode = RelocInfo::CODE_TARGET_WITH_ID; | 
|  | 170 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 171 | // 'code' is always generated ARM code, never THUMB code | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 172 | AllowDeferredHandleDereference embedding_raw_address; | 
|  | 173 | Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 174 | } | 
|  | 175 |  | 
|  | 176 |  | 
|  | 177 | void MacroAssembler::Ret(Condition cond) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 178 | bx(lr, cond); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 179 | } | 
|  | 180 |  | 
|  | 181 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 182 | void MacroAssembler::Drop(int count, Condition cond) { | 
|  | 183 | if (count > 0) { | 
|  | 184 | add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond); | 
|  | 185 | } | 
|  | 186 | } | 
|  | 187 |  | 
|  | 188 |  | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 189 | void MacroAssembler::Ret(int drop, Condition cond) { | 
|  | 190 | Drop(drop, cond); | 
|  | 191 | Ret(cond); | 
|  | 192 | } | 
|  | 193 |  | 
|  | 194 |  | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 195 | void MacroAssembler::Swap(Register reg1, | 
|  | 196 | Register reg2, | 
|  | 197 | Register scratch, | 
|  | 198 | Condition cond) { | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 199 | if (scratch.is(no_reg)) { | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 200 | eor(reg1, reg1, Operand(reg2), LeaveCC, cond); | 
|  | 201 | eor(reg2, reg2, Operand(reg1), LeaveCC, cond); | 
|  | 202 | eor(reg1, reg1, Operand(reg2), LeaveCC, cond); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 203 | } else { | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 204 | mov(scratch, reg1, LeaveCC, cond); | 
|  | 205 | mov(reg1, reg2, LeaveCC, cond); | 
|  | 206 | mov(reg2, scratch, LeaveCC, cond); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 207 | } | 
|  | 208 | } | 
|  | 209 |  | 
|  | 210 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 211 | void MacroAssembler::Call(Label* target) { | 
|  | 212 | bl(target); | 
|  | 213 | } | 
|  | 214 |  | 
|  | 215 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 216 | void MacroAssembler::Push(Handle<Object> handle) { | 
|  | 217 | mov(ip, Operand(handle)); | 
|  | 218 | push(ip); | 
|  | 219 | } | 
|  | 220 |  | 
|  | 221 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 222 | void MacroAssembler::Move(Register dst, Handle<Object> value) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 223 | AllowDeferredHandleDereference smi_check; | 
|  | 224 | if (value->IsSmi()) { | 
|  | 225 | mov(dst, Operand(value)); | 
|  | 226 | } else { | 
|  | 227 | DCHECK(value->IsHeapObject()); | 
|  | 228 | if (isolate()->heap()->InNewSpace(*value)) { | 
|  | 229 | Handle<Cell> cell = isolate()->factory()->NewCell(value); | 
|  | 230 | mov(dst, Operand(cell)); | 
|  | 231 | ldr(dst, FieldMemOperand(dst, Cell::kValueOffset)); | 
|  | 232 | } else { | 
|  | 233 | mov(dst, Operand(value)); | 
|  | 234 | } | 
|  | 235 | } | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 236 | } | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 237 |  | 
|  | 238 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 239 | void MacroAssembler::Move(Register dst, Register src, Condition cond) { | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 240 | if (!dst.is(src)) { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 241 | mov(dst, src, LeaveCC, cond); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 242 | } | 
|  | 243 | } | 
|  | 244 |  | 
|  | 245 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 246 | void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) { | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 247 | if (!dst.is(src)) { | 
|  | 248 | vmov(dst, src); | 
|  | 249 | } | 
|  | 250 | } | 
|  | 251 |  | 
|  | 252 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 253 | void MacroAssembler::Mls(Register dst, Register src1, Register src2, | 
|  | 254 | Register srcA, Condition cond) { | 
|  | 255 | if (CpuFeatures::IsSupported(MLS)) { | 
|  | 256 | CpuFeatureScope scope(this, MLS); | 
|  | 257 | mls(dst, src1, src2, srcA, cond); | 
|  | 258 | } else { | 
|  | 259 | DCHECK(!srcA.is(ip)); | 
|  | 260 | mul(ip, src1, src2, LeaveCC, cond); | 
|  | 261 | sub(dst, srcA, ip, LeaveCC, cond); | 
|  | 262 | } | 
|  | 263 | } | 
|  | 264 |  | 
|  | 265 |  | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 266 | void MacroAssembler::And(Register dst, Register src1, const Operand& src2, | 
|  | 267 | Condition cond) { | 
| Teng-Hui Zhu | 3e5fa29 | 2010-11-09 16:16:48 -0800 | [diff] [blame] | 268 | if (!src2.is_reg() && | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 269 | !src2.must_output_reloc_info(this) && | 
| Teng-Hui Zhu | 3e5fa29 | 2010-11-09 16:16:48 -0800 | [diff] [blame] | 270 | src2.immediate() == 0) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 271 | mov(dst, Operand::Zero(), LeaveCC, cond); | 
|  | 272 | } else if (!(src2.instructions_required(this) == 1) && | 
|  | 273 | !src2.must_output_reloc_info(this) && | 
| Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 274 | CpuFeatures::IsSupported(ARMv7) && | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 275 | base::bits::IsPowerOfTwo32(src2.immediate() + 1)) { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 276 | ubfx(dst, src1, 0, | 
|  | 277 | WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond); | 
| Teng-Hui Zhu | 3e5fa29 | 2010-11-09 16:16:48 -0800 | [diff] [blame] | 278 | } else { | 
|  | 279 | and_(dst, src1, src2, LeaveCC, cond); | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 280 | } | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 281 | } | 
|  | 282 |  | 
|  | 283 |  | 
|  | 284 | void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width, | 
|  | 285 | Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 286 | DCHECK(lsb < 32); | 
|  | 287 | if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 288 | int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1); | 
|  | 289 | and_(dst, src1, Operand(mask), LeaveCC, cond); | 
|  | 290 | if (lsb != 0) { | 
|  | 291 | mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond); | 
|  | 292 | } | 
|  | 293 | } else { | 
|  | 294 | ubfx(dst, src1, lsb, width, cond); | 
|  | 295 | } | 
|  | 296 | } | 
|  | 297 |  | 
|  | 298 |  | 
|  | 299 | void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width, | 
|  | 300 | Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 301 | DCHECK(lsb < 32); | 
|  | 302 | if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 303 | int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1); | 
|  | 304 | and_(dst, src1, Operand(mask), LeaveCC, cond); | 
|  | 305 | int shift_up = 32 - lsb - width; | 
|  | 306 | int shift_down = lsb + shift_up; | 
|  | 307 | if (shift_up != 0) { | 
|  | 308 | mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond); | 
|  | 309 | } | 
|  | 310 | if (shift_down != 0) { | 
|  | 311 | mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond); | 
|  | 312 | } | 
|  | 313 | } else { | 
|  | 314 | sbfx(dst, src1, lsb, width, cond); | 
|  | 315 | } | 
|  | 316 | } | 
|  | 317 |  | 
|  | 318 |  | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 319 | void MacroAssembler::Bfi(Register dst, | 
|  | 320 | Register src, | 
|  | 321 | Register scratch, | 
|  | 322 | int lsb, | 
|  | 323 | int width, | 
|  | 324 | Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 325 | DCHECK(0 <= lsb && lsb < 32); | 
|  | 326 | DCHECK(0 <= width && width < 32); | 
|  | 327 | DCHECK(lsb + width < 32); | 
|  | 328 | DCHECK(!scratch.is(dst)); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 329 | if (width == 0) return; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 330 | if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 331 | int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1); | 
|  | 332 | bic(dst, dst, Operand(mask)); | 
|  | 333 | and_(scratch, src, Operand((1 << width) - 1)); | 
|  | 334 | mov(scratch, Operand(scratch, LSL, lsb)); | 
|  | 335 | orr(dst, dst, scratch); | 
|  | 336 | } else { | 
|  | 337 | bfi(dst, src, lsb, width, cond); | 
|  | 338 | } | 
|  | 339 | } | 
|  | 340 |  | 
|  | 341 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 342 | void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width, | 
|  | 343 | Condition cond) { | 
|  | 344 | DCHECK(lsb < 32); | 
|  | 345 | if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 346 | int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 347 | bic(dst, src, Operand(mask)); | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 348 | } else { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 349 | Move(dst, src, cond); | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 350 | bfc(dst, lsb, width, cond); | 
|  | 351 | } | 
|  | 352 | } | 
|  | 353 |  | 
|  | 354 |  | 
| Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 355 | void MacroAssembler::Usat(Register dst, int satpos, const Operand& src, | 
|  | 356 | Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 357 | if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) { | 
|  | 358 | DCHECK(!dst.is(pc) && !src.rm().is(pc)); | 
|  | 359 | DCHECK((satpos >= 0) && (satpos <= 31)); | 
| Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 360 |  | 
|  | 361 | // These asserts are required to ensure compatibility with the ARMv7 | 
|  | 362 | // implementation. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 363 | DCHECK((src.shift_op() == ASR) || (src.shift_op() == LSL)); | 
|  | 364 | DCHECK(src.rs().is(no_reg)); | 
| Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 365 |  | 
|  | 366 | Label done; | 
|  | 367 | int satval = (1 << satpos) - 1; | 
|  | 368 |  | 
|  | 369 | if (cond != al) { | 
|  | 370 | b(NegateCondition(cond), &done);  // Skip saturate if !condition. | 
|  | 371 | } | 
|  | 372 | if (!(src.is_reg() && dst.is(src.rm()))) { | 
|  | 373 | mov(dst, src); | 
|  | 374 | } | 
|  | 375 | tst(dst, Operand(~satval)); | 
|  | 376 | b(eq, &done); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 377 | mov(dst, Operand::Zero(), LeaveCC, mi);  // 0 if negative. | 
| Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 378 | mov(dst, Operand(satval), LeaveCC, pl);  // satval if positive. | 
|  | 379 | bind(&done); | 
|  | 380 | } else { | 
|  | 381 | usat(dst, satpos, src, cond); | 
|  | 382 | } | 
|  | 383 | } | 
|  | 384 |  | 
|  | 385 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 386 | void MacroAssembler::Load(Register dst, | 
|  | 387 | const MemOperand& src, | 
|  | 388 | Representation r) { | 
|  | 389 | DCHECK(!r.IsDouble()); | 
|  | 390 | if (r.IsInteger8()) { | 
|  | 391 | ldrsb(dst, src); | 
|  | 392 | } else if (r.IsUInteger8()) { | 
|  | 393 | ldrb(dst, src); | 
|  | 394 | } else if (r.IsInteger16()) { | 
|  | 395 | ldrsh(dst, src); | 
|  | 396 | } else if (r.IsUInteger16()) { | 
|  | 397 | ldrh(dst, src); | 
|  | 398 | } else { | 
|  | 399 | ldr(dst, src); | 
|  | 400 | } | 
|  | 401 | } | 
|  | 402 |  | 
|  | 403 |  | 
|  | 404 | void MacroAssembler::Store(Register src, | 
|  | 405 | const MemOperand& dst, | 
|  | 406 | Representation r) { | 
|  | 407 | DCHECK(!r.IsDouble()); | 
|  | 408 | if (r.IsInteger8() || r.IsUInteger8()) { | 
|  | 409 | strb(src, dst); | 
|  | 410 | } else if (r.IsInteger16() || r.IsUInteger16()) { | 
|  | 411 | strh(src, dst); | 
|  | 412 | } else { | 
|  | 413 | if (r.IsHeapObject()) { | 
|  | 414 | AssertNotSmi(src); | 
|  | 415 | } else if (r.IsSmi()) { | 
|  | 416 | AssertSmi(src); | 
|  | 417 | } | 
|  | 418 | str(src, dst); | 
|  | 419 | } | 
|  | 420 | } | 
|  | 421 |  | 
|  | 422 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 423 | void MacroAssembler::LoadRoot(Register destination, | 
|  | 424 | Heap::RootListIndex index, | 
|  | 425 | Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 426 | if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && | 
|  | 427 | isolate()->heap()->RootCanBeTreatedAsConstant(index) && | 
|  | 428 | !predictable_code_size()) { | 
|  | 429 | // The CPU supports fast immediate values, and this root will never | 
|  | 430 | // change. We will load it as a relocatable immediate value. | 
|  | 431 | Handle<Object> root(&isolate()->heap()->roots_array_start()[index]); | 
|  | 432 | mov(destination, Operand(root), LeaveCC, cond); | 
|  | 433 | return; | 
|  | 434 | } | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 435 | ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 436 | } | 
|  | 437 |  | 
|  | 438 |  | 
| Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 439 | void MacroAssembler::StoreRoot(Register source, | 
|  | 440 | Heap::RootListIndex index, | 
|  | 441 | Condition cond) { | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 442 | str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond); | 
|  | 443 | } | 
|  | 444 |  | 
|  | 445 |  | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 446 | void MacroAssembler::InNewSpace(Register object, | 
|  | 447 | Register scratch, | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 448 | Condition cond, | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 449 | Label* branch) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 450 | DCHECK(cond == eq || cond == ne); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 451 | and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate()))); | 
|  | 452 | cmp(scratch, Operand(ExternalReference::new_space_start(isolate()))); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 453 | b(cond, branch); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 454 | } | 
|  | 455 |  | 
|  | 456 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 457 | void MacroAssembler::RecordWriteField( | 
|  | 458 | Register object, | 
|  | 459 | int offset, | 
|  | 460 | Register value, | 
|  | 461 | Register dst, | 
|  | 462 | LinkRegisterStatus lr_status, | 
|  | 463 | SaveFPRegsMode save_fp, | 
|  | 464 | RememberedSetAction remembered_set_action, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 465 | SmiCheck smi_check, | 
|  | 466 | PointersToHereCheck pointers_to_here_check_for_value) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 467 | // First, check if a write barrier is even needed. The tests below | 
|  | 468 | // catch stores of Smis. | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 469 | Label done; | 
|  | 470 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 471 | // Skip barrier if writing a smi. | 
|  | 472 | if (smi_check == INLINE_SMI_CHECK) { | 
|  | 473 | JumpIfSmi(value, &done); | 
|  | 474 | } | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 475 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 476 | // Although the object register is tagged, the offset is relative to the start | 
|  | 477 | // of the object, so so offset must be a multiple of kPointerSize. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 478 | DCHECK(IsAligned(offset, kPointerSize)); | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 479 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 480 | add(dst, object, Operand(offset - kHeapObjectTag)); | 
|  | 481 | if (emit_debug_code()) { | 
|  | 482 | Label ok; | 
|  | 483 | tst(dst, Operand((1 << kPointerSizeLog2) - 1)); | 
|  | 484 | b(eq, &ok); | 
|  | 485 | stop("Unaligned cell in write barrier"); | 
|  | 486 | bind(&ok); | 
|  | 487 | } | 
|  | 488 |  | 
|  | 489 | RecordWrite(object, | 
|  | 490 | dst, | 
|  | 491 | value, | 
|  | 492 | lr_status, | 
|  | 493 | save_fp, | 
|  | 494 | remembered_set_action, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 495 | OMIT_SMI_CHECK, | 
|  | 496 | pointers_to_here_check_for_value); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 497 |  | 
|  | 498 | bind(&done); | 
| Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 499 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 500 | // Clobber clobbered input registers when running with the debug-code flag | 
| Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 501 | // turned on to provoke errors. | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 502 | if (emit_debug_code()) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 503 | mov(value, Operand(bit_cast<int32_t>(kZapValue + 4))); | 
|  | 504 | mov(dst, Operand(bit_cast<int32_t>(kZapValue + 8))); | 
|  | 505 | } | 
|  | 506 | } | 
|  | 507 |  | 
|  | 508 |  | 
|  | 509 | // Will clobber 4 registers: object, map, dst, ip.  The | 
|  | 510 | // register 'object' contains a heap object pointer. | 
|  | 511 | void MacroAssembler::RecordWriteForMap(Register object, | 
|  | 512 | Register map, | 
|  | 513 | Register dst, | 
|  | 514 | LinkRegisterStatus lr_status, | 
|  | 515 | SaveFPRegsMode fp_mode) { | 
|  | 516 | if (emit_debug_code()) { | 
|  | 517 | ldr(dst, FieldMemOperand(map, HeapObject::kMapOffset)); | 
|  | 518 | cmp(dst, Operand(isolate()->factory()->meta_map())); | 
|  | 519 | Check(eq, kWrongAddressOrValuePassedToRecordWrite); | 
|  | 520 | } | 
|  | 521 |  | 
|  | 522 | if (!FLAG_incremental_marking) { | 
|  | 523 | return; | 
|  | 524 | } | 
|  | 525 |  | 
|  | 526 | if (emit_debug_code()) { | 
|  | 527 | ldr(ip, FieldMemOperand(object, HeapObject::kMapOffset)); | 
|  | 528 | cmp(ip, map); | 
|  | 529 | Check(eq, kWrongAddressOrValuePassedToRecordWrite); | 
|  | 530 | } | 
|  | 531 |  | 
|  | 532 | Label done; | 
|  | 533 |  | 
|  | 534 | // A single check of the map's pages interesting flag suffices, since it is | 
|  | 535 | // only set during incremental collection, and then it's also guaranteed that | 
|  | 536 | // the from object's page's interesting flag is also set.  This optimization | 
|  | 537 | // relies on the fact that maps can never be in new space. | 
|  | 538 | CheckPageFlag(map, | 
|  | 539 | map,  // Used as scratch. | 
|  | 540 | MemoryChunk::kPointersToHereAreInterestingMask, | 
|  | 541 | eq, | 
|  | 542 | &done); | 
|  | 543 |  | 
|  | 544 | add(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag)); | 
|  | 545 | if (emit_debug_code()) { | 
|  | 546 | Label ok; | 
|  | 547 | tst(dst, Operand((1 << kPointerSizeLog2) - 1)); | 
|  | 548 | b(eq, &ok); | 
|  | 549 | stop("Unaligned cell in write barrier"); | 
|  | 550 | bind(&ok); | 
|  | 551 | } | 
|  | 552 |  | 
|  | 553 | // Record the actual write. | 
|  | 554 | if (lr_status == kLRHasNotBeenSaved) { | 
|  | 555 | push(lr); | 
|  | 556 | } | 
|  | 557 | RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET, | 
|  | 558 | fp_mode); | 
|  | 559 | CallStub(&stub); | 
|  | 560 | if (lr_status == kLRHasNotBeenSaved) { | 
|  | 561 | pop(lr); | 
|  | 562 | } | 
|  | 563 |  | 
|  | 564 | bind(&done); | 
|  | 565 |  | 
|  | 566 | // Count number of write barriers in generated code. | 
|  | 567 | isolate()->counters()->write_barriers_static()->Increment(); | 
|  | 568 | IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst); | 
|  | 569 |  | 
|  | 570 | // Clobber clobbered registers when running with the debug-code flag | 
|  | 571 | // turned on to provoke errors. | 
|  | 572 | if (emit_debug_code()) { | 
|  | 573 | mov(dst, Operand(bit_cast<int32_t>(kZapValue + 12))); | 
|  | 574 | mov(map, Operand(bit_cast<int32_t>(kZapValue + 16))); | 
| Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 575 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 576 | } | 
|  | 577 |  | 
|  | 578 |  | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 579 | // Will clobber 4 registers: object, address, scratch, ip.  The | 
|  | 580 | // register 'object' contains a heap object pointer.  The heap object | 
|  | 581 | // tag is shifted away. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 582 | void MacroAssembler::RecordWrite( | 
|  | 583 | Register object, | 
|  | 584 | Register address, | 
|  | 585 | Register value, | 
|  | 586 | LinkRegisterStatus lr_status, | 
|  | 587 | SaveFPRegsMode fp_mode, | 
|  | 588 | RememberedSetAction remembered_set_action, | 
|  | 589 | SmiCheck smi_check, | 
|  | 590 | PointersToHereCheck pointers_to_here_check_for_value) { | 
|  | 591 | DCHECK(!object.is(value)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 592 | if (emit_debug_code()) { | 
|  | 593 | ldr(ip, MemOperand(address)); | 
|  | 594 | cmp(ip, value); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 595 | Check(eq, kWrongAddressOrValuePassedToRecordWrite); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 596 | } | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 597 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 598 | if (remembered_set_action == OMIT_REMEMBERED_SET && | 
|  | 599 | !FLAG_incremental_marking) { | 
|  | 600 | return; | 
|  | 601 | } | 
|  | 602 |  | 
|  | 603 | // First, check if a write barrier is even needed. The tests below | 
|  | 604 | // catch stores of smis and stores into the young generation. | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 605 | Label done; | 
|  | 606 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 607 | if (smi_check == INLINE_SMI_CHECK) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 608 | JumpIfSmi(value, &done); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 609 | } | 
|  | 610 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 611 | if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) { | 
|  | 612 | CheckPageFlag(value, | 
|  | 613 | value,  // Used as scratch. | 
|  | 614 | MemoryChunk::kPointersToHereAreInterestingMask, | 
|  | 615 | eq, | 
|  | 616 | &done); | 
|  | 617 | } | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 618 | CheckPageFlag(object, | 
|  | 619 | value,  // Used as scratch. | 
|  | 620 | MemoryChunk::kPointersFromHereAreInterestingMask, | 
|  | 621 | eq, | 
|  | 622 | &done); | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 623 |  | 
|  | 624 | // Record the actual write. | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 625 | if (lr_status == kLRHasNotBeenSaved) { | 
|  | 626 | push(lr); | 
|  | 627 | } | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 628 | RecordWriteStub stub(isolate(), object, value, address, remembered_set_action, | 
|  | 629 | fp_mode); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 630 | CallStub(&stub); | 
|  | 631 | if (lr_status == kLRHasNotBeenSaved) { | 
|  | 632 | pop(lr); | 
|  | 633 | } | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 634 |  | 
|  | 635 | bind(&done); | 
|  | 636 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 637 | // Count number of write barriers in generated code. | 
|  | 638 | isolate()->counters()->write_barriers_static()->Increment(); | 
|  | 639 | IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, | 
|  | 640 | value); | 
|  | 641 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 642 | // Clobber clobbered registers when running with the debug-code flag | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 643 | // turned on to provoke errors. | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 644 | if (emit_debug_code()) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 645 | mov(address, Operand(bit_cast<int32_t>(kZapValue + 12))); | 
|  | 646 | mov(value, Operand(bit_cast<int32_t>(kZapValue + 16))); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 647 | } | 
|  | 648 | } | 
|  | 649 |  | 
|  | 650 |  | 
|  | 651 | void MacroAssembler::RememberedSetHelper(Register object,  // For debug tests. | 
|  | 652 | Register address, | 
|  | 653 | Register scratch, | 
|  | 654 | SaveFPRegsMode fp_mode, | 
|  | 655 | RememberedSetFinalAction and_then) { | 
|  | 656 | Label done; | 
|  | 657 | if (emit_debug_code()) { | 
|  | 658 | Label ok; | 
|  | 659 | JumpIfNotInNewSpace(object, scratch, &ok); | 
|  | 660 | stop("Remembered set pointer is in new space"); | 
|  | 661 | bind(&ok); | 
|  | 662 | } | 
|  | 663 | // Load store buffer top. | 
|  | 664 | ExternalReference store_buffer = | 
|  | 665 | ExternalReference::store_buffer_top(isolate()); | 
|  | 666 | mov(ip, Operand(store_buffer)); | 
|  | 667 | ldr(scratch, MemOperand(ip)); | 
|  | 668 | // Store pointer to buffer and increment buffer top. | 
|  | 669 | str(address, MemOperand(scratch, kPointerSize, PostIndex)); | 
|  | 670 | // Write back new top of buffer. | 
|  | 671 | str(scratch, MemOperand(ip)); | 
|  | 672 | // Call stub on end of buffer. | 
|  | 673 | // Check for end of buffer. | 
|  | 674 | tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit)); | 
|  | 675 | if (and_then == kFallThroughAtEnd) { | 
|  | 676 | b(eq, &done); | 
|  | 677 | } else { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 678 | DCHECK(and_then == kReturnAtEnd); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 679 | Ret(eq); | 
|  | 680 | } | 
|  | 681 | push(lr); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 682 | StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 683 | CallStub(&store_buffer_overflow); | 
|  | 684 | pop(lr); | 
|  | 685 | bind(&done); | 
|  | 686 | if (and_then == kReturnAtEnd) { | 
|  | 687 | Ret(); | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 688 | } | 
|  | 689 | } | 
|  | 690 |  | 
|  | 691 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 692 | void MacroAssembler::PushFixedFrame(Register marker_reg) { | 
|  | 693 | DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code()); | 
|  | 694 | stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | | 
|  | 695 | cp.bit() | | 
|  | 696 | (FLAG_enable_ool_constant_pool ? pp.bit() : 0) | | 
|  | 697 | fp.bit() | | 
|  | 698 | lr.bit()); | 
|  | 699 | } | 
|  | 700 |  | 
|  | 701 |  | 
|  | 702 | void MacroAssembler::PopFixedFrame(Register marker_reg) { | 
|  | 703 | DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code()); | 
|  | 704 | ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | | 
|  | 705 | cp.bit() | | 
|  | 706 | (FLAG_enable_ool_constant_pool ? pp.bit() : 0) | | 
|  | 707 | fp.bit() | | 
|  | 708 | lr.bit()); | 
|  | 709 | } | 
|  | 710 |  | 
|  | 711 |  | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 712 | // Push and pop all registers that can hold pointers. | 
|  | 713 | void MacroAssembler::PushSafepointRegisters() { | 
|  | 714 | // Safepoints expect a block of contiguous register values starting with r0: | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 715 | DCHECK(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 716 | // Safepoints expect a block of kNumSafepointRegisters values on the | 
|  | 717 | // stack, so adjust the stack for unsaved registers. | 
|  | 718 | const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 719 | DCHECK(num_unsaved >= 0); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 720 | sub(sp, sp, Operand(num_unsaved * kPointerSize)); | 
|  | 721 | stm(db_w, sp, kSafepointSavedRegisters); | 
|  | 722 | } | 
|  | 723 |  | 
|  | 724 |  | 
|  | 725 | void MacroAssembler::PopSafepointRegisters() { | 
|  | 726 | const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 
|  | 727 | ldm(ia_w, sp, kSafepointSavedRegisters); | 
|  | 728 | add(sp, sp, Operand(num_unsaved * kPointerSize)); | 
|  | 729 | } | 
|  | 730 |  | 
|  | 731 |  | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 732 | void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) { | 
|  | 733 | str(src, SafepointRegisterSlot(dst)); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 734 | } | 
|  | 735 |  | 
|  | 736 |  | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 737 | void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) { | 
|  | 738 | ldr(dst, SafepointRegisterSlot(src)); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 739 | } | 
|  | 740 |  | 
|  | 741 |  | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 742 | int MacroAssembler::SafepointRegisterStackIndex(int reg_code) { | 
|  | 743 | // The registers are pushed starting with the highest encoding, | 
|  | 744 | // which means that lowest encodings are closest to the stack pointer. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 745 | DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 746 | return reg_code; | 
|  | 747 | } | 
|  | 748 |  | 
|  | 749 |  | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 750 | MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { | 
|  | 751 | return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); | 
|  | 752 | } | 
|  | 753 |  | 
|  | 754 |  | 
|  | 755 | MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 756 | // Number of d-regs not known at snapshot time. | 
|  | 757 | DCHECK(!serializer_enabled()); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 758 | // General purpose registers are pushed last on the stack. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 759 | int doubles_size = DwVfpRegister::NumAllocatableRegisters() * kDoubleSize; | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 760 | int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; | 
|  | 761 | return MemOperand(sp, doubles_size + register_offset); | 
|  | 762 | } | 
|  | 763 |  | 
|  | 764 |  | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 765 | void MacroAssembler::Ldrd(Register dst1, Register dst2, | 
|  | 766 | const MemOperand& src, Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 767 | DCHECK(src.rm().is(no_reg)); | 
|  | 768 | DCHECK(!dst1.is(lr));  // r14. | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 769 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 770 | // V8 does not use this addressing mode, so the fallback code | 
|  | 771 | // below doesn't support it yet. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 772 | DCHECK((src.am() != PreIndex) && (src.am() != NegPreIndex)); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 773 |  | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 774 | // Generate two ldr instructions if ldrd is not available. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 775 | if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() && | 
|  | 776 | (dst1.code() % 2 == 0) && (dst1.code() + 1 == dst2.code())) { | 
|  | 777 | CpuFeatureScope scope(this, ARMv7); | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 778 | ldrd(dst1, dst2, src, cond); | 
|  | 779 | } else { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 780 | if ((src.am() == Offset) || (src.am() == NegOffset)) { | 
|  | 781 | MemOperand src2(src); | 
|  | 782 | src2.set_offset(src2.offset() + 4); | 
|  | 783 | if (dst1.is(src.rn())) { | 
|  | 784 | ldr(dst2, src2, cond); | 
|  | 785 | ldr(dst1, src, cond); | 
|  | 786 | } else { | 
|  | 787 | ldr(dst1, src, cond); | 
|  | 788 | ldr(dst2, src2, cond); | 
|  | 789 | } | 
|  | 790 | } else {  // PostIndex or NegPostIndex. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 791 | DCHECK((src.am() == PostIndex) || (src.am() == NegPostIndex)); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 792 | if (dst1.is(src.rn())) { | 
|  | 793 | ldr(dst2, MemOperand(src.rn(), 4, Offset), cond); | 
|  | 794 | ldr(dst1, src, cond); | 
|  | 795 | } else { | 
|  | 796 | MemOperand src2(src); | 
|  | 797 | src2.set_offset(src2.offset() - 4); | 
|  | 798 | ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond); | 
|  | 799 | ldr(dst2, src2, cond); | 
|  | 800 | } | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 801 | } | 
|  | 802 | } | 
|  | 803 | } | 
|  | 804 |  | 
|  | 805 |  | 
|  | 806 | void MacroAssembler::Strd(Register src1, Register src2, | 
|  | 807 | const MemOperand& dst, Condition cond) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 808 | DCHECK(dst.rm().is(no_reg)); | 
|  | 809 | DCHECK(!src1.is(lr));  // r14. | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 810 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 811 | // V8 does not use this addressing mode, so the fallback code | 
|  | 812 | // below doesn't support it yet. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 813 | DCHECK((dst.am() != PreIndex) && (dst.am() != NegPreIndex)); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 814 |  | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 815 | // Generate two str instructions if strd is not available. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 816 | if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() && | 
|  | 817 | (src1.code() % 2 == 0) && (src1.code() + 1 == src2.code())) { | 
|  | 818 | CpuFeatureScope scope(this, ARMv7); | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 819 | strd(src1, src2, dst, cond); | 
|  | 820 | } else { | 
|  | 821 | MemOperand dst2(dst); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 822 | if ((dst.am() == Offset) || (dst.am() == NegOffset)) { | 
|  | 823 | dst2.set_offset(dst2.offset() + 4); | 
|  | 824 | str(src1, dst, cond); | 
|  | 825 | str(src2, dst2, cond); | 
|  | 826 | } else {  // PostIndex or NegPostIndex. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 827 | DCHECK((dst.am() == PostIndex) || (dst.am() == NegPostIndex)); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 828 | dst2.set_offset(dst2.offset() - 4); | 
|  | 829 | str(src1, MemOperand(dst.rn(), 4, PostIndex), cond); | 
|  | 830 | str(src2, dst2, cond); | 
|  | 831 | } | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 832 | } | 
|  | 833 | } | 
|  | 834 |  | 
|  | 835 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 836 | void MacroAssembler::VFPEnsureFPSCRState(Register scratch) { | 
|  | 837 | // If needed, restore wanted bits of FPSCR. | 
|  | 838 | Label fpscr_done; | 
|  | 839 | vmrs(scratch); | 
|  | 840 | if (emit_debug_code()) { | 
|  | 841 | Label rounding_mode_correct; | 
|  | 842 | tst(scratch, Operand(kVFPRoundingModeMask)); | 
|  | 843 | b(eq, &rounding_mode_correct); | 
|  | 844 | // Don't call Assert here, since Runtime_Abort could re-enter here. | 
|  | 845 | stop("Default rounding mode not set"); | 
|  | 846 | bind(&rounding_mode_correct); | 
|  | 847 | } | 
|  | 848 | tst(scratch, Operand(kVFPDefaultNaNModeControlBit)); | 
|  | 849 | b(ne, &fpscr_done); | 
|  | 850 | orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit)); | 
|  | 851 | vmsr(scratch); | 
|  | 852 | bind(&fpscr_done); | 
|  | 853 | } | 
|  | 854 |  | 
|  | 855 |  | 
|  | 856 | void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister dst, | 
|  | 857 | const DwVfpRegister src, | 
|  | 858 | const Condition cond) { | 
|  | 859 | vsub(dst, src, kDoubleRegZero, cond); | 
| Ben Murdoch | b8e0da2 | 2011-05-16 14:20:40 +0100 | [diff] [blame] | 860 | } | 
|  | 861 |  | 
|  | 862 |  | 
|  | 863 | void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1, | 
|  | 864 | const DwVfpRegister src2, | 
|  | 865 | const Condition cond) { | 
|  | 866 | // Compare and move FPSCR flags to the normal condition flags. | 
|  | 867 | VFPCompareAndLoadFlags(src1, src2, pc, cond); | 
|  | 868 | } | 
|  | 869 |  | 
|  | 870 | void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1, | 
|  | 871 | const double src2, | 
|  | 872 | const Condition cond) { | 
|  | 873 | // Compare and move FPSCR flags to the normal condition flags. | 
|  | 874 | VFPCompareAndLoadFlags(src1, src2, pc, cond); | 
|  | 875 | } | 
|  | 876 |  | 
|  | 877 |  | 
|  | 878 | void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1, | 
|  | 879 | const DwVfpRegister src2, | 
|  | 880 | const Register fpscr_flags, | 
|  | 881 | const Condition cond) { | 
|  | 882 | // Compare and load FPSCR. | 
|  | 883 | vcmp(src1, src2, cond); | 
|  | 884 | vmrs(fpscr_flags, cond); | 
|  | 885 | } | 
|  | 886 |  | 
|  | 887 | void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1, | 
|  | 888 | const double src2, | 
|  | 889 | const Register fpscr_flags, | 
|  | 890 | const Condition cond) { | 
|  | 891 | // Compare and load FPSCR. | 
|  | 892 | vcmp(src1, src2, cond); | 
|  | 893 | vmrs(fpscr_flags, cond); | 
| Ben Murdoch | 086aeea | 2011-05-13 15:57:08 +0100 | [diff] [blame] | 894 | } | 
|  | 895 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 896 | void MacroAssembler::Vmov(const DwVfpRegister dst, | 
|  | 897 | const double imm, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 898 | const Register scratch) { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 899 | static const DoubleRepresentation minus_zero(-0.0); | 
|  | 900 | static const DoubleRepresentation zero(0.0); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 901 | DoubleRepresentation value_rep(imm); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 902 | // Handle special values first. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 903 | if (value_rep == zero) { | 
|  | 904 | vmov(dst, kDoubleRegZero); | 
|  | 905 | } else if (value_rep == minus_zero) { | 
|  | 906 | vneg(dst, kDoubleRegZero); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 907 | } else { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 908 | vmov(dst, imm, scratch); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 909 | } | 
|  | 910 | } | 
|  | 911 |  | 
| Ben Murdoch | 086aeea | 2011-05-13 15:57:08 +0100 | [diff] [blame] | 912 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 913 | void MacroAssembler::VmovHigh(Register dst, DwVfpRegister src) { | 
|  | 914 | if (src.code() < 16) { | 
|  | 915 | const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code()); | 
|  | 916 | vmov(dst, loc.high()); | 
|  | 917 | } else { | 
|  | 918 | vmov(dst, VmovIndexHi, src); | 
|  | 919 | } | 
|  | 920 | } | 
|  | 921 |  | 
|  | 922 |  | 
|  | 923 | void MacroAssembler::VmovHigh(DwVfpRegister dst, Register src) { | 
|  | 924 | if (dst.code() < 16) { | 
|  | 925 | const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code()); | 
|  | 926 | vmov(loc.high(), src); | 
|  | 927 | } else { | 
|  | 928 | vmov(dst, VmovIndexHi, src); | 
|  | 929 | } | 
|  | 930 | } | 
|  | 931 |  | 
|  | 932 |  | 
|  | 933 | void MacroAssembler::VmovLow(Register dst, DwVfpRegister src) { | 
|  | 934 | if (src.code() < 16) { | 
|  | 935 | const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code()); | 
|  | 936 | vmov(dst, loc.low()); | 
|  | 937 | } else { | 
|  | 938 | vmov(dst, VmovIndexLo, src); | 
|  | 939 | } | 
|  | 940 | } | 
|  | 941 |  | 
|  | 942 |  | 
|  | 943 | void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) { | 
|  | 944 | if (dst.code() < 16) { | 
|  | 945 | const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code()); | 
|  | 946 | vmov(loc.low(), src); | 
|  | 947 | } else { | 
|  | 948 | vmov(dst, VmovIndexLo, src); | 
|  | 949 | } | 
|  | 950 | } | 
|  | 951 |  | 
|  | 952 |  | 
|  | 953 | void MacroAssembler::LoadConstantPoolPointerRegister() { | 
|  | 954 | if (FLAG_enable_ool_constant_pool) { | 
|  | 955 | int constant_pool_offset = Code::kConstantPoolOffset - Code::kHeaderSize - | 
|  | 956 | pc_offset() - Instruction::kPCReadOffset; | 
|  | 957 | DCHECK(ImmediateFitsAddrMode2Instruction(constant_pool_offset)); | 
|  | 958 | ldr(pp, MemOperand(pc, constant_pool_offset)); | 
|  | 959 | } | 
|  | 960 | } | 
|  | 961 |  | 
|  | 962 |  | 
|  | 963 | void MacroAssembler::StubPrologue() { | 
|  | 964 | PushFixedFrame(); | 
|  | 965 | Push(Smi::FromInt(StackFrame::STUB)); | 
|  | 966 | // Adjust FP to point to saved FP. | 
|  | 967 | add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 
|  | 968 | if (FLAG_enable_ool_constant_pool) { | 
|  | 969 | LoadConstantPoolPointerRegister(); | 
|  | 970 | set_constant_pool_available(true); | 
|  | 971 | } | 
|  | 972 | } | 
|  | 973 |  | 
|  | 974 |  | 
|  | 975 | void MacroAssembler::Prologue(bool code_pre_aging) { | 
|  | 976 | { PredictableCodeSizeScope predictible_code_size_scope( | 
|  | 977 | this, kNoCodeAgeSequenceLength); | 
|  | 978 | // The following three instructions must remain together and unmodified | 
|  | 979 | // for code aging to work properly. | 
|  | 980 | if (code_pre_aging) { | 
|  | 981 | // Pre-age the code. | 
|  | 982 | Code* stub = Code::GetPreAgedCodeAgeStub(isolate()); | 
|  | 983 | add(r0, pc, Operand(-8)); | 
|  | 984 | ldr(pc, MemOperand(pc, -4)); | 
|  | 985 | emit_code_stub_address(stub); | 
|  | 986 | } else { | 
|  | 987 | PushFixedFrame(r1); | 
|  | 988 | nop(ip.code()); | 
|  | 989 | // Adjust FP to point to saved FP. | 
|  | 990 | add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 
|  | 991 | } | 
|  | 992 | } | 
|  | 993 | if (FLAG_enable_ool_constant_pool) { | 
|  | 994 | LoadConstantPoolPointerRegister(); | 
|  | 995 | set_constant_pool_available(true); | 
|  | 996 | } | 
|  | 997 | } | 
|  | 998 |  | 
|  | 999 |  | 
|  | 1000 | void MacroAssembler::EnterFrame(StackFrame::Type type, | 
|  | 1001 | bool load_constant_pool) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1002 | // r0-r3: preserved | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1003 | PushFixedFrame(); | 
|  | 1004 | if (FLAG_enable_ool_constant_pool && load_constant_pool) { | 
|  | 1005 | LoadConstantPoolPointerRegister(); | 
|  | 1006 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1007 | mov(ip, Operand(Smi::FromInt(type))); | 
|  | 1008 | push(ip); | 
|  | 1009 | mov(ip, Operand(CodeObject())); | 
|  | 1010 | push(ip); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1011 | // Adjust FP to point to saved FP. | 
|  | 1012 | add(fp, sp, | 
|  | 1013 | Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1014 | } | 
|  | 1015 |  | 
|  | 1016 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1017 | int MacroAssembler::LeaveFrame(StackFrame::Type type) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1018 | // r0: preserved | 
|  | 1019 | // r1: preserved | 
|  | 1020 | // r2: preserved | 
|  | 1021 |  | 
|  | 1022 | // Drop the execution stack down to the frame pointer and restore | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1023 | // the caller frame pointer, return address and constant pool pointer | 
|  | 1024 | // (if FLAG_enable_ool_constant_pool). | 
|  | 1025 | int frame_ends; | 
|  | 1026 | if (FLAG_enable_ool_constant_pool) { | 
|  | 1027 | add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset)); | 
|  | 1028 | frame_ends = pc_offset(); | 
|  | 1029 | ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit()); | 
|  | 1030 | } else { | 
|  | 1031 | mov(sp, fp); | 
|  | 1032 | frame_ends = pc_offset(); | 
|  | 1033 | ldm(ia_w, sp, fp.bit() | lr.bit()); | 
|  | 1034 | } | 
|  | 1035 | return frame_ends; | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1036 | } | 
|  | 1037 |  | 
|  | 1038 |  | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1039 | void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1040 | // Set up the frame structure on the stack. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1041 | DCHECK_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement); | 
|  | 1042 | DCHECK_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset); | 
|  | 1043 | DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1044 | Push(lr, fp); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1045 | mov(fp, Operand(sp));  // Set up new frame pointer. | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1046 | // Reserve room for saved entry sp and code object. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1047 | sub(sp, sp, Operand(ExitFrameConstants::kFrameSize)); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1048 | if (emit_debug_code()) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1049 | mov(ip, Operand::Zero()); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1050 | str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); | 
|  | 1051 | } | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1052 | if (FLAG_enable_ool_constant_pool) { | 
|  | 1053 | str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset)); | 
|  | 1054 | } | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 1055 | mov(ip, Operand(CodeObject())); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1056 | str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1057 |  | 
|  | 1058 | // Save the frame pointer and the context in top. | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 1059 | mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1060 | str(fp, MemOperand(ip)); | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 1061 | mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1062 | str(cp, MemOperand(ip)); | 
|  | 1063 |  | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1064 | // Optionally save all double registers. | 
|  | 1065 | if (save_doubles) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1066 | SaveFPRegs(sp, ip); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1067 | // Note that d0 will be accessible at | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1068 | //   fp - ExitFrameConstants::kFrameSize - | 
|  | 1069 | //   DwVfpRegister::kMaxNumRegisters * kDoubleSize, | 
|  | 1070 | // since the sp slot, code slot and constant pool slot (if | 
|  | 1071 | // FLAG_enable_ool_constant_pool) were pushed after the fp. | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1072 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1073 |  | 
|  | 1074 | // Reserve place for the return address and stack space and align the frame | 
|  | 1075 | // preparing for calling the runtime function. | 
|  | 1076 | const int frame_alignment = MacroAssembler::ActivationFrameAlignment(); | 
|  | 1077 | sub(sp, sp, Operand((stack_space + 1) * kPointerSize)); | 
|  | 1078 | if (frame_alignment > 0) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1079 | DCHECK(base::bits::IsPowerOfTwo32(frame_alignment)); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1080 | and_(sp, sp, Operand(-frame_alignment)); | 
|  | 1081 | } | 
|  | 1082 |  | 
|  | 1083 | // Set the exit frame sp value to point just before the return address | 
|  | 1084 | // location. | 
|  | 1085 | add(ip, sp, Operand(kPointerSize)); | 
|  | 1086 | str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1087 | } | 
|  | 1088 |  | 
|  | 1089 |  | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 1090 | void MacroAssembler::InitializeNewString(Register string, | 
|  | 1091 | Register length, | 
|  | 1092 | Heap::RootListIndex map_index, | 
|  | 1093 | Register scratch1, | 
|  | 1094 | Register scratch2) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1095 | SmiTag(scratch1, length); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 1096 | LoadRoot(scratch2, map_index); | 
|  | 1097 | str(scratch1, FieldMemOperand(string, String::kLengthOffset)); | 
|  | 1098 | mov(scratch1, Operand(String::kEmptyHashField)); | 
|  | 1099 | str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset)); | 
|  | 1100 | str(scratch1, FieldMemOperand(string, String::kHashFieldOffset)); | 
|  | 1101 | } | 
|  | 1102 |  | 
|  | 1103 |  | 
|  | 1104 | int MacroAssembler::ActivationFrameAlignment() { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1105 | #if V8_HOST_ARCH_ARM | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1106 | // Running on the real platform. Use the alignment as mandated by the local | 
|  | 1107 | // environment. | 
|  | 1108 | // Note: This will break if we ever start generating snapshots on one ARM | 
|  | 1109 | // platform for another ARM platform with a different alignment. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1110 | return base::OS::ActivationFrameAlignment(); | 
|  | 1111 | #else  // V8_HOST_ARCH_ARM | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1112 | // If we are using the simulator then we should always align to the expected | 
|  | 1113 | // alignment. As the simulator is used to generate snapshots we do not know | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 1114 | // if the target platform will need alignment, so this is controlled from a | 
|  | 1115 | // flag. | 
|  | 1116 | return FLAG_sim_stack_alignment; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1117 | #endif  // V8_HOST_ARCH_ARM | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1118 | } | 
|  | 1119 |  | 
|  | 1120 |  | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1121 | void MacroAssembler::LeaveExitFrame(bool save_doubles, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1122 | Register argument_count, | 
|  | 1123 | bool restore_context) { | 
|  | 1124 | ConstantPoolUnavailableScope constant_pool_unavailable(this); | 
|  | 1125 |  | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1126 | // Optionally restore all double registers. | 
|  | 1127 | if (save_doubles) { | 
| Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 1128 | // Calculate the stack location of the saved doubles and restore them. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1129 | const int offset = ExitFrameConstants::kFrameSize; | 
|  | 1130 | sub(r3, fp, | 
|  | 1131 | Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize)); | 
|  | 1132 | RestoreFPRegs(r3, ip); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1133 | } | 
|  | 1134 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1135 | // Clear top frame. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1136 | mov(r3, Operand::Zero()); | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 1137 | mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1138 | str(r3, MemOperand(ip)); | 
|  | 1139 |  | 
|  | 1140 | // Restore current context from top and clear it in debug mode. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1141 | if (restore_context) { | 
|  | 1142 | mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 
|  | 1143 | ldr(cp, MemOperand(ip)); | 
|  | 1144 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1145 | #ifdef DEBUG | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1146 | mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1147 | str(r3, MemOperand(ip)); | 
|  | 1148 | #endif | 
|  | 1149 |  | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1150 | // Tear down the exit frame, pop the arguments, and return. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1151 | if (FLAG_enable_ool_constant_pool) { | 
|  | 1152 | ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset)); | 
|  | 1153 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1154 | mov(sp, Operand(fp)); | 
|  | 1155 | ldm(ia_w, sp, fp.bit() | lr.bit()); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1156 | if (argument_count.is_valid()) { | 
|  | 1157 | add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); | 
|  | 1158 | } | 
|  | 1159 | } | 
|  | 1160 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1161 |  | 
|  | 1162 | void MacroAssembler::MovFromFloatResult(const DwVfpRegister dst) { | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 1163 | if (use_eabi_hardfloat()) { | 
|  | 1164 | Move(dst, d0); | 
|  | 1165 | } else { | 
|  | 1166 | vmov(dst, r0, r1); | 
|  | 1167 | } | 
|  | 1168 | } | 
|  | 1169 |  | 
|  | 1170 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1171 | // On ARM this is just a synonym to make the purpose clear. | 
|  | 1172 | void MacroAssembler::MovFromFloatParameter(DwVfpRegister dst) { | 
|  | 1173 | MovFromFloatResult(dst); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1174 | } | 
|  | 1175 |  | 
|  | 1176 |  | 
|  | 1177 | void MacroAssembler::InvokePrologue(const ParameterCount& expected, | 
|  | 1178 | const ParameterCount& actual, | 
|  | 1179 | Handle<Code> code_constant, | 
|  | 1180 | Register code_reg, | 
|  | 1181 | Label* done, | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1182 | bool* definitely_mismatches, | 
| Ben Murdoch | b8e0da2 | 2011-05-16 14:20:40 +0100 | [diff] [blame] | 1183 | InvokeFlag flag, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1184 | const CallWrapper& call_wrapper) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1185 | bool definitely_matches = false; | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1186 | *definitely_mismatches = false; | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1187 | Label regular_invoke; | 
|  | 1188 |  | 
|  | 1189 | // Check whether the expected and actual arguments count match. If not, | 
|  | 1190 | // setup registers according to contract with ArgumentsAdaptorTrampoline: | 
|  | 1191 | //  r0: actual arguments count | 
|  | 1192 | //  r1: function (passed through to callee) | 
|  | 1193 | //  r2: expected arguments count | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1194 |  | 
|  | 1195 | // The code below is made a lot easier because the calling code already sets | 
|  | 1196 | // up actual and expected registers according to the contract if values are | 
|  | 1197 | // passed in registers. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1198 | DCHECK(actual.is_immediate() || actual.reg().is(r0)); | 
|  | 1199 | DCHECK(expected.is_immediate() || expected.reg().is(r2)); | 
|  | 1200 | DCHECK((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1201 |  | 
|  | 1202 | if (expected.is_immediate()) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1203 | DCHECK(actual.is_immediate()); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1204 | if (expected.immediate() == actual.immediate()) { | 
|  | 1205 | definitely_matches = true; | 
|  | 1206 | } else { | 
|  | 1207 | mov(r0, Operand(actual.immediate())); | 
|  | 1208 | const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 
|  | 1209 | if (expected.immediate() == sentinel) { | 
|  | 1210 | // Don't worry about adapting arguments for builtins that | 
|  | 1211 | // don't want that done. Skip adaption code by making it look | 
|  | 1212 | // like we have a match between expected and actual number of | 
|  | 1213 | // arguments. | 
|  | 1214 | definitely_matches = true; | 
|  | 1215 | } else { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1216 | *definitely_mismatches = true; | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1217 | mov(r2, Operand(expected.immediate())); | 
|  | 1218 | } | 
|  | 1219 | } | 
|  | 1220 | } else { | 
|  | 1221 | if (actual.is_immediate()) { | 
|  | 1222 | cmp(expected.reg(), Operand(actual.immediate())); | 
|  | 1223 | b(eq, ®ular_invoke); | 
|  | 1224 | mov(r0, Operand(actual.immediate())); | 
|  | 1225 | } else { | 
|  | 1226 | cmp(expected.reg(), Operand(actual.reg())); | 
|  | 1227 | b(eq, ®ular_invoke); | 
|  | 1228 | } | 
|  | 1229 | } | 
|  | 1230 |  | 
|  | 1231 | if (!definitely_matches) { | 
|  | 1232 | if (!code_constant.is_null()) { | 
|  | 1233 | mov(r3, Operand(code_constant)); | 
|  | 1234 | add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); | 
|  | 1235 | } | 
|  | 1236 |  | 
|  | 1237 | Handle<Code> adaptor = | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1238 | isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1239 | if (flag == CALL_FUNCTION) { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1240 | call_wrapper.BeforeCall(CallSize(adaptor)); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1241 | Call(adaptor); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 1242 | call_wrapper.AfterCall(); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1243 | if (!*definitely_mismatches) { | 
|  | 1244 | b(done); | 
|  | 1245 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1246 | } else { | 
|  | 1247 | Jump(adaptor, RelocInfo::CODE_TARGET); | 
|  | 1248 | } | 
|  | 1249 | bind(®ular_invoke); | 
|  | 1250 | } | 
|  | 1251 | } | 
|  | 1252 |  | 
|  | 1253 |  | 
|  | 1254 | void MacroAssembler::InvokeCode(Register code, | 
|  | 1255 | const ParameterCount& expected, | 
|  | 1256 | const ParameterCount& actual, | 
| Ben Murdoch | b8e0da2 | 2011-05-16 14:20:40 +0100 | [diff] [blame] | 1257 | InvokeFlag flag, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1258 | const CallWrapper& call_wrapper) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1259 | // You can't call a function without a valid frame. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1260 | DCHECK(flag == JUMP_FUNCTION || has_frame()); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1261 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1262 | Label done; | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1263 | bool definitely_mismatches = false; | 
|  | 1264 | InvokePrologue(expected, actual, Handle<Code>::null(), code, | 
|  | 1265 | &done, &definitely_mismatches, flag, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1266 | call_wrapper); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1267 | if (!definitely_mismatches) { | 
|  | 1268 | if (flag == CALL_FUNCTION) { | 
|  | 1269 | call_wrapper.BeforeCall(CallSize(code)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1270 | Call(code); | 
|  | 1271 | call_wrapper.AfterCall(); | 
|  | 1272 | } else { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1273 | DCHECK(flag == JUMP_FUNCTION); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1274 | Jump(code); | 
|  | 1275 | } | 
| Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame] | 1276 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1277 | // Continue here if InvokePrologue does handle the invocation due to | 
|  | 1278 | // mismatched parameter counts. | 
|  | 1279 | bind(&done); | 
|  | 1280 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1281 | } | 
|  | 1282 |  | 
|  | 1283 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1284 | void MacroAssembler::InvokeFunction(Register fun, | 
|  | 1285 | const ParameterCount& actual, | 
| Ben Murdoch | b8e0da2 | 2011-05-16 14:20:40 +0100 | [diff] [blame] | 1286 | InvokeFlag flag, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1287 | const CallWrapper& call_wrapper) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1288 | // You can't call a function without a valid frame. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1289 | DCHECK(flag == JUMP_FUNCTION || has_frame()); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1290 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1291 | // Contract with called JS functions requires that function is passed in r1. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1292 | DCHECK(fun.is(r1)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1293 |  | 
|  | 1294 | Register expected_reg = r2; | 
|  | 1295 | Register code_reg = r3; | 
|  | 1296 |  | 
|  | 1297 | ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 
|  | 1298 | ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 
|  | 1299 | ldr(expected_reg, | 
|  | 1300 | FieldMemOperand(code_reg, | 
|  | 1301 | SharedFunctionInfo::kFormalParameterCountOffset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1302 | SmiUntag(expected_reg); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1303 | ldr(code_reg, | 
| Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 1304 | FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1305 |  | 
|  | 1306 | ParameterCount expected(expected_reg); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1307 | InvokeCode(code_reg, expected, actual, flag, call_wrapper); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1308 | } | 
|  | 1309 |  | 
|  | 1310 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1311 | void MacroAssembler::InvokeFunction(Register function, | 
|  | 1312 | const ParameterCount& expected, | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 1313 | const ParameterCount& actual, | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 1314 | InvokeFlag flag, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1315 | const CallWrapper& call_wrapper) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1316 | // You can't call a function without a valid frame. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1317 | DCHECK(flag == JUMP_FUNCTION || has_frame()); | 
|  | 1318 |  | 
|  | 1319 | // Contract with called JS functions requires that function is passed in r1. | 
|  | 1320 | DCHECK(function.is(r1)); | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 1321 |  | 
|  | 1322 | // Get the function and setup the context. | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 1323 | ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | 
|  | 1324 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1325 | // We call indirectly through the code field in the function to | 
|  | 1326 | // allow recompilation to take effect without changing any of the | 
|  | 1327 | // call sites. | 
|  | 1328 | ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1329 | InvokeCode(r3, expected, actual, flag, call_wrapper); | 
|  | 1330 | } | 
|  | 1331 |  | 
|  | 1332 |  | 
|  | 1333 | void MacroAssembler::InvokeFunction(Handle<JSFunction> function, | 
|  | 1334 | const ParameterCount& expected, | 
|  | 1335 | const ParameterCount& actual, | 
|  | 1336 | InvokeFlag flag, | 
|  | 1337 | const CallWrapper& call_wrapper) { | 
|  | 1338 | Move(r1, function); | 
|  | 1339 | InvokeFunction(r1, expected, actual, flag, call_wrapper); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1340 | } | 
|  | 1341 |  | 
|  | 1342 |  | 
|  | 1343 | void MacroAssembler::IsObjectJSObjectType(Register heap_object, | 
|  | 1344 | Register map, | 
|  | 1345 | Register scratch, | 
|  | 1346 | Label* fail) { | 
|  | 1347 | ldr(map, FieldMemOperand(heap_object, HeapObject::kMapOffset)); | 
|  | 1348 | IsInstanceJSObjectType(map, scratch, fail); | 
|  | 1349 | } | 
|  | 1350 |  | 
|  | 1351 |  | 
|  | 1352 | void MacroAssembler::IsInstanceJSObjectType(Register map, | 
|  | 1353 | Register scratch, | 
|  | 1354 | Label* fail) { | 
|  | 1355 | ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1356 | cmp(scratch, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1357 | b(lt, fail); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1358 | cmp(scratch, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1359 | b(gt, fail); | 
|  | 1360 | } | 
|  | 1361 |  | 
|  | 1362 |  | 
|  | 1363 | void MacroAssembler::IsObjectJSStringType(Register object, | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1364 | Register scratch, | 
|  | 1365 | Label* fail) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1366 | DCHECK(kNotStringTag != 0); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1367 |  | 
|  | 1368 | ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); | 
|  | 1369 | ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 
|  | 1370 | tst(scratch, Operand(kIsNotStringMask)); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1371 | b(ne, fail); | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 1372 | } | 
|  | 1373 |  | 
| Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 1374 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1375 | void MacroAssembler::IsObjectNameType(Register object, | 
|  | 1376 | Register scratch, | 
|  | 1377 | Label* fail) { | 
|  | 1378 | ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); | 
|  | 1379 | ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 
|  | 1380 | cmp(scratch, Operand(LAST_NAME_TYPE)); | 
|  | 1381 | b(hi, fail); | 
|  | 1382 | } | 
|  | 1383 |  | 
|  | 1384 |  | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 1385 | void MacroAssembler::DebugBreak() { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1386 | mov(r0, Operand::Zero()); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1387 | mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate()))); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1388 | CEntryStub ces(isolate(), 1); | 
|  | 1389 | DCHECK(AllowThisStubCall(&ces)); | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 1390 | Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); | 
|  | 1391 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1392 |  | 
|  | 1393 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1394 | void MacroAssembler::PushTryHandler(StackHandler::Kind kind, | 
|  | 1395 | int handler_index) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1396 | // Adjust this code if not the case. | 
| Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1397 | STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); | 
|  | 1398 | STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1399 | STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize); | 
|  | 1400 | STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); | 
|  | 1401 | STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); | 
|  | 1402 | STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); | 
| Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1403 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1404 | // For the JSEntry handler, we must preserve r0-r4, r5-r6 are available. | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1405 | // We will build up the handler from the bottom by pushing on the stack. | 
|  | 1406 | // Set up the code object (r5) and the state (r6) for pushing. | 
|  | 1407 | unsigned state = | 
|  | 1408 | StackHandler::IndexField::encode(handler_index) | | 
|  | 1409 | StackHandler::KindField::encode(kind); | 
|  | 1410 | mov(r5, Operand(CodeObject())); | 
|  | 1411 | mov(r6, Operand(state)); | 
|  | 1412 |  | 
|  | 1413 | // Push the frame pointer, context, state, and code object. | 
|  | 1414 | if (kind == StackHandler::JS_ENTRY) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1415 | mov(cp, Operand(Smi::FromInt(0)));  // Indicates no context. | 
|  | 1416 | mov(ip, Operand::Zero());  // NULL frame pointer. | 
|  | 1417 | stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | ip.bit()); | 
| Ben Murdoch | 5d4cdbf | 2012-04-11 10:23:59 +0100 | [diff] [blame] | 1418 | } else { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1419 | stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit()); | 
| Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 1420 | } | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1421 |  | 
|  | 1422 | // Link the current handler as the next handler. | 
|  | 1423 | mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 
|  | 1424 | ldr(r5, MemOperand(r6)); | 
|  | 1425 | push(r5); | 
|  | 1426 | // Set this new handler as the current one. | 
|  | 1427 | str(sp, MemOperand(r6)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1428 | } | 
|  | 1429 |  | 
|  | 1430 |  | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1431 | void MacroAssembler::PopTryHandler() { | 
| Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1432 | STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1433 | pop(r1); | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 1434 | mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 
| Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 1435 | add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); | 
|  | 1436 | str(r1, MemOperand(ip)); | 
|  | 1437 | } | 
|  | 1438 |  | 
|  | 1439 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1440 | void MacroAssembler::JumpToHandlerEntry() { | 
|  | 1441 | // Compute the handler entry address and jump to it.  The handler table is | 
|  | 1442 | // a fixed array of (smi-tagged) code offsets. | 
|  | 1443 | // r0 = exception, r1 = code object, r2 = state. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1444 |  | 
|  | 1445 | ConstantPoolUnavailableScope constant_pool_unavailable(this); | 
|  | 1446 | if (FLAG_enable_ool_constant_pool) { | 
|  | 1447 | ldr(pp, FieldMemOperand(r1, Code::kConstantPoolOffset));  // Constant pool. | 
|  | 1448 | } | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1449 | ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset));  // Handler table. | 
|  | 1450 | add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 
|  | 1451 | mov(r2, Operand(r2, LSR, StackHandler::kKindWidth));  // Handler index. | 
|  | 1452 | ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2));  // Smi-tagged offset. | 
|  | 1453 | add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));  // Code start. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1454 | add(pc, r1, Operand::SmiUntag(r2));  // Jump | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1455 | } | 
|  | 1456 |  | 
|  | 1457 |  | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1458 | void MacroAssembler::Throw(Register value) { | 
| Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1459 | // Adjust this code if not the case. | 
|  | 1460 | STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1461 | STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | 
|  | 1462 | STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize); | 
|  | 1463 | STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); | 
|  | 1464 | STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); | 
|  | 1465 | STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); | 
|  | 1466 |  | 
|  | 1467 | // The exception is expected in r0. | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1468 | if (!value.is(r0)) { | 
|  | 1469 | mov(r0, value); | 
|  | 1470 | } | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1471 | // Drop the stack pointer to the top of the top handler. | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 1472 | mov(r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1473 | ldr(sp, MemOperand(r3)); | 
| Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1474 | // Restore the next handler. | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1475 | pop(r2); | 
|  | 1476 | str(r2, MemOperand(r3)); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1477 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1478 | // Get the code object (r1) and state (r2).  Restore the context and frame | 
|  | 1479 | // pointer. | 
|  | 1480 | ldm(ia_w, sp, r1.bit() | r2.bit() | cp.bit() | fp.bit()); | 
| Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1481 |  | 
|  | 1482 | // If the handler is a JS frame, restore the context to the frame. | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1483 | // (kind == ENTRY) == (fp == 0) == (cp == 0), so we could test either fp | 
|  | 1484 | // or cp. | 
|  | 1485 | tst(cp, cp); | 
| Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1486 | str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); | 
|  | 1487 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1488 | JumpToHandlerEntry(); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1489 | } | 
|  | 1490 |  | 
|  | 1491 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1492 | void MacroAssembler::ThrowUncatchable(Register value) { | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1493 | // Adjust this code if not the case. | 
| Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1494 | STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); | 
|  | 1495 | STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1496 | STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize); | 
|  | 1497 | STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); | 
|  | 1498 | STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); | 
|  | 1499 | STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); | 
|  | 1500 |  | 
|  | 1501 | // The exception is expected in r0. | 
| Ben Murdoch | 5d4cdbf | 2012-04-11 10:23:59 +0100 | [diff] [blame] | 1502 | if (!value.is(r0)) { | 
| Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 1503 | mov(r0, value); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1504 | } | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1505 | // Drop the stack pointer to the top of the top stack handler. | 
| Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 1506 | mov(r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 
|  | 1507 | ldr(sp, MemOperand(r3)); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1508 |  | 
| Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 1509 | // Unwind the handlers until the ENTRY handler is found. | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1510 | Label fetch_next, check_kind; | 
|  | 1511 | jmp(&check_kind); | 
|  | 1512 | bind(&fetch_next); | 
|  | 1513 | ldr(sp, MemOperand(sp, StackHandlerConstants::kNextOffset)); | 
| Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 1514 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1515 | bind(&check_kind); | 
|  | 1516 | STATIC_ASSERT(StackHandler::JS_ENTRY == 0); | 
|  | 1517 | ldr(r2, MemOperand(sp, StackHandlerConstants::kStateOffset)); | 
|  | 1518 | tst(r2, Operand(StackHandler::KindField::kMask)); | 
|  | 1519 | b(ne, &fetch_next); | 
|  | 1520 |  | 
|  | 1521 | // Set the top handler address to next handler past the top ENTRY handler. | 
| Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 1522 | pop(r2); | 
|  | 1523 | str(r2, MemOperand(r3)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1524 | // Get the code object (r1) and state (r2).  Clear the context and frame | 
|  | 1525 | // pointer (0 was saved in the handler). | 
|  | 1526 | ldm(ia_w, sp, r1.bit() | r2.bit() | cp.bit() | fp.bit()); | 
| Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 1527 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1528 | JumpToHandlerEntry(); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1529 | } | 
|  | 1530 |  | 
|  | 1531 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1532 | void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, | 
|  | 1533 | Register scratch, | 
|  | 1534 | Label* miss) { | 
|  | 1535 | Label same_contexts; | 
|  | 1536 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1537 | DCHECK(!holder_reg.is(scratch)); | 
|  | 1538 | DCHECK(!holder_reg.is(ip)); | 
|  | 1539 | DCHECK(!scratch.is(ip)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1540 |  | 
|  | 1541 | // Load current lexical context from the stack frame. | 
|  | 1542 | ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 
|  | 1543 | // In debug mode, make sure the lexical context is set. | 
|  | 1544 | #ifdef DEBUG | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1545 | cmp(scratch, Operand::Zero()); | 
|  | 1546 | Check(ne, kWeShouldNotHaveAnEmptyLexicalContext); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1547 | #endif | 
|  | 1548 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1549 | // Load the native context of the current context. | 
|  | 1550 | int offset = | 
|  | 1551 | Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1552 | ldr(scratch, FieldMemOperand(scratch, offset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1553 | ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1554 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1555 | // Check the context is a native context. | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1556 | if (emit_debug_code()) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1557 | // Cannot use ip as a temporary in this verification code. Due to the fact | 
|  | 1558 | // that ip is clobbered as part of cmp with an object Operand. | 
|  | 1559 | push(holder_reg);  // Temporarily save holder on the stack. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1560 | // Read the first word and compare to the native_context_map. | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1561 | ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1562 | LoadRoot(ip, Heap::kNativeContextMapRootIndex); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1563 | cmp(holder_reg, ip); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1564 | Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1565 | pop(holder_reg);  // Restore holder. | 
|  | 1566 | } | 
|  | 1567 |  | 
|  | 1568 | // Check if both contexts are the same. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1569 | ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1570 | cmp(scratch, Operand(ip)); | 
|  | 1571 | b(eq, &same_contexts); | 
|  | 1572 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1573 | // Check the context is a native context. | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1574 | if (emit_debug_code()) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1575 | // Cannot use ip as a temporary in this verification code. Due to the fact | 
|  | 1576 | // that ip is clobbered as part of cmp with an object Operand. | 
|  | 1577 | push(holder_reg);  // Temporarily save holder on the stack. | 
|  | 1578 | mov(holder_reg, ip);  // Move ip to its holding place. | 
|  | 1579 | LoadRoot(ip, Heap::kNullValueRootIndex); | 
|  | 1580 | cmp(holder_reg, ip); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1581 | Check(ne, kJSGlobalProxyContextShouldNotBeNull); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1582 |  | 
|  | 1583 | ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1584 | LoadRoot(ip, Heap::kNativeContextMapRootIndex); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1585 | cmp(holder_reg, ip); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1586 | Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1587 | // Restore ip is not needed. ip is reloaded below. | 
|  | 1588 | pop(holder_reg);  // Restore holder. | 
|  | 1589 | // Restore ip to holder's context. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1590 | ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1591 | } | 
|  | 1592 |  | 
|  | 1593 | // Check that the security token in the calling global object is | 
|  | 1594 | // compatible with the security token in the receiving global | 
|  | 1595 | // object. | 
|  | 1596 | int token_offset = Context::kHeaderSize + | 
|  | 1597 | Context::SECURITY_TOKEN_INDEX * kPointerSize; | 
|  | 1598 |  | 
|  | 1599 | ldr(scratch, FieldMemOperand(scratch, token_offset)); | 
|  | 1600 | ldr(ip, FieldMemOperand(ip, token_offset)); | 
|  | 1601 | cmp(scratch, Operand(ip)); | 
|  | 1602 | b(ne, miss); | 
|  | 1603 |  | 
|  | 1604 | bind(&same_contexts); | 
|  | 1605 | } | 
|  | 1606 |  | 
|  | 1607 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1608 | // Compute the hash code from the untagged key.  This must be kept in sync with | 
|  | 1609 | // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in | 
|  | 1610 | // code-stub-hydrogen.cc | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1611 | void MacroAssembler::GetNumberHash(Register t0, Register scratch) { | 
|  | 1612 | // First of all we assign the hash seed to scratch. | 
|  | 1613 | LoadRoot(scratch, Heap::kHashSeedRootIndex); | 
|  | 1614 | SmiUntag(scratch); | 
|  | 1615 |  | 
|  | 1616 | // Xor original key with a seed. | 
|  | 1617 | eor(t0, t0, Operand(scratch)); | 
|  | 1618 |  | 
|  | 1619 | // Compute the hash code from the untagged key.  This must be kept in sync | 
|  | 1620 | // with ComputeIntegerHash in utils.h. | 
|  | 1621 | // | 
|  | 1622 | // hash = ~hash + (hash << 15); | 
|  | 1623 | mvn(scratch, Operand(t0)); | 
|  | 1624 | add(t0, scratch, Operand(t0, LSL, 15)); | 
|  | 1625 | // hash = hash ^ (hash >> 12); | 
|  | 1626 | eor(t0, t0, Operand(t0, LSR, 12)); | 
|  | 1627 | // hash = hash + (hash << 2); | 
|  | 1628 | add(t0, t0, Operand(t0, LSL, 2)); | 
|  | 1629 | // hash = hash ^ (hash >> 4); | 
|  | 1630 | eor(t0, t0, Operand(t0, LSR, 4)); | 
|  | 1631 | // hash = hash * 2057; | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 1632 | mov(scratch, Operand(t0, LSL, 11)); | 
|  | 1633 | add(t0, t0, Operand(t0, LSL, 3)); | 
|  | 1634 | add(t0, t0, scratch); | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1635 | // hash = hash ^ (hash >> 16); | 
|  | 1636 | eor(t0, t0, Operand(t0, LSR, 16)); | 
|  | 1637 | } | 
|  | 1638 |  | 
|  | 1639 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1640 | void MacroAssembler::LoadFromNumberDictionary(Label* miss, | 
|  | 1641 | Register elements, | 
|  | 1642 | Register key, | 
|  | 1643 | Register result, | 
|  | 1644 | Register t0, | 
|  | 1645 | Register t1, | 
|  | 1646 | Register t2) { | 
|  | 1647 | // Register use: | 
|  | 1648 | // | 
|  | 1649 | // elements - holds the slow-case elements of the receiver on entry. | 
|  | 1650 | //            Unchanged unless 'result' is the same register. | 
|  | 1651 | // | 
|  | 1652 | // key      - holds the smi key on entry. | 
|  | 1653 | //            Unchanged unless 'result' is the same register. | 
|  | 1654 | // | 
|  | 1655 | // result   - holds the result on exit if the load succeeded. | 
|  | 1656 | //            Allowed to be the same as 'key' or 'result'. | 
|  | 1657 | //            Unchanged on bailout so 'key' or 'result' can be used | 
|  | 1658 | //            in further computation. | 
|  | 1659 | // | 
|  | 1660 | // Scratch registers: | 
|  | 1661 | // | 
|  | 1662 | // t0 - holds the untagged key on entry and holds the hash once computed. | 
|  | 1663 | // | 
|  | 1664 | // t1 - used to hold the capacity mask of the dictionary | 
|  | 1665 | // | 
|  | 1666 | // t2 - used for the index into the dictionary. | 
|  | 1667 | Label done; | 
|  | 1668 |  | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1669 | GetNumberHash(t0, t1); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1670 |  | 
|  | 1671 | // Compute the capacity mask. | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1672 | ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1673 | SmiUntag(t1); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1674 | sub(t1, t1, Operand(1)); | 
|  | 1675 |  | 
|  | 1676 | // Generate an unrolled loop that performs a few probes before giving up. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1677 | for (int i = 0; i < kNumberDictionaryProbes; i++) { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1678 | // Use t2 for index calculations and keep the hash intact in t0. | 
|  | 1679 | mov(t2, t0); | 
|  | 1680 | // Compute the masked index: (hash + i + i * i) & mask. | 
|  | 1681 | if (i > 0) { | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1682 | add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i))); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1683 | } | 
|  | 1684 | and_(t2, t2, Operand(t1)); | 
|  | 1685 |  | 
|  | 1686 | // Scale the index by multiplying by the element size. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1687 | DCHECK(SeededNumberDictionary::kEntrySize == 3); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1688 | add(t2, t2, Operand(t2, LSL, 1));  // t2 = t2 * 3 | 
|  | 1689 |  | 
|  | 1690 | // Check if the key is identical to the name. | 
|  | 1691 | add(t2, elements, Operand(t2, LSL, kPointerSizeLog2)); | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1692 | ldr(ip, FieldMemOperand(t2, SeededNumberDictionary::kElementsStartOffset)); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1693 | cmp(key, Operand(ip)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1694 | if (i != kNumberDictionaryProbes - 1) { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1695 | b(eq, &done); | 
|  | 1696 | } else { | 
|  | 1697 | b(ne, miss); | 
|  | 1698 | } | 
|  | 1699 | } | 
|  | 1700 |  | 
|  | 1701 | bind(&done); | 
|  | 1702 | // Check that the value is a normal property. | 
|  | 1703 | // t2: elements + (index * kPointerSize) | 
|  | 1704 | const int kDetailsOffset = | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1705 | SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize; | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1706 | ldr(t1, FieldMemOperand(t2, kDetailsOffset)); | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 1707 | tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask))); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1708 | b(ne, miss); | 
|  | 1709 |  | 
|  | 1710 | // Get the value at the masked, scaled index and return. | 
|  | 1711 | const int kValueOffset = | 
| Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1712 | SeededNumberDictionary::kElementsStartOffset + kPointerSize; | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1713 | ldr(result, FieldMemOperand(t2, kValueOffset)); | 
|  | 1714 | } | 
|  | 1715 |  | 
|  | 1716 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1717 | void MacroAssembler::Allocate(int object_size, | 
|  | 1718 | Register result, | 
|  | 1719 | Register scratch1, | 
|  | 1720 | Register scratch2, | 
|  | 1721 | Label* gc_required, | 
|  | 1722 | AllocationFlags flags) { | 
|  | 1723 | DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 
| John Reck | 5913587 | 2010-11-02 12:39:01 -0700 | [diff] [blame] | 1724 | if (!FLAG_inline_new) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1725 | if (emit_debug_code()) { | 
| John Reck | 5913587 | 2010-11-02 12:39:01 -0700 | [diff] [blame] | 1726 | // Trash the registers to simulate an allocation failure. | 
|  | 1727 | mov(result, Operand(0x7091)); | 
|  | 1728 | mov(scratch1, Operand(0x7191)); | 
|  | 1729 | mov(scratch2, Operand(0x7291)); | 
|  | 1730 | } | 
|  | 1731 | jmp(gc_required); | 
|  | 1732 | return; | 
|  | 1733 | } | 
|  | 1734 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1735 | DCHECK(!result.is(scratch1)); | 
|  | 1736 | DCHECK(!result.is(scratch2)); | 
|  | 1737 | DCHECK(!scratch1.is(scratch2)); | 
|  | 1738 | DCHECK(!scratch1.is(ip)); | 
|  | 1739 | DCHECK(!scratch2.is(ip)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1740 |  | 
| Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 1741 | // Make object size into bytes. | 
|  | 1742 | if ((flags & SIZE_IN_WORDS) != 0) { | 
|  | 1743 | object_size *= kPointerSize; | 
|  | 1744 | } | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1745 | DCHECK_EQ(0, object_size & kObjectAlignmentMask); | 
| Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 1746 |  | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1747 | // Check relative positions of allocation top and limit addresses. | 
|  | 1748 | // The values must be adjacent in memory to allow the use of LDM. | 
|  | 1749 | // Also, assert that the registers are numbered such that the values | 
|  | 1750 | // are loaded in the correct order. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1751 | ExternalReference allocation_top = | 
|  | 1752 | AllocationUtils::GetAllocationTopReference(isolate(), flags); | 
|  | 1753 | ExternalReference allocation_limit = | 
|  | 1754 | AllocationUtils::GetAllocationLimitReference(isolate(), flags); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1755 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1756 | intptr_t top   = | 
|  | 1757 | reinterpret_cast<intptr_t>(allocation_top.address()); | 
|  | 1758 | intptr_t limit = | 
|  | 1759 | reinterpret_cast<intptr_t>(allocation_limit.address()); | 
|  | 1760 | DCHECK((limit - top) == kPointerSize); | 
|  | 1761 | DCHECK(result.code() < ip.code()); | 
|  | 1762 |  | 
|  | 1763 | // Set up allocation top address register. | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1764 | Register topaddr = scratch1; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1765 | mov(topaddr, Operand(allocation_top)); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1766 |  | 
|  | 1767 | // This code stores a temporary value in ip. This is OK, as the code below | 
|  | 1768 | // does not need ip for implicit literal generation. | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1769 | if ((flags & RESULT_CONTAINS_TOP) == 0) { | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1770 | // Load allocation top into result and allocation limit into ip. | 
|  | 1771 | ldm(ia, topaddr, result.bit() | ip.bit()); | 
|  | 1772 | } else { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1773 | if (emit_debug_code()) { | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1774 | // Assert that result actually contains top on entry. ip is used | 
|  | 1775 | // immediately below so this use of ip does not cause difference with | 
|  | 1776 | // respect to register content between debug and release mode. | 
|  | 1777 | ldr(ip, MemOperand(topaddr)); | 
|  | 1778 | cmp(result, ip); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1779 | Check(eq, kUnexpectedAllocationTop); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1780 | } | 
|  | 1781 | // Load allocation limit into ip. Result already contains allocation top. | 
|  | 1782 | ldr(ip, MemOperand(topaddr, limit - top)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1783 | } | 
|  | 1784 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1785 | if ((flags & DOUBLE_ALIGNMENT) != 0) { | 
|  | 1786 | // Align the next allocation. Storing the filler map without checking top is | 
|  | 1787 | // safe in new-space because the limit of the heap is aligned there. | 
|  | 1788 | DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0); | 
|  | 1789 | STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | 
|  | 1790 | and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC); | 
|  | 1791 | Label aligned; | 
|  | 1792 | b(eq, &aligned); | 
|  | 1793 | if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) { | 
|  | 1794 | cmp(result, Operand(ip)); | 
|  | 1795 | b(hs, gc_required); | 
|  | 1796 | } | 
|  | 1797 | mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); | 
|  | 1798 | str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex)); | 
|  | 1799 | bind(&aligned); | 
|  | 1800 | } | 
|  | 1801 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1802 | // Calculate new top and bail out if new space is exhausted. Use result | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1803 | // to calculate the new top. We must preserve the ip register at this | 
|  | 1804 | // point, so we cannot just use add(). | 
|  | 1805 | DCHECK(object_size > 0); | 
|  | 1806 | Register source = result; | 
|  | 1807 | Condition cond = al; | 
|  | 1808 | int shift = 0; | 
|  | 1809 | while (object_size != 0) { | 
|  | 1810 | if (((object_size >> shift) & 0x03) == 0) { | 
|  | 1811 | shift += 2; | 
|  | 1812 | } else { | 
|  | 1813 | int bits = object_size & (0xff << shift); | 
|  | 1814 | object_size -= bits; | 
|  | 1815 | shift += 8; | 
|  | 1816 | Operand bits_operand(bits); | 
|  | 1817 | DCHECK(bits_operand.instructions_required(this) == 1); | 
|  | 1818 | add(scratch2, source, bits_operand, SetCC, cond); | 
|  | 1819 | source = scratch2; | 
|  | 1820 | cond = cc; | 
|  | 1821 | } | 
|  | 1822 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1823 | b(cs, gc_required); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1824 | cmp(scratch2, Operand(ip)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1825 | b(hi, gc_required); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1826 | str(scratch2, MemOperand(topaddr)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1827 |  | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1828 | // Tag object if requested. | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1829 | if ((flags & TAG_OBJECT) != 0) { | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1830 | add(result, result, Operand(kHeapObjectTag)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1831 | } | 
|  | 1832 | } | 
|  | 1833 |  | 
|  | 1834 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1835 | void MacroAssembler::Allocate(Register object_size, | 
|  | 1836 | Register result, | 
|  | 1837 | Register scratch1, | 
|  | 1838 | Register scratch2, | 
|  | 1839 | Label* gc_required, | 
|  | 1840 | AllocationFlags flags) { | 
| John Reck | 5913587 | 2010-11-02 12:39:01 -0700 | [diff] [blame] | 1841 | if (!FLAG_inline_new) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1842 | if (emit_debug_code()) { | 
| John Reck | 5913587 | 2010-11-02 12:39:01 -0700 | [diff] [blame] | 1843 | // Trash the registers to simulate an allocation failure. | 
|  | 1844 | mov(result, Operand(0x7091)); | 
|  | 1845 | mov(scratch1, Operand(0x7191)); | 
|  | 1846 | mov(scratch2, Operand(0x7291)); | 
|  | 1847 | } | 
|  | 1848 | jmp(gc_required); | 
|  | 1849 | return; | 
|  | 1850 | } | 
|  | 1851 |  | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1852 | // Assert that the register arguments are different and that none of | 
|  | 1853 | // them are ip. ip is used explicitly in the code generated below. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1854 | DCHECK(!result.is(scratch1)); | 
|  | 1855 | DCHECK(!result.is(scratch2)); | 
|  | 1856 | DCHECK(!scratch1.is(scratch2)); | 
|  | 1857 | DCHECK(!object_size.is(ip)); | 
|  | 1858 | DCHECK(!result.is(ip)); | 
|  | 1859 | DCHECK(!scratch1.is(ip)); | 
|  | 1860 | DCHECK(!scratch2.is(ip)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1861 |  | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1862 | // Check relative positions of allocation top and limit addresses. | 
|  | 1863 | // The values must be adjacent in memory to allow the use of LDM. | 
|  | 1864 | // Also, assert that the registers are numbered such that the values | 
|  | 1865 | // are loaded in the correct order. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1866 | ExternalReference allocation_top = | 
|  | 1867 | AllocationUtils::GetAllocationTopReference(isolate(), flags); | 
|  | 1868 | ExternalReference allocation_limit = | 
|  | 1869 | AllocationUtils::GetAllocationLimitReference(isolate(), flags); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1870 | intptr_t top = | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1871 | reinterpret_cast<intptr_t>(allocation_top.address()); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1872 | intptr_t limit = | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1873 | reinterpret_cast<intptr_t>(allocation_limit.address()); | 
|  | 1874 | DCHECK((limit - top) == kPointerSize); | 
|  | 1875 | DCHECK(result.code() < ip.code()); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1876 |  | 
|  | 1877 | // Set up allocation top address. | 
|  | 1878 | Register topaddr = scratch1; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1879 | mov(topaddr, Operand(allocation_top)); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1880 |  | 
|  | 1881 | // This code stores a temporary value in ip. This is OK, as the code below | 
|  | 1882 | // does not need ip for implicit literal generation. | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1883 | if ((flags & RESULT_CONTAINS_TOP) == 0) { | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1884 | // Load allocation top into result and allocation limit into ip. | 
|  | 1885 | ldm(ia, topaddr, result.bit() | ip.bit()); | 
|  | 1886 | } else { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1887 | if (emit_debug_code()) { | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1888 | // Assert that result actually contains top on entry. ip is used | 
|  | 1889 | // immediately below so this use of ip does not cause difference with | 
|  | 1890 | // respect to register content between debug and release mode. | 
|  | 1891 | ldr(ip, MemOperand(topaddr)); | 
|  | 1892 | cmp(result, ip); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1893 | Check(eq, kUnexpectedAllocationTop); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1894 | } | 
|  | 1895 | // Load allocation limit into ip. Result already contains allocation top. | 
|  | 1896 | ldr(ip, MemOperand(topaddr, limit - top)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1897 | } | 
|  | 1898 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1899 | if ((flags & DOUBLE_ALIGNMENT) != 0) { | 
|  | 1900 | // Align the next allocation. Storing the filler map without checking top is | 
|  | 1901 | // safe in new-space because the limit of the heap is aligned there. | 
|  | 1902 | DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0); | 
|  | 1903 | DCHECK(kPointerAlignment * 2 == kDoubleAlignment); | 
|  | 1904 | and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC); | 
|  | 1905 | Label aligned; | 
|  | 1906 | b(eq, &aligned); | 
|  | 1907 | if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) { | 
|  | 1908 | cmp(result, Operand(ip)); | 
|  | 1909 | b(hs, gc_required); | 
|  | 1910 | } | 
|  | 1911 | mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); | 
|  | 1912 | str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex)); | 
|  | 1913 | bind(&aligned); | 
|  | 1914 | } | 
|  | 1915 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1916 | // Calculate new top and bail out if new space is exhausted. Use result | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1917 | // to calculate the new top. Object size may be in words so a shift is | 
|  | 1918 | // required to get the number of bytes. | 
| Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 1919 | if ((flags & SIZE_IN_WORDS) != 0) { | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1920 | add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC); | 
| Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 1921 | } else { | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1922 | add(scratch2, result, Operand(object_size), SetCC); | 
| Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 1923 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 1924 | b(cs, gc_required); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1925 | cmp(scratch2, Operand(ip)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1926 | b(hi, gc_required); | 
|  | 1927 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1928 | // Update allocation top. result temporarily holds the new top. | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1929 | if (emit_debug_code()) { | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1930 | tst(scratch2, Operand(kObjectAlignmentMask)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1931 | Check(eq, kUnalignedAllocationInNewSpace); | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 1932 | } | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1933 | str(scratch2, MemOperand(topaddr)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1934 |  | 
|  | 1935 | // Tag object if requested. | 
|  | 1936 | if ((flags & TAG_OBJECT) != 0) { | 
|  | 1937 | add(result, result, Operand(kHeapObjectTag)); | 
|  | 1938 | } | 
|  | 1939 | } | 
|  | 1940 |  | 
|  | 1941 |  | 
|  | 1942 | void MacroAssembler::UndoAllocationInNewSpace(Register object, | 
|  | 1943 | Register scratch) { | 
|  | 1944 | ExternalReference new_space_allocation_top = | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1945 | ExternalReference::new_space_allocation_top_address(isolate()); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1946 |  | 
|  | 1947 | // Make sure the object has no tag before resetting top. | 
|  | 1948 | and_(object, object, Operand(~kHeapObjectTagMask)); | 
|  | 1949 | #ifdef DEBUG | 
|  | 1950 | // Check that the object un-allocated is below the current top. | 
|  | 1951 | mov(scratch, Operand(new_space_allocation_top)); | 
|  | 1952 | ldr(scratch, MemOperand(scratch)); | 
|  | 1953 | cmp(object, scratch); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1954 | Check(lt, kUndoAllocationOfNonAllocatedMemory); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1955 | #endif | 
|  | 1956 | // Write the address of the object to un-allocate as the current top. | 
|  | 1957 | mov(scratch, Operand(new_space_allocation_top)); | 
|  | 1958 | str(object, MemOperand(scratch)); | 
|  | 1959 | } | 
|  | 1960 |  | 
|  | 1961 |  | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 1962 | void MacroAssembler::AllocateTwoByteString(Register result, | 
|  | 1963 | Register length, | 
|  | 1964 | Register scratch1, | 
|  | 1965 | Register scratch2, | 
|  | 1966 | Register scratch3, | 
|  | 1967 | Label* gc_required) { | 
|  | 1968 | // Calculate the number of bytes needed for the characters in the string while | 
|  | 1969 | // observing object alignment. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1970 | DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 1971 | mov(scratch1, Operand(length, LSL, 1));  // Length in bytes, not chars. | 
|  | 1972 | add(scratch1, scratch1, | 
|  | 1973 | Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize)); | 
| Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 1974 | and_(scratch1, scratch1, Operand(~kObjectAlignmentMask)); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 1975 |  | 
|  | 1976 | // Allocate two-byte string in new space. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1977 | Allocate(scratch1, | 
|  | 1978 | result, | 
|  | 1979 | scratch2, | 
|  | 1980 | scratch3, | 
|  | 1981 | gc_required, | 
|  | 1982 | TAG_OBJECT); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 1983 |  | 
|  | 1984 | // Set the map, length and hash field. | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 1985 | InitializeNewString(result, | 
|  | 1986 | length, | 
|  | 1987 | Heap::kStringMapRootIndex, | 
|  | 1988 | scratch1, | 
|  | 1989 | scratch2); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 1990 | } | 
|  | 1991 |  | 
|  | 1992 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1993 | void MacroAssembler::AllocateOneByteString(Register result, Register length, | 
|  | 1994 | Register scratch1, Register scratch2, | 
|  | 1995 | Register scratch3, | 
|  | 1996 | Label* gc_required) { | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 1997 | // Calculate the number of bytes needed for the characters in the string while | 
|  | 1998 | // observing object alignment. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1999 | DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0); | 
|  | 2000 | DCHECK(kCharSize == 1); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2001 | add(scratch1, length, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2002 | Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize)); | 
| Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 2003 | and_(scratch1, scratch1, Operand(~kObjectAlignmentMask)); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2004 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2005 | // Allocate one-byte string in new space. | 
|  | 2006 | Allocate(scratch1, | 
|  | 2007 | result, | 
|  | 2008 | scratch2, | 
|  | 2009 | scratch3, | 
|  | 2010 | gc_required, | 
|  | 2011 | TAG_OBJECT); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2012 |  | 
|  | 2013 | // Set the map, length and hash field. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2014 | InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex, | 
|  | 2015 | scratch1, scratch2); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2016 | } | 
|  | 2017 |  | 
|  | 2018 |  | 
|  | 2019 | void MacroAssembler::AllocateTwoByteConsString(Register result, | 
|  | 2020 | Register length, | 
|  | 2021 | Register scratch1, | 
|  | 2022 | Register scratch2, | 
|  | 2023 | Label* gc_required) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2024 | Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, | 
|  | 2025 | TAG_OBJECT); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2026 |  | 
|  | 2027 | InitializeNewString(result, | 
|  | 2028 | length, | 
|  | 2029 | Heap::kConsStringMapRootIndex, | 
|  | 2030 | scratch1, | 
|  | 2031 | scratch2); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2032 | } | 
|  | 2033 |  | 
|  | 2034 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2035 | void MacroAssembler::AllocateOneByteConsString(Register result, Register length, | 
|  | 2036 | Register scratch1, | 
|  | 2037 | Register scratch2, | 
|  | 2038 | Label* gc_required) { | 
|  | 2039 | Allocate(ConsString::kSize, | 
|  | 2040 | result, | 
|  | 2041 | scratch1, | 
|  | 2042 | scratch2, | 
|  | 2043 | gc_required, | 
|  | 2044 | TAG_OBJECT); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2045 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2046 | InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex, | 
|  | 2047 | scratch1, scratch2); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2048 | } | 
|  | 2049 |  | 
|  | 2050 |  | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 2051 | void MacroAssembler::AllocateTwoByteSlicedString(Register result, | 
|  | 2052 | Register length, | 
|  | 2053 | Register scratch1, | 
|  | 2054 | Register scratch2, | 
|  | 2055 | Label* gc_required) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2056 | Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, | 
|  | 2057 | TAG_OBJECT); | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 2058 |  | 
|  | 2059 | InitializeNewString(result, | 
|  | 2060 | length, | 
|  | 2061 | Heap::kSlicedStringMapRootIndex, | 
|  | 2062 | scratch1, | 
|  | 2063 | scratch2); | 
|  | 2064 | } | 
|  | 2065 |  | 
|  | 2066 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2067 | void MacroAssembler::AllocateOneByteSlicedString(Register result, | 
|  | 2068 | Register length, | 
|  | 2069 | Register scratch1, | 
|  | 2070 | Register scratch2, | 
|  | 2071 | Label* gc_required) { | 
|  | 2072 | Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, | 
|  | 2073 | TAG_OBJECT); | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 2074 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2075 | InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex, | 
|  | 2076 | scratch1, scratch2); | 
| Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 2077 | } | 
|  | 2078 |  | 
|  | 2079 |  | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2080 | void MacroAssembler::CompareObjectType(Register object, | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2081 | Register map, | 
|  | 2082 | Register type_reg, | 
|  | 2083 | InstanceType type) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2084 | const Register temp = type_reg.is(no_reg) ? ip : type_reg; | 
|  | 2085 |  | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2086 | ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2087 | CompareInstanceType(map, temp, type); | 
|  | 2088 | } | 
|  | 2089 |  | 
|  | 2090 |  | 
|  | 2091 | void MacroAssembler::CheckObjectTypeRange(Register object, | 
|  | 2092 | Register map, | 
|  | 2093 | InstanceType min_type, | 
|  | 2094 | InstanceType max_type, | 
|  | 2095 | Label* false_label) { | 
|  | 2096 | STATIC_ASSERT(Map::kInstanceTypeOffset < 4096); | 
|  | 2097 | STATIC_ASSERT(LAST_TYPE < 256); | 
|  | 2098 | ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); | 
|  | 2099 | ldrb(ip, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 
|  | 2100 | sub(ip, ip, Operand(min_type)); | 
|  | 2101 | cmp(ip, Operand(max_type - min_type)); | 
|  | 2102 | b(hi, false_label); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2103 | } | 
|  | 2104 |  | 
|  | 2105 |  | 
|  | 2106 | void MacroAssembler::CompareInstanceType(Register map, | 
|  | 2107 | Register type_reg, | 
|  | 2108 | InstanceType type) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2109 | // Registers map and type_reg can be ip. These two lines assert | 
|  | 2110 | // that ip can be used with the two instructions (the constants | 
|  | 2111 | // will never need ip). | 
|  | 2112 | STATIC_ASSERT(Map::kInstanceTypeOffset < 4096); | 
|  | 2113 | STATIC_ASSERT(LAST_TYPE < 256); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2114 | ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 
|  | 2115 | cmp(type_reg, Operand(type)); | 
|  | 2116 | } | 
|  | 2117 |  | 
|  | 2118 |  | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 2119 | void MacroAssembler::CompareRoot(Register obj, | 
|  | 2120 | Heap::RootListIndex index) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2121 | DCHECK(!obj.is(ip)); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 2122 | LoadRoot(ip, index); | 
|  | 2123 | cmp(obj, ip); | 
|  | 2124 | } | 
|  | 2125 |  | 
|  | 2126 |  | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 2127 | void MacroAssembler::CheckFastElements(Register map, | 
|  | 2128 | Register scratch, | 
|  | 2129 | Label* fail) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2130 | STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | 
|  | 2131 | STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 
|  | 2132 | STATIC_ASSERT(FAST_ELEMENTS == 2); | 
|  | 2133 | STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 2134 | ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2135 | cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue)); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 2136 | b(hi, fail); | 
|  | 2137 | } | 
|  | 2138 |  | 
|  | 2139 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2140 | void MacroAssembler::CheckFastObjectElements(Register map, | 
|  | 2141 | Register scratch, | 
|  | 2142 | Label* fail) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2143 | STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | 
|  | 2144 | STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 
|  | 2145 | STATIC_ASSERT(FAST_ELEMENTS == 2); | 
|  | 2146 | STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2147 | ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2148 | cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2149 | b(ls, fail); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2150 | cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2151 | b(hi, fail); | 
|  | 2152 | } | 
|  | 2153 |  | 
|  | 2154 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2155 | void MacroAssembler::CheckFastSmiElements(Register map, | 
|  | 2156 | Register scratch, | 
|  | 2157 | Label* fail) { | 
|  | 2158 | STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | 
|  | 2159 | STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2160 | ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2161 | cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2162 | b(hi, fail); | 
|  | 2163 | } | 
|  | 2164 |  | 
|  | 2165 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2166 | void MacroAssembler::StoreNumberToDoubleElements( | 
|  | 2167 | Register value_reg, | 
|  | 2168 | Register key_reg, | 
|  | 2169 | Register elements_reg, | 
|  | 2170 | Register scratch1, | 
|  | 2171 | LowDwVfpRegister double_scratch, | 
|  | 2172 | Label* fail, | 
|  | 2173 | int elements_offset) { | 
|  | 2174 | Label smi_value, store; | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2175 |  | 
|  | 2176 | // Handle smi values specially. | 
|  | 2177 | JumpIfSmi(value_reg, &smi_value); | 
|  | 2178 |  | 
|  | 2179 | // Ensure that the object is a heap number | 
|  | 2180 | CheckMap(value_reg, | 
|  | 2181 | scratch1, | 
|  | 2182 | isolate()->factory()->heap_number_map(), | 
|  | 2183 | fail, | 
|  | 2184 | DONT_DO_SMI_CHECK); | 
|  | 2185 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2186 | vldr(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | 
|  | 2187 | // Force a canonical NaN. | 
|  | 2188 | if (emit_debug_code()) { | 
|  | 2189 | vmrs(ip); | 
|  | 2190 | tst(ip, Operand(kVFPDefaultNaNModeControlBit)); | 
|  | 2191 | Assert(ne, kDefaultNaNModeNotSet); | 
|  | 2192 | } | 
|  | 2193 | VFPCanonicalizeNaN(double_scratch); | 
|  | 2194 | b(&store); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2195 |  | 
|  | 2196 | bind(&smi_value); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2197 | SmiToDouble(double_scratch, value_reg); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2198 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2199 | bind(&store); | 
|  | 2200 | add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg)); | 
|  | 2201 | vstr(double_scratch, | 
|  | 2202 | FieldMemOperand(scratch1, | 
|  | 2203 | FixedDoubleArray::kHeaderSize - elements_offset)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2204 | } | 
|  | 2205 |  | 
|  | 2206 |  | 
|  | 2207 | void MacroAssembler::CompareMap(Register obj, | 
|  | 2208 | Register scratch, | 
|  | 2209 | Handle<Map> map, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2210 | Label* early_success) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2211 | ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2212 | CompareMap(scratch, map, early_success); | 
|  | 2213 | } | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2214 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2215 |  | 
|  | 2216 | void MacroAssembler::CompareMap(Register obj_map, | 
|  | 2217 | Handle<Map> map, | 
|  | 2218 | Label* early_success) { | 
|  | 2219 | cmp(obj_map, Operand(map)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2220 | } | 
|  | 2221 |  | 
|  | 2222 |  | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2223 | void MacroAssembler::CheckMap(Register obj, | 
|  | 2224 | Register scratch, | 
|  | 2225 | Handle<Map> map, | 
|  | 2226 | Label* fail, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2227 | SmiCheckType smi_check_type) { | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 2228 | if (smi_check_type == DO_SMI_CHECK) { | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2229 | JumpIfSmi(obj, fail); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2230 | } | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2231 |  | 
|  | 2232 | Label success; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2233 | CompareMap(obj, scratch, map, &success); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2234 | b(ne, fail); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2235 | bind(&success); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2236 | } | 
|  | 2237 |  | 
|  | 2238 |  | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 2239 | void MacroAssembler::CheckMap(Register obj, | 
|  | 2240 | Register scratch, | 
|  | 2241 | Heap::RootListIndex index, | 
|  | 2242 | Label* fail, | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 2243 | SmiCheckType smi_check_type) { | 
|  | 2244 | if (smi_check_type == DO_SMI_CHECK) { | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2245 | JumpIfSmi(obj, fail); | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 2246 | } | 
|  | 2247 | ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); | 
|  | 2248 | LoadRoot(ip, index); | 
|  | 2249 | cmp(scratch, ip); | 
|  | 2250 | b(ne, fail); | 
|  | 2251 | } | 
|  | 2252 |  | 
|  | 2253 |  | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 2254 | void MacroAssembler::DispatchMap(Register obj, | 
|  | 2255 | Register scratch, | 
|  | 2256 | Handle<Map> map, | 
|  | 2257 | Handle<Code> success, | 
|  | 2258 | SmiCheckType smi_check_type) { | 
|  | 2259 | Label fail; | 
|  | 2260 | if (smi_check_type == DO_SMI_CHECK) { | 
|  | 2261 | JumpIfSmi(obj, &fail); | 
|  | 2262 | } | 
|  | 2263 | ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); | 
|  | 2264 | mov(ip, Operand(map)); | 
|  | 2265 | cmp(scratch, ip); | 
|  | 2266 | Jump(success, RelocInfo::CODE_TARGET, eq); | 
|  | 2267 | bind(&fail); | 
|  | 2268 | } | 
|  | 2269 |  | 
|  | 2270 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2271 | void MacroAssembler::TryGetFunctionPrototype(Register function, | 
|  | 2272 | Register result, | 
|  | 2273 | Register scratch, | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2274 | Label* miss, | 
|  | 2275 | bool miss_on_bound_function) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2276 | Label non_instance; | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2277 | if (miss_on_bound_function) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2278 | // Check that the receiver isn't a smi. | 
|  | 2279 | JumpIfSmi(function, miss); | 
|  | 2280 |  | 
|  | 2281 | // Check that the function really is a function.  Load map into result reg. | 
|  | 2282 | CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE); | 
|  | 2283 | b(ne, miss); | 
|  | 2284 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2285 | ldr(scratch, | 
|  | 2286 | FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 
|  | 2287 | ldr(scratch, | 
|  | 2288 | FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset)); | 
|  | 2289 | tst(scratch, | 
|  | 2290 | Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction))); | 
|  | 2291 | b(ne, miss); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2292 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2293 | // Make sure that the function has an instance prototype. | 
|  | 2294 | ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset)); | 
|  | 2295 | tst(scratch, Operand(1 << Map::kHasNonInstancePrototype)); | 
|  | 2296 | b(ne, &non_instance); | 
|  | 2297 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2298 |  | 
|  | 2299 | // Get the prototype or initial map from the function. | 
|  | 2300 | ldr(result, | 
|  | 2301 | FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 
|  | 2302 |  | 
|  | 2303 | // If the prototype or initial map is the hole, don't return it and | 
|  | 2304 | // simply miss the cache instead. This will allow us to allocate a | 
|  | 2305 | // prototype object on-demand in the runtime system. | 
|  | 2306 | LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 
|  | 2307 | cmp(result, ip); | 
|  | 2308 | b(eq, miss); | 
|  | 2309 |  | 
|  | 2310 | // If the function does not have an initial map, we're done. | 
|  | 2311 | Label done; | 
|  | 2312 | CompareObjectType(result, scratch, scratch, MAP_TYPE); | 
|  | 2313 | b(ne, &done); | 
|  | 2314 |  | 
|  | 2315 | // Get the prototype from the initial map. | 
|  | 2316 | ldr(result, FieldMemOperand(result, Map::kPrototypeOffset)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2317 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2318 | if (miss_on_bound_function) { | 
|  | 2319 | jmp(&done); | 
|  | 2320 |  | 
|  | 2321 | // Non-instance prototype: Fetch prototype from constructor field | 
|  | 2322 | // in initial map. | 
|  | 2323 | bind(&non_instance); | 
|  | 2324 | ldr(result, FieldMemOperand(result, Map::kConstructorOffset)); | 
|  | 2325 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2326 |  | 
|  | 2327 | // All done. | 
|  | 2328 | bind(&done); | 
|  | 2329 | } | 
|  | 2330 |  | 
|  | 2331 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2332 | void MacroAssembler::CallStub(CodeStub* stub, | 
|  | 2333 | TypeFeedbackId ast_id, | 
|  | 2334 | Condition cond) { | 
|  | 2335 | DCHECK(AllowThisStubCall(stub));  // Stub calls are not allowed in some stubs. | 
|  | 2336 | Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2337 | } | 
|  | 2338 |  | 
|  | 2339 |  | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2340 | void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2341 | Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); | 
|  | 2342 | } | 
|  | 2343 |  | 
|  | 2344 |  | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2345 | static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 
|  | 2346 | return ref0.address() - ref1.address(); | 
|  | 2347 | } | 
|  | 2348 |  | 
|  | 2349 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2350 | void MacroAssembler::CallApiFunctionAndReturn( | 
|  | 2351 | Register function_address, | 
|  | 2352 | ExternalReference thunk_ref, | 
|  | 2353 | int stack_space, | 
|  | 2354 | MemOperand return_value_operand, | 
|  | 2355 | MemOperand* context_restore_operand) { | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2356 | ExternalReference next_address = | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2357 | ExternalReference::handle_scope_next_address(isolate()); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2358 | const int kNextOffset = 0; | 
|  | 2359 | const int kLimitOffset = AddressOffset( | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2360 | ExternalReference::handle_scope_limit_address(isolate()), | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2361 | next_address); | 
|  | 2362 | const int kLevelOffset = AddressOffset( | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2363 | ExternalReference::handle_scope_level_address(isolate()), | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2364 | next_address); | 
|  | 2365 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2366 | DCHECK(function_address.is(r1) || function_address.is(r2)); | 
|  | 2367 |  | 
|  | 2368 | Label profiler_disabled; | 
|  | 2369 | Label end_profiler_check; | 
|  | 2370 | mov(r9, Operand(ExternalReference::is_profiling_address(isolate()))); | 
|  | 2371 | ldrb(r9, MemOperand(r9, 0)); | 
|  | 2372 | cmp(r9, Operand(0)); | 
|  | 2373 | b(eq, &profiler_disabled); | 
|  | 2374 |  | 
|  | 2375 | // Additional parameter is the address of the actual callback. | 
|  | 2376 | mov(r3, Operand(thunk_ref)); | 
|  | 2377 | jmp(&end_profiler_check); | 
|  | 2378 |  | 
|  | 2379 | bind(&profiler_disabled); | 
|  | 2380 | Move(r3, function_address); | 
|  | 2381 | bind(&end_profiler_check); | 
|  | 2382 |  | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2383 | // Allocate HandleScope in callee-save registers. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2384 | mov(r9, Operand(next_address)); | 
|  | 2385 | ldr(r4, MemOperand(r9, kNextOffset)); | 
|  | 2386 | ldr(r5, MemOperand(r9, kLimitOffset)); | 
|  | 2387 | ldr(r6, MemOperand(r9, kLevelOffset)); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2388 | add(r6, r6, Operand(1)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2389 | str(r6, MemOperand(r9, kLevelOffset)); | 
|  | 2390 |  | 
|  | 2391 | if (FLAG_log_timer_events) { | 
|  | 2392 | FrameScope frame(this, StackFrame::MANUAL); | 
|  | 2393 | PushSafepointRegisters(); | 
|  | 2394 | PrepareCallCFunction(1, r0); | 
|  | 2395 | mov(r0, Operand(ExternalReference::isolate_address(isolate()))); | 
|  | 2396 | CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1); | 
|  | 2397 | PopSafepointRegisters(); | 
|  | 2398 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2399 |  | 
|  | 2400 | // Native call returns to the DirectCEntry stub which redirects to the | 
|  | 2401 | // return address pushed on stack (could have moved after GC). | 
|  | 2402 | // DirectCEntry stub itself is generated early and never moves. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2403 | DirectCEntryStub stub(isolate()); | 
|  | 2404 | stub.GenerateCall(this, r3); | 
|  | 2405 |  | 
|  | 2406 | if (FLAG_log_timer_events) { | 
|  | 2407 | FrameScope frame(this, StackFrame::MANUAL); | 
|  | 2408 | PushSafepointRegisters(); | 
|  | 2409 | PrepareCallCFunction(1, r0); | 
|  | 2410 | mov(r0, Operand(ExternalReference::isolate_address(isolate()))); | 
|  | 2411 | CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1); | 
|  | 2412 | PopSafepointRegisters(); | 
|  | 2413 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2414 |  | 
|  | 2415 | Label promote_scheduled_exception; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2416 | Label exception_handled; | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2417 | Label delete_allocated_handles; | 
|  | 2418 | Label leave_exit_frame; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2419 | Label return_value_loaded; | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2420 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2421 | // load value from ReturnValue | 
|  | 2422 | ldr(r0, return_value_operand); | 
|  | 2423 | bind(&return_value_loaded); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2424 | // No more valid handles (the result handle was the last one). Restore | 
|  | 2425 | // previous handle scope. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2426 | str(r4, MemOperand(r9, kNextOffset)); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2427 | if (emit_debug_code()) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2428 | ldr(r1, MemOperand(r9, kLevelOffset)); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2429 | cmp(r1, r6); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2430 | Check(eq, kUnexpectedLevelAfterReturnFromApiCall); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2431 | } | 
|  | 2432 | sub(r6, r6, Operand(1)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2433 | str(r6, MemOperand(r9, kLevelOffset)); | 
|  | 2434 | ldr(ip, MemOperand(r9, kLimitOffset)); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2435 | cmp(r5, ip); | 
|  | 2436 | b(ne, &delete_allocated_handles); | 
|  | 2437 |  | 
|  | 2438 | // Check if the function scheduled an exception. | 
|  | 2439 | bind(&leave_exit_frame); | 
|  | 2440 | LoadRoot(r4, Heap::kTheHoleValueRootIndex); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2441 | mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate()))); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2442 | ldr(r5, MemOperand(ip)); | 
|  | 2443 | cmp(r4, r5); | 
|  | 2444 | b(ne, &promote_scheduled_exception); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2445 | bind(&exception_handled); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2446 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2447 | bool restore_context = context_restore_operand != NULL; | 
|  | 2448 | if (restore_context) { | 
|  | 2449 | ldr(cp, *context_restore_operand); | 
|  | 2450 | } | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 2451 | // LeaveExitFrame expects unwind space to be in a register. | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2452 | mov(r4, Operand(stack_space)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2453 | LeaveExitFrame(false, r4, !restore_context); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 2454 | mov(pc, lr); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2455 |  | 
|  | 2456 | bind(&promote_scheduled_exception); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2457 | { | 
|  | 2458 | FrameScope frame(this, StackFrame::INTERNAL); | 
|  | 2459 | CallExternalReference( | 
|  | 2460 | ExternalReference(Runtime::kPromoteScheduledException, isolate()), | 
|  | 2461 | 0); | 
|  | 2462 | } | 
|  | 2463 | jmp(&exception_handled); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2464 |  | 
|  | 2465 | // HandleScope limit has changed. Delete allocated extensions. | 
|  | 2466 | bind(&delete_allocated_handles); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2467 | str(r5, MemOperand(r9, kLimitOffset)); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2468 | mov(r4, r0); | 
| Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 2469 | PrepareCallCFunction(1, r5); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2470 | mov(r0, Operand(ExternalReference::isolate_address(isolate()))); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2471 | CallCFunction( | 
| Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 2472 | ExternalReference::delete_handle_scope_extensions(isolate()), 1); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2473 | mov(r0, r4); | 
|  | 2474 | jmp(&leave_exit_frame); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2475 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2476 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2477 |  | 
|  | 2478 | bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2479 | return has_frame_ || !stub->SometimesSetsUpAFrame(); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2480 | } | 
|  | 2481 |  | 
|  | 2482 |  | 
| Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 2483 | void MacroAssembler::IndexFromHash(Register hash, Register index) { | 
|  | 2484 | // If the hash field contains an array index pick it out. The assert checks | 
|  | 2485 | // that the constants for the maximum number of digits for an array index | 
|  | 2486 | // cached in the hash field and the number of bits reserved for it does not | 
|  | 2487 | // conflict. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2488 | DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) < | 
| Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 2489 | (1 << String::kArrayIndexValueBits)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2490 | DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash); | 
| Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 2491 | } | 
|  | 2492 |  | 
|  | 2493 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2494 | void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) { | 
| Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 2495 | if (CpuFeatures::IsSupported(VFP3)) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2496 | vmov(value.low(), smi); | 
|  | 2497 | vcvt_f64_s32(value, 1); | 
| Iain Merrick | 9ac36c9 | 2010-09-13 15:29:50 +0100 | [diff] [blame] | 2498 | } else { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2499 | SmiUntag(ip, smi); | 
|  | 2500 | vmov(value.low(), ip); | 
|  | 2501 | vcvt_f64_s32(value, value.low()); | 
| Iain Merrick | 9ac36c9 | 2010-09-13 15:29:50 +0100 | [diff] [blame] | 2502 | } | 
|  | 2503 | } | 
|  | 2504 |  | 
|  | 2505 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2506 | void MacroAssembler::TestDoubleIsInt32(DwVfpRegister double_input, | 
|  | 2507 | LowDwVfpRegister double_scratch) { | 
|  | 2508 | DCHECK(!double_input.is(double_scratch)); | 
|  | 2509 | vcvt_s32_f64(double_scratch.low(), double_input); | 
|  | 2510 | vcvt_f64_s32(double_scratch, double_scratch.low()); | 
|  | 2511 | VFPCompareAndSetFlags(double_input, double_scratch); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 2512 | } | 
|  | 2513 |  | 
|  | 2514 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2515 | void MacroAssembler::TryDoubleToInt32Exact(Register result, | 
|  | 2516 | DwVfpRegister double_input, | 
|  | 2517 | LowDwVfpRegister double_scratch) { | 
|  | 2518 | DCHECK(!double_input.is(double_scratch)); | 
|  | 2519 | vcvt_s32_f64(double_scratch.low(), double_input); | 
|  | 2520 | vmov(result, double_scratch.low()); | 
|  | 2521 | vcvt_f64_s32(double_scratch, double_scratch.low()); | 
|  | 2522 | VFPCompareAndSetFlags(double_input, double_scratch); | 
|  | 2523 | } | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2524 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2525 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2526 | void MacroAssembler::TryInt32Floor(Register result, | 
|  | 2527 | DwVfpRegister double_input, | 
|  | 2528 | Register input_high, | 
|  | 2529 | LowDwVfpRegister double_scratch, | 
|  | 2530 | Label* done, | 
|  | 2531 | Label* exact) { | 
|  | 2532 | DCHECK(!result.is(input_high)); | 
|  | 2533 | DCHECK(!double_input.is(double_scratch)); | 
|  | 2534 | Label negative, exception; | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2535 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2536 | VmovHigh(input_high, double_input); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2537 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2538 | // Test for NaN and infinities. | 
|  | 2539 | Sbfx(result, input_high, | 
|  | 2540 | HeapNumber::kExponentShift, HeapNumber::kExponentBits); | 
|  | 2541 | cmp(result, Operand(-1)); | 
|  | 2542 | b(eq, &exception); | 
|  | 2543 | // Test for values that can be exactly represented as a | 
|  | 2544 | // signed 32-bit integer. | 
|  | 2545 | TryDoubleToInt32Exact(result, double_input, double_scratch); | 
|  | 2546 | // If exact, return (result already fetched). | 
|  | 2547 | b(eq, exact); | 
|  | 2548 | cmp(input_high, Operand::Zero()); | 
|  | 2549 | b(mi, &negative); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2550 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2551 | // Input is in ]+0, +inf[. | 
|  | 2552 | // If result equals 0x7fffffff input was out of range or | 
|  | 2553 | // in ]0x7fffffff, 0x80000000[. We ignore this last case which | 
|  | 2554 | // could fits into an int32, that means we always think input was | 
|  | 2555 | // out of range and always go to exception. | 
|  | 2556 | // If result < 0x7fffffff, go to done, result fetched. | 
|  | 2557 | cmn(result, Operand(1)); | 
|  | 2558 | b(mi, &exception); | 
|  | 2559 | b(done); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2560 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2561 | // Input is in ]-inf, -0[. | 
|  | 2562 | // If x is a non integer negative number, | 
|  | 2563 | // floor(x) <=> round_to_zero(x) - 1. | 
|  | 2564 | bind(&negative); | 
|  | 2565 | sub(result, result, Operand(1), SetCC); | 
|  | 2566 | // If result is still negative, go to done, result fetched. | 
|  | 2567 | // Else, we had an overflow and we fall through exception. | 
|  | 2568 | b(mi, done); | 
|  | 2569 | bind(&exception); | 
|  | 2570 | } | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2571 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2572 | void MacroAssembler::TryInlineTruncateDoubleToI(Register result, | 
|  | 2573 | DwVfpRegister double_input, | 
|  | 2574 | Label* done) { | 
|  | 2575 | LowDwVfpRegister double_scratch = kScratchDoubleReg; | 
|  | 2576 | vcvt_s32_f64(double_scratch.low(), double_input); | 
|  | 2577 | vmov(result, double_scratch.low()); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2578 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2579 | // If result is not saturated (0x7fffffff or 0x80000000), we are done. | 
|  | 2580 | sub(ip, result, Operand(1)); | 
|  | 2581 | cmp(ip, Operand(0x7ffffffe)); | 
|  | 2582 | b(lt, done); | 
|  | 2583 | } | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2584 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2585 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2586 | void MacroAssembler::TruncateDoubleToI(Register result, | 
|  | 2587 | DwVfpRegister double_input) { | 
|  | 2588 | Label done; | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2589 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2590 | TryInlineTruncateDoubleToI(result, double_input, &done); | 
|  | 2591 |  | 
|  | 2592 | // If we fell through then inline version didn't succeed - call stub instead. | 
|  | 2593 | push(lr); | 
|  | 2594 | sub(sp, sp, Operand(kDoubleSize));  // Put input on stack. | 
|  | 2595 | vstr(double_input, MemOperand(sp, 0)); | 
|  | 2596 |  | 
|  | 2597 | DoubleToIStub stub(isolate(), sp, result, 0, true, true); | 
|  | 2598 | CallStub(&stub); | 
|  | 2599 |  | 
|  | 2600 | add(sp, sp, Operand(kDoubleSize)); | 
|  | 2601 | pop(lr); | 
|  | 2602 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2603 | bind(&done); | 
|  | 2604 | } | 
|  | 2605 |  | 
|  | 2606 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2607 | void MacroAssembler::TruncateHeapNumberToI(Register result, | 
|  | 2608 | Register object) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2609 | Label done; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2610 | LowDwVfpRegister double_scratch = kScratchDoubleReg; | 
|  | 2611 | DCHECK(!result.is(object)); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2612 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2613 | vldr(double_scratch, | 
|  | 2614 | MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag)); | 
|  | 2615 | TryInlineTruncateDoubleToI(result, double_scratch, &done); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2616 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2617 | // If we fell through then inline version didn't succeed - call stub instead. | 
|  | 2618 | push(lr); | 
|  | 2619 | DoubleToIStub stub(isolate(), | 
|  | 2620 | object, | 
|  | 2621 | result, | 
|  | 2622 | HeapNumber::kValueOffset - kHeapObjectTag, | 
|  | 2623 | true, | 
|  | 2624 | true); | 
|  | 2625 | CallStub(&stub); | 
|  | 2626 | pop(lr); | 
|  | 2627 |  | 
|  | 2628 | bind(&done); | 
|  | 2629 | } | 
|  | 2630 |  | 
|  | 2631 |  | 
|  | 2632 | void MacroAssembler::TruncateNumberToI(Register object, | 
|  | 2633 | Register result, | 
|  | 2634 | Register heap_number_map, | 
|  | 2635 | Register scratch1, | 
|  | 2636 | Label* not_number) { | 
|  | 2637 | Label done; | 
|  | 2638 | DCHECK(!result.is(object)); | 
|  | 2639 |  | 
|  | 2640 | UntagAndJumpIfSmi(result, object, &done); | 
|  | 2641 | JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number); | 
|  | 2642 | TruncateHeapNumberToI(result, object); | 
|  | 2643 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2644 | bind(&done); | 
|  | 2645 | } | 
|  | 2646 |  | 
|  | 2647 |  | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2648 | void MacroAssembler::GetLeastBitsFromSmi(Register dst, | 
|  | 2649 | Register src, | 
|  | 2650 | int num_least_bits) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2651 | if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) { | 
| Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 2652 | ubfx(dst, src, kSmiTagSize, num_least_bits); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2653 | } else { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2654 | SmiUntag(dst, src); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2655 | and_(dst, dst, Operand((1 << num_least_bits) - 1)); | 
|  | 2656 | } | 
|  | 2657 | } | 
|  | 2658 |  | 
|  | 2659 |  | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2660 | void MacroAssembler::GetLeastBitsFromInt32(Register dst, | 
|  | 2661 | Register src, | 
|  | 2662 | int num_least_bits) { | 
|  | 2663 | and_(dst, src, Operand((1 << num_least_bits) - 1)); | 
|  | 2664 | } | 
|  | 2665 |  | 
|  | 2666 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2667 | void MacroAssembler::CallRuntime(const Runtime::Function* f, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2668 | int num_arguments, | 
|  | 2669 | SaveFPRegsMode save_doubles) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2670 | // All parameters are on the stack.  r0 has the return value after call. | 
|  | 2671 |  | 
|  | 2672 | // If the expected number of arguments of the runtime function is | 
|  | 2673 | // constant, we check that the actual number of arguments match the | 
|  | 2674 | // expectation. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2675 | CHECK(f->nargs < 0 || f->nargs == num_arguments); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2676 |  | 
| Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 2677 | // TODO(1236192): Most runtime routines don't need the number of | 
|  | 2678 | // arguments passed in because it is constant. At some point we | 
|  | 2679 | // should remove this need and make the runtime routine entry code | 
|  | 2680 | // smarter. | 
|  | 2681 | mov(r0, Operand(num_arguments)); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2682 | mov(r1, Operand(ExternalReference(f, isolate()))); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2683 | CEntryStub stub(isolate(), 1, save_doubles); | 
| Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 2684 | CallStub(&stub); | 
|  | 2685 | } | 
|  | 2686 |  | 
|  | 2687 |  | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 2688 | void MacroAssembler::CallExternalReference(const ExternalReference& ext, | 
|  | 2689 | int num_arguments) { | 
|  | 2690 | mov(r0, Operand(num_arguments)); | 
|  | 2691 | mov(r1, Operand(ext)); | 
|  | 2692 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2693 | CEntryStub stub(isolate(), 1); | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 2694 | CallStub(&stub); | 
|  | 2695 | } | 
|  | 2696 |  | 
|  | 2697 |  | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2698 | void MacroAssembler::TailCallExternalReference(const ExternalReference& ext, | 
|  | 2699 | int num_arguments, | 
|  | 2700 | int result_size) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2701 | // TODO(1236192): Most runtime routines don't need the number of | 
|  | 2702 | // arguments passed in because it is constant. At some point we | 
|  | 2703 | // should remove this need and make the runtime routine entry code | 
|  | 2704 | // smarter. | 
|  | 2705 | mov(r0, Operand(num_arguments)); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2706 | JumpToExternalReference(ext); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2707 | } | 
|  | 2708 |  | 
|  | 2709 |  | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2710 | void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, | 
|  | 2711 | int num_arguments, | 
|  | 2712 | int result_size) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2713 | TailCallExternalReference(ExternalReference(fid, isolate()), | 
|  | 2714 | num_arguments, | 
|  | 2715 | result_size); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2716 | } | 
|  | 2717 |  | 
|  | 2718 |  | 
|  | 2719 | void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2720 | #if defined(__thumb__) | 
|  | 2721 | // Thumb mode builtin. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2722 | DCHECK((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2723 | #endif | 
|  | 2724 | mov(r1, Operand(builtin)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2725 | CEntryStub stub(isolate(), 1); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2726 | Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 
|  | 2727 | } | 
|  | 2728 |  | 
|  | 2729 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2730 | void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 2731 | InvokeFlag flag, | 
|  | 2732 | const CallWrapper& call_wrapper) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2733 | // You can't call a builtin without a valid frame. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2734 | DCHECK(flag == JUMP_FUNCTION || has_frame()); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2735 |  | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 2736 | GetBuiltinEntry(r2, id); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 2737 | if (flag == CALL_FUNCTION) { | 
|  | 2738 | call_wrapper.BeforeCall(CallSize(r2)); | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 2739 | Call(r2); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 2740 | call_wrapper.AfterCall(); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2741 | } else { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2742 | DCHECK(flag == JUMP_FUNCTION); | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 2743 | Jump(r2); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2744 | } | 
|  | 2745 | } | 
|  | 2746 |  | 
|  | 2747 |  | 
| Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 2748 | void MacroAssembler::GetBuiltinFunction(Register target, | 
|  | 2749 | Builtins::JavaScript id) { | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2750 | // Load the builtins object into target register. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2751 | ldr(target, | 
|  | 2752 | MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2753 | ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset)); | 
| Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 2754 | // Load the JavaScript builtin function from the builtins object. | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2755 | ldr(target, FieldMemOperand(target, | 
| Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 2756 | JSBuiltinsObject::OffsetOfFunctionWithId(id))); | 
|  | 2757 | } | 
|  | 2758 |  | 
|  | 2759 |  | 
|  | 2760 | void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2761 | DCHECK(!target.is(r1)); | 
| Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 2762 | GetBuiltinFunction(r1, id); | 
|  | 2763 | // Load the code entry point from the builtins object. | 
|  | 2764 | ldr(target, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2765 | } | 
|  | 2766 |  | 
|  | 2767 |  | 
|  | 2768 | void MacroAssembler::SetCounter(StatsCounter* counter, int value, | 
|  | 2769 | Register scratch1, Register scratch2) { | 
|  | 2770 | if (FLAG_native_code_counters && counter->Enabled()) { | 
|  | 2771 | mov(scratch1, Operand(value)); | 
|  | 2772 | mov(scratch2, Operand(ExternalReference(counter))); | 
|  | 2773 | str(scratch1, MemOperand(scratch2)); | 
|  | 2774 | } | 
|  | 2775 | } | 
|  | 2776 |  | 
|  | 2777 |  | 
|  | 2778 | void MacroAssembler::IncrementCounter(StatsCounter* counter, int value, | 
|  | 2779 | Register scratch1, Register scratch2) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2780 | DCHECK(value > 0); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2781 | if (FLAG_native_code_counters && counter->Enabled()) { | 
|  | 2782 | mov(scratch2, Operand(ExternalReference(counter))); | 
|  | 2783 | ldr(scratch1, MemOperand(scratch2)); | 
|  | 2784 | add(scratch1, scratch1, Operand(value)); | 
|  | 2785 | str(scratch1, MemOperand(scratch2)); | 
|  | 2786 | } | 
|  | 2787 | } | 
|  | 2788 |  | 
|  | 2789 |  | 
|  | 2790 | void MacroAssembler::DecrementCounter(StatsCounter* counter, int value, | 
|  | 2791 | Register scratch1, Register scratch2) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2792 | DCHECK(value > 0); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2793 | if (FLAG_native_code_counters && counter->Enabled()) { | 
|  | 2794 | mov(scratch2, Operand(ExternalReference(counter))); | 
|  | 2795 | ldr(scratch1, MemOperand(scratch2)); | 
|  | 2796 | sub(scratch1, scratch1, Operand(value)); | 
|  | 2797 | str(scratch1, MemOperand(scratch2)); | 
|  | 2798 | } | 
|  | 2799 | } | 
|  | 2800 |  | 
|  | 2801 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2802 | void MacroAssembler::Assert(Condition cond, BailoutReason reason) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2803 | if (emit_debug_code()) | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2804 | Check(cond, reason); | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 2805 | } | 
|  | 2806 |  | 
|  | 2807 |  | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 2808 | void MacroAssembler::AssertFastElements(Register elements) { | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2809 | if (emit_debug_code()) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2810 | DCHECK(!elements.is(ip)); | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 2811 | Label ok; | 
|  | 2812 | push(elements); | 
|  | 2813 | ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset)); | 
|  | 2814 | LoadRoot(ip, Heap::kFixedArrayMapRootIndex); | 
|  | 2815 | cmp(elements, ip); | 
|  | 2816 | b(eq, &ok); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 2817 | LoadRoot(ip, Heap::kFixedDoubleArrayMapRootIndex); | 
|  | 2818 | cmp(elements, ip); | 
|  | 2819 | b(eq, &ok); | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 2820 | LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex); | 
|  | 2821 | cmp(elements, ip); | 
|  | 2822 | b(eq, &ok); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2823 | Abort(kJSObjectWithFastElementsMapHasSlowElements); | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 2824 | bind(&ok); | 
|  | 2825 | pop(elements); | 
|  | 2826 | } | 
|  | 2827 | } | 
|  | 2828 |  | 
|  | 2829 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2830 | void MacroAssembler::Check(Condition cond, BailoutReason reason) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2831 | Label L; | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2832 | b(cond, &L); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2833 | Abort(reason); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2834 | // will not return here | 
|  | 2835 | bind(&L); | 
|  | 2836 | } | 
|  | 2837 |  | 
|  | 2838 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2839 | void MacroAssembler::Abort(BailoutReason reason) { | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 2840 | Label abort_start; | 
|  | 2841 | bind(&abort_start); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2842 | #ifdef DEBUG | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2843 | const char* msg = GetBailoutReason(reason); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2844 | if (msg != NULL) { | 
|  | 2845 | RecordComment("Abort message: "); | 
|  | 2846 | RecordComment(msg); | 
|  | 2847 | } | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2848 |  | 
|  | 2849 | if (FLAG_trap_on_abort) { | 
|  | 2850 | stop(msg); | 
|  | 2851 | return; | 
|  | 2852 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2853 | #endif | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2854 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2855 | mov(r0, Operand(Smi::FromInt(reason))); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2856 | push(r0); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2857 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2858 | // Disable stub call restrictions to always allow calls to abort. | 
|  | 2859 | if (!has_frame_) { | 
|  | 2860 | // We don't actually want to generate a pile of code for this, so just | 
|  | 2861 | // claim there is a stack frame, without generating one. | 
|  | 2862 | FrameScope scope(this, StackFrame::NONE); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2863 | CallRuntime(Runtime::kAbort, 1); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2864 | } else { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2865 | CallRuntime(Runtime::kAbort, 1); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2866 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2867 | // will not return here | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 2868 | if (is_const_pool_blocked()) { | 
|  | 2869 | // If the calling code cares about the exact number of | 
|  | 2870 | // instructions generated, we insert padding here to keep the size | 
|  | 2871 | // of the Abort macro constant. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2872 | static const int kExpectedAbortInstructions = 7; | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 2873 | int abort_instructions = InstructionsGeneratedSince(&abort_start); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2874 | DCHECK(abort_instructions <= kExpectedAbortInstructions); | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 2875 | while (abort_instructions++ < kExpectedAbortInstructions) { | 
|  | 2876 | nop(); | 
|  | 2877 | } | 
|  | 2878 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2879 | } | 
|  | 2880 |  | 
|  | 2881 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2882 | void MacroAssembler::LoadContext(Register dst, int context_chain_length) { | 
|  | 2883 | if (context_chain_length > 0) { | 
|  | 2884 | // Move up the chain of contexts to the context containing the slot. | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 2885 | ldr(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2886 | for (int i = 1; i < context_chain_length; i++) { | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 2887 | ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2888 | } | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 2889 | } else { | 
|  | 2890 | // Slot is in the current function context.  Move it into the | 
|  | 2891 | // destination register in case we store into it (the write barrier | 
|  | 2892 | // cannot be allowed to destroy the context in esi). | 
|  | 2893 | mov(dst, cp); | 
|  | 2894 | } | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2895 | } | 
|  | 2896 |  | 
|  | 2897 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2898 | void MacroAssembler::LoadTransitionedArrayMapConditional( | 
|  | 2899 | ElementsKind expected_kind, | 
|  | 2900 | ElementsKind transitioned_kind, | 
|  | 2901 | Register map_in_out, | 
|  | 2902 | Register scratch, | 
|  | 2903 | Label* no_map_match) { | 
|  | 2904 | // Load the global or builtins object from the current context. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2905 | ldr(scratch, | 
|  | 2906 | MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 
|  | 2907 | ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2908 |  | 
|  | 2909 | // Check that the function's map is the same as the expected cached map. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2910 | ldr(scratch, | 
|  | 2911 | MemOperand(scratch, | 
|  | 2912 | Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX))); | 
|  | 2913 | size_t offset = expected_kind * kPointerSize + | 
|  | 2914 | FixedArrayBase::kHeaderSize; | 
|  | 2915 | ldr(ip, FieldMemOperand(scratch, offset)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2916 | cmp(map_in_out, ip); | 
|  | 2917 | b(ne, no_map_match); | 
|  | 2918 |  | 
|  | 2919 | // Use the transitioned cached map. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2920 | offset = transitioned_kind * kPointerSize + | 
|  | 2921 | FixedArrayBase::kHeaderSize; | 
|  | 2922 | ldr(map_in_out, FieldMemOperand(scratch, offset)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2923 | } | 
|  | 2924 |  | 
|  | 2925 |  | 
| Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 2926 | void MacroAssembler::LoadGlobalFunction(int index, Register function) { | 
|  | 2927 | // Load the global or builtins object from the current context. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2928 | ldr(function, | 
|  | 2929 | MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 
|  | 2930 | // Load the native context from the global or builtins object. | 
| Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 2931 | ldr(function, FieldMemOperand(function, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2932 | GlobalObject::kNativeContextOffset)); | 
|  | 2933 | // Load the function from the native context. | 
| Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 2934 | ldr(function, MemOperand(function, Context::SlotOffset(index))); | 
|  | 2935 | } | 
|  | 2936 |  | 
|  | 2937 |  | 
|  | 2938 | void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 
|  | 2939 | Register map, | 
|  | 2940 | Register scratch) { | 
|  | 2941 | // Load the initial map. The global functions all have initial maps. | 
|  | 2942 | ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2943 | if (emit_debug_code()) { | 
| Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 2944 | Label ok, fail; | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 2945 | CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK); | 
| Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 2946 | b(&ok); | 
|  | 2947 | bind(&fail); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2948 | Abort(kGlobalFunctionsMustHaveInitialMap); | 
| Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 2949 | bind(&ok); | 
|  | 2950 | } | 
|  | 2951 | } | 
|  | 2952 |  | 
|  | 2953 |  | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2954 | void MacroAssembler::JumpIfNotPowerOfTwoOrZero( | 
|  | 2955 | Register reg, | 
|  | 2956 | Register scratch, | 
|  | 2957 | Label* not_power_of_two_or_zero) { | 
|  | 2958 | sub(scratch, reg, Operand(1), SetCC); | 
|  | 2959 | b(mi, not_power_of_two_or_zero); | 
|  | 2960 | tst(scratch, reg); | 
|  | 2961 | b(ne, not_power_of_two_or_zero); | 
|  | 2962 | } | 
|  | 2963 |  | 
|  | 2964 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 2965 | void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg( | 
|  | 2966 | Register reg, | 
|  | 2967 | Register scratch, | 
|  | 2968 | Label* zero_and_neg, | 
|  | 2969 | Label* not_power_of_two) { | 
|  | 2970 | sub(scratch, reg, Operand(1), SetCC); | 
|  | 2971 | b(mi, zero_and_neg); | 
|  | 2972 | tst(scratch, reg); | 
|  | 2973 | b(ne, not_power_of_two); | 
|  | 2974 | } | 
|  | 2975 |  | 
|  | 2976 |  | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2977 | void MacroAssembler::JumpIfNotBothSmi(Register reg1, | 
|  | 2978 | Register reg2, | 
|  | 2979 | Label* on_not_both_smi) { | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 2980 | STATIC_ASSERT(kSmiTag == 0); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2981 | tst(reg1, Operand(kSmiTagMask)); | 
|  | 2982 | tst(reg2, Operand(kSmiTagMask), eq); | 
|  | 2983 | b(ne, on_not_both_smi); | 
|  | 2984 | } | 
|  | 2985 |  | 
|  | 2986 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2987 | void MacroAssembler::UntagAndJumpIfSmi( | 
|  | 2988 | Register dst, Register src, Label* smi_case) { | 
|  | 2989 | STATIC_ASSERT(kSmiTag == 0); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2990 | SmiUntag(dst, src, SetCC); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2991 | b(cc, smi_case);  // Shifter carry is not set for a smi. | 
|  | 2992 | } | 
|  | 2993 |  | 
|  | 2994 |  | 
|  | 2995 | void MacroAssembler::UntagAndJumpIfNotSmi( | 
|  | 2996 | Register dst, Register src, Label* non_smi_case) { | 
|  | 2997 | STATIC_ASSERT(kSmiTag == 0); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 2998 | SmiUntag(dst, src, SetCC); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 2999 | b(cs, non_smi_case);  // Shifter carry is set for a non-smi. | 
|  | 3000 | } | 
|  | 3001 |  | 
|  | 3002 |  | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 3003 | void MacroAssembler::JumpIfEitherSmi(Register reg1, | 
|  | 3004 | Register reg2, | 
|  | 3005 | Label* on_either_smi) { | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 3006 | STATIC_ASSERT(kSmiTag == 0); | 
| Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 3007 | tst(reg1, Operand(kSmiTagMask)); | 
|  | 3008 | tst(reg2, Operand(kSmiTagMask), ne); | 
|  | 3009 | b(eq, on_either_smi); | 
|  | 3010 | } | 
|  | 3011 |  | 
|  | 3012 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3013 | void MacroAssembler::AssertNotSmi(Register object) { | 
|  | 3014 | if (emit_debug_code()) { | 
|  | 3015 | STATIC_ASSERT(kSmiTag == 0); | 
|  | 3016 | tst(object, Operand(kSmiTagMask)); | 
|  | 3017 | Check(ne, kOperandIsASmi); | 
|  | 3018 | } | 
| Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 3019 | } | 
|  | 3020 |  | 
|  | 3021 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3022 | void MacroAssembler::AssertSmi(Register object) { | 
|  | 3023 | if (emit_debug_code()) { | 
|  | 3024 | STATIC_ASSERT(kSmiTag == 0); | 
|  | 3025 | tst(object, Operand(kSmiTagMask)); | 
|  | 3026 | Check(eq, kOperandIsNotSmi); | 
|  | 3027 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 3028 | } | 
|  | 3029 |  | 
|  | 3030 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3031 | void MacroAssembler::AssertString(Register object) { | 
|  | 3032 | if (emit_debug_code()) { | 
|  | 3033 | STATIC_ASSERT(kSmiTag == 0); | 
|  | 3034 | tst(object, Operand(kSmiTagMask)); | 
|  | 3035 | Check(ne, kOperandIsASmiAndNotAString); | 
|  | 3036 | push(object); | 
|  | 3037 | ldr(object, FieldMemOperand(object, HeapObject::kMapOffset)); | 
|  | 3038 | CompareInstanceType(object, object, FIRST_NONSTRING_TYPE); | 
|  | 3039 | pop(object); | 
|  | 3040 | Check(lo, kOperandIsNotAString); | 
|  | 3041 | } | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3042 | } | 
|  | 3043 |  | 
|  | 3044 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3045 | void MacroAssembler::AssertName(Register object) { | 
|  | 3046 | if (emit_debug_code()) { | 
|  | 3047 | STATIC_ASSERT(kSmiTag == 0); | 
|  | 3048 | tst(object, Operand(kSmiTagMask)); | 
|  | 3049 | Check(ne, kOperandIsASmiAndNotAName); | 
|  | 3050 | push(object); | 
|  | 3051 | ldr(object, FieldMemOperand(object, HeapObject::kMapOffset)); | 
|  | 3052 | CompareInstanceType(object, object, LAST_NAME_TYPE); | 
|  | 3053 | pop(object); | 
|  | 3054 | Check(le, kOperandIsNotAName); | 
|  | 3055 | } | 
|  | 3056 | } | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3057 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3058 |  | 
|  | 3059 | void MacroAssembler::AssertUndefinedOrAllocationSite(Register object, | 
|  | 3060 | Register scratch) { | 
|  | 3061 | if (emit_debug_code()) { | 
|  | 3062 | Label done_checking; | 
|  | 3063 | AssertNotSmi(object); | 
|  | 3064 | CompareRoot(object, Heap::kUndefinedValueRootIndex); | 
|  | 3065 | b(eq, &done_checking); | 
|  | 3066 | ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); | 
|  | 3067 | CompareRoot(scratch, Heap::kAllocationSiteMapRootIndex); | 
|  | 3068 | Assert(eq, kExpectedUndefinedOrCell); | 
|  | 3069 | bind(&done_checking); | 
|  | 3070 | } | 
|  | 3071 | } | 
|  | 3072 |  | 
|  | 3073 |  | 
|  | 3074 | void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) { | 
|  | 3075 | if (emit_debug_code()) { | 
|  | 3076 | CompareRoot(reg, index); | 
|  | 3077 | Check(eq, kHeapNumberMapRegisterClobbered); | 
|  | 3078 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 3079 | } | 
|  | 3080 |  | 
|  | 3081 |  | 
|  | 3082 | void MacroAssembler::JumpIfNotHeapNumber(Register object, | 
|  | 3083 | Register heap_number_map, | 
|  | 3084 | Register scratch, | 
|  | 3085 | Label* on_not_heap_number) { | 
|  | 3086 | ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3087 | AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 3088 | cmp(scratch, heap_number_map); | 
|  | 3089 | b(ne, on_not_heap_number); | 
|  | 3090 | } | 
|  | 3091 |  | 
|  | 3092 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3093 | void MacroAssembler::LookupNumberStringCache(Register object, | 
|  | 3094 | Register result, | 
|  | 3095 | Register scratch1, | 
|  | 3096 | Register scratch2, | 
|  | 3097 | Register scratch3, | 
|  | 3098 | Label* not_found) { | 
|  | 3099 | // Use of registers. Register result is used as a temporary. | 
|  | 3100 | Register number_string_cache = result; | 
|  | 3101 | Register mask = scratch3; | 
|  | 3102 |  | 
|  | 3103 | // Load the number string cache. | 
|  | 3104 | LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); | 
|  | 3105 |  | 
|  | 3106 | // Make the hash mask from the length of the number string cache. It | 
|  | 3107 | // contains two elements (number and string) for each cache entry. | 
|  | 3108 | ldr(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset)); | 
|  | 3109 | // Divide length by two (length is a smi). | 
|  | 3110 | mov(mask, Operand(mask, ASR, kSmiTagSize + 1)); | 
|  | 3111 | sub(mask, mask, Operand(1));  // Make mask. | 
|  | 3112 |  | 
|  | 3113 | // Calculate the entry in the number string cache. The hash value in the | 
|  | 3114 | // number string cache for smis is just the smi value, and the hash for | 
|  | 3115 | // doubles is the xor of the upper and lower words. See | 
|  | 3116 | // Heap::GetNumberStringCache. | 
|  | 3117 | Label is_smi; | 
|  | 3118 | Label load_result_from_cache; | 
|  | 3119 | JumpIfSmi(object, &is_smi); | 
|  | 3120 | CheckMap(object, | 
|  | 3121 | scratch1, | 
|  | 3122 | Heap::kHeapNumberMapRootIndex, | 
|  | 3123 | not_found, | 
|  | 3124 | DONT_DO_SMI_CHECK); | 
|  | 3125 |  | 
|  | 3126 | STATIC_ASSERT(8 == kDoubleSize); | 
|  | 3127 | add(scratch1, | 
|  | 3128 | object, | 
|  | 3129 | Operand(HeapNumber::kValueOffset - kHeapObjectTag)); | 
|  | 3130 | ldm(ia, scratch1, scratch1.bit() | scratch2.bit()); | 
|  | 3131 | eor(scratch1, scratch1, Operand(scratch2)); | 
|  | 3132 | and_(scratch1, scratch1, Operand(mask)); | 
|  | 3133 |  | 
|  | 3134 | // Calculate address of entry in string cache: each entry consists | 
|  | 3135 | // of two pointer sized fields. | 
|  | 3136 | add(scratch1, | 
|  | 3137 | number_string_cache, | 
|  | 3138 | Operand(scratch1, LSL, kPointerSizeLog2 + 1)); | 
|  | 3139 |  | 
|  | 3140 | Register probe = mask; | 
|  | 3141 | ldr(probe, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); | 
|  | 3142 | JumpIfSmi(probe, not_found); | 
|  | 3143 | sub(scratch2, object, Operand(kHeapObjectTag)); | 
|  | 3144 | vldr(d0, scratch2, HeapNumber::kValueOffset); | 
|  | 3145 | sub(probe, probe, Operand(kHeapObjectTag)); | 
|  | 3146 | vldr(d1, probe, HeapNumber::kValueOffset); | 
|  | 3147 | VFPCompareAndSetFlags(d0, d1); | 
|  | 3148 | b(ne, not_found);  // The cache did not contain this value. | 
|  | 3149 | b(&load_result_from_cache); | 
|  | 3150 |  | 
|  | 3151 | bind(&is_smi); | 
|  | 3152 | Register scratch = scratch1; | 
|  | 3153 | and_(scratch, mask, Operand(object, ASR, 1)); | 
|  | 3154 | // Calculate address of entry in string cache: each entry consists | 
|  | 3155 | // of two pointer sized fields. | 
|  | 3156 | add(scratch, | 
|  | 3157 | number_string_cache, | 
|  | 3158 | Operand(scratch, LSL, kPointerSizeLog2 + 1)); | 
|  | 3159 |  | 
|  | 3160 | // Check if the entry is the smi we are looking for. | 
|  | 3161 | ldr(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize)); | 
|  | 3162 | cmp(object, probe); | 
|  | 3163 | b(ne, not_found); | 
|  | 3164 |  | 
|  | 3165 | // Get the result from the cache. | 
|  | 3166 | bind(&load_result_from_cache); | 
|  | 3167 | ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize)); | 
|  | 3168 | IncrementCounter(isolate()->counters()->number_to_string_native(), | 
|  | 3169 | 1, | 
|  | 3170 | scratch1, | 
|  | 3171 | scratch2); | 
|  | 3172 | } | 
|  | 3173 |  | 
|  | 3174 |  | 
|  | 3175 | void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings( | 
|  | 3176 | Register first, Register second, Register scratch1, Register scratch2, | 
| Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 3177 | Label* failure) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3178 | // Test that both first and second are sequential one-byte strings. | 
| Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 3179 | // Assume that they are non-smis. | 
|  | 3180 | ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset)); | 
|  | 3181 | ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset)); | 
|  | 3182 | ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | 
|  | 3183 | ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3184 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3185 | JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1, | 
|  | 3186 | scratch2, failure); | 
| Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 3187 | } | 
|  | 3188 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3189 | void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first, | 
|  | 3190 | Register second, | 
|  | 3191 | Register scratch1, | 
|  | 3192 | Register scratch2, | 
|  | 3193 | Label* failure) { | 
| Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 3194 | // Check that neither is a smi. | 
| Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 3195 | and_(scratch1, first, Operand(second)); | 
| Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 3196 | JumpIfSmi(scratch1, failure); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3197 | JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1, | 
|  | 3198 | scratch2, failure); | 
|  | 3199 | } | 
|  | 3200 |  | 
|  | 3201 |  | 
|  | 3202 | void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg, | 
|  | 3203 | Label* not_unique_name) { | 
|  | 3204 | STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); | 
|  | 3205 | Label succeed; | 
|  | 3206 | tst(reg, Operand(kIsNotStringMask | kIsNotInternalizedMask)); | 
|  | 3207 | b(eq, &succeed); | 
|  | 3208 | cmp(reg, Operand(SYMBOL_TYPE)); | 
|  | 3209 | b(ne, not_unique_name); | 
|  | 3210 |  | 
|  | 3211 | bind(&succeed); | 
| Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 3212 | } | 
|  | 3213 |  | 
| Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3214 |  | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3215 | // Allocates a heap number or jumps to the need_gc label if the young space | 
|  | 3216 | // is full and a scavenge is needed. | 
|  | 3217 | void MacroAssembler::AllocateHeapNumber(Register result, | 
|  | 3218 | Register scratch1, | 
|  | 3219 | Register scratch2, | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 3220 | Register heap_number_map, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3221 | Label* gc_required, | 
|  | 3222 | TaggingMode tagging_mode, | 
|  | 3223 | MutableMode mode) { | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3224 | // Allocate an object in the heap for the heap number and tag it as a heap | 
|  | 3225 | // object. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3226 | Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, | 
|  | 3227 | tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS); | 
|  | 3228 |  | 
|  | 3229 | Heap::RootListIndex map_index = mode == MUTABLE | 
|  | 3230 | ? Heap::kMutableHeapNumberMapRootIndex | 
|  | 3231 | : Heap::kHeapNumberMapRootIndex; | 
|  | 3232 | AssertIsRoot(heap_number_map, map_index); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3233 |  | 
| Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 3234 | // Store heap number map in the allocated object. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3235 | if (tagging_mode == TAG_RESULT) { | 
|  | 3236 | str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); | 
|  | 3237 | } else { | 
|  | 3238 | str(heap_number_map, MemOperand(result, HeapObject::kMapOffset)); | 
|  | 3239 | } | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3240 | } | 
|  | 3241 |  | 
|  | 3242 |  | 
| Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3243 | void MacroAssembler::AllocateHeapNumberWithValue(Register result, | 
|  | 3244 | DwVfpRegister value, | 
|  | 3245 | Register scratch1, | 
|  | 3246 | Register scratch2, | 
|  | 3247 | Register heap_number_map, | 
|  | 3248 | Label* gc_required) { | 
|  | 3249 | AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required); | 
|  | 3250 | sub(scratch1, result, Operand(kHeapObjectTag)); | 
|  | 3251 | vstr(value, scratch1, HeapNumber::kValueOffset); | 
|  | 3252 | } | 
|  | 3253 |  | 
|  | 3254 |  | 
| Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 3255 | // Copies a fixed number of fields of heap objects from src to dst. | 
|  | 3256 | void MacroAssembler::CopyFields(Register dst, | 
|  | 3257 | Register src, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3258 | LowDwVfpRegister double_scratch, | 
| Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 3259 | int field_count) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3260 | int double_count = field_count / (DwVfpRegister::kSizeInBytes / kPointerSize); | 
|  | 3261 | for (int i = 0; i < double_count; i++) { | 
|  | 3262 | vldr(double_scratch, FieldMemOperand(src, i * DwVfpRegister::kSizeInBytes)); | 
|  | 3263 | vstr(double_scratch, FieldMemOperand(dst, i * DwVfpRegister::kSizeInBytes)); | 
| Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 3264 | } | 
| Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 3265 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3266 | STATIC_ASSERT(SwVfpRegister::kSizeInBytes == kPointerSize); | 
|  | 3267 | STATIC_ASSERT(2 * SwVfpRegister::kSizeInBytes == DwVfpRegister::kSizeInBytes); | 
|  | 3268 |  | 
|  | 3269 | int remain = field_count % (DwVfpRegister::kSizeInBytes / kPointerSize); | 
|  | 3270 | if (remain != 0) { | 
|  | 3271 | vldr(double_scratch.low(), | 
|  | 3272 | FieldMemOperand(src, (field_count - 1) * kPointerSize)); | 
|  | 3273 | vstr(double_scratch.low(), | 
|  | 3274 | FieldMemOperand(dst, (field_count - 1) * kPointerSize)); | 
| Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 3275 | } | 
|  | 3276 | } | 
|  | 3277 |  | 
|  | 3278 |  | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3279 | void MacroAssembler::CopyBytes(Register src, | 
|  | 3280 | Register dst, | 
|  | 3281 | Register length, | 
|  | 3282 | Register scratch) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3283 | Label align_loop_1, word_loop, byte_loop, byte_loop_1, done; | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3284 |  | 
|  | 3285 | // Align src before copying in word size chunks. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3286 | cmp(length, Operand(kPointerSize)); | 
|  | 3287 | b(le, &byte_loop); | 
|  | 3288 |  | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3289 | bind(&align_loop_1); | 
|  | 3290 | tst(src, Operand(kPointerSize - 1)); | 
|  | 3291 | b(eq, &word_loop); | 
|  | 3292 | ldrb(scratch, MemOperand(src, 1, PostIndex)); | 
|  | 3293 | strb(scratch, MemOperand(dst, 1, PostIndex)); | 
|  | 3294 | sub(length, length, Operand(1), SetCC); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3295 | b(&align_loop_1); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3296 | // Copy bytes in word size chunks. | 
|  | 3297 | bind(&word_loop); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 3298 | if (emit_debug_code()) { | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3299 | tst(src, Operand(kPointerSize - 1)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3300 | Assert(eq, kExpectingAlignmentForCopyBytes); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3301 | } | 
|  | 3302 | cmp(length, Operand(kPointerSize)); | 
|  | 3303 | b(lt, &byte_loop); | 
|  | 3304 | ldr(scratch, MemOperand(src, kPointerSize, PostIndex)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3305 | if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) { | 
|  | 3306 | str(scratch, MemOperand(dst, kPointerSize, PostIndex)); | 
|  | 3307 | } else { | 
|  | 3308 | strb(scratch, MemOperand(dst, 1, PostIndex)); | 
|  | 3309 | mov(scratch, Operand(scratch, LSR, 8)); | 
|  | 3310 | strb(scratch, MemOperand(dst, 1, PostIndex)); | 
|  | 3311 | mov(scratch, Operand(scratch, LSR, 8)); | 
|  | 3312 | strb(scratch, MemOperand(dst, 1, PostIndex)); | 
|  | 3313 | mov(scratch, Operand(scratch, LSR, 8)); | 
|  | 3314 | strb(scratch, MemOperand(dst, 1, PostIndex)); | 
|  | 3315 | } | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3316 | sub(length, length, Operand(kPointerSize)); | 
|  | 3317 | b(&word_loop); | 
|  | 3318 |  | 
|  | 3319 | // Copy the last bytes if any left. | 
|  | 3320 | bind(&byte_loop); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3321 | cmp(length, Operand::Zero()); | 
| Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 3322 | b(eq, &done); | 
|  | 3323 | bind(&byte_loop_1); | 
|  | 3324 | ldrb(scratch, MemOperand(src, 1, PostIndex)); | 
|  | 3325 | strb(scratch, MemOperand(dst, 1, PostIndex)); | 
|  | 3326 | sub(length, length, Operand(1), SetCC); | 
|  | 3327 | b(ne, &byte_loop_1); | 
|  | 3328 | bind(&done); | 
|  | 3329 | } | 
|  | 3330 |  | 
|  | 3331 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3332 | void MacroAssembler::InitializeFieldsWithFiller(Register start_offset, | 
|  | 3333 | Register end_offset, | 
|  | 3334 | Register filler) { | 
|  | 3335 | Label loop, entry; | 
|  | 3336 | b(&entry); | 
|  | 3337 | bind(&loop); | 
|  | 3338 | str(filler, MemOperand(start_offset, kPointerSize, PostIndex)); | 
|  | 3339 | bind(&entry); | 
|  | 3340 | cmp(start_offset, end_offset); | 
|  | 3341 | b(lt, &loop); | 
|  | 3342 | } | 
|  | 3343 |  | 
|  | 3344 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3345 | void MacroAssembler::CheckFor32DRegs(Register scratch) { | 
|  | 3346 | mov(scratch, Operand(ExternalReference::cpu_features())); | 
|  | 3347 | ldr(scratch, MemOperand(scratch)); | 
|  | 3348 | tst(scratch, Operand(1u << VFP32DREGS)); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3349 | } | 
|  | 3350 |  | 
|  | 3351 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3352 | void MacroAssembler::SaveFPRegs(Register location, Register scratch) { | 
|  | 3353 | CheckFor32DRegs(scratch); | 
|  | 3354 | vstm(db_w, location, d16, d31, ne); | 
|  | 3355 | sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq); | 
|  | 3356 | vstm(db_w, location, d0, d15); | 
|  | 3357 | } | 
|  | 3358 |  | 
|  | 3359 |  | 
|  | 3360 | void MacroAssembler::RestoreFPRegs(Register location, Register scratch) { | 
|  | 3361 | CheckFor32DRegs(scratch); | 
|  | 3362 | vldm(ia_w, location, d0, d15); | 
|  | 3363 | vldm(ia_w, location, d16, d31, ne); | 
|  | 3364 | add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq); | 
|  | 3365 | } | 
|  | 3366 |  | 
|  | 3367 |  | 
|  | 3368 | void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte( | 
|  | 3369 | Register first, Register second, Register scratch1, Register scratch2, | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3370 | Label* failure) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3371 | const int kFlatOneByteStringMask = | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3372 | kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3373 | const int kFlatOneByteStringTag = | 
|  | 3374 | kStringTag | kOneByteStringTag | kSeqStringTag; | 
|  | 3375 | and_(scratch1, first, Operand(kFlatOneByteStringMask)); | 
|  | 3376 | and_(scratch2, second, Operand(kFlatOneByteStringMask)); | 
|  | 3377 | cmp(scratch1, Operand(kFlatOneByteStringTag)); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3378 | // Ignore second test if first test failed. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3379 | cmp(scratch2, Operand(kFlatOneByteStringTag), eq); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3380 | b(ne, failure); | 
|  | 3381 | } | 
|  | 3382 |  | 
|  | 3383 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3384 | void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type, | 
|  | 3385 | Register scratch, | 
|  | 3386 | Label* failure) { | 
|  | 3387 | const int kFlatOneByteStringMask = | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3388 | kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask; | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3389 | const int kFlatOneByteStringTag = | 
|  | 3390 | kStringTag | kOneByteStringTag | kSeqStringTag; | 
|  | 3391 | and_(scratch, type, Operand(kFlatOneByteStringMask)); | 
|  | 3392 | cmp(scratch, Operand(kFlatOneByteStringTag)); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3393 | b(ne, failure); | 
|  | 3394 | } | 
|  | 3395 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 3396 | static const int kRegisterPassedArguments = 4; | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3397 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 3398 |  | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3399 | int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments, | 
|  | 3400 | int num_double_arguments) { | 
|  | 3401 | int stack_passed_words = 0; | 
|  | 3402 | if (use_eabi_hardfloat()) { | 
|  | 3403 | // In the hard floating point calling convention, we can use | 
|  | 3404 | // all double registers to pass doubles. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3405 | if (num_double_arguments > DoubleRegister::NumRegisters()) { | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3406 | stack_passed_words += | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3407 | 2 * (num_double_arguments - DoubleRegister::NumRegisters()); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3408 | } | 
|  | 3409 | } else { | 
|  | 3410 | // In the soft floating point calling convention, every double | 
|  | 3411 | // argument is passed using two registers. | 
|  | 3412 | num_reg_arguments += 2 * num_double_arguments; | 
|  | 3413 | } | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3414 | // Up to four simple arguments are passed in registers r0..r3. | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3415 | if (num_reg_arguments > kRegisterPassedArguments) { | 
|  | 3416 | stack_passed_words += num_reg_arguments - kRegisterPassedArguments; | 
|  | 3417 | } | 
|  | 3418 | return stack_passed_words; | 
|  | 3419 | } | 
|  | 3420 |  | 
|  | 3421 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3422 | void MacroAssembler::EmitSeqStringSetCharCheck(Register string, | 
|  | 3423 | Register index, | 
|  | 3424 | Register value, | 
|  | 3425 | uint32_t encoding_mask) { | 
|  | 3426 | Label is_object; | 
|  | 3427 | SmiTst(string); | 
|  | 3428 | Check(ne, kNonObject); | 
|  | 3429 |  | 
|  | 3430 | ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset)); | 
|  | 3431 | ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset)); | 
|  | 3432 |  | 
|  | 3433 | and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask)); | 
|  | 3434 | cmp(ip, Operand(encoding_mask)); | 
|  | 3435 | Check(eq, kUnexpectedStringType); | 
|  | 3436 |  | 
|  | 3437 | // The index is assumed to be untagged coming in, tag it to compare with the | 
|  | 3438 | // string length without using a temp register, it is restored at the end of | 
|  | 3439 | // this function. | 
|  | 3440 | Label index_tag_ok, index_tag_bad; | 
|  | 3441 | TrySmiTag(index, index, &index_tag_bad); | 
|  | 3442 | b(&index_tag_ok); | 
|  | 3443 | bind(&index_tag_bad); | 
|  | 3444 | Abort(kIndexIsTooLarge); | 
|  | 3445 | bind(&index_tag_ok); | 
|  | 3446 |  | 
|  | 3447 | ldr(ip, FieldMemOperand(string, String::kLengthOffset)); | 
|  | 3448 | cmp(index, ip); | 
|  | 3449 | Check(lt, kIndexIsTooLarge); | 
|  | 3450 |  | 
|  | 3451 | cmp(index, Operand(Smi::FromInt(0))); | 
|  | 3452 | Check(ge, kIndexIsNegative); | 
|  | 3453 |  | 
|  | 3454 | SmiUntag(index, index); | 
|  | 3455 | } | 
|  | 3456 |  | 
|  | 3457 |  | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3458 | void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, | 
|  | 3459 | int num_double_arguments, | 
|  | 3460 | Register scratch) { | 
|  | 3461 | int frame_alignment = ActivationFrameAlignment(); | 
|  | 3462 | int stack_passed_arguments = CalculateStackPassedWords( | 
|  | 3463 | num_reg_arguments, num_double_arguments); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3464 | if (frame_alignment > kPointerSize) { | 
|  | 3465 | // Make stack end at alignment and make room for num_arguments - 4 words | 
|  | 3466 | // and the original value of sp. | 
|  | 3467 | mov(scratch, sp); | 
|  | 3468 | sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3469 | DCHECK(base::bits::IsPowerOfTwo32(frame_alignment)); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3470 | and_(sp, sp, Operand(-frame_alignment)); | 
|  | 3471 | str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 
|  | 3472 | } else { | 
|  | 3473 | sub(sp, sp, Operand(stack_passed_arguments * kPointerSize)); | 
|  | 3474 | } | 
|  | 3475 | } | 
|  | 3476 |  | 
|  | 3477 |  | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3478 | void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, | 
|  | 3479 | Register scratch) { | 
|  | 3480 | PrepareCallCFunction(num_reg_arguments, 0, scratch); | 
|  | 3481 | } | 
|  | 3482 |  | 
|  | 3483 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3484 | void MacroAssembler::MovToFloatParameter(DwVfpRegister src) { | 
|  | 3485 | DCHECK(src.is(d0)); | 
|  | 3486 | if (!use_eabi_hardfloat()) { | 
|  | 3487 | vmov(r0, r1, src); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3488 | } | 
|  | 3489 | } | 
|  | 3490 |  | 
|  | 3491 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3492 | // On ARM this is just a synonym to make the purpose clear. | 
|  | 3493 | void MacroAssembler::MovToFloatResult(DwVfpRegister src) { | 
|  | 3494 | MovToFloatParameter(src); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3495 | } | 
|  | 3496 |  | 
|  | 3497 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3498 | void MacroAssembler::MovToFloatParameters(DwVfpRegister src1, | 
|  | 3499 | DwVfpRegister src2) { | 
|  | 3500 | DCHECK(src1.is(d0)); | 
|  | 3501 | DCHECK(src2.is(d1)); | 
|  | 3502 | if (!use_eabi_hardfloat()) { | 
|  | 3503 | vmov(r0, r1, src1); | 
|  | 3504 | vmov(r2, r3, src2); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3505 | } | 
|  | 3506 | } | 
|  | 3507 |  | 
|  | 3508 |  | 
|  | 3509 | void MacroAssembler::CallCFunction(ExternalReference function, | 
|  | 3510 | int num_reg_arguments, | 
|  | 3511 | int num_double_arguments) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3512 | mov(ip, Operand(function)); | 
|  | 3513 | CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3514 | } | 
|  | 3515 |  | 
|  | 3516 |  | 
|  | 3517 | void MacroAssembler::CallCFunction(Register function, | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3518 | int num_reg_arguments, | 
|  | 3519 | int num_double_arguments) { | 
|  | 3520 | CallCFunctionHelper(function, num_reg_arguments, num_double_arguments); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3521 | } | 
|  | 3522 |  | 
|  | 3523 |  | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3524 | void MacroAssembler::CallCFunction(ExternalReference function, | 
|  | 3525 | int num_arguments) { | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3526 | CallCFunction(function, num_arguments, 0); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 3527 | } | 
|  | 3528 |  | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3529 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 3530 | void MacroAssembler::CallCFunction(Register function, | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 3531 | int num_arguments) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3532 | CallCFunction(function, num_arguments, 0); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3533 | } | 
|  | 3534 |  | 
|  | 3535 |  | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 3536 | void MacroAssembler::CallCFunctionHelper(Register function, | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3537 | int num_reg_arguments, | 
|  | 3538 | int num_double_arguments) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3539 | DCHECK(has_frame()); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3540 | // Make sure that the stack is aligned before calling a C function unless | 
|  | 3541 | // running in the simulator. The simulator has its own alignment check which | 
|  | 3542 | // provides more information. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3543 | #if V8_HOST_ARCH_ARM | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 3544 | if (emit_debug_code()) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3545 | int frame_alignment = base::OS::ActivationFrameAlignment(); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3546 | int frame_alignment_mask = frame_alignment - 1; | 
|  | 3547 | if (frame_alignment > kPointerSize) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3548 | DCHECK(base::bits::IsPowerOfTwo32(frame_alignment)); | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3549 | Label alignment_as_expected; | 
|  | 3550 | tst(sp, Operand(frame_alignment_mask)); | 
|  | 3551 | b(eq, &alignment_as_expected); | 
|  | 3552 | // Don't use Check here, as it will call Runtime_Abort possibly | 
|  | 3553 | // re-entering here. | 
|  | 3554 | stop("Unexpected alignment"); | 
|  | 3555 | bind(&alignment_as_expected); | 
|  | 3556 | } | 
|  | 3557 | } | 
|  | 3558 | #endif | 
|  | 3559 |  | 
|  | 3560 | // Just call directly. The function called cannot cause a GC, or | 
|  | 3561 | // allow preemption, so the return address in the link register | 
|  | 3562 | // stays correct. | 
|  | 3563 | Call(function); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3564 | int stack_passed_arguments = CalculateStackPassedWords( | 
|  | 3565 | num_reg_arguments, num_double_arguments); | 
|  | 3566 | if (ActivationFrameAlignment() > kPointerSize) { | 
| Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3567 | ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 
|  | 3568 | } else { | 
|  | 3569 | add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 
|  | 3570 | } | 
|  | 3571 | } | 
|  | 3572 |  | 
|  | 3573 |  | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 3574 | void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3575 | Register result, | 
|  | 3576 | Register scratch) { | 
|  | 3577 | Label small_constant_pool_load, load_result; | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 3578 | ldr(result, MemOperand(ldr_location)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3579 |  | 
|  | 3580 | if (FLAG_enable_ool_constant_pool) { | 
|  | 3581 | // Check if this is an extended constant pool load. | 
|  | 3582 | and_(scratch, result, Operand(GetConsantPoolLoadMask())); | 
|  | 3583 | teq(scratch, Operand(GetConsantPoolLoadPattern())); | 
|  | 3584 | b(eq, &small_constant_pool_load); | 
|  | 3585 | if (emit_debug_code()) { | 
|  | 3586 | // Check that the instruction sequence is: | 
|  | 3587 | //   movw reg, #offset_low | 
|  | 3588 | //   movt reg, #offset_high | 
|  | 3589 | //   ldr reg, [pp, reg] | 
|  | 3590 | Instr patterns[] = {GetMovWPattern(), GetMovTPattern(), | 
|  | 3591 | GetLdrPpRegOffsetPattern()}; | 
|  | 3592 | for (int i = 0; i < 3; i++) { | 
|  | 3593 | ldr(result, MemOperand(ldr_location, i * kInstrSize)); | 
|  | 3594 | and_(result, result, Operand(patterns[i])); | 
|  | 3595 | cmp(result, Operand(patterns[i])); | 
|  | 3596 | Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool); | 
|  | 3597 | } | 
|  | 3598 | // Result was clobbered. Restore it. | 
|  | 3599 | ldr(result, MemOperand(ldr_location)); | 
|  | 3600 | } | 
|  | 3601 |  | 
|  | 3602 | // Get the offset into the constant pool.  First extract movw immediate into | 
|  | 3603 | // result. | 
|  | 3604 | and_(scratch, result, Operand(0xfff)); | 
|  | 3605 | mov(ip, Operand(result, LSR, 4)); | 
|  | 3606 | and_(ip, ip, Operand(0xf000)); | 
|  | 3607 | orr(result, scratch, Operand(ip)); | 
|  | 3608 | // Then extract movt immediate and or into result. | 
|  | 3609 | ldr(scratch, MemOperand(ldr_location, kInstrSize)); | 
|  | 3610 | and_(ip, scratch, Operand(0xf0000)); | 
|  | 3611 | orr(result, result, Operand(ip, LSL, 12)); | 
|  | 3612 | and_(scratch, scratch, Operand(0xfff)); | 
|  | 3613 | orr(result, result, Operand(scratch, LSL, 16)); | 
|  | 3614 |  | 
|  | 3615 | b(&load_result); | 
|  | 3616 | } | 
|  | 3617 |  | 
|  | 3618 | bind(&small_constant_pool_load); | 
| Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 3619 | if (emit_debug_code()) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3620 | // Check that the instruction is a ldr reg, [<pc or pp> + offset] . | 
|  | 3621 | and_(result, result, Operand(GetConsantPoolLoadPattern())); | 
|  | 3622 | cmp(result, Operand(GetConsantPoolLoadPattern())); | 
|  | 3623 | Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool); | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 3624 | // Result was clobbered. Restore it. | 
|  | 3625 | ldr(result, MemOperand(ldr_location)); | 
|  | 3626 | } | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3627 |  | 
|  | 3628 | // Get the offset into the constant pool. | 
|  | 3629 | const uint32_t kLdrOffsetMask = (1 << 12) - 1; | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 3630 | and_(result, result, Operand(kLdrOffsetMask)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3631 |  | 
|  | 3632 | bind(&load_result); | 
|  | 3633 | // Get the address of the constant. | 
|  | 3634 | if (FLAG_enable_ool_constant_pool) { | 
|  | 3635 | add(result, pp, Operand(result)); | 
|  | 3636 | } else { | 
|  | 3637 | add(result, ldr_location, Operand(result)); | 
|  | 3638 | add(result, result, Operand(Instruction::kPCReadOffset)); | 
|  | 3639 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 3640 | } | 
|  | 3641 |  | 
|  | 3642 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3643 | void MacroAssembler::CheckPageFlag( | 
|  | 3644 | Register object, | 
|  | 3645 | Register scratch, | 
|  | 3646 | int mask, | 
|  | 3647 | Condition cc, | 
|  | 3648 | Label* condition_met) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3649 | Bfc(scratch, object, 0, kPageSizeBits); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3650 | ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset)); | 
|  | 3651 | tst(scratch, Operand(mask)); | 
|  | 3652 | b(cc, condition_met); | 
|  | 3653 | } | 
|  | 3654 |  | 
|  | 3655 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3656 | void MacroAssembler::CheckMapDeprecated(Handle<Map> map, | 
|  | 3657 | Register scratch, | 
|  | 3658 | Label* if_deprecated) { | 
|  | 3659 | if (map->CanBeDeprecated()) { | 
|  | 3660 | mov(scratch, Operand(map)); | 
|  | 3661 | ldr(scratch, FieldMemOperand(scratch, Map::kBitField3Offset)); | 
|  | 3662 | tst(scratch, Operand(Map::Deprecated::kMask)); | 
|  | 3663 | b(ne, if_deprecated); | 
|  | 3664 | } | 
|  | 3665 | } | 
|  | 3666 |  | 
|  | 3667 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3668 | void MacroAssembler::JumpIfBlack(Register object, | 
|  | 3669 | Register scratch0, | 
|  | 3670 | Register scratch1, | 
|  | 3671 | Label* on_black) { | 
|  | 3672 | HasColor(object, scratch0, scratch1, on_black, 1, 0);  // kBlackBitPattern. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3673 | DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3674 | } | 
|  | 3675 |  | 
|  | 3676 |  | 
|  | 3677 | void MacroAssembler::HasColor(Register object, | 
|  | 3678 | Register bitmap_scratch, | 
|  | 3679 | Register mask_scratch, | 
|  | 3680 | Label* has_color, | 
|  | 3681 | int first_bit, | 
|  | 3682 | int second_bit) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3683 | DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3684 |  | 
|  | 3685 | GetMarkBits(object, bitmap_scratch, mask_scratch); | 
|  | 3686 |  | 
|  | 3687 | Label other_color, word_boundary; | 
|  | 3688 | ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); | 
|  | 3689 | tst(ip, Operand(mask_scratch)); | 
|  | 3690 | b(first_bit == 1 ? eq : ne, &other_color); | 
|  | 3691 | // Shift left 1 by adding. | 
|  | 3692 | add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC); | 
|  | 3693 | b(eq, &word_boundary); | 
|  | 3694 | tst(ip, Operand(mask_scratch)); | 
|  | 3695 | b(second_bit == 1 ? ne : eq, has_color); | 
|  | 3696 | jmp(&other_color); | 
|  | 3697 |  | 
|  | 3698 | bind(&word_boundary); | 
|  | 3699 | ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize)); | 
|  | 3700 | tst(ip, Operand(1)); | 
|  | 3701 | b(second_bit == 1 ? ne : eq, has_color); | 
|  | 3702 | bind(&other_color); | 
|  | 3703 | } | 
|  | 3704 |  | 
|  | 3705 |  | 
|  | 3706 | // Detect some, but not all, common pointer-free objects.  This is used by the | 
|  | 3707 | // incremental write barrier which doesn't care about oddballs (they are always | 
|  | 3708 | // marked black immediately so this code is not hit). | 
|  | 3709 | void MacroAssembler::JumpIfDataObject(Register value, | 
|  | 3710 | Register scratch, | 
|  | 3711 | Label* not_data_object) { | 
|  | 3712 | Label is_data_object; | 
|  | 3713 | ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset)); | 
|  | 3714 | CompareRoot(scratch, Heap::kHeapNumberMapRootIndex); | 
|  | 3715 | b(eq, &is_data_object); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3716 | DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1); | 
|  | 3717 | DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3718 | // If it's a string and it's not a cons string then it's an object containing | 
|  | 3719 | // no GC pointers. | 
|  | 3720 | ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 
|  | 3721 | tst(scratch, Operand(kIsIndirectStringMask | kIsNotStringMask)); | 
|  | 3722 | b(ne, not_data_object); | 
|  | 3723 | bind(&is_data_object); | 
|  | 3724 | } | 
|  | 3725 |  | 
|  | 3726 |  | 
|  | 3727 | void MacroAssembler::GetMarkBits(Register addr_reg, | 
|  | 3728 | Register bitmap_reg, | 
|  | 3729 | Register mask_reg) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3730 | DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3731 | and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask)); | 
|  | 3732 | Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2); | 
|  | 3733 | const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2; | 
|  | 3734 | Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits); | 
|  | 3735 | add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2)); | 
|  | 3736 | mov(ip, Operand(1)); | 
|  | 3737 | mov(mask_reg, Operand(ip, LSL, mask_reg)); | 
|  | 3738 | } | 
|  | 3739 |  | 
|  | 3740 |  | 
|  | 3741 | void MacroAssembler::EnsureNotWhite( | 
|  | 3742 | Register value, | 
|  | 3743 | Register bitmap_scratch, | 
|  | 3744 | Register mask_scratch, | 
|  | 3745 | Register load_scratch, | 
|  | 3746 | Label* value_is_white_and_not_data) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3747 | DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ip)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3748 | GetMarkBits(value, bitmap_scratch, mask_scratch); | 
|  | 3749 |  | 
|  | 3750 | // If the value is black or grey we don't need to do anything. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3751 | DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); | 
|  | 3752 | DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0); | 
|  | 3753 | DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0); | 
|  | 3754 | DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3755 |  | 
|  | 3756 | Label done; | 
|  | 3757 |  | 
|  | 3758 | // Since both black and grey have a 1 in the first position and white does | 
|  | 3759 | // not have a 1 there we only need to check one bit. | 
|  | 3760 | ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); | 
|  | 3761 | tst(mask_scratch, load_scratch); | 
|  | 3762 | b(ne, &done); | 
|  | 3763 |  | 
|  | 3764 | if (emit_debug_code()) { | 
|  | 3765 | // Check for impossible bit pattern. | 
|  | 3766 | Label ok; | 
|  | 3767 | // LSL may overflow, making the check conservative. | 
|  | 3768 | tst(load_scratch, Operand(mask_scratch, LSL, 1)); | 
|  | 3769 | b(eq, &ok); | 
|  | 3770 | stop("Impossible marking bit pattern"); | 
|  | 3771 | bind(&ok); | 
|  | 3772 | } | 
|  | 3773 |  | 
|  | 3774 | // Value is white.  We check whether it is data that doesn't need scanning. | 
|  | 3775 | // Currently only checks for HeapNumber and non-cons strings. | 
|  | 3776 | Register map = load_scratch;  // Holds map while checking type. | 
|  | 3777 | Register length = load_scratch;  // Holds length of object after testing type. | 
|  | 3778 | Label is_data_object; | 
|  | 3779 |  | 
|  | 3780 | // Check for heap-number | 
|  | 3781 | ldr(map, FieldMemOperand(value, HeapObject::kMapOffset)); | 
|  | 3782 | CompareRoot(map, Heap::kHeapNumberMapRootIndex); | 
|  | 3783 | mov(length, Operand(HeapNumber::kSize), LeaveCC, eq); | 
|  | 3784 | b(eq, &is_data_object); | 
|  | 3785 |  | 
|  | 3786 | // Check for strings. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3787 | DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1); | 
|  | 3788 | DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3789 | // If it's a string and it's not a cons string then it's an object containing | 
|  | 3790 | // no GC pointers. | 
|  | 3791 | Register instance_type = load_scratch; | 
|  | 3792 | ldrb(instance_type, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 
|  | 3793 | tst(instance_type, Operand(kIsIndirectStringMask | kIsNotStringMask)); | 
|  | 3794 | b(ne, value_is_white_and_not_data); | 
|  | 3795 | // It's a non-indirect (non-cons and non-slice) string. | 
|  | 3796 | // If it's external, the length is just ExternalString::kSize. | 
|  | 3797 | // Otherwise it's String::kHeaderSize + string->length() * (1 or 2). | 
|  | 3798 | // External strings are the only ones with the kExternalStringTag bit | 
|  | 3799 | // set. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3800 | DCHECK_EQ(0, kSeqStringTag & kExternalStringTag); | 
|  | 3801 | DCHECK_EQ(0, kConsStringTag & kExternalStringTag); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3802 | tst(instance_type, Operand(kExternalStringTag)); | 
|  | 3803 | mov(length, Operand(ExternalString::kSize), LeaveCC, ne); | 
|  | 3804 | b(ne, &is_data_object); | 
|  | 3805 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3806 | // Sequential string, either Latin1 or UC16. | 
|  | 3807 | // For Latin1 (char-size of 1) we shift the smi tag away to get the length. | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3808 | // For UC16 (char-size of 2) we just leave the smi tag in place, thereby | 
|  | 3809 | // getting the length multiplied by 2. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3810 | DCHECK(kOneByteStringTag == 4 && kStringEncodingMask == 4); | 
|  | 3811 | DCHECK(kSmiTag == 0 && kSmiTagSize == 1); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3812 | ldr(ip, FieldMemOperand(value, String::kLengthOffset)); | 
|  | 3813 | tst(instance_type, Operand(kStringEncodingMask)); | 
|  | 3814 | mov(ip, Operand(ip, LSR, 1), LeaveCC, ne); | 
|  | 3815 | add(length, ip, Operand(SeqString::kHeaderSize + kObjectAlignmentMask)); | 
|  | 3816 | and_(length, length, Operand(~kObjectAlignmentMask)); | 
|  | 3817 |  | 
|  | 3818 | bind(&is_data_object); | 
|  | 3819 | // Value is a data object, and it is white.  Mark it black.  Since we know | 
|  | 3820 | // that the object is white we can make it black by flipping one bit. | 
|  | 3821 | ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); | 
|  | 3822 | orr(ip, ip, Operand(mask_scratch)); | 
|  | 3823 | str(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); | 
|  | 3824 |  | 
|  | 3825 | and_(bitmap_scratch, bitmap_scratch, Operand(~Page::kPageAlignmentMask)); | 
|  | 3826 | ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); | 
|  | 3827 | add(ip, ip, Operand(length)); | 
|  | 3828 | str(ip, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); | 
|  | 3829 |  | 
|  | 3830 | bind(&done); | 
|  | 3831 | } | 
|  | 3832 |  | 
|  | 3833 |  | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3834 | void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { | 
|  | 3835 | Usat(output_reg, 8, Operand(input_reg)); | 
|  | 3836 | } | 
|  | 3837 |  | 
|  | 3838 |  | 
|  | 3839 | void MacroAssembler::ClampDoubleToUint8(Register result_reg, | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3840 | DwVfpRegister input_reg, | 
|  | 3841 | LowDwVfpRegister double_scratch) { | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3842 | Label done; | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3843 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3844 | // Handle inputs >= 255 (including +infinity). | 
|  | 3845 | Vmov(double_scratch, 255.0, result_reg); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3846 | mov(result_reg, Operand(255)); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3847 | VFPCompareAndSetFlags(input_reg, double_scratch); | 
|  | 3848 | b(ge, &done); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3849 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3850 | // For inputs < 255 (including negative) vcvt_u32_f64 with round-to-nearest | 
|  | 3851 | // rounding mode will provide the correct result. | 
|  | 3852 | vcvt_u32_f64(double_scratch.low(), input_reg, kFPSCRRounding); | 
|  | 3853 | vmov(result_reg, double_scratch.low()); | 
|  | 3854 |  | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3855 | bind(&done); | 
|  | 3856 | } | 
|  | 3857 |  | 
|  | 3858 |  | 
|  | 3859 | void MacroAssembler::LoadInstanceDescriptors(Register map, | 
|  | 3860 | Register descriptors) { | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3861 | ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset)); | 
|  | 3862 | } | 
|  | 3863 |  | 
|  | 3864 |  | 
|  | 3865 | void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) { | 
|  | 3866 | ldr(dst, FieldMemOperand(map, Map::kBitField3Offset)); | 
|  | 3867 | DecodeField<Map::NumberOfOwnDescriptorsBits>(dst); | 
|  | 3868 | } | 
|  | 3869 |  | 
|  | 3870 |  | 
|  | 3871 | void MacroAssembler::EnumLength(Register dst, Register map) { | 
|  | 3872 | STATIC_ASSERT(Map::EnumLengthBits::kShift == 0); | 
|  | 3873 | ldr(dst, FieldMemOperand(map, Map::kBitField3Offset)); | 
|  | 3874 | and_(dst, dst, Operand(Map::EnumLengthBits::kMask)); | 
|  | 3875 | SmiTag(dst); | 
| Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 3876 | } | 
|  | 3877 |  | 
|  | 3878 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3879 | void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) { | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3880 | Register  empty_fixed_array_value = r6; | 
|  | 3881 | LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3882 | Label next, start; | 
|  | 3883 | mov(r2, r0); | 
|  | 3884 |  | 
|  | 3885 | // Check if the enum length field is properly initialized, indicating that | 
|  | 3886 | // there is an enum cache. | 
|  | 3887 | ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); | 
|  | 3888 |  | 
|  | 3889 | EnumLength(r3, r1); | 
|  | 3890 | cmp(r3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel))); | 
|  | 3891 | b(eq, call_runtime); | 
|  | 3892 |  | 
|  | 3893 | jmp(&start); | 
|  | 3894 |  | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3895 | bind(&next); | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3896 | ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3897 |  | 
|  | 3898 | // For all objects but the receiver, check that the cache is empty. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3899 | EnumLength(r3, r1); | 
|  | 3900 | cmp(r3, Operand(Smi::FromInt(0))); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3901 | b(ne, call_runtime); | 
|  | 3902 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3903 | bind(&start); | 
|  | 3904 |  | 
|  | 3905 | // Check that there are no elements. Register r2 contains the current JS | 
|  | 3906 | // object we've reached through the prototype chain. | 
|  | 3907 | Label no_elements; | 
|  | 3908 | ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset)); | 
|  | 3909 | cmp(r2, empty_fixed_array_value); | 
|  | 3910 | b(eq, &no_elements); | 
|  | 3911 |  | 
|  | 3912 | // Second chance, the object may be using the empty slow element dictionary. | 
|  | 3913 | CompareRoot(r2, Heap::kEmptySlowElementDictionaryRootIndex); | 
|  | 3914 | b(ne, call_runtime); | 
|  | 3915 |  | 
|  | 3916 | bind(&no_elements); | 
|  | 3917 | ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); | 
|  | 3918 | cmp(r2, null_value); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3919 | b(ne, &next); | 
|  | 3920 | } | 
|  | 3921 |  | 
|  | 3922 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3923 | void MacroAssembler::TestJSArrayForAllocationMemento( | 
|  | 3924 | Register receiver_reg, | 
|  | 3925 | Register scratch_reg, | 
|  | 3926 | Label* no_memento_found) { | 
|  | 3927 | ExternalReference new_space_start = | 
|  | 3928 | ExternalReference::new_space_start(isolate()); | 
|  | 3929 | ExternalReference new_space_allocation_top = | 
|  | 3930 | ExternalReference::new_space_allocation_top_address(isolate()); | 
|  | 3931 | add(scratch_reg, receiver_reg, | 
|  | 3932 | Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag)); | 
|  | 3933 | cmp(scratch_reg, Operand(new_space_start)); | 
|  | 3934 | b(lt, no_memento_found); | 
|  | 3935 | mov(ip, Operand(new_space_allocation_top)); | 
|  | 3936 | ldr(ip, MemOperand(ip)); | 
|  | 3937 | cmp(scratch_reg, ip); | 
|  | 3938 | b(gt, no_memento_found); | 
|  | 3939 | ldr(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize)); | 
|  | 3940 | cmp(scratch_reg, | 
|  | 3941 | Operand(isolate()->factory()->allocation_memento_map())); | 
| Ben Murdoch | 3ef787d | 2012-04-12 10:51:47 +0100 | [diff] [blame] | 3942 | } | 
|  | 3943 |  | 
|  | 3944 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 3945 | Register GetRegisterThatIsNotOneOf(Register reg1, | 
|  | 3946 | Register reg2, | 
|  | 3947 | Register reg3, | 
|  | 3948 | Register reg4, | 
|  | 3949 | Register reg5, | 
|  | 3950 | Register reg6) { | 
|  | 3951 | RegList regs = 0; | 
|  | 3952 | if (reg1.is_valid()) regs |= reg1.bit(); | 
|  | 3953 | if (reg2.is_valid()) regs |= reg2.bit(); | 
|  | 3954 | if (reg3.is_valid()) regs |= reg3.bit(); | 
|  | 3955 | if (reg4.is_valid()) regs |= reg4.bit(); | 
|  | 3956 | if (reg5.is_valid()) regs |= reg5.bit(); | 
|  | 3957 | if (reg6.is_valid()) regs |= reg6.bit(); | 
|  | 3958 |  | 
|  | 3959 | for (int i = 0; i < Register::NumAllocatableRegisters(); i++) { | 
|  | 3960 | Register candidate = Register::FromAllocationIndex(i); | 
|  | 3961 | if (regs & candidate.bit()) continue; | 
|  | 3962 | return candidate; | 
|  | 3963 | } | 
|  | 3964 | UNREACHABLE(); | 
|  | 3965 | return no_reg; | 
|  | 3966 | } | 
|  | 3967 |  | 
|  | 3968 |  | 
|  | 3969 | void MacroAssembler::JumpIfDictionaryInPrototypeChain( | 
|  | 3970 | Register object, | 
|  | 3971 | Register scratch0, | 
|  | 3972 | Register scratch1, | 
|  | 3973 | Label* found) { | 
|  | 3974 | DCHECK(!scratch1.is(scratch0)); | 
|  | 3975 | Factory* factory = isolate()->factory(); | 
|  | 3976 | Register current = scratch0; | 
|  | 3977 | Label loop_again; | 
|  | 3978 |  | 
|  | 3979 | // scratch contained elements pointer. | 
|  | 3980 | mov(current, object); | 
|  | 3981 |  | 
|  | 3982 | // Loop based on the map going up the prototype chain. | 
|  | 3983 | bind(&loop_again); | 
|  | 3984 | ldr(current, FieldMemOperand(current, HeapObject::kMapOffset)); | 
|  | 3985 | ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset)); | 
|  | 3986 | DecodeField<Map::ElementsKindBits>(scratch1); | 
|  | 3987 | cmp(scratch1, Operand(DICTIONARY_ELEMENTS)); | 
|  | 3988 | b(eq, found); | 
|  | 3989 | ldr(current, FieldMemOperand(current, Map::kPrototypeOffset)); | 
|  | 3990 | cmp(current, Operand(factory->null_value())); | 
|  | 3991 | b(ne, &loop_again); | 
|  | 3992 | } | 
|  | 3993 |  | 
|  | 3994 |  | 
|  | 3995 | #ifdef DEBUG | 
|  | 3996 | bool AreAliased(Register reg1, | 
|  | 3997 | Register reg2, | 
|  | 3998 | Register reg3, | 
|  | 3999 | Register reg4, | 
|  | 4000 | Register reg5, | 
|  | 4001 | Register reg6, | 
|  | 4002 | Register reg7, | 
|  | 4003 | Register reg8) { | 
|  | 4004 | int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + | 
|  | 4005 | reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() + | 
|  | 4006 | reg7.is_valid() + reg8.is_valid(); | 
|  | 4007 |  | 
|  | 4008 | RegList regs = 0; | 
|  | 4009 | if (reg1.is_valid()) regs |= reg1.bit(); | 
|  | 4010 | if (reg2.is_valid()) regs |= reg2.bit(); | 
|  | 4011 | if (reg3.is_valid()) regs |= reg3.bit(); | 
|  | 4012 | if (reg4.is_valid()) regs |= reg4.bit(); | 
|  | 4013 | if (reg5.is_valid()) regs |= reg5.bit(); | 
|  | 4014 | if (reg6.is_valid()) regs |= reg6.bit(); | 
|  | 4015 | if (reg7.is_valid()) regs |= reg7.bit(); | 
|  | 4016 | if (reg8.is_valid()) regs |= reg8.bit(); | 
|  | 4017 | int n_of_non_aliasing_regs = NumRegs(regs); | 
|  | 4018 |  | 
|  | 4019 | return n_of_valid_regs != n_of_non_aliasing_regs; | 
|  | 4020 | } | 
|  | 4021 | #endif | 
|  | 4022 |  | 
|  | 4023 |  | 
|  | 4024 | CodePatcher::CodePatcher(byte* address, | 
|  | 4025 | int instructions, | 
|  | 4026 | FlushICache flush_cache) | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4027 | : address_(address), | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4028 | size_(instructions * Assembler::kInstrSize), | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 4029 | masm_(NULL, address, size_ + Assembler::kGap), | 
|  | 4030 | flush_cache_(flush_cache) { | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4031 | // Create a new macro assembler pointing to the address of the code to patch. | 
|  | 4032 | // The size is adjusted with kGap on order for the assembler to generate size | 
|  | 4033 | // bytes of instructions without failing with buffer size constraints. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 4034 | DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4035 | } | 
|  | 4036 |  | 
|  | 4037 |  | 
|  | 4038 | CodePatcher::~CodePatcher() { | 
|  | 4039 | // Indicate that code has changed. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 4040 | if (flush_cache_ == FLUSH) { | 
|  | 4041 | CpuFeatures::FlushICache(address_, size_); | 
|  | 4042 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4043 |  | 
|  | 4044 | // Check that the code was patched as expected. | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 4045 | DCHECK(masm_.pc_ == address_ + size_); | 
|  | 4046 | DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4047 | } | 
|  | 4048 |  | 
|  | 4049 |  | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 4050 | void CodePatcher::Emit(Instr instr) { | 
|  | 4051 | masm()->emit(instr); | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4052 | } | 
|  | 4053 |  | 
|  | 4054 |  | 
|  | 4055 | void CodePatcher::Emit(Address addr) { | 
|  | 4056 | masm()->emit(reinterpret_cast<Instr>(addr)); | 
|  | 4057 | } | 
| Steve Block | 1e0659c | 2011-05-24 12:43:12 +0100 | [diff] [blame] | 4058 |  | 
|  | 4059 |  | 
|  | 4060 | void CodePatcher::EmitCondition(Condition cond) { | 
|  | 4061 | Instr instr = Assembler::instr_at(masm_.pc_); | 
|  | 4062 | instr = (instr & ~kCondMask) | cond; | 
|  | 4063 | masm_.emit(instr); | 
|  | 4064 | } | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4065 |  | 
|  | 4066 |  | 
| Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 4067 | void MacroAssembler::TruncatingDiv(Register result, | 
|  | 4068 | Register dividend, | 
|  | 4069 | int32_t divisor) { | 
|  | 4070 | DCHECK(!dividend.is(result)); | 
|  | 4071 | DCHECK(!dividend.is(ip)); | 
|  | 4072 | DCHECK(!result.is(ip)); | 
|  | 4073 | base::MagicNumbersForDivision<uint32_t> mag = | 
|  | 4074 | base::SignedDivisionByConstant(static_cast<uint32_t>(divisor)); | 
|  | 4075 | mov(ip, Operand(mag.multiplier)); | 
|  | 4076 | smull(ip, result, dividend, ip); | 
|  | 4077 | bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0; | 
|  | 4078 | if (divisor > 0 && neg) { | 
|  | 4079 | add(result, result, Operand(dividend)); | 
|  | 4080 | } | 
|  | 4081 | if (divisor < 0 && !neg && mag.multiplier > 0) { | 
|  | 4082 | sub(result, result, Operand(dividend)); | 
|  | 4083 | } | 
|  | 4084 | if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift)); | 
|  | 4085 | add(result, result, Operand(dividend, LSR, 31)); | 
|  | 4086 | } | 
|  | 4087 |  | 
|  | 4088 |  | 
| Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4089 | } }  // namespace v8::internal | 
| Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 4090 |  | 
|  | 4091 | #endif  // V8_TARGET_ARCH_ARM |