blob: 0aa886bac724a3236d86382c0f7f04558c355f1d [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Iain Merrick9ac36c92010-09-13 15:29:50 +01005#include <limits.h> // For LONG_MIN, LONG_MAX.
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/v8.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +010010
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/base/bits.h"
12#include "src/base/division-by-constant.h"
13#include "src/bootstrapper.h"
14#include "src/codegen.h"
15#include "src/cpu-profiler.h"
16#include "src/debug.h"
17#include "src/isolate-inl.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040018#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000019
20namespace v8 {
21namespace internal {
22
Ben Murdoch8b112d22011-06-08 16:22:53 +010023MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
24 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch8b112d22011-06-08 16:22:53 +010027 if (isolate() != NULL) {
28 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
29 isolate());
30 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Steve Blocka7e24c12009-10-30 11:49:00 +000034void MacroAssembler::Jump(Register target, Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +000035 bx(target, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000036}
37
38
39void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
40 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000042 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000043}
44
45
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000046void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode,
Steve Blocka7e24c12009-10-30 11:49:00 +000047 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000049 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
50}
51
52
53void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
54 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000056 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 AllowDeferredHandleDereference embedding_raw_address;
Steve Blocka7e24c12009-10-30 11:49:00 +000058 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
59}
60
61
Steve Block44f0eee2011-05-26 01:26:41 +010062int MacroAssembler::CallSize(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010063 return kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +010064}
65
66
Steve Blocka7e24c12009-10-30 11:49:00 +000067void MacroAssembler::Call(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010068 // Block constant pool for the call instruction sequence.
69 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000070 Label start;
71 bind(&start);
Steve Blocka7e24c12009-10-30 11:49:00 +000072 blx(target, cond);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 DCHECK_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block44f0eee2011-05-26 01:26:41 +010077int MacroAssembler::CallSize(
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000078 Address target, RelocInfo::Mode rmode, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010079 Instr mov_instr = cond | MOV | LeaveCC;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
81 return kInstrSize +
82 mov_operand.instructions_required(this, mov_instr) * kInstrSize;
83}
84
85
86int MacroAssembler::CallStubSize(
87 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) {
88 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
89}
90
91
92int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate,
93 Address target,
94 RelocInfo::Mode rmode,
95 Condition cond) {
96 Instr mov_instr = cond | MOV | LeaveCC;
97 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
98 return kInstrSize +
99 mov_operand.instructions_required(NULL, mov_instr) * kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +0100100}
101
102
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000103void MacroAssembler::Call(Address target,
Ben Murdoch257744e2011-11-30 15:57:28 +0000104 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105 Condition cond,
106 TargetAddressStorageMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +0100107 // Block constant pool for the call instruction sequence.
108 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000109 Label start;
110 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111
112 bool old_predictable_code_size = predictable_code_size();
113 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
114 set_predictable_code_size(true);
115 }
116
117#ifdef DEBUG
118 // Check the expected size before generating code to ensure we assume the same
119 // constant pool availability (e.g., whether constant pool is full or not).
120 int expected_size = CallSize(target, rmode, cond);
121#endif
122
123 // Call sequence on V7 or later may be :
124 // movw ip, #... @ call address low 16
125 // movt ip, #... @ call address high 16
126 // blx ip
127 // @ return address
128 // Or for pre-V7 or values that may be back-patched
129 // to avoid ICache flushes:
130 // ldr ip, [pc, #...] @ call address
131 // blx ip
132 // @ return address
Steve Block6ded16b2010-05-10 14:33:55 +0100133
Steve Block44f0eee2011-05-26 01:26:41 +0100134 // Statement positions are expected to be recorded when the target
135 // address is loaded. The mov method will automatically record
136 // positions when pc is the target, since this is not the case here
137 // we have to do it explicitly.
138 positions_recorder()->WriteRecordedPositions();
Steve Block6ded16b2010-05-10 14:33:55 +0100139
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000140 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
Steve Block44f0eee2011-05-26 01:26:41 +0100141 blx(ip, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100142
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
144 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
145 set_predictable_code_size(old_predictable_code_size);
146 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000147}
148
149
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000150int MacroAssembler::CallSize(Handle<Code> code,
151 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000153 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000155 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
Ben Murdoch257744e2011-11-30 15:57:28 +0000156}
157
158
159void MacroAssembler::Call(Handle<Code> code,
160 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 TypeFeedbackId ast_id,
162 Condition cond,
163 TargetAddressStorageMode mode) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000164 Label start;
165 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000166 DCHECK(RelocInfo::IsCodeTarget(rmode));
167 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000168 SetRecordedAstId(ast_id);
169 rmode = RelocInfo::CODE_TARGET_WITH_ID;
170 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000171 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172 AllowDeferredHandleDereference embedding_raw_address;
173 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000174}
175
176
177void MacroAssembler::Ret(Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000178 bx(lr, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000179}
180
181
Leon Clarkee46be812010-01-19 14:06:41 +0000182void MacroAssembler::Drop(int count, Condition cond) {
183 if (count > 0) {
184 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
185 }
186}
187
188
Ben Murdochb0fe1622011-05-05 13:52:32 +0100189void MacroAssembler::Ret(int drop, Condition cond) {
190 Drop(drop, cond);
191 Ret(cond);
192}
193
194
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100195void MacroAssembler::Swap(Register reg1,
196 Register reg2,
197 Register scratch,
198 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100199 if (scratch.is(no_reg)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100200 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
201 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
202 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100203 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 mov(scratch, reg1, LeaveCC, cond);
205 mov(reg1, reg2, LeaveCC, cond);
206 mov(reg2, scratch, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100207 }
208}
209
210
Leon Clarkee46be812010-01-19 14:06:41 +0000211void MacroAssembler::Call(Label* target) {
212 bl(target);
213}
214
215
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000216void MacroAssembler::Push(Handle<Object> handle) {
217 mov(ip, Operand(handle));
218 push(ip);
219}
220
221
Leon Clarkee46be812010-01-19 14:06:41 +0000222void MacroAssembler::Move(Register dst, Handle<Object> value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223 AllowDeferredHandleDereference smi_check;
224 if (value->IsSmi()) {
225 mov(dst, Operand(value));
226 } else {
227 DCHECK(value->IsHeapObject());
228 if (isolate()->heap()->InNewSpace(*value)) {
229 Handle<Cell> cell = isolate()->factory()->NewCell(value);
230 mov(dst, Operand(cell));
231 ldr(dst, FieldMemOperand(dst, Cell::kValueOffset));
232 } else {
233 mov(dst, Operand(value));
234 }
235 }
Leon Clarkee46be812010-01-19 14:06:41 +0000236}
Steve Blockd0582a62009-12-15 09:54:21 +0000237
238
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000239void MacroAssembler::Move(Register dst, Register src, Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100240 if (!dst.is(src)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000241 mov(dst, src, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100242 }
243}
244
245
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000247 if (!dst.is(src)) {
248 vmov(dst, src);
249 }
250}
251
252
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000253void MacroAssembler::Mls(Register dst, Register src1, Register src2,
254 Register srcA, Condition cond) {
255 if (CpuFeatures::IsSupported(MLS)) {
256 CpuFeatureScope scope(this, MLS);
257 mls(dst, src1, src2, srcA, cond);
258 } else {
259 DCHECK(!srcA.is(ip));
260 mul(ip, src1, src2, LeaveCC, cond);
261 sub(dst, srcA, ip, LeaveCC, cond);
262 }
263}
264
265
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100266void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
267 Condition cond) {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800268 if (!src2.is_reg() &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 !src2.must_output_reloc_info(this) &&
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800270 src2.immediate() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000271 mov(dst, Operand::Zero(), LeaveCC, cond);
272 } else if (!(src2.instructions_required(this) == 1) &&
273 !src2.must_output_reloc_info(this) &&
Ben Murdoch8b112d22011-06-08 16:22:53 +0100274 CpuFeatures::IsSupported(ARMv7) &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 base::bits::IsPowerOfTwo32(src2.immediate() + 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000276 ubfx(dst, src1, 0,
277 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800278 } else {
279 and_(dst, src1, src2, LeaveCC, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100280 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100281}
282
283
284void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
285 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000286 DCHECK(lsb < 32);
287 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100288 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
289 and_(dst, src1, Operand(mask), LeaveCC, cond);
290 if (lsb != 0) {
291 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
292 }
293 } else {
294 ubfx(dst, src1, lsb, width, cond);
295 }
296}
297
298
299void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
300 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000301 DCHECK(lsb < 32);
302 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100303 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
304 and_(dst, src1, Operand(mask), LeaveCC, cond);
305 int shift_up = 32 - lsb - width;
306 int shift_down = lsb + shift_up;
307 if (shift_up != 0) {
308 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
309 }
310 if (shift_down != 0) {
311 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
312 }
313 } else {
314 sbfx(dst, src1, lsb, width, cond);
315 }
316}
317
318
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100319void MacroAssembler::Bfi(Register dst,
320 Register src,
321 Register scratch,
322 int lsb,
323 int width,
324 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000325 DCHECK(0 <= lsb && lsb < 32);
326 DCHECK(0 <= width && width < 32);
327 DCHECK(lsb + width < 32);
328 DCHECK(!scratch.is(dst));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100329 if (width == 0) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100331 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
332 bic(dst, dst, Operand(mask));
333 and_(scratch, src, Operand((1 << width) - 1));
334 mov(scratch, Operand(scratch, LSL, lsb));
335 orr(dst, dst, scratch);
336 } else {
337 bfi(dst, src, lsb, width, cond);
338 }
339}
340
341
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000342void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width,
343 Condition cond) {
344 DCHECK(lsb < 32);
345 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100346 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000347 bic(dst, src, Operand(mask));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100348 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000349 Move(dst, src, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100350 bfc(dst, lsb, width, cond);
351 }
352}
353
354
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100355void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
356 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000357 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
358 DCHECK(!dst.is(pc) && !src.rm().is(pc));
359 DCHECK((satpos >= 0) && (satpos <= 31));
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100360
361 // These asserts are required to ensure compatibility with the ARMv7
362 // implementation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000363 DCHECK((src.shift_op() == ASR) || (src.shift_op() == LSL));
364 DCHECK(src.rs().is(no_reg));
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100365
366 Label done;
367 int satval = (1 << satpos) - 1;
368
369 if (cond != al) {
370 b(NegateCondition(cond), &done); // Skip saturate if !condition.
371 }
372 if (!(src.is_reg() && dst.is(src.rm()))) {
373 mov(dst, src);
374 }
375 tst(dst, Operand(~satval));
376 b(eq, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000377 mov(dst, Operand::Zero(), LeaveCC, mi); // 0 if negative.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100378 mov(dst, Operand(satval), LeaveCC, pl); // satval if positive.
379 bind(&done);
380 } else {
381 usat(dst, satpos, src, cond);
382 }
383}
384
385
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000386void MacroAssembler::Load(Register dst,
387 const MemOperand& src,
388 Representation r) {
389 DCHECK(!r.IsDouble());
390 if (r.IsInteger8()) {
391 ldrsb(dst, src);
392 } else if (r.IsUInteger8()) {
393 ldrb(dst, src);
394 } else if (r.IsInteger16()) {
395 ldrsh(dst, src);
396 } else if (r.IsUInteger16()) {
397 ldrh(dst, src);
398 } else {
399 ldr(dst, src);
400 }
401}
402
403
404void MacroAssembler::Store(Register src,
405 const MemOperand& dst,
406 Representation r) {
407 DCHECK(!r.IsDouble());
408 if (r.IsInteger8() || r.IsUInteger8()) {
409 strb(src, dst);
410 } else if (r.IsInteger16() || r.IsUInteger16()) {
411 strh(src, dst);
412 } else {
413 if (r.IsHeapObject()) {
414 AssertNotSmi(src);
415 } else if (r.IsSmi()) {
416 AssertSmi(src);
417 }
418 str(src, dst);
419 }
420}
421
422
Steve Blocka7e24c12009-10-30 11:49:00 +0000423void MacroAssembler::LoadRoot(Register destination,
424 Heap::RootListIndex index,
425 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
427 isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
428 !predictable_code_size()) {
429 // The CPU supports fast immediate values, and this root will never
430 // change. We will load it as a relocatable immediate value.
431 Handle<Object> root(&isolate()->heap()->roots_array_start()[index]);
432 mov(destination, Operand(root), LeaveCC, cond);
433 return;
434 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000435 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000436}
437
438
Kristian Monsen25f61362010-05-21 11:50:48 +0100439void MacroAssembler::StoreRoot(Register source,
440 Heap::RootListIndex index,
441 Condition cond) {
Ben Murdochc7cc0282012-03-05 14:35:55 +0000442 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
443}
444
445
Steve Block6ded16b2010-05-10 14:33:55 +0100446void MacroAssembler::InNewSpace(Register object,
447 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100448 Condition cond,
Steve Block6ded16b2010-05-10 14:33:55 +0100449 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 DCHECK(cond == eq || cond == ne);
Steve Block44f0eee2011-05-26 01:26:41 +0100451 and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate())));
452 cmp(scratch, Operand(ExternalReference::new_space_start(isolate())));
Steve Block1e0659c2011-05-24 12:43:12 +0100453 b(cond, branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100454}
455
456
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100457void MacroAssembler::RecordWriteField(
458 Register object,
459 int offset,
460 Register value,
461 Register dst,
462 LinkRegisterStatus lr_status,
463 SaveFPRegsMode save_fp,
464 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000465 SmiCheck smi_check,
466 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100467 // First, check if a write barrier is even needed. The tests below
468 // catch stores of Smis.
Steve Block6ded16b2010-05-10 14:33:55 +0100469 Label done;
470
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100471 // Skip barrier if writing a smi.
472 if (smi_check == INLINE_SMI_CHECK) {
473 JumpIfSmi(value, &done);
474 }
Steve Block6ded16b2010-05-10 14:33:55 +0100475
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100476 // Although the object register is tagged, the offset is relative to the start
477 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000478 DCHECK(IsAligned(offset, kPointerSize));
Steve Block8defd9f2010-07-08 12:39:36 +0100479
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100480 add(dst, object, Operand(offset - kHeapObjectTag));
481 if (emit_debug_code()) {
482 Label ok;
483 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
484 b(eq, &ok);
485 stop("Unaligned cell in write barrier");
486 bind(&ok);
487 }
488
489 RecordWrite(object,
490 dst,
491 value,
492 lr_status,
493 save_fp,
494 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000495 OMIT_SMI_CHECK,
496 pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000497
498 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000499
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100500 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000501 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100502 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000503 mov(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
504 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
505 }
506}
507
508
509// Will clobber 4 registers: object, map, dst, ip. The
510// register 'object' contains a heap object pointer.
511void MacroAssembler::RecordWriteForMap(Register object,
512 Register map,
513 Register dst,
514 LinkRegisterStatus lr_status,
515 SaveFPRegsMode fp_mode) {
516 if (emit_debug_code()) {
517 ldr(dst, FieldMemOperand(map, HeapObject::kMapOffset));
518 cmp(dst, Operand(isolate()->factory()->meta_map()));
519 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
520 }
521
522 if (!FLAG_incremental_marking) {
523 return;
524 }
525
526 if (emit_debug_code()) {
527 ldr(ip, FieldMemOperand(object, HeapObject::kMapOffset));
528 cmp(ip, map);
529 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
530 }
531
532 Label done;
533
534 // A single check of the map's pages interesting flag suffices, since it is
535 // only set during incremental collection, and then it's also guaranteed that
536 // the from object's page's interesting flag is also set. This optimization
537 // relies on the fact that maps can never be in new space.
538 CheckPageFlag(map,
539 map, // Used as scratch.
540 MemoryChunk::kPointersToHereAreInterestingMask,
541 eq,
542 &done);
543
544 add(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
545 if (emit_debug_code()) {
546 Label ok;
547 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
548 b(eq, &ok);
549 stop("Unaligned cell in write barrier");
550 bind(&ok);
551 }
552
553 // Record the actual write.
554 if (lr_status == kLRHasNotBeenSaved) {
555 push(lr);
556 }
557 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
558 fp_mode);
559 CallStub(&stub);
560 if (lr_status == kLRHasNotBeenSaved) {
561 pop(lr);
562 }
563
564 bind(&done);
565
566 // Count number of write barriers in generated code.
567 isolate()->counters()->write_barriers_static()->Increment();
568 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst);
569
570 // Clobber clobbered registers when running with the debug-code flag
571 // turned on to provoke errors.
572 if (emit_debug_code()) {
573 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
574 mov(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Leon Clarke4515c472010-02-03 11:58:03 +0000575 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000576}
577
578
Steve Block8defd9f2010-07-08 12:39:36 +0100579// Will clobber 4 registers: object, address, scratch, ip. The
580// register 'object' contains a heap object pointer. The heap object
581// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000582void MacroAssembler::RecordWrite(
583 Register object,
584 Register address,
585 Register value,
586 LinkRegisterStatus lr_status,
587 SaveFPRegsMode fp_mode,
588 RememberedSetAction remembered_set_action,
589 SmiCheck smi_check,
590 PointersToHereCheck pointers_to_here_check_for_value) {
591 DCHECK(!object.is(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100592 if (emit_debug_code()) {
593 ldr(ip, MemOperand(address));
594 cmp(ip, value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000595 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100596 }
Steve Block8defd9f2010-07-08 12:39:36 +0100597
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000598 if (remembered_set_action == OMIT_REMEMBERED_SET &&
599 !FLAG_incremental_marking) {
600 return;
601 }
602
603 // First, check if a write barrier is even needed. The tests below
604 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100605 Label done;
606
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100607 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000608 JumpIfSmi(value, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100609 }
610
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
612 CheckPageFlag(value,
613 value, // Used as scratch.
614 MemoryChunk::kPointersToHereAreInterestingMask,
615 eq,
616 &done);
617 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100618 CheckPageFlag(object,
619 value, // Used as scratch.
620 MemoryChunk::kPointersFromHereAreInterestingMask,
621 eq,
622 &done);
Steve Block8defd9f2010-07-08 12:39:36 +0100623
624 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100625 if (lr_status == kLRHasNotBeenSaved) {
626 push(lr);
627 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000628 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
629 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100630 CallStub(&stub);
631 if (lr_status == kLRHasNotBeenSaved) {
632 pop(lr);
633 }
Steve Block8defd9f2010-07-08 12:39:36 +0100634
635 bind(&done);
636
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 // Count number of write barriers in generated code.
638 isolate()->counters()->write_barriers_static()->Increment();
639 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip,
640 value);
641
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100642 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100643 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100644 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645 mov(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
646 mov(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100647 }
648}
649
650
651void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
652 Register address,
653 Register scratch,
654 SaveFPRegsMode fp_mode,
655 RememberedSetFinalAction and_then) {
656 Label done;
657 if (emit_debug_code()) {
658 Label ok;
659 JumpIfNotInNewSpace(object, scratch, &ok);
660 stop("Remembered set pointer is in new space");
661 bind(&ok);
662 }
663 // Load store buffer top.
664 ExternalReference store_buffer =
665 ExternalReference::store_buffer_top(isolate());
666 mov(ip, Operand(store_buffer));
667 ldr(scratch, MemOperand(ip));
668 // Store pointer to buffer and increment buffer top.
669 str(address, MemOperand(scratch, kPointerSize, PostIndex));
670 // Write back new top of buffer.
671 str(scratch, MemOperand(ip));
672 // Call stub on end of buffer.
673 // Check for end of buffer.
674 tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
675 if (and_then == kFallThroughAtEnd) {
676 b(eq, &done);
677 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678 DCHECK(and_then == kReturnAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100679 Ret(eq);
680 }
681 push(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100683 CallStub(&store_buffer_overflow);
684 pop(lr);
685 bind(&done);
686 if (and_then == kReturnAtEnd) {
687 Ret();
Steve Block8defd9f2010-07-08 12:39:36 +0100688 }
689}
690
691
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692void MacroAssembler::PushFixedFrame(Register marker_reg) {
693 DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code());
694 stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) |
695 cp.bit() |
696 (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
697 fp.bit() |
698 lr.bit());
699}
700
701
702void MacroAssembler::PopFixedFrame(Register marker_reg) {
703 DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code());
704 ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) |
705 cp.bit() |
706 (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
707 fp.bit() |
708 lr.bit());
709}
710
711
Ben Murdochb0fe1622011-05-05 13:52:32 +0100712// Push and pop all registers that can hold pointers.
713void MacroAssembler::PushSafepointRegisters() {
714 // Safepoints expect a block of contiguous register values starting with r0:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000715 DCHECK(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100716 // Safepoints expect a block of kNumSafepointRegisters values on the
717 // stack, so adjust the stack for unsaved registers.
718 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000719 DCHECK(num_unsaved >= 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100720 sub(sp, sp, Operand(num_unsaved * kPointerSize));
721 stm(db_w, sp, kSafepointSavedRegisters);
722}
723
724
725void MacroAssembler::PopSafepointRegisters() {
726 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
727 ldm(ia_w, sp, kSafepointSavedRegisters);
728 add(sp, sp, Operand(num_unsaved * kPointerSize));
729}
730
731
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100732void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
733 str(src, SafepointRegisterSlot(dst));
Steve Block1e0659c2011-05-24 12:43:12 +0100734}
735
736
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100737void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
738 ldr(dst, SafepointRegisterSlot(src));
Steve Block1e0659c2011-05-24 12:43:12 +0100739}
740
741
Ben Murdochb0fe1622011-05-05 13:52:32 +0100742int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
743 // The registers are pushed starting with the highest encoding,
744 // which means that lowest encodings are closest to the stack pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000745 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100746 return reg_code;
747}
748
749
Steve Block1e0659c2011-05-24 12:43:12 +0100750MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
751 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
752}
753
754
755MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000756 // Number of d-regs not known at snapshot time.
757 DCHECK(!serializer_enabled());
Steve Block1e0659c2011-05-24 12:43:12 +0100758 // General purpose registers are pushed last on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759 int doubles_size = DwVfpRegister::NumAllocatableRegisters() * kDoubleSize;
Steve Block1e0659c2011-05-24 12:43:12 +0100760 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
761 return MemOperand(sp, doubles_size + register_offset);
762}
763
764
Leon Clarkef7060e22010-06-03 12:02:55 +0100765void MacroAssembler::Ldrd(Register dst1, Register dst2,
766 const MemOperand& src, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000767 DCHECK(src.rm().is(no_reg));
768 DCHECK(!dst1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100769
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000770 // V8 does not use this addressing mode, so the fallback code
771 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000772 DCHECK((src.am() != PreIndex) && (src.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000773
Leon Clarkef7060e22010-06-03 12:02:55 +0100774 // Generate two ldr instructions if ldrd is not available.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() &&
776 (dst1.code() % 2 == 0) && (dst1.code() + 1 == dst2.code())) {
777 CpuFeatureScope scope(this, ARMv7);
Leon Clarkef7060e22010-06-03 12:02:55 +0100778 ldrd(dst1, dst2, src, cond);
779 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000780 if ((src.am() == Offset) || (src.am() == NegOffset)) {
781 MemOperand src2(src);
782 src2.set_offset(src2.offset() + 4);
783 if (dst1.is(src.rn())) {
784 ldr(dst2, src2, cond);
785 ldr(dst1, src, cond);
786 } else {
787 ldr(dst1, src, cond);
788 ldr(dst2, src2, cond);
789 }
790 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000791 DCHECK((src.am() == PostIndex) || (src.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000792 if (dst1.is(src.rn())) {
793 ldr(dst2, MemOperand(src.rn(), 4, Offset), cond);
794 ldr(dst1, src, cond);
795 } else {
796 MemOperand src2(src);
797 src2.set_offset(src2.offset() - 4);
798 ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond);
799 ldr(dst2, src2, cond);
800 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100801 }
802 }
803}
804
805
806void MacroAssembler::Strd(Register src1, Register src2,
807 const MemOperand& dst, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000808 DCHECK(dst.rm().is(no_reg));
809 DCHECK(!src1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100810
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000811 // V8 does not use this addressing mode, so the fallback code
812 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000813 DCHECK((dst.am() != PreIndex) && (dst.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000814
Leon Clarkef7060e22010-06-03 12:02:55 +0100815 // Generate two str instructions if strd is not available.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000816 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() &&
817 (src1.code() % 2 == 0) && (src1.code() + 1 == src2.code())) {
818 CpuFeatureScope scope(this, ARMv7);
Leon Clarkef7060e22010-06-03 12:02:55 +0100819 strd(src1, src2, dst, cond);
820 } else {
821 MemOperand dst2(dst);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000822 if ((dst.am() == Offset) || (dst.am() == NegOffset)) {
823 dst2.set_offset(dst2.offset() + 4);
824 str(src1, dst, cond);
825 str(src2, dst2, cond);
826 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000827 DCHECK((dst.am() == PostIndex) || (dst.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000828 dst2.set_offset(dst2.offset() - 4);
829 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond);
830 str(src2, dst2, cond);
831 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100832 }
833}
834
835
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000836void MacroAssembler::VFPEnsureFPSCRState(Register scratch) {
837 // If needed, restore wanted bits of FPSCR.
838 Label fpscr_done;
839 vmrs(scratch);
840 if (emit_debug_code()) {
841 Label rounding_mode_correct;
842 tst(scratch, Operand(kVFPRoundingModeMask));
843 b(eq, &rounding_mode_correct);
844 // Don't call Assert here, since Runtime_Abort could re-enter here.
845 stop("Default rounding mode not set");
846 bind(&rounding_mode_correct);
847 }
848 tst(scratch, Operand(kVFPDefaultNaNModeControlBit));
849 b(ne, &fpscr_done);
850 orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit));
851 vmsr(scratch);
852 bind(&fpscr_done);
853}
854
855
856void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister dst,
857 const DwVfpRegister src,
858 const Condition cond) {
859 vsub(dst, src, kDoubleRegZero, cond);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100860}
861
862
863void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
864 const DwVfpRegister src2,
865 const Condition cond) {
866 // Compare and move FPSCR flags to the normal condition flags.
867 VFPCompareAndLoadFlags(src1, src2, pc, cond);
868}
869
870void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
871 const double src2,
872 const Condition cond) {
873 // Compare and move FPSCR flags to the normal condition flags.
874 VFPCompareAndLoadFlags(src1, src2, pc, cond);
875}
876
877
878void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
879 const DwVfpRegister src2,
880 const Register fpscr_flags,
881 const Condition cond) {
882 // Compare and load FPSCR.
883 vcmp(src1, src2, cond);
884 vmrs(fpscr_flags, cond);
885}
886
887void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
888 const double src2,
889 const Register fpscr_flags,
890 const Condition cond) {
891 // Compare and load FPSCR.
892 vcmp(src1, src2, cond);
893 vmrs(fpscr_flags, cond);
Ben Murdoch086aeea2011-05-13 15:57:08 +0100894}
895
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000896void MacroAssembler::Vmov(const DwVfpRegister dst,
897 const double imm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000898 const Register scratch) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000899 static const DoubleRepresentation minus_zero(-0.0);
900 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000901 DoubleRepresentation value_rep(imm);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000902 // Handle special values first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000903 if (value_rep == zero) {
904 vmov(dst, kDoubleRegZero);
905 } else if (value_rep == minus_zero) {
906 vneg(dst, kDoubleRegZero);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000907 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000908 vmov(dst, imm, scratch);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000909 }
910}
911
Ben Murdoch086aeea2011-05-13 15:57:08 +0100912
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000913void MacroAssembler::VmovHigh(Register dst, DwVfpRegister src) {
914 if (src.code() < 16) {
915 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
916 vmov(dst, loc.high());
917 } else {
918 vmov(dst, VmovIndexHi, src);
919 }
920}
921
922
923void MacroAssembler::VmovHigh(DwVfpRegister dst, Register src) {
924 if (dst.code() < 16) {
925 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
926 vmov(loc.high(), src);
927 } else {
928 vmov(dst, VmovIndexHi, src);
929 }
930}
931
932
933void MacroAssembler::VmovLow(Register dst, DwVfpRegister src) {
934 if (src.code() < 16) {
935 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
936 vmov(dst, loc.low());
937 } else {
938 vmov(dst, VmovIndexLo, src);
939 }
940}
941
942
943void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) {
944 if (dst.code() < 16) {
945 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
946 vmov(loc.low(), src);
947 } else {
948 vmov(dst, VmovIndexLo, src);
949 }
950}
951
952
953void MacroAssembler::LoadConstantPoolPointerRegister() {
954 if (FLAG_enable_ool_constant_pool) {
955 int constant_pool_offset = Code::kConstantPoolOffset - Code::kHeaderSize -
956 pc_offset() - Instruction::kPCReadOffset;
957 DCHECK(ImmediateFitsAddrMode2Instruction(constant_pool_offset));
958 ldr(pp, MemOperand(pc, constant_pool_offset));
959 }
960}
961
962
963void MacroAssembler::StubPrologue() {
964 PushFixedFrame();
965 Push(Smi::FromInt(StackFrame::STUB));
966 // Adjust FP to point to saved FP.
967 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
968 if (FLAG_enable_ool_constant_pool) {
969 LoadConstantPoolPointerRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400970 set_ool_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000971 }
972}
973
974
975void MacroAssembler::Prologue(bool code_pre_aging) {
976 { PredictableCodeSizeScope predictible_code_size_scope(
977 this, kNoCodeAgeSequenceLength);
978 // The following three instructions must remain together and unmodified
979 // for code aging to work properly.
980 if (code_pre_aging) {
981 // Pre-age the code.
982 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
983 add(r0, pc, Operand(-8));
984 ldr(pc, MemOperand(pc, -4));
985 emit_code_stub_address(stub);
986 } else {
987 PushFixedFrame(r1);
988 nop(ip.code());
989 // Adjust FP to point to saved FP.
990 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
991 }
992 }
993 if (FLAG_enable_ool_constant_pool) {
994 LoadConstantPoolPointerRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400995 set_ool_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000996 }
997}
998
999
1000void MacroAssembler::EnterFrame(StackFrame::Type type,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001001 bool load_constant_pool_pointer_reg) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001002 // r0-r3: preserved
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001003 PushFixedFrame();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001004 if (FLAG_enable_ool_constant_pool && load_constant_pool_pointer_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001005 LoadConstantPoolPointerRegister();
1006 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001007 mov(ip, Operand(Smi::FromInt(type)));
1008 push(ip);
1009 mov(ip, Operand(CodeObject()));
1010 push(ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001011 // Adjust FP to point to saved FP.
1012 add(fp, sp,
1013 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001014}
1015
1016
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001017int MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001018 // r0: preserved
1019 // r1: preserved
1020 // r2: preserved
1021
1022 // Drop the execution stack down to the frame pointer and restore
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023 // the caller frame pointer, return address and constant pool pointer
1024 // (if FLAG_enable_ool_constant_pool).
1025 int frame_ends;
1026 if (FLAG_enable_ool_constant_pool) {
1027 add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
1028 frame_ends = pc_offset();
1029 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
1030 } else {
1031 mov(sp, fp);
1032 frame_ends = pc_offset();
1033 ldm(ia_w, sp, fp.bit() | lr.bit());
1034 }
1035 return frame_ends;
Steve Blocka7e24c12009-10-30 11:49:00 +00001036}
1037
1038
Steve Block1e0659c2011-05-24 12:43:12 +01001039void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001040 // Set up the frame structure on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001041 DCHECK_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1042 DCHECK_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1043 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Steve Block1e0659c2011-05-24 12:43:12 +01001044 Push(lr, fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001045 mov(fp, Operand(sp)); // Set up new frame pointer.
Steve Block1e0659c2011-05-24 12:43:12 +01001046 // Reserve room for saved entry sp and code object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001047 sub(sp, sp, Operand(ExitFrameConstants::kFrameSize));
Steve Block44f0eee2011-05-26 01:26:41 +01001048 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001049 mov(ip, Operand::Zero());
Steve Block1e0659c2011-05-24 12:43:12 +01001050 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
1051 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001052 if (FLAG_enable_ool_constant_pool) {
1053 str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1054 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001055 mov(ip, Operand(CodeObject()));
Steve Block1e0659c2011-05-24 12:43:12 +01001056 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001057
1058 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00001059 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001060 str(fp, MemOperand(ip));
Ben Murdoch589d6972011-11-30 16:04:58 +00001061 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001062 str(cp, MemOperand(ip));
1063
Ben Murdochb0fe1622011-05-05 13:52:32 +01001064 // Optionally save all double registers.
1065 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001066 SaveFPRegs(sp, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001067 // Note that d0 will be accessible at
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001068 // fp - ExitFrameConstants::kFrameSize -
1069 // DwVfpRegister::kMaxNumRegisters * kDoubleSize,
1070 // since the sp slot, code slot and constant pool slot (if
1071 // FLAG_enable_ool_constant_pool) were pushed after the fp.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001072 }
Steve Block1e0659c2011-05-24 12:43:12 +01001073
1074 // Reserve place for the return address and stack space and align the frame
1075 // preparing for calling the runtime function.
1076 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1077 sub(sp, sp, Operand((stack_space + 1) * kPointerSize));
1078 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001079 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +01001080 and_(sp, sp, Operand(-frame_alignment));
1081 }
1082
1083 // Set the exit frame sp value to point just before the return address
1084 // location.
1085 add(ip, sp, Operand(kPointerSize));
1086 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001087}
1088
1089
Steve Block6ded16b2010-05-10 14:33:55 +01001090void MacroAssembler::InitializeNewString(Register string,
1091 Register length,
1092 Heap::RootListIndex map_index,
1093 Register scratch1,
1094 Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001095 SmiTag(scratch1, length);
Steve Block6ded16b2010-05-10 14:33:55 +01001096 LoadRoot(scratch2, map_index);
1097 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
1098 mov(scratch1, Operand(String::kEmptyHashField));
1099 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
1100 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
1101}
1102
1103
1104int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001105#if V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001106 // Running on the real platform. Use the alignment as mandated by the local
1107 // environment.
1108 // Note: This will break if we ever start generating snapshots on one ARM
1109 // platform for another ARM platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001110 return base::OS::ActivationFrameAlignment();
1111#else // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001112 // If we are using the simulator then we should always align to the expected
1113 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +01001114 // if the target platform will need alignment, so this is controlled from a
1115 // flag.
1116 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001117#endif // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001118}
1119
1120
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001121void MacroAssembler::LeaveExitFrame(bool save_doubles,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001122 Register argument_count,
1123 bool restore_context) {
1124 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1125
Ben Murdochb0fe1622011-05-05 13:52:32 +01001126 // Optionally restore all double registers.
1127 if (save_doubles) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001128 // Calculate the stack location of the saved doubles and restore them.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001129 const int offset = ExitFrameConstants::kFrameSize;
1130 sub(r3, fp,
1131 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize));
1132 RestoreFPRegs(r3, ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001133 }
1134
Steve Blocka7e24c12009-10-30 11:49:00 +00001135 // Clear top frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001136 mov(r3, Operand::Zero());
Ben Murdoch589d6972011-11-30 16:04:58 +00001137 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001138 str(r3, MemOperand(ip));
1139
1140 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001141 if (restore_context) {
1142 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1143 ldr(cp, MemOperand(ip));
1144 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001145#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001146 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001147 str(r3, MemOperand(ip));
1148#endif
1149
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001150 // Tear down the exit frame, pop the arguments, and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001151 if (FLAG_enable_ool_constant_pool) {
1152 ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1153 }
Steve Block1e0659c2011-05-24 12:43:12 +01001154 mov(sp, Operand(fp));
1155 ldm(ia_w, sp, fp.bit() | lr.bit());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001156 if (argument_count.is_valid()) {
1157 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
1158 }
1159}
1160
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001161
1162void MacroAssembler::MovFromFloatResult(const DwVfpRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001163 if (use_eabi_hardfloat()) {
1164 Move(dst, d0);
1165 } else {
1166 vmov(dst, r0, r1);
1167 }
1168}
1169
1170
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001171// On ARM this is just a synonym to make the purpose clear.
1172void MacroAssembler::MovFromFloatParameter(DwVfpRegister dst) {
1173 MovFromFloatResult(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001174}
1175
1176
1177void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1178 const ParameterCount& actual,
1179 Handle<Code> code_constant,
1180 Register code_reg,
1181 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001182 bool* definitely_mismatches,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001183 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001184 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001185 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001186 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00001187 Label regular_invoke;
1188
1189 // Check whether the expected and actual arguments count match. If not,
1190 // setup registers according to contract with ArgumentsAdaptorTrampoline:
1191 // r0: actual arguments count
1192 // r1: function (passed through to callee)
1193 // r2: expected arguments count
Steve Blocka7e24c12009-10-30 11:49:00 +00001194
1195 // The code below is made a lot easier because the calling code already sets
1196 // up actual and expected registers according to the contract if values are
1197 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001198 DCHECK(actual.is_immediate() || actual.reg().is(r0));
1199 DCHECK(expected.is_immediate() || expected.reg().is(r2));
1200 DCHECK((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
Steve Blocka7e24c12009-10-30 11:49:00 +00001201
1202 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001203 DCHECK(actual.is_immediate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001204 if (expected.immediate() == actual.immediate()) {
1205 definitely_matches = true;
1206 } else {
1207 mov(r0, Operand(actual.immediate()));
1208 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1209 if (expected.immediate() == sentinel) {
1210 // Don't worry about adapting arguments for builtins that
1211 // don't want that done. Skip adaption code by making it look
1212 // like we have a match between expected and actual number of
1213 // arguments.
1214 definitely_matches = true;
1215 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001216 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00001217 mov(r2, Operand(expected.immediate()));
1218 }
1219 }
1220 } else {
1221 if (actual.is_immediate()) {
1222 cmp(expected.reg(), Operand(actual.immediate()));
1223 b(eq, &regular_invoke);
1224 mov(r0, Operand(actual.immediate()));
1225 } else {
1226 cmp(expected.reg(), Operand(actual.reg()));
1227 b(eq, &regular_invoke);
1228 }
1229 }
1230
1231 if (!definitely_matches) {
1232 if (!code_constant.is_null()) {
1233 mov(r3, Operand(code_constant));
1234 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
1235 }
1236
1237 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001238 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001239 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001240 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001241 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00001242 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001243 if (!*definitely_mismatches) {
1244 b(done);
1245 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001246 } else {
1247 Jump(adaptor, RelocInfo::CODE_TARGET);
1248 }
1249 bind(&regular_invoke);
1250 }
1251}
1252
1253
1254void MacroAssembler::InvokeCode(Register code,
1255 const ParameterCount& expected,
1256 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001257 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001258 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001259 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001260 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001261
Steve Blocka7e24c12009-10-30 11:49:00 +00001262 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001263 bool definitely_mismatches = false;
1264 InvokePrologue(expected, actual, Handle<Code>::null(), code,
1265 &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001266 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001267 if (!definitely_mismatches) {
1268 if (flag == CALL_FUNCTION) {
1269 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001270 Call(code);
1271 call_wrapper.AfterCall();
1272 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001273 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001274 Jump(code);
1275 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001276
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001277 // Continue here if InvokePrologue does handle the invocation due to
1278 // mismatched parameter counts.
1279 bind(&done);
1280 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001281}
1282
1283
Steve Blocka7e24c12009-10-30 11:49:00 +00001284void MacroAssembler::InvokeFunction(Register fun,
1285 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001286 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001287 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001288 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001289 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001290
Steve Blocka7e24c12009-10-30 11:49:00 +00001291 // Contract with called JS functions requires that function is passed in r1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001292 DCHECK(fun.is(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001293
1294 Register expected_reg = r2;
1295 Register code_reg = r3;
1296
1297 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1298 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1299 ldr(expected_reg,
1300 FieldMemOperand(code_reg,
1301 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001302 SmiUntag(expected_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001303 ldr(code_reg,
Steve Block791712a2010-08-27 10:21:07 +01001304 FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001305
1306 ParameterCount expected(expected_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001307 InvokeCode(code_reg, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00001308}
1309
1310
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001311void MacroAssembler::InvokeFunction(Register function,
1312 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00001313 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00001314 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001315 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001316 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001317 DCHECK(flag == JUMP_FUNCTION || has_frame());
1318
1319 // Contract with called JS functions requires that function is passed in r1.
1320 DCHECK(function.is(r1));
Andrei Popescu402d9372010-02-26 13:31:12 +00001321
1322 // Get the function and setup the context.
Andrei Popescu402d9372010-02-26 13:31:12 +00001323 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1324
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001325 // We call indirectly through the code field in the function to
1326 // allow recompilation to take effect without changing any of the
1327 // call sites.
1328 ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001329 InvokeCode(r3, expected, actual, flag, call_wrapper);
1330}
1331
1332
1333void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
1334 const ParameterCount& expected,
1335 const ParameterCount& actual,
1336 InvokeFlag flag,
1337 const CallWrapper& call_wrapper) {
1338 Move(r1, function);
1339 InvokeFunction(r1, expected, actual, flag, call_wrapper);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001340}
1341
1342
1343void MacroAssembler::IsObjectJSObjectType(Register heap_object,
1344 Register map,
1345 Register scratch,
1346 Label* fail) {
1347 ldr(map, FieldMemOperand(heap_object, HeapObject::kMapOffset));
1348 IsInstanceJSObjectType(map, scratch, fail);
1349}
1350
1351
1352void MacroAssembler::IsInstanceJSObjectType(Register map,
1353 Register scratch,
1354 Label* fail) {
1355 ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001356 cmp(scratch, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001357 b(lt, fail);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001358 cmp(scratch, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001359 b(gt, fail);
1360}
1361
1362
1363void MacroAssembler::IsObjectJSStringType(Register object,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001364 Register scratch,
1365 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001366 DCHECK(kNotStringTag != 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001367
1368 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1369 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1370 tst(scratch, Operand(kIsNotStringMask));
Steve Block1e0659c2011-05-24 12:43:12 +01001371 b(ne, fail);
Andrei Popescu402d9372010-02-26 13:31:12 +00001372}
1373
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001374
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001375void MacroAssembler::IsObjectNameType(Register object,
1376 Register scratch,
1377 Label* fail) {
1378 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1379 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1380 cmp(scratch, Operand(LAST_NAME_TYPE));
1381 b(hi, fail);
1382}
1383
1384
Andrei Popescu402d9372010-02-26 13:31:12 +00001385void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001386 mov(r0, Operand::Zero());
Steve Block44f0eee2011-05-26 01:26:41 +01001387 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001388 CEntryStub ces(isolate(), 1);
1389 DCHECK(AllowThisStubCall(&ces));
Andrei Popescu402d9372010-02-26 13:31:12 +00001390 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1391}
Steve Blocka7e24c12009-10-30 11:49:00 +00001392
1393
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001394void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1395 int handler_index) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001396 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001397 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1398 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001399 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1400 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1401 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1402 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001403
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001404 // For the JSEntry handler, we must preserve r0-r4, r5-r6 are available.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001405 // We will build up the handler from the bottom by pushing on the stack.
1406 // Set up the code object (r5) and the state (r6) for pushing.
1407 unsigned state =
1408 StackHandler::IndexField::encode(handler_index) |
1409 StackHandler::KindField::encode(kind);
1410 mov(r5, Operand(CodeObject()));
1411 mov(r6, Operand(state));
1412
1413 // Push the frame pointer, context, state, and code object.
1414 if (kind == StackHandler::JS_ENTRY) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001415 mov(cp, Operand(Smi::FromInt(0))); // Indicates no context.
1416 mov(ip, Operand::Zero()); // NULL frame pointer.
1417 stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | ip.bit());
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01001418 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001419 stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit());
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001420 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001421
1422 // Link the current handler as the next handler.
1423 mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1424 ldr(r5, MemOperand(r6));
1425 push(r5);
1426 // Set this new handler as the current one.
1427 str(sp, MemOperand(r6));
Steve Blocka7e24c12009-10-30 11:49:00 +00001428}
1429
1430
Leon Clarkee46be812010-01-19 14:06:41 +00001431void MacroAssembler::PopTryHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001432 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Leon Clarkee46be812010-01-19 14:06:41 +00001433 pop(r1);
Ben Murdoch589d6972011-11-30 16:04:58 +00001434 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Leon Clarkee46be812010-01-19 14:06:41 +00001435 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1436 str(r1, MemOperand(ip));
1437}
1438
1439
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001440void MacroAssembler::JumpToHandlerEntry() {
1441 // Compute the handler entry address and jump to it. The handler table is
1442 // a fixed array of (smi-tagged) code offsets.
1443 // r0 = exception, r1 = code object, r2 = state.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001444
1445 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1446 if (FLAG_enable_ool_constant_pool) {
1447 ldr(pp, FieldMemOperand(r1, Code::kConstantPoolOffset)); // Constant pool.
1448 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001449 ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table.
1450 add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1451 mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index.
1452 ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset.
1453 add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001454 add(pc, r1, Operand::SmiUntag(r2)); // Jump
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001455}
1456
1457
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001458void MacroAssembler::Throw(Register value) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001459 // Adjust this code if not the case.
1460 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001461 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1462 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1463 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1464 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1465 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1466
1467 // The exception is expected in r0.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001468 if (!value.is(r0)) {
1469 mov(r0, value);
1470 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001471 // Drop the stack pointer to the top of the top handler.
Ben Murdoch589d6972011-11-30 16:04:58 +00001472 mov(r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001473 ldr(sp, MemOperand(r3));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001474 // Restore the next handler.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001475 pop(r2);
1476 str(r2, MemOperand(r3));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001477
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001478 // Get the code object (r1) and state (r2). Restore the context and frame
1479 // pointer.
1480 ldm(ia_w, sp, r1.bit() | r2.bit() | cp.bit() | fp.bit());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001481
1482 // If the handler is a JS frame, restore the context to the frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001483 // (kind == ENTRY) == (fp == 0) == (cp == 0), so we could test either fp
1484 // or cp.
1485 tst(cp, cp);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001486 str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
1487
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001488 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001489}
1490
1491
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001492void MacroAssembler::ThrowUncatchable(Register value) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001493 // Adjust this code if not the case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001494 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1495 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001496 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1497 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1498 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1499 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1500
1501 // The exception is expected in r0.
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01001502 if (!value.is(r0)) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001503 mov(r0, value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001504 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001505 // Drop the stack pointer to the top of the top stack handler.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001506 mov(r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1507 ldr(sp, MemOperand(r3));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001508
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001509 // Unwind the handlers until the ENTRY handler is found.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001510 Label fetch_next, check_kind;
1511 jmp(&check_kind);
1512 bind(&fetch_next);
1513 ldr(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001514
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001515 bind(&check_kind);
1516 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
1517 ldr(r2, MemOperand(sp, StackHandlerConstants::kStateOffset));
1518 tst(r2, Operand(StackHandler::KindField::kMask));
1519 b(ne, &fetch_next);
1520
1521 // Set the top handler address to next handler past the top ENTRY handler.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001522 pop(r2);
1523 str(r2, MemOperand(r3));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001524 // Get the code object (r1) and state (r2). Clear the context and frame
1525 // pointer (0 was saved in the handler).
1526 ldm(ia_w, sp, r1.bit() | r2.bit() | cp.bit() | fp.bit());
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001527
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001528 JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001529}
1530
1531
Steve Blocka7e24c12009-10-30 11:49:00 +00001532void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1533 Register scratch,
1534 Label* miss) {
1535 Label same_contexts;
1536
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001537 DCHECK(!holder_reg.is(scratch));
1538 DCHECK(!holder_reg.is(ip));
1539 DCHECK(!scratch.is(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001540
1541 // Load current lexical context from the stack frame.
1542 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
1543 // In debug mode, make sure the lexical context is set.
1544#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001545 cmp(scratch, Operand::Zero());
1546 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001547#endif
1548
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001549 // Load the native context of the current context.
1550 int offset =
1551 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
Steve Blocka7e24c12009-10-30 11:49:00 +00001552 ldr(scratch, FieldMemOperand(scratch, offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001553 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001554
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001555 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001556 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001557 // Cannot use ip as a temporary in this verification code. Due to the fact
1558 // that ip is clobbered as part of cmp with an object Operand.
1559 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001560 // Read the first word and compare to the native_context_map.
Steve Blocka7e24c12009-10-30 11:49:00 +00001561 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001562 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001563 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001564 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001565 pop(holder_reg); // Restore holder.
1566 }
1567
1568 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001569 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001570 cmp(scratch, Operand(ip));
1571 b(eq, &same_contexts);
1572
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001573 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001574 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001575 // Cannot use ip as a temporary in this verification code. Due to the fact
1576 // that ip is clobbered as part of cmp with an object Operand.
1577 push(holder_reg); // Temporarily save holder on the stack.
1578 mov(holder_reg, ip); // Move ip to its holding place.
1579 LoadRoot(ip, Heap::kNullValueRootIndex);
1580 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001581 Check(ne, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001582
1583 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001584 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001585 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001586 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001587 // Restore ip is not needed. ip is reloaded below.
1588 pop(holder_reg); // Restore holder.
1589 // Restore ip to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001590 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001591 }
1592
1593 // Check that the security token in the calling global object is
1594 // compatible with the security token in the receiving global
1595 // object.
1596 int token_offset = Context::kHeaderSize +
1597 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1598
1599 ldr(scratch, FieldMemOperand(scratch, token_offset));
1600 ldr(ip, FieldMemOperand(ip, token_offset));
1601 cmp(scratch, Operand(ip));
1602 b(ne, miss);
1603
1604 bind(&same_contexts);
1605}
1606
1607
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001608// Compute the hash code from the untagged key. This must be kept in sync with
1609// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1610// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001611void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
1612 // First of all we assign the hash seed to scratch.
1613 LoadRoot(scratch, Heap::kHashSeedRootIndex);
1614 SmiUntag(scratch);
1615
1616 // Xor original key with a seed.
1617 eor(t0, t0, Operand(scratch));
1618
1619 // Compute the hash code from the untagged key. This must be kept in sync
1620 // with ComputeIntegerHash in utils.h.
1621 //
1622 // hash = ~hash + (hash << 15);
1623 mvn(scratch, Operand(t0));
1624 add(t0, scratch, Operand(t0, LSL, 15));
1625 // hash = hash ^ (hash >> 12);
1626 eor(t0, t0, Operand(t0, LSR, 12));
1627 // hash = hash + (hash << 2);
1628 add(t0, t0, Operand(t0, LSL, 2));
1629 // hash = hash ^ (hash >> 4);
1630 eor(t0, t0, Operand(t0, LSR, 4));
1631 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001632 mov(scratch, Operand(t0, LSL, 11));
1633 add(t0, t0, Operand(t0, LSL, 3));
1634 add(t0, t0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001635 // hash = hash ^ (hash >> 16);
1636 eor(t0, t0, Operand(t0, LSR, 16));
1637}
1638
1639
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001640void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1641 Register elements,
1642 Register key,
1643 Register result,
1644 Register t0,
1645 Register t1,
1646 Register t2) {
1647 // Register use:
1648 //
1649 // elements - holds the slow-case elements of the receiver on entry.
1650 // Unchanged unless 'result' is the same register.
1651 //
1652 // key - holds the smi key on entry.
1653 // Unchanged unless 'result' is the same register.
1654 //
1655 // result - holds the result on exit if the load succeeded.
1656 // Allowed to be the same as 'key' or 'result'.
1657 // Unchanged on bailout so 'key' or 'result' can be used
1658 // in further computation.
1659 //
1660 // Scratch registers:
1661 //
1662 // t0 - holds the untagged key on entry and holds the hash once computed.
1663 //
1664 // t1 - used to hold the capacity mask of the dictionary
1665 //
1666 // t2 - used for the index into the dictionary.
1667 Label done;
1668
Ben Murdochc7cc0282012-03-05 14:35:55 +00001669 GetNumberHash(t0, t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001670
1671 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001672 ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001673 SmiUntag(t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001674 sub(t1, t1, Operand(1));
1675
1676 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001677 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001678 // Use t2 for index calculations and keep the hash intact in t0.
1679 mov(t2, t0);
1680 // Compute the masked index: (hash + i + i * i) & mask.
1681 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00001682 add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001683 }
1684 and_(t2, t2, Operand(t1));
1685
1686 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001687 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001688 add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
1689
1690 // Check if the key is identical to the name.
1691 add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001692 ldr(ip, FieldMemOperand(t2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001693 cmp(key, Operand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001694 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001695 b(eq, &done);
1696 } else {
1697 b(ne, miss);
1698 }
1699 }
1700
1701 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001702 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001703 // t2: elements + (index * kPointerSize)
1704 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001705 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001706 ldr(t1, FieldMemOperand(t2, kDetailsOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001707 DCHECK_EQ(FIELD, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00001708 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001709 b(ne, miss);
1710
1711 // Get the value at the masked, scaled index and return.
1712 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001713 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001714 ldr(result, FieldMemOperand(t2, kValueOffset));
1715}
1716
1717
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001718void MacroAssembler::Allocate(int object_size,
1719 Register result,
1720 Register scratch1,
1721 Register scratch2,
1722 Label* gc_required,
1723 AllocationFlags flags) {
1724 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07001725 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001726 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001727 // Trash the registers to simulate an allocation failure.
1728 mov(result, Operand(0x7091));
1729 mov(scratch1, Operand(0x7191));
1730 mov(scratch2, Operand(0x7291));
1731 }
1732 jmp(gc_required);
1733 return;
1734 }
1735
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001736 DCHECK(!result.is(scratch1));
1737 DCHECK(!result.is(scratch2));
1738 DCHECK(!scratch1.is(scratch2));
1739 DCHECK(!scratch1.is(ip));
1740 DCHECK(!scratch2.is(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001741
Kristian Monsen25f61362010-05-21 11:50:48 +01001742 // Make object size into bytes.
1743 if ((flags & SIZE_IN_WORDS) != 0) {
1744 object_size *= kPointerSize;
1745 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001746 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Kristian Monsen25f61362010-05-21 11:50:48 +01001747
Ben Murdochb0fe1622011-05-05 13:52:32 +01001748 // Check relative positions of allocation top and limit addresses.
1749 // The values must be adjacent in memory to allow the use of LDM.
1750 // Also, assert that the registers are numbered such that the values
1751 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001752 ExternalReference allocation_top =
1753 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1754 ExternalReference allocation_limit =
1755 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001756
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001757 intptr_t top =
1758 reinterpret_cast<intptr_t>(allocation_top.address());
1759 intptr_t limit =
1760 reinterpret_cast<intptr_t>(allocation_limit.address());
1761 DCHECK((limit - top) == kPointerSize);
1762 DCHECK(result.code() < ip.code());
1763
1764 // Set up allocation top address register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001765 Register topaddr = scratch1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001766 mov(topaddr, Operand(allocation_top));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001767
1768 // This code stores a temporary value in ip. This is OK, as the code below
1769 // does not need ip for implicit literal generation.
Steve Blocka7e24c12009-10-30 11:49:00 +00001770 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001771 // Load allocation top into result and allocation limit into ip.
1772 ldm(ia, topaddr, result.bit() | ip.bit());
1773 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001774 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001775 // Assert that result actually contains top on entry. ip is used
1776 // immediately below so this use of ip does not cause difference with
1777 // respect to register content between debug and release mode.
1778 ldr(ip, MemOperand(topaddr));
1779 cmp(result, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001780 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001781 }
1782 // Load allocation limit into ip. Result already contains allocation top.
1783 ldr(ip, MemOperand(topaddr, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001784 }
1785
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001786 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1787 // Align the next allocation. Storing the filler map without checking top is
1788 // safe in new-space because the limit of the heap is aligned there.
1789 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1790 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1791 and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
1792 Label aligned;
1793 b(eq, &aligned);
1794 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1795 cmp(result, Operand(ip));
1796 b(hs, gc_required);
1797 }
1798 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
1799 str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
1800 bind(&aligned);
1801 }
1802
Steve Blocka7e24c12009-10-30 11:49:00 +00001803 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001804 // to calculate the new top. We must preserve the ip register at this
1805 // point, so we cannot just use add().
1806 DCHECK(object_size > 0);
1807 Register source = result;
1808 Condition cond = al;
1809 int shift = 0;
1810 while (object_size != 0) {
1811 if (((object_size >> shift) & 0x03) == 0) {
1812 shift += 2;
1813 } else {
1814 int bits = object_size & (0xff << shift);
1815 object_size -= bits;
1816 shift += 8;
1817 Operand bits_operand(bits);
1818 DCHECK(bits_operand.instructions_required(this) == 1);
1819 add(scratch2, source, bits_operand, SetCC, cond);
1820 source = scratch2;
1821 cond = cc;
1822 }
1823 }
Steve Block1e0659c2011-05-24 12:43:12 +01001824 b(cs, gc_required);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001825 cmp(scratch2, Operand(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001826 b(hi, gc_required);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001827 str(scratch2, MemOperand(topaddr));
Steve Blocka7e24c12009-10-30 11:49:00 +00001828
Ben Murdochb0fe1622011-05-05 13:52:32 +01001829 // Tag object if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00001830 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001831 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001832 }
1833}
1834
1835
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001836void MacroAssembler::Allocate(Register object_size,
1837 Register result,
1838 Register scratch1,
1839 Register scratch2,
1840 Label* gc_required,
1841 AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001842 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001843 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001844 // Trash the registers to simulate an allocation failure.
1845 mov(result, Operand(0x7091));
1846 mov(scratch1, Operand(0x7191));
1847 mov(scratch2, Operand(0x7291));
1848 }
1849 jmp(gc_required);
1850 return;
1851 }
1852
Ben Murdochb0fe1622011-05-05 13:52:32 +01001853 // Assert that the register arguments are different and that none of
1854 // them are ip. ip is used explicitly in the code generated below.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001855 DCHECK(!result.is(scratch1));
1856 DCHECK(!result.is(scratch2));
1857 DCHECK(!scratch1.is(scratch2));
1858 DCHECK(!object_size.is(ip));
1859 DCHECK(!result.is(ip));
1860 DCHECK(!scratch1.is(ip));
1861 DCHECK(!scratch2.is(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001862
Ben Murdochb0fe1622011-05-05 13:52:32 +01001863 // Check relative positions of allocation top and limit addresses.
1864 // The values must be adjacent in memory to allow the use of LDM.
1865 // Also, assert that the registers are numbered such that the values
1866 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001867 ExternalReference allocation_top =
1868 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1869 ExternalReference allocation_limit =
1870 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001871 intptr_t top =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001872 reinterpret_cast<intptr_t>(allocation_top.address());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001873 intptr_t limit =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001874 reinterpret_cast<intptr_t>(allocation_limit.address());
1875 DCHECK((limit - top) == kPointerSize);
1876 DCHECK(result.code() < ip.code());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001877
1878 // Set up allocation top address.
1879 Register topaddr = scratch1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001880 mov(topaddr, Operand(allocation_top));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001881
1882 // This code stores a temporary value in ip. This is OK, as the code below
1883 // does not need ip for implicit literal generation.
Steve Blocka7e24c12009-10-30 11:49:00 +00001884 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001885 // Load allocation top into result and allocation limit into ip.
1886 ldm(ia, topaddr, result.bit() | ip.bit());
1887 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001888 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001889 // Assert that result actually contains top on entry. ip is used
1890 // immediately below so this use of ip does not cause difference with
1891 // respect to register content between debug and release mode.
1892 ldr(ip, MemOperand(topaddr));
1893 cmp(result, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001894 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001895 }
1896 // Load allocation limit into ip. Result already contains allocation top.
1897 ldr(ip, MemOperand(topaddr, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001898 }
1899
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001900 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1901 // Align the next allocation. Storing the filler map without checking top is
1902 // safe in new-space because the limit of the heap is aligned there.
1903 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1904 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1905 and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
1906 Label aligned;
1907 b(eq, &aligned);
1908 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1909 cmp(result, Operand(ip));
1910 b(hs, gc_required);
1911 }
1912 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
1913 str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
1914 bind(&aligned);
1915 }
1916
Steve Blocka7e24c12009-10-30 11:49:00 +00001917 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb0fe1622011-05-05 13:52:32 +01001918 // to calculate the new top. Object size may be in words so a shift is
1919 // required to get the number of bytes.
Kristian Monsen25f61362010-05-21 11:50:48 +01001920 if ((flags & SIZE_IN_WORDS) != 0) {
Steve Block1e0659c2011-05-24 12:43:12 +01001921 add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01001922 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01001923 add(scratch2, result, Operand(object_size), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01001924 }
Steve Block1e0659c2011-05-24 12:43:12 +01001925 b(cs, gc_required);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001926 cmp(scratch2, Operand(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001927 b(hi, gc_required);
1928
Steve Blockd0582a62009-12-15 09:54:21 +00001929 // Update allocation top. result temporarily holds the new top.
Steve Block44f0eee2011-05-26 01:26:41 +01001930 if (emit_debug_code()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001931 tst(scratch2, Operand(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001932 Check(eq, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00001933 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001934 str(scratch2, MemOperand(topaddr));
Steve Blocka7e24c12009-10-30 11:49:00 +00001935
1936 // Tag object if requested.
1937 if ((flags & TAG_OBJECT) != 0) {
1938 add(result, result, Operand(kHeapObjectTag));
1939 }
1940}
1941
1942
1943void MacroAssembler::UndoAllocationInNewSpace(Register object,
1944 Register scratch) {
1945 ExternalReference new_space_allocation_top =
Steve Block44f0eee2011-05-26 01:26:41 +01001946 ExternalReference::new_space_allocation_top_address(isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00001947
1948 // Make sure the object has no tag before resetting top.
1949 and_(object, object, Operand(~kHeapObjectTagMask));
1950#ifdef DEBUG
1951 // Check that the object un-allocated is below the current top.
1952 mov(scratch, Operand(new_space_allocation_top));
1953 ldr(scratch, MemOperand(scratch));
1954 cmp(object, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001955 Check(lt, kUndoAllocationOfNonAllocatedMemory);
Steve Blocka7e24c12009-10-30 11:49:00 +00001956#endif
1957 // Write the address of the object to un-allocate as the current top.
1958 mov(scratch, Operand(new_space_allocation_top));
1959 str(object, MemOperand(scratch));
1960}
1961
1962
Andrei Popescu31002712010-02-23 13:46:05 +00001963void MacroAssembler::AllocateTwoByteString(Register result,
1964 Register length,
1965 Register scratch1,
1966 Register scratch2,
1967 Register scratch3,
1968 Label* gc_required) {
1969 // Calculate the number of bytes needed for the characters in the string while
1970 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001971 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00001972 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
1973 add(scratch1, scratch1,
1974 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001975 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001976
1977 // Allocate two-byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001978 Allocate(scratch1,
1979 result,
1980 scratch2,
1981 scratch3,
1982 gc_required,
1983 TAG_OBJECT);
Andrei Popescu31002712010-02-23 13:46:05 +00001984
1985 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001986 InitializeNewString(result,
1987 length,
1988 Heap::kStringMapRootIndex,
1989 scratch1,
1990 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001991}
1992
1993
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001994void MacroAssembler::AllocateOneByteString(Register result, Register length,
1995 Register scratch1, Register scratch2,
1996 Register scratch3,
1997 Label* gc_required) {
Andrei Popescu31002712010-02-23 13:46:05 +00001998 // Calculate the number of bytes needed for the characters in the string while
1999 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002000 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2001 DCHECK(kCharSize == 1);
Andrei Popescu31002712010-02-23 13:46:05 +00002002 add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002003 Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002004 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002005
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006 // Allocate one-byte string in new space.
2007 Allocate(scratch1,
2008 result,
2009 scratch2,
2010 scratch3,
2011 gc_required,
2012 TAG_OBJECT);
Andrei Popescu31002712010-02-23 13:46:05 +00002013
2014 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002015 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
2016 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002017}
2018
2019
2020void MacroAssembler::AllocateTwoByteConsString(Register result,
2021 Register length,
2022 Register scratch1,
2023 Register scratch2,
2024 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002025 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
2026 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01002027
2028 InitializeNewString(result,
2029 length,
2030 Heap::kConsStringMapRootIndex,
2031 scratch1,
2032 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002033}
2034
2035
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002036void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
2037 Register scratch1,
2038 Register scratch2,
2039 Label* gc_required) {
2040 Allocate(ConsString::kSize,
2041 result,
2042 scratch1,
2043 scratch2,
2044 gc_required,
2045 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01002046
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002047 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
2048 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002049}
2050
2051
Ben Murdoch589d6972011-11-30 16:04:58 +00002052void MacroAssembler::AllocateTwoByteSlicedString(Register result,
2053 Register length,
2054 Register scratch1,
2055 Register scratch2,
2056 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002057 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2058 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00002059
2060 InitializeNewString(result,
2061 length,
2062 Heap::kSlicedStringMapRootIndex,
2063 scratch1,
2064 scratch2);
2065}
2066
2067
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002068void MacroAssembler::AllocateOneByteSlicedString(Register result,
2069 Register length,
2070 Register scratch1,
2071 Register scratch2,
2072 Label* gc_required) {
2073 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2074 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00002075
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002076 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
2077 scratch1, scratch2);
Ben Murdoch589d6972011-11-30 16:04:58 +00002078}
2079
2080
Steve Block6ded16b2010-05-10 14:33:55 +01002081void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00002082 Register map,
2083 Register type_reg,
2084 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002085 const Register temp = type_reg.is(no_reg) ? ip : type_reg;
2086
Steve Block6ded16b2010-05-10 14:33:55 +01002087 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002088 CompareInstanceType(map, temp, type);
2089}
2090
2091
2092void MacroAssembler::CheckObjectTypeRange(Register object,
2093 Register map,
2094 InstanceType min_type,
2095 InstanceType max_type,
2096 Label* false_label) {
2097 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
2098 STATIC_ASSERT(LAST_TYPE < 256);
2099 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2100 ldrb(ip, FieldMemOperand(map, Map::kInstanceTypeOffset));
2101 sub(ip, ip, Operand(min_type));
2102 cmp(ip, Operand(max_type - min_type));
2103 b(hi, false_label);
Steve Blocka7e24c12009-10-30 11:49:00 +00002104}
2105
2106
2107void MacroAssembler::CompareInstanceType(Register map,
2108 Register type_reg,
2109 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002110 // Registers map and type_reg can be ip. These two lines assert
2111 // that ip can be used with the two instructions (the constants
2112 // will never need ip).
2113 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
2114 STATIC_ASSERT(LAST_TYPE < 256);
Steve Blocka7e24c12009-10-30 11:49:00 +00002115 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
2116 cmp(type_reg, Operand(type));
2117}
2118
2119
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002120void MacroAssembler::CompareRoot(Register obj,
2121 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002122 DCHECK(!obj.is(ip));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002123 LoadRoot(ip, index);
2124 cmp(obj, ip);
2125}
2126
2127
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002128void MacroAssembler::CheckFastElements(Register map,
2129 Register scratch,
2130 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002131 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2132 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2133 STATIC_ASSERT(FAST_ELEMENTS == 2);
2134 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002135 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002136 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002137 b(hi, fail);
2138}
2139
2140
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002141void MacroAssembler::CheckFastObjectElements(Register map,
2142 Register scratch,
2143 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002144 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2145 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2146 STATIC_ASSERT(FAST_ELEMENTS == 2);
2147 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002148 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002149 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002150 b(ls, fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002151 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002152 b(hi, fail);
2153}
2154
2155
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002156void MacroAssembler::CheckFastSmiElements(Register map,
2157 Register scratch,
2158 Label* fail) {
2159 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2160 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002161 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002162 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002163 b(hi, fail);
2164}
2165
2166
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002167void MacroAssembler::StoreNumberToDoubleElements(
2168 Register value_reg,
2169 Register key_reg,
2170 Register elements_reg,
2171 Register scratch1,
2172 LowDwVfpRegister double_scratch,
2173 Label* fail,
2174 int elements_offset) {
2175 Label smi_value, store;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002176
2177 // Handle smi values specially.
2178 JumpIfSmi(value_reg, &smi_value);
2179
2180 // Ensure that the object is a heap number
2181 CheckMap(value_reg,
2182 scratch1,
2183 isolate()->factory()->heap_number_map(),
2184 fail,
2185 DONT_DO_SMI_CHECK);
2186
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002187 vldr(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
2188 // Force a canonical NaN.
2189 if (emit_debug_code()) {
2190 vmrs(ip);
2191 tst(ip, Operand(kVFPDefaultNaNModeControlBit));
2192 Assert(ne, kDefaultNaNModeNotSet);
2193 }
2194 VFPCanonicalizeNaN(double_scratch);
2195 b(&store);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002196
2197 bind(&smi_value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002198 SmiToDouble(double_scratch, value_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002199
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002200 bind(&store);
2201 add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg));
2202 vstr(double_scratch,
2203 FieldMemOperand(scratch1,
2204 FixedDoubleArray::kHeaderSize - elements_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002205}
2206
2207
2208void MacroAssembler::CompareMap(Register obj,
2209 Register scratch,
2210 Handle<Map> map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002211 Label* early_success) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002212 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002213 CompareMap(scratch, map, early_success);
2214}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002215
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002216
2217void MacroAssembler::CompareMap(Register obj_map,
2218 Handle<Map> map,
2219 Label* early_success) {
2220 cmp(obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002221}
2222
2223
Andrei Popescu31002712010-02-23 13:46:05 +00002224void MacroAssembler::CheckMap(Register obj,
2225 Register scratch,
2226 Handle<Map> map,
2227 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002228 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002229 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002230 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +00002231 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002232
2233 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002234 CompareMap(obj, scratch, map, &success);
Andrei Popescu31002712010-02-23 13:46:05 +00002235 b(ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002236 bind(&success);
Andrei Popescu31002712010-02-23 13:46:05 +00002237}
2238
2239
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002240void MacroAssembler::CheckMap(Register obj,
2241 Register scratch,
2242 Heap::RootListIndex index,
2243 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002244 SmiCheckType smi_check_type) {
2245 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002246 JumpIfSmi(obj, fail);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002247 }
2248 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2249 LoadRoot(ip, index);
2250 cmp(scratch, ip);
2251 b(ne, fail);
2252}
2253
2254
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002255void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
2256 Register scratch2, Handle<WeakCell> cell,
2257 Handle<Code> success,
2258 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002259 Label fail;
2260 if (smi_check_type == DO_SMI_CHECK) {
2261 JumpIfSmi(obj, &fail);
2262 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002263 ldr(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
2264 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002265 Jump(success, RelocInfo::CODE_TARGET, eq);
2266 bind(&fail);
2267}
2268
2269
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002270void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2271 Register scratch) {
2272 mov(scratch, Operand(cell));
2273 ldr(scratch, FieldMemOperand(scratch, WeakCell::kValueOffset));
2274 cmp(value, scratch);
2275}
2276
2277
2278void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2279 Label* miss) {
2280 mov(value, Operand(cell));
2281 ldr(value, FieldMemOperand(value, WeakCell::kValueOffset));
2282 JumpIfSmi(value, miss);
2283}
2284
2285
Steve Blocka7e24c12009-10-30 11:49:00 +00002286void MacroAssembler::TryGetFunctionPrototype(Register function,
2287 Register result,
2288 Register scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002289 Label* miss,
2290 bool miss_on_bound_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002291 Label non_instance;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002292 if (miss_on_bound_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002293 // Check that the receiver isn't a smi.
2294 JumpIfSmi(function, miss);
2295
2296 // Check that the function really is a function. Load map into result reg.
2297 CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2298 b(ne, miss);
2299
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002300 ldr(scratch,
2301 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2302 ldr(scratch,
2303 FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
2304 tst(scratch,
2305 Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
2306 b(ne, miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002307
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002308 // Make sure that the function has an instance prototype.
2309 ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2310 tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2311 b(ne, &non_instance);
2312 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002313
2314 // Get the prototype or initial map from the function.
2315 ldr(result,
2316 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2317
2318 // If the prototype or initial map is the hole, don't return it and
2319 // simply miss the cache instead. This will allow us to allocate a
2320 // prototype object on-demand in the runtime system.
2321 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2322 cmp(result, ip);
2323 b(eq, miss);
2324
2325 // If the function does not have an initial map, we're done.
2326 Label done;
2327 CompareObjectType(result, scratch, scratch, MAP_TYPE);
2328 b(ne, &done);
2329
2330 // Get the prototype from the initial map.
2331 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002332
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002333 if (miss_on_bound_function) {
2334 jmp(&done);
2335
2336 // Non-instance prototype: Fetch prototype from constructor field
2337 // in initial map.
2338 bind(&non_instance);
2339 ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2340 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002341
2342 // All done.
2343 bind(&done);
2344}
2345
2346
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002347void MacroAssembler::CallStub(CodeStub* stub,
2348 TypeFeedbackId ast_id,
2349 Condition cond) {
2350 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
2351 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00002352}
2353
2354
Andrei Popescu31002712010-02-23 13:46:05 +00002355void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +00002356 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2357}
2358
2359
Steve Block1e0659c2011-05-24 12:43:12 +01002360static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
2361 return ref0.address() - ref1.address();
2362}
2363
2364
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002365void MacroAssembler::CallApiFunctionAndReturn(
2366 Register function_address,
2367 ExternalReference thunk_ref,
2368 int stack_space,
2369 MemOperand return_value_operand,
2370 MemOperand* context_restore_operand) {
Steve Block1e0659c2011-05-24 12:43:12 +01002371 ExternalReference next_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002372 ExternalReference::handle_scope_next_address(isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01002373 const int kNextOffset = 0;
2374 const int kLimitOffset = AddressOffset(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002375 ExternalReference::handle_scope_limit_address(isolate()),
Steve Block1e0659c2011-05-24 12:43:12 +01002376 next_address);
2377 const int kLevelOffset = AddressOffset(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002378 ExternalReference::handle_scope_level_address(isolate()),
Steve Block1e0659c2011-05-24 12:43:12 +01002379 next_address);
2380
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002381 DCHECK(function_address.is(r1) || function_address.is(r2));
2382
2383 Label profiler_disabled;
2384 Label end_profiler_check;
2385 mov(r9, Operand(ExternalReference::is_profiling_address(isolate())));
2386 ldrb(r9, MemOperand(r9, 0));
2387 cmp(r9, Operand(0));
2388 b(eq, &profiler_disabled);
2389
2390 // Additional parameter is the address of the actual callback.
2391 mov(r3, Operand(thunk_ref));
2392 jmp(&end_profiler_check);
2393
2394 bind(&profiler_disabled);
2395 Move(r3, function_address);
2396 bind(&end_profiler_check);
2397
Steve Block1e0659c2011-05-24 12:43:12 +01002398 // Allocate HandleScope in callee-save registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002399 mov(r9, Operand(next_address));
2400 ldr(r4, MemOperand(r9, kNextOffset));
2401 ldr(r5, MemOperand(r9, kLimitOffset));
2402 ldr(r6, MemOperand(r9, kLevelOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002403 add(r6, r6, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002404 str(r6, MemOperand(r9, kLevelOffset));
2405
2406 if (FLAG_log_timer_events) {
2407 FrameScope frame(this, StackFrame::MANUAL);
2408 PushSafepointRegisters();
2409 PrepareCallCFunction(1, r0);
2410 mov(r0, Operand(ExternalReference::isolate_address(isolate())));
2411 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2412 PopSafepointRegisters();
2413 }
Steve Block1e0659c2011-05-24 12:43:12 +01002414
2415 // Native call returns to the DirectCEntry stub which redirects to the
2416 // return address pushed on stack (could have moved after GC).
2417 // DirectCEntry stub itself is generated early and never moves.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002418 DirectCEntryStub stub(isolate());
2419 stub.GenerateCall(this, r3);
2420
2421 if (FLAG_log_timer_events) {
2422 FrameScope frame(this, StackFrame::MANUAL);
2423 PushSafepointRegisters();
2424 PrepareCallCFunction(1, r0);
2425 mov(r0, Operand(ExternalReference::isolate_address(isolate())));
2426 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2427 PopSafepointRegisters();
2428 }
Steve Block1e0659c2011-05-24 12:43:12 +01002429
2430 Label promote_scheduled_exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002431 Label exception_handled;
Steve Block1e0659c2011-05-24 12:43:12 +01002432 Label delete_allocated_handles;
2433 Label leave_exit_frame;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002434 Label return_value_loaded;
Steve Block1e0659c2011-05-24 12:43:12 +01002435
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002436 // load value from ReturnValue
2437 ldr(r0, return_value_operand);
2438 bind(&return_value_loaded);
Steve Block1e0659c2011-05-24 12:43:12 +01002439 // No more valid handles (the result handle was the last one). Restore
2440 // previous handle scope.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002441 str(r4, MemOperand(r9, kNextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002442 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002443 ldr(r1, MemOperand(r9, kLevelOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002444 cmp(r1, r6);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002445 Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
Steve Block1e0659c2011-05-24 12:43:12 +01002446 }
2447 sub(r6, r6, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002448 str(r6, MemOperand(r9, kLevelOffset));
2449 ldr(ip, MemOperand(r9, kLimitOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002450 cmp(r5, ip);
2451 b(ne, &delete_allocated_handles);
2452
2453 // Check if the function scheduled an exception.
2454 bind(&leave_exit_frame);
2455 LoadRoot(r4, Heap::kTheHoleValueRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01002456 mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate())));
Steve Block1e0659c2011-05-24 12:43:12 +01002457 ldr(r5, MemOperand(ip));
2458 cmp(r4, r5);
2459 b(ne, &promote_scheduled_exception);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002460 bind(&exception_handled);
Steve Block1e0659c2011-05-24 12:43:12 +01002461
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002462 bool restore_context = context_restore_operand != NULL;
2463 if (restore_context) {
2464 ldr(cp, *context_restore_operand);
2465 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002466 // LeaveExitFrame expects unwind space to be in a register.
Steve Block1e0659c2011-05-24 12:43:12 +01002467 mov(r4, Operand(stack_space));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002468 LeaveExitFrame(false, r4, !restore_context);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002469 mov(pc, lr);
Steve Block1e0659c2011-05-24 12:43:12 +01002470
2471 bind(&promote_scheduled_exception);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002472 {
2473 FrameScope frame(this, StackFrame::INTERNAL);
2474 CallExternalReference(
2475 ExternalReference(Runtime::kPromoteScheduledException, isolate()),
2476 0);
2477 }
2478 jmp(&exception_handled);
Steve Block1e0659c2011-05-24 12:43:12 +01002479
2480 // HandleScope limit has changed. Delete allocated extensions.
2481 bind(&delete_allocated_handles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002482 str(r5, MemOperand(r9, kLimitOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002483 mov(r4, r0);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002484 PrepareCallCFunction(1, r5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002485 mov(r0, Operand(ExternalReference::isolate_address(isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01002486 CallCFunction(
Ben Murdoch8b112d22011-06-08 16:22:53 +01002487 ExternalReference::delete_handle_scope_extensions(isolate()), 1);
Steve Block1e0659c2011-05-24 12:43:12 +01002488 mov(r0, r4);
2489 jmp(&leave_exit_frame);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002490}
Steve Block1e0659c2011-05-24 12:43:12 +01002491
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002492
2493bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002494 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00002495}
2496
2497
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002498void MacroAssembler::IndexFromHash(Register hash, Register index) {
2499 // If the hash field contains an array index pick it out. The assert checks
2500 // that the constants for the maximum number of digits for an array index
2501 // cached in the hash field and the number of bits reserved for it does not
2502 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002503 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002504 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002505 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002506}
2507
2508
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002509void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002510 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002511 vmov(value.low(), smi);
2512 vcvt_f64_s32(value, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01002513 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002514 SmiUntag(ip, smi);
2515 vmov(value.low(), ip);
2516 vcvt_f64_s32(value, value.low());
Iain Merrick9ac36c92010-09-13 15:29:50 +01002517 }
2518}
2519
2520
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002521void MacroAssembler::TestDoubleIsInt32(DwVfpRegister double_input,
2522 LowDwVfpRegister double_scratch) {
2523 DCHECK(!double_input.is(double_scratch));
2524 vcvt_s32_f64(double_scratch.low(), double_input);
2525 vcvt_f64_s32(double_scratch, double_scratch.low());
2526 VFPCompareAndSetFlags(double_input, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002527}
2528
2529
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002530void MacroAssembler::TryDoubleToInt32Exact(Register result,
2531 DwVfpRegister double_input,
2532 LowDwVfpRegister double_scratch) {
2533 DCHECK(!double_input.is(double_scratch));
2534 vcvt_s32_f64(double_scratch.low(), double_input);
2535 vmov(result, double_scratch.low());
2536 vcvt_f64_s32(double_scratch, double_scratch.low());
2537 VFPCompareAndSetFlags(double_input, double_scratch);
2538}
Steve Block44f0eee2011-05-26 01:26:41 +01002539
Steve Block44f0eee2011-05-26 01:26:41 +01002540
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002541void MacroAssembler::TryInt32Floor(Register result,
2542 DwVfpRegister double_input,
2543 Register input_high,
2544 LowDwVfpRegister double_scratch,
2545 Label* done,
2546 Label* exact) {
2547 DCHECK(!result.is(input_high));
2548 DCHECK(!double_input.is(double_scratch));
2549 Label negative, exception;
Steve Block44f0eee2011-05-26 01:26:41 +01002550
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002551 VmovHigh(input_high, double_input);
Steve Block44f0eee2011-05-26 01:26:41 +01002552
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002553 // Test for NaN and infinities.
2554 Sbfx(result, input_high,
2555 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
2556 cmp(result, Operand(-1));
2557 b(eq, &exception);
2558 // Test for values that can be exactly represented as a
2559 // signed 32-bit integer.
2560 TryDoubleToInt32Exact(result, double_input, double_scratch);
2561 // If exact, return (result already fetched).
2562 b(eq, exact);
2563 cmp(input_high, Operand::Zero());
2564 b(mi, &negative);
Steve Block44f0eee2011-05-26 01:26:41 +01002565
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002566 // Input is in ]+0, +inf[.
2567 // If result equals 0x7fffffff input was out of range or
2568 // in ]0x7fffffff, 0x80000000[. We ignore this last case which
2569 // could fits into an int32, that means we always think input was
2570 // out of range and always go to exception.
2571 // If result < 0x7fffffff, go to done, result fetched.
2572 cmn(result, Operand(1));
2573 b(mi, &exception);
2574 b(done);
Steve Block44f0eee2011-05-26 01:26:41 +01002575
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002576 // Input is in ]-inf, -0[.
2577 // If x is a non integer negative number,
2578 // floor(x) <=> round_to_zero(x) - 1.
2579 bind(&negative);
2580 sub(result, result, Operand(1), SetCC);
2581 // If result is still negative, go to done, result fetched.
2582 // Else, we had an overflow and we fall through exception.
2583 b(mi, done);
2584 bind(&exception);
2585}
Steve Block44f0eee2011-05-26 01:26:41 +01002586
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002587void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2588 DwVfpRegister double_input,
2589 Label* done) {
2590 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2591 vcvt_s32_f64(double_scratch.low(), double_input);
2592 vmov(result, double_scratch.low());
Steve Block44f0eee2011-05-26 01:26:41 +01002593
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002594 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
2595 sub(ip, result, Operand(1));
2596 cmp(ip, Operand(0x7ffffffe));
2597 b(lt, done);
2598}
Steve Block44f0eee2011-05-26 01:26:41 +01002599
Steve Block44f0eee2011-05-26 01:26:41 +01002600
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002601void MacroAssembler::TruncateDoubleToI(Register result,
2602 DwVfpRegister double_input) {
2603 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002604
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002605 TryInlineTruncateDoubleToI(result, double_input, &done);
2606
2607 // If we fell through then inline version didn't succeed - call stub instead.
2608 push(lr);
2609 sub(sp, sp, Operand(kDoubleSize)); // Put input on stack.
2610 vstr(double_input, MemOperand(sp, 0));
2611
2612 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
2613 CallStub(&stub);
2614
2615 add(sp, sp, Operand(kDoubleSize));
2616 pop(lr);
2617
Steve Block44f0eee2011-05-26 01:26:41 +01002618 bind(&done);
2619}
2620
2621
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002622void MacroAssembler::TruncateHeapNumberToI(Register result,
2623 Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002624 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002625 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2626 DCHECK(!result.is(object));
Steve Block44f0eee2011-05-26 01:26:41 +01002627
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002628 vldr(double_scratch,
2629 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
2630 TryInlineTruncateDoubleToI(result, double_scratch, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01002631
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002632 // If we fell through then inline version didn't succeed - call stub instead.
2633 push(lr);
2634 DoubleToIStub stub(isolate(),
2635 object,
2636 result,
2637 HeapNumber::kValueOffset - kHeapObjectTag,
2638 true,
2639 true);
2640 CallStub(&stub);
2641 pop(lr);
2642
2643 bind(&done);
2644}
2645
2646
2647void MacroAssembler::TruncateNumberToI(Register object,
2648 Register result,
2649 Register heap_number_map,
2650 Register scratch1,
2651 Label* not_number) {
2652 Label done;
2653 DCHECK(!result.is(object));
2654
2655 UntagAndJumpIfSmi(result, object, &done);
2656 JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
2657 TruncateHeapNumberToI(result, object);
2658
Steve Block44f0eee2011-05-26 01:26:41 +01002659 bind(&done);
2660}
2661
2662
Andrei Popescu31002712010-02-23 13:46:05 +00002663void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2664 Register src,
2665 int num_least_bits) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002666 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002667 ubfx(dst, src, kSmiTagSize, num_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +00002668 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002669 SmiUntag(dst, src);
Andrei Popescu31002712010-02-23 13:46:05 +00002670 and_(dst, dst, Operand((1 << num_least_bits) - 1));
2671 }
2672}
2673
2674
Steve Block1e0659c2011-05-24 12:43:12 +01002675void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2676 Register src,
2677 int num_least_bits) {
2678 and_(dst, src, Operand((1 << num_least_bits) - 1));
2679}
2680
2681
Steve Block44f0eee2011-05-26 01:26:41 +01002682void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002683 int num_arguments,
2684 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002685 // All parameters are on the stack. r0 has the return value after call.
2686
2687 // If the expected number of arguments of the runtime function is
2688 // constant, we check that the actual number of arguments match the
2689 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002690 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00002691
Leon Clarke4515c472010-02-03 11:58:03 +00002692 // TODO(1236192): Most runtime routines don't need the number of
2693 // arguments passed in because it is constant. At some point we
2694 // should remove this need and make the runtime routine entry code
2695 // smarter.
2696 mov(r0, Operand(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01002697 mov(r1, Operand(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002698 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002699 CallStub(&stub);
2700}
2701
2702
Andrei Popescu402d9372010-02-26 13:31:12 +00002703void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2704 int num_arguments) {
2705 mov(r0, Operand(num_arguments));
2706 mov(r1, Operand(ext));
2707
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002708 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002709 CallStub(&stub);
2710}
2711
2712
Steve Block6ded16b2010-05-10 14:33:55 +01002713void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2714 int num_arguments,
2715 int result_size) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002716 // TODO(1236192): Most runtime routines don't need the number of
2717 // arguments passed in because it is constant. At some point we
2718 // should remove this need and make the runtime routine entry code
2719 // smarter.
2720 mov(r0, Operand(num_arguments));
Steve Block6ded16b2010-05-10 14:33:55 +01002721 JumpToExternalReference(ext);
Steve Blocka7e24c12009-10-30 11:49:00 +00002722}
2723
2724
Steve Block6ded16b2010-05-10 14:33:55 +01002725void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2726 int num_arguments,
2727 int result_size) {
Steve Block44f0eee2011-05-26 01:26:41 +01002728 TailCallExternalReference(ExternalReference(fid, isolate()),
2729 num_arguments,
2730 result_size);
Steve Block6ded16b2010-05-10 14:33:55 +01002731}
2732
2733
2734void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002735#if defined(__thumb__)
2736 // Thumb mode builtin.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002737 DCHECK((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002738#endif
2739 mov(r1, Operand(builtin));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002740 CEntryStub stub(isolate(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002741 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2742}
2743
2744
Steve Blocka7e24c12009-10-30 11:49:00 +00002745void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
Ben Murdoch257744e2011-11-30 15:57:28 +00002746 InvokeFlag flag,
2747 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002748 // You can't call a builtin without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002749 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002750
Andrei Popescu402d9372010-02-26 13:31:12 +00002751 GetBuiltinEntry(r2, id);
Ben Murdoch257744e2011-11-30 15:57:28 +00002752 if (flag == CALL_FUNCTION) {
2753 call_wrapper.BeforeCall(CallSize(r2));
Andrei Popescu402d9372010-02-26 13:31:12 +00002754 Call(r2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002755 call_wrapper.AfterCall();
Steve Blocka7e24c12009-10-30 11:49:00 +00002756 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002757 DCHECK(flag == JUMP_FUNCTION);
Andrei Popescu402d9372010-02-26 13:31:12 +00002758 Jump(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002759 }
2760}
2761
2762
Steve Block791712a2010-08-27 10:21:07 +01002763void MacroAssembler::GetBuiltinFunction(Register target,
2764 Builtins::JavaScript id) {
Steve Block6ded16b2010-05-10 14:33:55 +01002765 // Load the builtins object into target register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002766 ldr(target,
2767 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
Steve Block6ded16b2010-05-10 14:33:55 +01002768 ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +00002769 // Load the JavaScript builtin function from the builtins object.
Steve Block6ded16b2010-05-10 14:33:55 +01002770 ldr(target, FieldMemOperand(target,
Steve Block791712a2010-08-27 10:21:07 +01002771 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2772}
2773
2774
2775void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002776 DCHECK(!target.is(r1));
Steve Block791712a2010-08-27 10:21:07 +01002777 GetBuiltinFunction(r1, id);
2778 // Load the code entry point from the builtins object.
2779 ldr(target, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002780}
2781
2782
2783void MacroAssembler::SetCounter(StatsCounter* counter, int value,
2784 Register scratch1, Register scratch2) {
2785 if (FLAG_native_code_counters && counter->Enabled()) {
2786 mov(scratch1, Operand(value));
2787 mov(scratch2, Operand(ExternalReference(counter)));
2788 str(scratch1, MemOperand(scratch2));
2789 }
2790}
2791
2792
2793void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
2794 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002795 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002796 if (FLAG_native_code_counters && counter->Enabled()) {
2797 mov(scratch2, Operand(ExternalReference(counter)));
2798 ldr(scratch1, MemOperand(scratch2));
2799 add(scratch1, scratch1, Operand(value));
2800 str(scratch1, MemOperand(scratch2));
2801 }
2802}
2803
2804
2805void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
2806 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002807 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002808 if (FLAG_native_code_counters && counter->Enabled()) {
2809 mov(scratch2, Operand(ExternalReference(counter)));
2810 ldr(scratch1, MemOperand(scratch2));
2811 sub(scratch1, scratch1, Operand(value));
2812 str(scratch1, MemOperand(scratch2));
2813 }
2814}
2815
2816
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002817void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01002818 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002819 Check(cond, reason);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002820}
2821
2822
Iain Merrick75681382010-08-19 15:07:18 +01002823void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002824 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002825 DCHECK(!elements.is(ip));
Iain Merrick75681382010-08-19 15:07:18 +01002826 Label ok;
2827 push(elements);
2828 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
2829 LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2830 cmp(elements, ip);
2831 b(eq, &ok);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002832 LoadRoot(ip, Heap::kFixedDoubleArrayMapRootIndex);
2833 cmp(elements, ip);
2834 b(eq, &ok);
Iain Merrick75681382010-08-19 15:07:18 +01002835 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2836 cmp(elements, ip);
2837 b(eq, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002838 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002839 bind(&ok);
2840 pop(elements);
2841 }
2842}
2843
2844
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002845void MacroAssembler::Check(Condition cond, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002846 Label L;
Steve Block1e0659c2011-05-24 12:43:12 +01002847 b(cond, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002848 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002849 // will not return here
2850 bind(&L);
2851}
2852
2853
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002854void MacroAssembler::Abort(BailoutReason reason) {
Steve Block8defd9f2010-07-08 12:39:36 +01002855 Label abort_start;
2856 bind(&abort_start);
Steve Blocka7e24c12009-10-30 11:49:00 +00002857#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002858 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002859 if (msg != NULL) {
2860 RecordComment("Abort message: ");
2861 RecordComment(msg);
2862 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002863
2864 if (FLAG_trap_on_abort) {
2865 stop(msg);
2866 return;
2867 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002868#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002869
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002870 mov(r0, Operand(Smi::FromInt(reason)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002871 push(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002872
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002873 // Disable stub call restrictions to always allow calls to abort.
2874 if (!has_frame_) {
2875 // We don't actually want to generate a pile of code for this, so just
2876 // claim there is a stack frame, without generating one.
2877 FrameScope scope(this, StackFrame::NONE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002878 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002879 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002880 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002881 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002882 // will not return here
Steve Block8defd9f2010-07-08 12:39:36 +01002883 if (is_const_pool_blocked()) {
2884 // If the calling code cares about the exact number of
2885 // instructions generated, we insert padding here to keep the size
2886 // of the Abort macro constant.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002887 static const int kExpectedAbortInstructions = 7;
Steve Block8defd9f2010-07-08 12:39:36 +01002888 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002889 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block8defd9f2010-07-08 12:39:36 +01002890 while (abort_instructions++ < kExpectedAbortInstructions) {
2891 nop();
2892 }
2893 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002894}
2895
2896
Steve Blockd0582a62009-12-15 09:54:21 +00002897void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2898 if (context_chain_length > 0) {
2899 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002900 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002901 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002902 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002903 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002904 } else {
2905 // Slot is in the current function context. Move it into the
2906 // destination register in case we store into it (the write barrier
2907 // cannot be allowed to destroy the context in esi).
2908 mov(dst, cp);
2909 }
Steve Blockd0582a62009-12-15 09:54:21 +00002910}
2911
2912
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002913void MacroAssembler::LoadTransitionedArrayMapConditional(
2914 ElementsKind expected_kind,
2915 ElementsKind transitioned_kind,
2916 Register map_in_out,
2917 Register scratch,
2918 Label* no_map_match) {
2919 // Load the global or builtins object from the current context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002920 ldr(scratch,
2921 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2922 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002923
2924 // Check that the function's map is the same as the expected cached map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002925 ldr(scratch,
2926 MemOperand(scratch,
2927 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2928 size_t offset = expected_kind * kPointerSize +
2929 FixedArrayBase::kHeaderSize;
2930 ldr(ip, FieldMemOperand(scratch, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002931 cmp(map_in_out, ip);
2932 b(ne, no_map_match);
2933
2934 // Use the transitioned cached map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002935 offset = transitioned_kind * kPointerSize +
2936 FixedArrayBase::kHeaderSize;
2937 ldr(map_in_out, FieldMemOperand(scratch, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002938}
2939
2940
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002941void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2942 // Load the global or builtins object from the current context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002943 ldr(function,
2944 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2945 // Load the native context from the global or builtins object.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002946 ldr(function, FieldMemOperand(function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002947 GlobalObject::kNativeContextOffset));
2948 // Load the function from the native context.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002949 ldr(function, MemOperand(function, Context::SlotOffset(index)));
2950}
2951
2952
2953void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2954 Register map,
2955 Register scratch) {
2956 // Load the initial map. The global functions all have initial maps.
2957 ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002958 if (emit_debug_code()) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002959 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00002960 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002961 b(&ok);
2962 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002963 Abort(kGlobalFunctionsMustHaveInitialMap);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002964 bind(&ok);
2965 }
2966}
2967
2968
Steve Block1e0659c2011-05-24 12:43:12 +01002969void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
2970 Register reg,
2971 Register scratch,
2972 Label* not_power_of_two_or_zero) {
2973 sub(scratch, reg, Operand(1), SetCC);
2974 b(mi, not_power_of_two_or_zero);
2975 tst(scratch, reg);
2976 b(ne, not_power_of_two_or_zero);
2977}
2978
2979
Steve Block44f0eee2011-05-26 01:26:41 +01002980void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
2981 Register reg,
2982 Register scratch,
2983 Label* zero_and_neg,
2984 Label* not_power_of_two) {
2985 sub(scratch, reg, Operand(1), SetCC);
2986 b(mi, zero_and_neg);
2987 tst(scratch, reg);
2988 b(ne, not_power_of_two);
2989}
2990
2991
Andrei Popescu31002712010-02-23 13:46:05 +00002992void MacroAssembler::JumpIfNotBothSmi(Register reg1,
2993 Register reg2,
2994 Label* on_not_both_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01002995 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00002996 tst(reg1, Operand(kSmiTagMask));
2997 tst(reg2, Operand(kSmiTagMask), eq);
2998 b(ne, on_not_both_smi);
2999}
3000
3001
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003002void MacroAssembler::UntagAndJumpIfSmi(
3003 Register dst, Register src, Label* smi_case) {
3004 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003005 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003006 b(cc, smi_case); // Shifter carry is not set for a smi.
3007}
3008
3009
3010void MacroAssembler::UntagAndJumpIfNotSmi(
3011 Register dst, Register src, Label* non_smi_case) {
3012 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003013 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003014 b(cs, non_smi_case); // Shifter carry is set for a non-smi.
3015}
3016
3017
Andrei Popescu31002712010-02-23 13:46:05 +00003018void MacroAssembler::JumpIfEitherSmi(Register reg1,
3019 Register reg2,
3020 Label* on_either_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01003021 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00003022 tst(reg1, Operand(kSmiTagMask));
3023 tst(reg2, Operand(kSmiTagMask), ne);
3024 b(eq, on_either_smi);
3025}
3026
3027
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003028void MacroAssembler::AssertNotSmi(Register object) {
3029 if (emit_debug_code()) {
3030 STATIC_ASSERT(kSmiTag == 0);
3031 tst(object, Operand(kSmiTagMask));
3032 Check(ne, kOperandIsASmi);
3033 }
Iain Merrick75681382010-08-19 15:07:18 +01003034}
3035
3036
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003037void MacroAssembler::AssertSmi(Register object) {
3038 if (emit_debug_code()) {
3039 STATIC_ASSERT(kSmiTag == 0);
3040 tst(object, Operand(kSmiTagMask));
3041 Check(eq, kOperandIsNotSmi);
3042 }
Steve Block1e0659c2011-05-24 12:43:12 +01003043}
3044
3045
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003046void MacroAssembler::AssertString(Register object) {
3047 if (emit_debug_code()) {
3048 STATIC_ASSERT(kSmiTag == 0);
3049 tst(object, Operand(kSmiTagMask));
3050 Check(ne, kOperandIsASmiAndNotAString);
3051 push(object);
3052 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3053 CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
3054 pop(object);
3055 Check(lo, kOperandIsNotAString);
3056 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003057}
3058
3059
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003060void MacroAssembler::AssertName(Register object) {
3061 if (emit_debug_code()) {
3062 STATIC_ASSERT(kSmiTag == 0);
3063 tst(object, Operand(kSmiTagMask));
3064 Check(ne, kOperandIsASmiAndNotAName);
3065 push(object);
3066 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3067 CompareInstanceType(object, object, LAST_NAME_TYPE);
3068 pop(object);
3069 Check(le, kOperandIsNotAName);
3070 }
3071}
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003072
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003073
3074void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
3075 Register scratch) {
3076 if (emit_debug_code()) {
3077 Label done_checking;
3078 AssertNotSmi(object);
3079 CompareRoot(object, Heap::kUndefinedValueRootIndex);
3080 b(eq, &done_checking);
3081 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3082 CompareRoot(scratch, Heap::kAllocationSiteMapRootIndex);
3083 Assert(eq, kExpectedUndefinedOrCell);
3084 bind(&done_checking);
3085 }
3086}
3087
3088
3089void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
3090 if (emit_debug_code()) {
3091 CompareRoot(reg, index);
3092 Check(eq, kHeapNumberMapRegisterClobbered);
3093 }
Steve Block1e0659c2011-05-24 12:43:12 +01003094}
3095
3096
3097void MacroAssembler::JumpIfNotHeapNumber(Register object,
3098 Register heap_number_map,
3099 Register scratch,
3100 Label* on_not_heap_number) {
3101 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003102 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01003103 cmp(scratch, heap_number_map);
3104 b(ne, on_not_heap_number);
3105}
3106
3107
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003108void MacroAssembler::LookupNumberStringCache(Register object,
3109 Register result,
3110 Register scratch1,
3111 Register scratch2,
3112 Register scratch3,
3113 Label* not_found) {
3114 // Use of registers. Register result is used as a temporary.
3115 Register number_string_cache = result;
3116 Register mask = scratch3;
3117
3118 // Load the number string cache.
3119 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
3120
3121 // Make the hash mask from the length of the number string cache. It
3122 // contains two elements (number and string) for each cache entry.
3123 ldr(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset));
3124 // Divide length by two (length is a smi).
3125 mov(mask, Operand(mask, ASR, kSmiTagSize + 1));
3126 sub(mask, mask, Operand(1)); // Make mask.
3127
3128 // Calculate the entry in the number string cache. The hash value in the
3129 // number string cache for smis is just the smi value, and the hash for
3130 // doubles is the xor of the upper and lower words. See
3131 // Heap::GetNumberStringCache.
3132 Label is_smi;
3133 Label load_result_from_cache;
3134 JumpIfSmi(object, &is_smi);
3135 CheckMap(object,
3136 scratch1,
3137 Heap::kHeapNumberMapRootIndex,
3138 not_found,
3139 DONT_DO_SMI_CHECK);
3140
3141 STATIC_ASSERT(8 == kDoubleSize);
3142 add(scratch1,
3143 object,
3144 Operand(HeapNumber::kValueOffset - kHeapObjectTag));
3145 ldm(ia, scratch1, scratch1.bit() | scratch2.bit());
3146 eor(scratch1, scratch1, Operand(scratch2));
3147 and_(scratch1, scratch1, Operand(mask));
3148
3149 // Calculate address of entry in string cache: each entry consists
3150 // of two pointer sized fields.
3151 add(scratch1,
3152 number_string_cache,
3153 Operand(scratch1, LSL, kPointerSizeLog2 + 1));
3154
3155 Register probe = mask;
3156 ldr(probe, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
3157 JumpIfSmi(probe, not_found);
3158 sub(scratch2, object, Operand(kHeapObjectTag));
3159 vldr(d0, scratch2, HeapNumber::kValueOffset);
3160 sub(probe, probe, Operand(kHeapObjectTag));
3161 vldr(d1, probe, HeapNumber::kValueOffset);
3162 VFPCompareAndSetFlags(d0, d1);
3163 b(ne, not_found); // The cache did not contain this value.
3164 b(&load_result_from_cache);
3165
3166 bind(&is_smi);
3167 Register scratch = scratch1;
3168 and_(scratch, mask, Operand(object, ASR, 1));
3169 // Calculate address of entry in string cache: each entry consists
3170 // of two pointer sized fields.
3171 add(scratch,
3172 number_string_cache,
3173 Operand(scratch, LSL, kPointerSizeLog2 + 1));
3174
3175 // Check if the entry is the smi we are looking for.
3176 ldr(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3177 cmp(object, probe);
3178 b(ne, not_found);
3179
3180 // Get the result from the cache.
3181 bind(&load_result_from_cache);
3182 ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize));
3183 IncrementCounter(isolate()->counters()->number_to_string_native(),
3184 1,
3185 scratch1,
3186 scratch2);
3187}
3188
3189
3190void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
3191 Register first, Register second, Register scratch1, Register scratch2,
Leon Clarked91b9f72010-01-27 17:25:45 +00003192 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003193 // Test that both first and second are sequential one-byte strings.
Leon Clarked91b9f72010-01-27 17:25:45 +00003194 // Assume that they are non-smis.
3195 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
3196 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
3197 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3198 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003199
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003200 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
3201 scratch2, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00003202}
3203
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003204void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
3205 Register second,
3206 Register scratch1,
3207 Register scratch2,
3208 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003209 // Check that neither is a smi.
Leon Clarked91b9f72010-01-27 17:25:45 +00003210 and_(scratch1, first, Operand(second));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003211 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003212 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
3213 scratch2, failure);
3214}
3215
3216
3217void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3218 Label* not_unique_name) {
3219 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3220 Label succeed;
3221 tst(reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3222 b(eq, &succeed);
3223 cmp(reg, Operand(SYMBOL_TYPE));
3224 b(ne, not_unique_name);
3225
3226 bind(&succeed);
Leon Clarked91b9f72010-01-27 17:25:45 +00003227}
3228
Steve Blockd0582a62009-12-15 09:54:21 +00003229
Steve Block6ded16b2010-05-10 14:33:55 +01003230// Allocates a heap number or jumps to the need_gc label if the young space
3231// is full and a scavenge is needed.
3232void MacroAssembler::AllocateHeapNumber(Register result,
3233 Register scratch1,
3234 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003235 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003236 Label* gc_required,
3237 TaggingMode tagging_mode,
3238 MutableMode mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01003239 // Allocate an object in the heap for the heap number and tag it as a heap
3240 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003241 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
3242 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS);
3243
3244 Heap::RootListIndex map_index = mode == MUTABLE
3245 ? Heap::kMutableHeapNumberMapRootIndex
3246 : Heap::kHeapNumberMapRootIndex;
3247 AssertIsRoot(heap_number_map, map_index);
Steve Block6ded16b2010-05-10 14:33:55 +01003248
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003249 // Store heap number map in the allocated object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003250 if (tagging_mode == TAG_RESULT) {
3251 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
3252 } else {
3253 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset));
3254 }
Steve Block6ded16b2010-05-10 14:33:55 +01003255}
3256
3257
Steve Block8defd9f2010-07-08 12:39:36 +01003258void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3259 DwVfpRegister value,
3260 Register scratch1,
3261 Register scratch2,
3262 Register heap_number_map,
3263 Label* gc_required) {
3264 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
3265 sub(scratch1, result, Operand(kHeapObjectTag));
3266 vstr(value, scratch1, HeapNumber::kValueOffset);
3267}
3268
3269
Ben Murdochbb769b22010-08-11 14:56:33 +01003270// Copies a fixed number of fields of heap objects from src to dst.
3271void MacroAssembler::CopyFields(Register dst,
3272 Register src,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003273 LowDwVfpRegister double_scratch,
Ben Murdochbb769b22010-08-11 14:56:33 +01003274 int field_count) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003275 int double_count = field_count / (DwVfpRegister::kSizeInBytes / kPointerSize);
3276 for (int i = 0; i < double_count; i++) {
3277 vldr(double_scratch, FieldMemOperand(src, i * DwVfpRegister::kSizeInBytes));
3278 vstr(double_scratch, FieldMemOperand(dst, i * DwVfpRegister::kSizeInBytes));
Ben Murdochbb769b22010-08-11 14:56:33 +01003279 }
Ben Murdochbb769b22010-08-11 14:56:33 +01003280
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003281 STATIC_ASSERT(SwVfpRegister::kSizeInBytes == kPointerSize);
3282 STATIC_ASSERT(2 * SwVfpRegister::kSizeInBytes == DwVfpRegister::kSizeInBytes);
3283
3284 int remain = field_count % (DwVfpRegister::kSizeInBytes / kPointerSize);
3285 if (remain != 0) {
3286 vldr(double_scratch.low(),
3287 FieldMemOperand(src, (field_count - 1) * kPointerSize));
3288 vstr(double_scratch.low(),
3289 FieldMemOperand(dst, (field_count - 1) * kPointerSize));
Ben Murdochbb769b22010-08-11 14:56:33 +01003290 }
3291}
3292
3293
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003294void MacroAssembler::CopyBytes(Register src,
3295 Register dst,
3296 Register length,
3297 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003298 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003299
3300 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003301 cmp(length, Operand(kPointerSize));
3302 b(le, &byte_loop);
3303
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003304 bind(&align_loop_1);
3305 tst(src, Operand(kPointerSize - 1));
3306 b(eq, &word_loop);
3307 ldrb(scratch, MemOperand(src, 1, PostIndex));
3308 strb(scratch, MemOperand(dst, 1, PostIndex));
3309 sub(length, length, Operand(1), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003310 b(&align_loop_1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003311 // Copy bytes in word size chunks.
3312 bind(&word_loop);
Steve Block44f0eee2011-05-26 01:26:41 +01003313 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003314 tst(src, Operand(kPointerSize - 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003315 Assert(eq, kExpectingAlignmentForCopyBytes);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003316 }
3317 cmp(length, Operand(kPointerSize));
3318 b(lt, &byte_loop);
3319 ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003320 if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) {
3321 str(scratch, MemOperand(dst, kPointerSize, PostIndex));
3322 } else {
3323 strb(scratch, MemOperand(dst, 1, PostIndex));
3324 mov(scratch, Operand(scratch, LSR, 8));
3325 strb(scratch, MemOperand(dst, 1, PostIndex));
3326 mov(scratch, Operand(scratch, LSR, 8));
3327 strb(scratch, MemOperand(dst, 1, PostIndex));
3328 mov(scratch, Operand(scratch, LSR, 8));
3329 strb(scratch, MemOperand(dst, 1, PostIndex));
3330 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003331 sub(length, length, Operand(kPointerSize));
3332 b(&word_loop);
3333
3334 // Copy the last bytes if any left.
3335 bind(&byte_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003336 cmp(length, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003337 b(eq, &done);
3338 bind(&byte_loop_1);
3339 ldrb(scratch, MemOperand(src, 1, PostIndex));
3340 strb(scratch, MemOperand(dst, 1, PostIndex));
3341 sub(length, length, Operand(1), SetCC);
3342 b(ne, &byte_loop_1);
3343 bind(&done);
3344}
3345
3346
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003347void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
3348 Register end_offset,
3349 Register filler) {
3350 Label loop, entry;
3351 b(&entry);
3352 bind(&loop);
3353 str(filler, MemOperand(start_offset, kPointerSize, PostIndex));
3354 bind(&entry);
3355 cmp(start_offset, end_offset);
3356 b(lt, &loop);
3357}
3358
3359
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003360void MacroAssembler::CheckFor32DRegs(Register scratch) {
3361 mov(scratch, Operand(ExternalReference::cpu_features()));
3362 ldr(scratch, MemOperand(scratch));
3363 tst(scratch, Operand(1u << VFP32DREGS));
Steve Block6ded16b2010-05-10 14:33:55 +01003364}
3365
3366
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003367void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
3368 CheckFor32DRegs(scratch);
3369 vstm(db_w, location, d16, d31, ne);
3370 sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3371 vstm(db_w, location, d0, d15);
3372}
3373
3374
3375void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
3376 CheckFor32DRegs(scratch);
3377 vldm(ia_w, location, d0, d15);
3378 vldm(ia_w, location, d16, d31, ne);
3379 add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3380}
3381
3382
3383void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
3384 Register first, Register second, Register scratch1, Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +01003385 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003386 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003387 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003388 const int kFlatOneByteStringTag =
3389 kStringTag | kOneByteStringTag | kSeqStringTag;
3390 and_(scratch1, first, Operand(kFlatOneByteStringMask));
3391 and_(scratch2, second, Operand(kFlatOneByteStringMask));
3392 cmp(scratch1, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003393 // Ignore second test if first test failed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003394 cmp(scratch2, Operand(kFlatOneByteStringTag), eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003395 b(ne, failure);
3396}
3397
3398
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003399void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
3400 Register scratch,
3401 Label* failure) {
3402 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003403 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003404 const int kFlatOneByteStringTag =
3405 kStringTag | kOneByteStringTag | kSeqStringTag;
3406 and_(scratch, type, Operand(kFlatOneByteStringMask));
3407 cmp(scratch, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003408 b(ne, failure);
3409}
3410
Steve Block44f0eee2011-05-26 01:26:41 +01003411static const int kRegisterPassedArguments = 4;
Steve Block6ded16b2010-05-10 14:33:55 +01003412
Steve Block44f0eee2011-05-26 01:26:41 +01003413
Ben Murdoch257744e2011-11-30 15:57:28 +00003414int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
3415 int num_double_arguments) {
3416 int stack_passed_words = 0;
3417 if (use_eabi_hardfloat()) {
3418 // In the hard floating point calling convention, we can use
3419 // all double registers to pass doubles.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003420 if (num_double_arguments > DoubleRegister::NumRegisters()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003421 stack_passed_words +=
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003422 2 * (num_double_arguments - DoubleRegister::NumRegisters());
Ben Murdoch257744e2011-11-30 15:57:28 +00003423 }
3424 } else {
3425 // In the soft floating point calling convention, every double
3426 // argument is passed using two registers.
3427 num_reg_arguments += 2 * num_double_arguments;
3428 }
Steve Block6ded16b2010-05-10 14:33:55 +01003429 // Up to four simple arguments are passed in registers r0..r3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003430 if (num_reg_arguments > kRegisterPassedArguments) {
3431 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
3432 }
3433 return stack_passed_words;
3434}
3435
3436
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003437void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3438 Register index,
3439 Register value,
3440 uint32_t encoding_mask) {
3441 Label is_object;
3442 SmiTst(string);
3443 Check(ne, kNonObject);
3444
3445 ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
3446 ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
3447
3448 and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
3449 cmp(ip, Operand(encoding_mask));
3450 Check(eq, kUnexpectedStringType);
3451
3452 // The index is assumed to be untagged coming in, tag it to compare with the
3453 // string length without using a temp register, it is restored at the end of
3454 // this function.
3455 Label index_tag_ok, index_tag_bad;
3456 TrySmiTag(index, index, &index_tag_bad);
3457 b(&index_tag_ok);
3458 bind(&index_tag_bad);
3459 Abort(kIndexIsTooLarge);
3460 bind(&index_tag_ok);
3461
3462 ldr(ip, FieldMemOperand(string, String::kLengthOffset));
3463 cmp(index, ip);
3464 Check(lt, kIndexIsTooLarge);
3465
3466 cmp(index, Operand(Smi::FromInt(0)));
3467 Check(ge, kIndexIsNegative);
3468
3469 SmiUntag(index, index);
3470}
3471
3472
Ben Murdoch257744e2011-11-30 15:57:28 +00003473void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3474 int num_double_arguments,
3475 Register scratch) {
3476 int frame_alignment = ActivationFrameAlignment();
3477 int stack_passed_arguments = CalculateStackPassedWords(
3478 num_reg_arguments, num_double_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01003479 if (frame_alignment > kPointerSize) {
3480 // Make stack end at alignment and make room for num_arguments - 4 words
3481 // and the original value of sp.
3482 mov(scratch, sp);
3483 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003484 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003485 and_(sp, sp, Operand(-frame_alignment));
3486 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
3487 } else {
3488 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
3489 }
3490}
3491
3492
Ben Murdoch257744e2011-11-30 15:57:28 +00003493void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3494 Register scratch) {
3495 PrepareCallCFunction(num_reg_arguments, 0, scratch);
3496}
3497
3498
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003499void MacroAssembler::MovToFloatParameter(DwVfpRegister src) {
3500 DCHECK(src.is(d0));
3501 if (!use_eabi_hardfloat()) {
3502 vmov(r0, r1, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003503 }
3504}
3505
3506
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003507// On ARM this is just a synonym to make the purpose clear.
3508void MacroAssembler::MovToFloatResult(DwVfpRegister src) {
3509 MovToFloatParameter(src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003510}
3511
3512
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003513void MacroAssembler::MovToFloatParameters(DwVfpRegister src1,
3514 DwVfpRegister src2) {
3515 DCHECK(src1.is(d0));
3516 DCHECK(src2.is(d1));
3517 if (!use_eabi_hardfloat()) {
3518 vmov(r0, r1, src1);
3519 vmov(r2, r3, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003520 }
3521}
3522
3523
3524void MacroAssembler::CallCFunction(ExternalReference function,
3525 int num_reg_arguments,
3526 int num_double_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003527 mov(ip, Operand(function));
3528 CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003529}
3530
3531
3532void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003533 int num_reg_arguments,
3534 int num_double_arguments) {
3535 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003536}
3537
3538
Steve Block6ded16b2010-05-10 14:33:55 +01003539void MacroAssembler::CallCFunction(ExternalReference function,
3540 int num_arguments) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003541 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003542}
3543
Ben Murdoch257744e2011-11-30 15:57:28 +00003544
Steve Block44f0eee2011-05-26 01:26:41 +01003545void MacroAssembler::CallCFunction(Register function,
Steve Block44f0eee2011-05-26 01:26:41 +01003546 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003547 CallCFunction(function, num_arguments, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01003548}
3549
3550
Steve Block44f0eee2011-05-26 01:26:41 +01003551void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch257744e2011-11-30 15:57:28 +00003552 int num_reg_arguments,
3553 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003554 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01003555 // Make sure that the stack is aligned before calling a C function unless
3556 // running in the simulator. The simulator has its own alignment check which
3557 // provides more information.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003558#if V8_HOST_ARCH_ARM
Steve Block44f0eee2011-05-26 01:26:41 +01003559 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003560 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01003561 int frame_alignment_mask = frame_alignment - 1;
3562 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003563 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003564 Label alignment_as_expected;
3565 tst(sp, Operand(frame_alignment_mask));
3566 b(eq, &alignment_as_expected);
3567 // Don't use Check here, as it will call Runtime_Abort possibly
3568 // re-entering here.
3569 stop("Unexpected alignment");
3570 bind(&alignment_as_expected);
3571 }
3572 }
3573#endif
3574
3575 // Just call directly. The function called cannot cause a GC, or
3576 // allow preemption, so the return address in the link register
3577 // stays correct.
3578 Call(function);
Ben Murdoch257744e2011-11-30 15:57:28 +00003579 int stack_passed_arguments = CalculateStackPassedWords(
3580 num_reg_arguments, num_double_arguments);
3581 if (ActivationFrameAlignment() > kPointerSize) {
Steve Block6ded16b2010-05-10 14:33:55 +01003582 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3583 } else {
3584 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
3585 }
3586}
3587
3588
Steve Block1e0659c2011-05-24 12:43:12 +01003589void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003590 Register result,
3591 Register scratch) {
3592 Label small_constant_pool_load, load_result;
Steve Block1e0659c2011-05-24 12:43:12 +01003593 ldr(result, MemOperand(ldr_location));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003594
3595 if (FLAG_enable_ool_constant_pool) {
3596 // Check if this is an extended constant pool load.
3597 and_(scratch, result, Operand(GetConsantPoolLoadMask()));
3598 teq(scratch, Operand(GetConsantPoolLoadPattern()));
3599 b(eq, &small_constant_pool_load);
3600 if (emit_debug_code()) {
3601 // Check that the instruction sequence is:
3602 // movw reg, #offset_low
3603 // movt reg, #offset_high
3604 // ldr reg, [pp, reg]
3605 Instr patterns[] = {GetMovWPattern(), GetMovTPattern(),
3606 GetLdrPpRegOffsetPattern()};
3607 for (int i = 0; i < 3; i++) {
3608 ldr(result, MemOperand(ldr_location, i * kInstrSize));
3609 and_(result, result, Operand(patterns[i]));
3610 cmp(result, Operand(patterns[i]));
3611 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
3612 }
3613 // Result was clobbered. Restore it.
3614 ldr(result, MemOperand(ldr_location));
3615 }
3616
3617 // Get the offset into the constant pool. First extract movw immediate into
3618 // result.
3619 and_(scratch, result, Operand(0xfff));
3620 mov(ip, Operand(result, LSR, 4));
3621 and_(ip, ip, Operand(0xf000));
3622 orr(result, scratch, Operand(ip));
3623 // Then extract movt immediate and or into result.
3624 ldr(scratch, MemOperand(ldr_location, kInstrSize));
3625 and_(ip, scratch, Operand(0xf0000));
3626 orr(result, result, Operand(ip, LSL, 12));
3627 and_(scratch, scratch, Operand(0xfff));
3628 orr(result, result, Operand(scratch, LSL, 16));
3629
3630 b(&load_result);
3631 }
3632
3633 bind(&small_constant_pool_load);
Steve Block44f0eee2011-05-26 01:26:41 +01003634 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003635 // Check that the instruction is a ldr reg, [<pc or pp> + offset] .
3636 and_(result, result, Operand(GetConsantPoolLoadPattern()));
3637 cmp(result, Operand(GetConsantPoolLoadPattern()));
3638 Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
Steve Block1e0659c2011-05-24 12:43:12 +01003639 // Result was clobbered. Restore it.
3640 ldr(result, MemOperand(ldr_location));
3641 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003642
3643 // Get the offset into the constant pool.
3644 const uint32_t kLdrOffsetMask = (1 << 12) - 1;
Steve Block1e0659c2011-05-24 12:43:12 +01003645 and_(result, result, Operand(kLdrOffsetMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003646
3647 bind(&load_result);
3648 // Get the address of the constant.
3649 if (FLAG_enable_ool_constant_pool) {
3650 add(result, pp, Operand(result));
3651 } else {
3652 add(result, ldr_location, Operand(result));
3653 add(result, result, Operand(Instruction::kPCReadOffset));
3654 }
Steve Block1e0659c2011-05-24 12:43:12 +01003655}
3656
3657
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003658void MacroAssembler::CheckPageFlag(
3659 Register object,
3660 Register scratch,
3661 int mask,
3662 Condition cc,
3663 Label* condition_met) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003664 Bfc(scratch, object, 0, kPageSizeBits);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003665 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
3666 tst(scratch, Operand(mask));
3667 b(cc, condition_met);
3668}
3669
3670
3671void MacroAssembler::JumpIfBlack(Register object,
3672 Register scratch0,
3673 Register scratch1,
3674 Label* on_black) {
3675 HasColor(object, scratch0, scratch1, on_black, 1, 0); // kBlackBitPattern.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003676 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003677}
3678
3679
3680void MacroAssembler::HasColor(Register object,
3681 Register bitmap_scratch,
3682 Register mask_scratch,
3683 Label* has_color,
3684 int first_bit,
3685 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003686 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003687
3688 GetMarkBits(object, bitmap_scratch, mask_scratch);
3689
3690 Label other_color, word_boundary;
3691 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3692 tst(ip, Operand(mask_scratch));
3693 b(first_bit == 1 ? eq : ne, &other_color);
3694 // Shift left 1 by adding.
3695 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC);
3696 b(eq, &word_boundary);
3697 tst(ip, Operand(mask_scratch));
3698 b(second_bit == 1 ? ne : eq, has_color);
3699 jmp(&other_color);
3700
3701 bind(&word_boundary);
3702 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
3703 tst(ip, Operand(1));
3704 b(second_bit == 1 ? ne : eq, has_color);
3705 bind(&other_color);
3706}
3707
3708
3709// Detect some, but not all, common pointer-free objects. This is used by the
3710// incremental write barrier which doesn't care about oddballs (they are always
3711// marked black immediately so this code is not hit).
3712void MacroAssembler::JumpIfDataObject(Register value,
3713 Register scratch,
3714 Label* not_data_object) {
3715 Label is_data_object;
3716 ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
3717 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
3718 b(eq, &is_data_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003719 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3720 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003721 // If it's a string and it's not a cons string then it's an object containing
3722 // no GC pointers.
3723 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3724 tst(scratch, Operand(kIsIndirectStringMask | kIsNotStringMask));
3725 b(ne, not_data_object);
3726 bind(&is_data_object);
3727}
3728
3729
3730void MacroAssembler::GetMarkBits(Register addr_reg,
3731 Register bitmap_reg,
3732 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003733 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003734 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3735 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
3736 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
3737 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits);
3738 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2));
3739 mov(ip, Operand(1));
3740 mov(mask_reg, Operand(ip, LSL, mask_reg));
3741}
3742
3743
3744void MacroAssembler::EnsureNotWhite(
3745 Register value,
3746 Register bitmap_scratch,
3747 Register mask_scratch,
3748 Register load_scratch,
3749 Label* value_is_white_and_not_data) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003750 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ip));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003751 GetMarkBits(value, bitmap_scratch, mask_scratch);
3752
3753 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003754 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3755 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
3756 DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0);
3757 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003758
3759 Label done;
3760
3761 // Since both black and grey have a 1 in the first position and white does
3762 // not have a 1 there we only need to check one bit.
3763 ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3764 tst(mask_scratch, load_scratch);
3765 b(ne, &done);
3766
3767 if (emit_debug_code()) {
3768 // Check for impossible bit pattern.
3769 Label ok;
3770 // LSL may overflow, making the check conservative.
3771 tst(load_scratch, Operand(mask_scratch, LSL, 1));
3772 b(eq, &ok);
3773 stop("Impossible marking bit pattern");
3774 bind(&ok);
3775 }
3776
3777 // Value is white. We check whether it is data that doesn't need scanning.
3778 // Currently only checks for HeapNumber and non-cons strings.
3779 Register map = load_scratch; // Holds map while checking type.
3780 Register length = load_scratch; // Holds length of object after testing type.
3781 Label is_data_object;
3782
3783 // Check for heap-number
3784 ldr(map, FieldMemOperand(value, HeapObject::kMapOffset));
3785 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
3786 mov(length, Operand(HeapNumber::kSize), LeaveCC, eq);
3787 b(eq, &is_data_object);
3788
3789 // Check for strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003790 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3791 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003792 // If it's a string and it's not a cons string then it's an object containing
3793 // no GC pointers.
3794 Register instance_type = load_scratch;
3795 ldrb(instance_type, FieldMemOperand(map, Map::kInstanceTypeOffset));
3796 tst(instance_type, Operand(kIsIndirectStringMask | kIsNotStringMask));
3797 b(ne, value_is_white_and_not_data);
3798 // It's a non-indirect (non-cons and non-slice) string.
3799 // If it's external, the length is just ExternalString::kSize.
3800 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3801 // External strings are the only ones with the kExternalStringTag bit
3802 // set.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003803 DCHECK_EQ(0, kSeqStringTag & kExternalStringTag);
3804 DCHECK_EQ(0, kConsStringTag & kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003805 tst(instance_type, Operand(kExternalStringTag));
3806 mov(length, Operand(ExternalString::kSize), LeaveCC, ne);
3807 b(ne, &is_data_object);
3808
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003809 // Sequential string, either Latin1 or UC16.
3810 // For Latin1 (char-size of 1) we shift the smi tag away to get the length.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003811 // For UC16 (char-size of 2) we just leave the smi tag in place, thereby
3812 // getting the length multiplied by 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003813 DCHECK(kOneByteStringTag == 4 && kStringEncodingMask == 4);
3814 DCHECK(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003815 ldr(ip, FieldMemOperand(value, String::kLengthOffset));
3816 tst(instance_type, Operand(kStringEncodingMask));
3817 mov(ip, Operand(ip, LSR, 1), LeaveCC, ne);
3818 add(length, ip, Operand(SeqString::kHeaderSize + kObjectAlignmentMask));
3819 and_(length, length, Operand(~kObjectAlignmentMask));
3820
3821 bind(&is_data_object);
3822 // Value is a data object, and it is white. Mark it black. Since we know
3823 // that the object is white we can make it black by flipping one bit.
3824 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3825 orr(ip, ip, Operand(mask_scratch));
3826 str(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3827
3828 and_(bitmap_scratch, bitmap_scratch, Operand(~Page::kPageAlignmentMask));
3829 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3830 add(ip, ip, Operand(length));
3831 str(ip, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3832
3833 bind(&done);
3834}
3835
3836
Ben Murdoch257744e2011-11-30 15:57:28 +00003837void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
3838 Usat(output_reg, 8, Operand(input_reg));
3839}
3840
3841
3842void MacroAssembler::ClampDoubleToUint8(Register result_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003843 DwVfpRegister input_reg,
3844 LowDwVfpRegister double_scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003845 Label done;
Ben Murdoch257744e2011-11-30 15:57:28 +00003846
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003847 // Handle inputs >= 255 (including +infinity).
3848 Vmov(double_scratch, 255.0, result_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003849 mov(result_reg, Operand(255));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003850 VFPCompareAndSetFlags(input_reg, double_scratch);
3851 b(ge, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003852
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003853 // For inputs < 255 (including negative) vcvt_u32_f64 with round-to-nearest
3854 // rounding mode will provide the correct result.
3855 vcvt_u32_f64(double_scratch.low(), input_reg, kFPSCRRounding);
3856 vmov(result_reg, double_scratch.low());
3857
Ben Murdoch257744e2011-11-30 15:57:28 +00003858 bind(&done);
3859}
3860
3861
3862void MacroAssembler::LoadInstanceDescriptors(Register map,
3863 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003864 ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
3865}
3866
3867
3868void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3869 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3870 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3871}
3872
3873
3874void MacroAssembler::EnumLength(Register dst, Register map) {
3875 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3876 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3877 and_(dst, dst, Operand(Map::EnumLengthBits::kMask));
3878 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003879}
3880
3881
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003882void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003883 Register empty_fixed_array_value = r6;
3884 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003885 Label next, start;
3886 mov(r2, r0);
3887
3888 // Check if the enum length field is properly initialized, indicating that
3889 // there is an enum cache.
3890 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
3891
3892 EnumLength(r3, r1);
3893 cmp(r3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
3894 b(eq, call_runtime);
3895
3896 jmp(&start);
3897
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003898 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003899 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003900
3901 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003902 EnumLength(r3, r1);
3903 cmp(r3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003904 b(ne, call_runtime);
3905
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003906 bind(&start);
3907
3908 // Check that there are no elements. Register r2 contains the current JS
3909 // object we've reached through the prototype chain.
3910 Label no_elements;
3911 ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset));
3912 cmp(r2, empty_fixed_array_value);
3913 b(eq, &no_elements);
3914
3915 // Second chance, the object may be using the empty slow element dictionary.
3916 CompareRoot(r2, Heap::kEmptySlowElementDictionaryRootIndex);
3917 b(ne, call_runtime);
3918
3919 bind(&no_elements);
3920 ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3921 cmp(r2, null_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003922 b(ne, &next);
3923}
3924
3925
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003926void MacroAssembler::TestJSArrayForAllocationMemento(
3927 Register receiver_reg,
3928 Register scratch_reg,
3929 Label* no_memento_found) {
3930 ExternalReference new_space_start =
3931 ExternalReference::new_space_start(isolate());
3932 ExternalReference new_space_allocation_top =
3933 ExternalReference::new_space_allocation_top_address(isolate());
3934 add(scratch_reg, receiver_reg,
3935 Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3936 cmp(scratch_reg, Operand(new_space_start));
3937 b(lt, no_memento_found);
3938 mov(ip, Operand(new_space_allocation_top));
3939 ldr(ip, MemOperand(ip));
3940 cmp(scratch_reg, ip);
3941 b(gt, no_memento_found);
3942 ldr(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
3943 cmp(scratch_reg,
3944 Operand(isolate()->factory()->allocation_memento_map()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003945}
3946
3947
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003948Register GetRegisterThatIsNotOneOf(Register reg1,
3949 Register reg2,
3950 Register reg3,
3951 Register reg4,
3952 Register reg5,
3953 Register reg6) {
3954 RegList regs = 0;
3955 if (reg1.is_valid()) regs |= reg1.bit();
3956 if (reg2.is_valid()) regs |= reg2.bit();
3957 if (reg3.is_valid()) regs |= reg3.bit();
3958 if (reg4.is_valid()) regs |= reg4.bit();
3959 if (reg5.is_valid()) regs |= reg5.bit();
3960 if (reg6.is_valid()) regs |= reg6.bit();
3961
3962 for (int i = 0; i < Register::NumAllocatableRegisters(); i++) {
3963 Register candidate = Register::FromAllocationIndex(i);
3964 if (regs & candidate.bit()) continue;
3965 return candidate;
3966 }
3967 UNREACHABLE();
3968 return no_reg;
3969}
3970
3971
3972void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3973 Register object,
3974 Register scratch0,
3975 Register scratch1,
3976 Label* found) {
3977 DCHECK(!scratch1.is(scratch0));
3978 Factory* factory = isolate()->factory();
3979 Register current = scratch0;
3980 Label loop_again;
3981
3982 // scratch contained elements pointer.
3983 mov(current, object);
3984
3985 // Loop based on the map going up the prototype chain.
3986 bind(&loop_again);
3987 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
3988 ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
3989 DecodeField<Map::ElementsKindBits>(scratch1);
3990 cmp(scratch1, Operand(DICTIONARY_ELEMENTS));
3991 b(eq, found);
3992 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
3993 cmp(current, Operand(factory->null_value()));
3994 b(ne, &loop_again);
3995}
3996
3997
3998#ifdef DEBUG
3999bool AreAliased(Register reg1,
4000 Register reg2,
4001 Register reg3,
4002 Register reg4,
4003 Register reg5,
4004 Register reg6,
4005 Register reg7,
4006 Register reg8) {
4007 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
4008 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
4009 reg7.is_valid() + reg8.is_valid();
4010
4011 RegList regs = 0;
4012 if (reg1.is_valid()) regs |= reg1.bit();
4013 if (reg2.is_valid()) regs |= reg2.bit();
4014 if (reg3.is_valid()) regs |= reg3.bit();
4015 if (reg4.is_valid()) regs |= reg4.bit();
4016 if (reg5.is_valid()) regs |= reg5.bit();
4017 if (reg6.is_valid()) regs |= reg6.bit();
4018 if (reg7.is_valid()) regs |= reg7.bit();
4019 if (reg8.is_valid()) regs |= reg8.bit();
4020 int n_of_non_aliasing_regs = NumRegs(regs);
4021
4022 return n_of_valid_regs != n_of_non_aliasing_regs;
4023}
4024#endif
4025
4026
4027CodePatcher::CodePatcher(byte* address,
4028 int instructions,
4029 FlushICache flush_cache)
Steve Blocka7e24c12009-10-30 11:49:00 +00004030 : address_(address),
Steve Blocka7e24c12009-10-30 11:49:00 +00004031 size_(instructions * Assembler::kInstrSize),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004032 masm_(NULL, address, size_ + Assembler::kGap),
4033 flush_cache_(flush_cache) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004034 // Create a new macro assembler pointing to the address of the code to patch.
4035 // The size is adjusted with kGap on order for the assembler to generate size
4036 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004037 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00004038}
4039
4040
4041CodePatcher::~CodePatcher() {
4042 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004043 if (flush_cache_ == FLUSH) {
4044 CpuFeatures::FlushICache(address_, size_);
4045 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004046
4047 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004048 DCHECK(masm_.pc_ == address_ + size_);
4049 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00004050}
4051
4052
Steve Block1e0659c2011-05-24 12:43:12 +01004053void CodePatcher::Emit(Instr instr) {
4054 masm()->emit(instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00004055}
4056
4057
4058void CodePatcher::Emit(Address addr) {
4059 masm()->emit(reinterpret_cast<Instr>(addr));
4060}
Steve Block1e0659c2011-05-24 12:43:12 +01004061
4062
4063void CodePatcher::EmitCondition(Condition cond) {
4064 Instr instr = Assembler::instr_at(masm_.pc_);
4065 instr = (instr & ~kCondMask) | cond;
4066 masm_.emit(instr);
4067}
Steve Blocka7e24c12009-10-30 11:49:00 +00004068
4069
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004070void MacroAssembler::TruncatingDiv(Register result,
4071 Register dividend,
4072 int32_t divisor) {
4073 DCHECK(!dividend.is(result));
4074 DCHECK(!dividend.is(ip));
4075 DCHECK(!result.is(ip));
4076 base::MagicNumbersForDivision<uint32_t> mag =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004077 base::SignedDivisionByConstant(bit_cast<uint32_t>(divisor));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004078 mov(ip, Operand(mag.multiplier));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004079 bool neg = (mag.multiplier & (1U << 31)) != 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004080 if (divisor > 0 && neg) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004081 smmla(result, dividend, ip, dividend);
4082 } else {
4083 smmul(result, dividend, ip);
4084 if (divisor < 0 && !neg && mag.multiplier > 0) {
4085 sub(result, result, Operand(dividend));
4086 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004087 }
4088 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift));
4089 add(result, result, Operand(dividend, LSR, 31));
4090}
4091
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004092} // namespace internal
4093} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01004094
4095#endif // V8_TARGET_ARCH_ARM