blob: a08673d4620243d1e4c598e787f1517b0a629dd5 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Iain Merrick9ac36c92010-09-13 15:29:50 +01005#include <limits.h> // For LONG_MIN, LONG_MAX.
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
Ben Murdoch014dc512016-03-22 12:00:34 +000013#include "src/debug/debug.h"
14#include "src/register-configuration.h"
Emily Bernier958fae72015-03-24 16:35:39 -040015#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000016
Ben Murdoch014dc512016-03-22 12:00:34 +000017#include "src/arm/macro-assembler-arm.h"
18
Steve Blocka7e24c12009-10-30 11:49:00 +000019namespace v8 {
20namespace internal {
21
Ben Murdoch014dc512016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010024 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch014dc512016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010030 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Steve Blocka7e24c12009-10-30 11:49:00 +000034void MacroAssembler::Jump(Register target, Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +000035 bx(target, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000036}
37
38
39void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
40 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000042 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000043}
44
45
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000046void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode,
Steve Blocka7e24c12009-10-30 11:49:00 +000047 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000049 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
50}
51
52
53void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
54 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000056 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 AllowDeferredHandleDereference embedding_raw_address;
Steve Blocka7e24c12009-10-30 11:49:00 +000058 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
59}
60
61
Steve Block44f0eee2011-05-26 01:26:41 +010062int MacroAssembler::CallSize(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010063 return kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +010064}
65
66
Steve Blocka7e24c12009-10-30 11:49:00 +000067void MacroAssembler::Call(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010068 // Block constant pool for the call instruction sequence.
69 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000070 Label start;
71 bind(&start);
Steve Blocka7e24c12009-10-30 11:49:00 +000072 blx(target, cond);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 DCHECK_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block44f0eee2011-05-26 01:26:41 +010077int MacroAssembler::CallSize(
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000078 Address target, RelocInfo::Mode rmode, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010079 Instr mov_instr = cond | MOV | LeaveCC;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
81 return kInstrSize +
82 mov_operand.instructions_required(this, mov_instr) * kInstrSize;
83}
84
85
86int MacroAssembler::CallStubSize(
87 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) {
88 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
89}
90
91
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000092void MacroAssembler::Call(Address target,
Ben Murdoch257744e2011-11-30 15:57:28 +000093 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000094 Condition cond,
95 TargetAddressStorageMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +010096 // Block constant pool for the call instruction sequence.
97 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000098 Label start;
99 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000100
101 bool old_predictable_code_size = predictable_code_size();
102 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
103 set_predictable_code_size(true);
104 }
105
106#ifdef DEBUG
107 // Check the expected size before generating code to ensure we assume the same
108 // constant pool availability (e.g., whether constant pool is full or not).
109 int expected_size = CallSize(target, rmode, cond);
110#endif
111
112 // Call sequence on V7 or later may be :
113 // movw ip, #... @ call address low 16
114 // movt ip, #... @ call address high 16
115 // blx ip
116 // @ return address
117 // Or for pre-V7 or values that may be back-patched
118 // to avoid ICache flushes:
119 // ldr ip, [pc, #...] @ call address
120 // blx ip
121 // @ return address
Steve Block6ded16b2010-05-10 14:33:55 +0100122
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000123 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
Steve Block44f0eee2011-05-26 01:26:41 +0100124 blx(ip, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100125
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126 DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
127 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
128 set_predictable_code_size(old_predictable_code_size);
129 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000130}
131
132
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000133int MacroAssembler::CallSize(Handle<Code> code,
134 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000136 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000137 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000138 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
Ben Murdoch257744e2011-11-30 15:57:28 +0000139}
140
141
142void MacroAssembler::Call(Handle<Code> code,
143 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144 TypeFeedbackId ast_id,
145 Condition cond,
146 TargetAddressStorageMode mode) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000147 Label start;
148 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149 DCHECK(RelocInfo::IsCodeTarget(rmode));
150 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000151 SetRecordedAstId(ast_id);
152 rmode = RelocInfo::CODE_TARGET_WITH_ID;
153 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000154 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155 AllowDeferredHandleDereference embedding_raw_address;
156 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000157}
158
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100159void MacroAssembler::CallDeoptimizer(Address target) {
160 BlockConstPoolScope block_const_pool(this);
161
162 uintptr_t target_raw = reinterpret_cast<uintptr_t>(target);
163
164 // We use blx, like a call, but it does not return here. The link register is
165 // used by the deoptimizer to work out what called it.
166 if (CpuFeatures::IsSupported(ARMv7)) {
167 CpuFeatureScope scope(this, ARMv7);
168 movw(ip, target_raw & 0xffff);
169 movt(ip, (target_raw >> 16) & 0xffff);
170 blx(ip);
171 } else {
172 // We need to load a literal, but we can't use the usual constant pool
173 // because we call this from a patcher, and cannot afford the guard
174 // instruction and other administrative overhead.
175 ldr(ip, MemOperand(pc, (2 * kInstrSize) - kPcLoadDelta));
176 blx(ip);
177 dd(target_raw);
178 }
179}
180
181int MacroAssembler::CallDeoptimizerSize() {
182 // ARMv7+:
183 // movw ip, ...
184 // movt ip, ...
185 // blx ip @ This never returns.
186 //
187 // ARMv6:
188 // ldr ip, =address
189 // blx ip @ This never returns.
190 // .word address
191 return 3 * kInstrSize;
192}
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
194void MacroAssembler::Ret(Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 bx(lr, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000196}
197
198
Leon Clarkee46be812010-01-19 14:06:41 +0000199void MacroAssembler::Drop(int count, Condition cond) {
200 if (count > 0) {
201 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
202 }
203}
204
Ben Murdoch109988c2016-05-18 11:27:45 +0100205void MacroAssembler::Drop(Register count, Condition cond) {
206 add(sp, sp, Operand(count, LSL, kPointerSizeLog2), LeaveCC, cond);
207}
Leon Clarkee46be812010-01-19 14:06:41 +0000208
Ben Murdochb0fe1622011-05-05 13:52:32 +0100209void MacroAssembler::Ret(int drop, Condition cond) {
210 Drop(drop, cond);
211 Ret(cond);
212}
213
214
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100215void MacroAssembler::Swap(Register reg1,
216 Register reg2,
217 Register scratch,
218 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100219 if (scratch.is(no_reg)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100220 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
221 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
222 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100223 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100224 mov(scratch, reg1, LeaveCC, cond);
225 mov(reg1, reg2, LeaveCC, cond);
226 mov(reg2, scratch, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100227 }
228}
229
230
Leon Clarkee46be812010-01-19 14:06:41 +0000231void MacroAssembler::Call(Label* target) {
232 bl(target);
233}
234
235
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000236void MacroAssembler::Push(Handle<Object> handle) {
237 mov(ip, Operand(handle));
238 push(ip);
239}
240
241
Leon Clarkee46be812010-01-19 14:06:41 +0000242void MacroAssembler::Move(Register dst, Handle<Object> value) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000243 mov(dst, Operand(value));
Leon Clarkee46be812010-01-19 14:06:41 +0000244}
Steve Blockd0582a62009-12-15 09:54:21 +0000245
246
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000247void MacroAssembler::Move(Register dst, Register src, Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100248 if (!dst.is(src)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000249 mov(dst, src, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100250 }
251}
252
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100253void MacroAssembler::Move(SwVfpRegister dst, SwVfpRegister src) {
254 if (!dst.is(src)) {
255 vmov(dst, src);
256 }
257}
Steve Block6ded16b2010-05-10 14:33:55 +0100258
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000259void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000260 if (!dst.is(src)) {
261 vmov(dst, src);
262 }
263}
264
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000265void MacroAssembler::Mls(Register dst, Register src1, Register src2,
266 Register srcA, Condition cond) {
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100267 if (CpuFeatures::IsSupported(ARMv7)) {
268 CpuFeatureScope scope(this, ARMv7);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 mls(dst, src1, src2, srcA, cond);
270 } else {
271 DCHECK(!srcA.is(ip));
272 mul(ip, src1, src2, LeaveCC, cond);
273 sub(dst, srcA, ip, LeaveCC, cond);
274 }
275}
276
277
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100278void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
279 Condition cond) {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800280 if (!src2.is_reg() &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000281 !src2.must_output_reloc_info(this) &&
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800282 src2.immediate() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000283 mov(dst, Operand::Zero(), LeaveCC, cond);
284 } else if (!(src2.instructions_required(this) == 1) &&
285 !src2.must_output_reloc_info(this) &&
Ben Murdoch8b112d22011-06-08 16:22:53 +0100286 CpuFeatures::IsSupported(ARMv7) &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000287 base::bits::IsPowerOfTwo32(src2.immediate() + 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000288 ubfx(dst, src1, 0,
289 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800290 } else {
291 and_(dst, src1, src2, LeaveCC, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100292 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100293}
294
295
296void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
297 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 DCHECK(lsb < 32);
299 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100300 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
301 and_(dst, src1, Operand(mask), LeaveCC, cond);
302 if (lsb != 0) {
303 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
304 }
305 } else {
306 ubfx(dst, src1, lsb, width, cond);
307 }
308}
309
310
311void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
312 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000313 DCHECK(lsb < 32);
314 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100315 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
316 and_(dst, src1, Operand(mask), LeaveCC, cond);
317 int shift_up = 32 - lsb - width;
318 int shift_down = lsb + shift_up;
319 if (shift_up != 0) {
320 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
321 }
322 if (shift_down != 0) {
323 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
324 }
325 } else {
326 sbfx(dst, src1, lsb, width, cond);
327 }
328}
329
330
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100331void MacroAssembler::Bfi(Register dst,
332 Register src,
333 Register scratch,
334 int lsb,
335 int width,
336 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000337 DCHECK(0 <= lsb && lsb < 32);
338 DCHECK(0 <= width && width < 32);
339 DCHECK(lsb + width < 32);
340 DCHECK(!scratch.is(dst));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100341 if (width == 0) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000342 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100343 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
344 bic(dst, dst, Operand(mask));
345 and_(scratch, src, Operand((1 << width) - 1));
346 mov(scratch, Operand(scratch, LSL, lsb));
347 orr(dst, dst, scratch);
348 } else {
349 bfi(dst, src, lsb, width, cond);
350 }
351}
352
353
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width,
355 Condition cond) {
356 DCHECK(lsb < 32);
357 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100358 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000359 bic(dst, src, Operand(mask));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100360 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000361 Move(dst, src, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100362 bfc(dst, lsb, width, cond);
363 }
364}
365
366
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367void MacroAssembler::Load(Register dst,
368 const MemOperand& src,
369 Representation r) {
370 DCHECK(!r.IsDouble());
371 if (r.IsInteger8()) {
372 ldrsb(dst, src);
373 } else if (r.IsUInteger8()) {
374 ldrb(dst, src);
375 } else if (r.IsInteger16()) {
376 ldrsh(dst, src);
377 } else if (r.IsUInteger16()) {
378 ldrh(dst, src);
379 } else {
380 ldr(dst, src);
381 }
382}
383
384
385void MacroAssembler::Store(Register src,
386 const MemOperand& dst,
387 Representation r) {
388 DCHECK(!r.IsDouble());
389 if (r.IsInteger8() || r.IsUInteger8()) {
390 strb(src, dst);
391 } else if (r.IsInteger16() || r.IsUInteger16()) {
392 strh(src, dst);
393 } else {
394 if (r.IsHeapObject()) {
395 AssertNotSmi(src);
396 } else if (r.IsSmi()) {
397 AssertSmi(src);
398 }
399 str(src, dst);
400 }
401}
402
403
Steve Blocka7e24c12009-10-30 11:49:00 +0000404void MacroAssembler::LoadRoot(Register destination,
405 Heap::RootListIndex index,
406 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
408 isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
409 !predictable_code_size()) {
410 // The CPU supports fast immediate values, and this root will never
411 // change. We will load it as a relocatable immediate value.
Ben Murdoch014dc512016-03-22 12:00:34 +0000412 Handle<Object> root = isolate()->heap()->root_handle(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000413 mov(destination, Operand(root), LeaveCC, cond);
414 return;
415 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000416 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000417}
418
419
Kristian Monsen25f61362010-05-21 11:50:48 +0100420void MacroAssembler::StoreRoot(Register source,
421 Heap::RootListIndex index,
422 Condition cond) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000423 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000424 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
425}
426
427
Steve Block6ded16b2010-05-10 14:33:55 +0100428void MacroAssembler::InNewSpace(Register object,
429 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100430 Condition cond,
Steve Block6ded16b2010-05-10 14:33:55 +0100431 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432 DCHECK(cond == eq || cond == ne);
Ben Murdoch109988c2016-05-18 11:27:45 +0100433 const int mask =
434 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
435 CheckPageFlag(object, scratch, mask, cond, branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100436}
437
438
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100439void MacroAssembler::RecordWriteField(
440 Register object,
441 int offset,
442 Register value,
443 Register dst,
444 LinkRegisterStatus lr_status,
445 SaveFPRegsMode save_fp,
446 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000447 SmiCheck smi_check,
448 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100449 // First, check if a write barrier is even needed. The tests below
450 // catch stores of Smis.
Steve Block6ded16b2010-05-10 14:33:55 +0100451 Label done;
452
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100453 // Skip barrier if writing a smi.
454 if (smi_check == INLINE_SMI_CHECK) {
455 JumpIfSmi(value, &done);
456 }
Steve Block6ded16b2010-05-10 14:33:55 +0100457
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100458 // Although the object register is tagged, the offset is relative to the start
459 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460 DCHECK(IsAligned(offset, kPointerSize));
Steve Block8defd9f2010-07-08 12:39:36 +0100461
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100462 add(dst, object, Operand(offset - kHeapObjectTag));
463 if (emit_debug_code()) {
464 Label ok;
465 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
466 b(eq, &ok);
467 stop("Unaligned cell in write barrier");
468 bind(&ok);
469 }
470
471 RecordWrite(object,
472 dst,
473 value,
474 lr_status,
475 save_fp,
476 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000477 OMIT_SMI_CHECK,
478 pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000479
480 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000481
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100482 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000483 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100484 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000485 mov(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
486 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
487 }
488}
489
490
491// Will clobber 4 registers: object, map, dst, ip. The
492// register 'object' contains a heap object pointer.
493void MacroAssembler::RecordWriteForMap(Register object,
494 Register map,
495 Register dst,
496 LinkRegisterStatus lr_status,
497 SaveFPRegsMode fp_mode) {
498 if (emit_debug_code()) {
499 ldr(dst, FieldMemOperand(map, HeapObject::kMapOffset));
500 cmp(dst, Operand(isolate()->factory()->meta_map()));
501 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
502 }
503
504 if (!FLAG_incremental_marking) {
505 return;
506 }
507
508 if (emit_debug_code()) {
509 ldr(ip, FieldMemOperand(object, HeapObject::kMapOffset));
510 cmp(ip, map);
511 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
512 }
513
514 Label done;
515
516 // A single check of the map's pages interesting flag suffices, since it is
517 // only set during incremental collection, and then it's also guaranteed that
518 // the from object's page's interesting flag is also set. This optimization
519 // relies on the fact that maps can never be in new space.
520 CheckPageFlag(map,
521 map, // Used as scratch.
522 MemoryChunk::kPointersToHereAreInterestingMask,
523 eq,
524 &done);
525
526 add(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
527 if (emit_debug_code()) {
528 Label ok;
529 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
530 b(eq, &ok);
531 stop("Unaligned cell in write barrier");
532 bind(&ok);
533 }
534
535 // Record the actual write.
536 if (lr_status == kLRHasNotBeenSaved) {
537 push(lr);
538 }
539 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
540 fp_mode);
541 CallStub(&stub);
542 if (lr_status == kLRHasNotBeenSaved) {
543 pop(lr);
544 }
545
546 bind(&done);
547
548 // Count number of write barriers in generated code.
549 isolate()->counters()->write_barriers_static()->Increment();
550 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst);
551
552 // Clobber clobbered registers when running with the debug-code flag
553 // turned on to provoke errors.
554 if (emit_debug_code()) {
555 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
556 mov(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Leon Clarke4515c472010-02-03 11:58:03 +0000557 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000558}
559
560
Steve Block8defd9f2010-07-08 12:39:36 +0100561// Will clobber 4 registers: object, address, scratch, ip. The
562// register 'object' contains a heap object pointer. The heap object
563// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000564void MacroAssembler::RecordWrite(
565 Register object,
566 Register address,
567 Register value,
568 LinkRegisterStatus lr_status,
569 SaveFPRegsMode fp_mode,
570 RememberedSetAction remembered_set_action,
571 SmiCheck smi_check,
572 PointersToHereCheck pointers_to_here_check_for_value) {
573 DCHECK(!object.is(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100574 if (emit_debug_code()) {
575 ldr(ip, MemOperand(address));
576 cmp(ip, value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100578 }
Steve Block8defd9f2010-07-08 12:39:36 +0100579
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000580 if (remembered_set_action == OMIT_REMEMBERED_SET &&
581 !FLAG_incremental_marking) {
582 return;
583 }
584
585 // First, check if a write barrier is even needed. The tests below
586 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100587 Label done;
588
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100589 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000590 JumpIfSmi(value, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 }
592
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000593 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
594 CheckPageFlag(value,
595 value, // Used as scratch.
596 MemoryChunk::kPointersToHereAreInterestingMask,
597 eq,
598 &done);
599 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100600 CheckPageFlag(object,
601 value, // Used as scratch.
602 MemoryChunk::kPointersFromHereAreInterestingMask,
603 eq,
604 &done);
Steve Block8defd9f2010-07-08 12:39:36 +0100605
606 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100607 if (lr_status == kLRHasNotBeenSaved) {
608 push(lr);
609 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000610 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
611 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100612 CallStub(&stub);
613 if (lr_status == kLRHasNotBeenSaved) {
614 pop(lr);
615 }
Steve Block8defd9f2010-07-08 12:39:36 +0100616
617 bind(&done);
618
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000619 // Count number of write barriers in generated code.
620 isolate()->counters()->write_barriers_static()->Increment();
621 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip,
622 value);
623
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100624 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100625 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100626 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627 mov(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
628 mov(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100629 }
630}
631
Ben Murdoch109988c2016-05-18 11:27:45 +0100632void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
633 Register code_entry,
634 Register scratch) {
635 const int offset = JSFunction::kCodeEntryOffset;
636
637 // Since a code entry (value) is always in old space, we don't need to update
638 // remembered set. If incremental marking is off, there is nothing for us to
639 // do.
640 if (!FLAG_incremental_marking) return;
641
642 DCHECK(js_function.is(r1));
643 DCHECK(code_entry.is(r4));
644 DCHECK(scratch.is(r5));
645 AssertNotSmi(js_function);
646
647 if (emit_debug_code()) {
648 add(scratch, js_function, Operand(offset - kHeapObjectTag));
649 ldr(ip, MemOperand(scratch));
650 cmp(ip, code_entry);
651 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
652 }
653
654 // First, check if a write barrier is even needed. The tests below
655 // catch stores of Smis and stores into young gen.
656 Label done;
657
658 CheckPageFlag(code_entry, scratch,
659 MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
660 CheckPageFlag(js_function, scratch,
661 MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
662
663 const Register dst = scratch;
664 add(dst, js_function, Operand(offset - kHeapObjectTag));
665
666 push(code_entry);
667
668 // Save caller-saved registers, which includes js_function.
669 DCHECK((kCallerSaved & js_function.bit()) != 0);
670 DCHECK_EQ(kCallerSaved & code_entry.bit(), 0);
671 stm(db_w, sp, (kCallerSaved | lr.bit()));
672
673 int argument_count = 3;
674 PrepareCallCFunction(argument_count, code_entry);
675
676 mov(r0, js_function);
677 mov(r1, dst);
678 mov(r2, Operand(ExternalReference::isolate_address(isolate())));
679
680 {
681 AllowExternalCallThatCantCauseGC scope(this);
682 CallCFunction(
683 ExternalReference::incremental_marking_record_write_code_entry_function(
684 isolate()),
685 argument_count);
686 }
687
688 // Restore caller-saved registers (including js_function and code_entry).
689 ldm(ia_w, sp, (kCallerSaved | lr.bit()));
690
691 pop(code_entry);
692
693 bind(&done);
694}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100695
696void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
697 Register address,
698 Register scratch,
699 SaveFPRegsMode fp_mode,
700 RememberedSetFinalAction and_then) {
701 Label done;
702 if (emit_debug_code()) {
703 Label ok;
704 JumpIfNotInNewSpace(object, scratch, &ok);
705 stop("Remembered set pointer is in new space");
706 bind(&ok);
707 }
708 // Load store buffer top.
709 ExternalReference store_buffer =
710 ExternalReference::store_buffer_top(isolate());
711 mov(ip, Operand(store_buffer));
712 ldr(scratch, MemOperand(ip));
713 // Store pointer to buffer and increment buffer top.
714 str(address, MemOperand(scratch, kPointerSize, PostIndex));
715 // Write back new top of buffer.
716 str(scratch, MemOperand(ip));
717 // Call stub on end of buffer.
718 // Check for end of buffer.
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100719 tst(scratch, Operand(StoreBuffer::kStoreBufferMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100720 if (and_then == kFallThroughAtEnd) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100721 b(ne, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100722 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000723 DCHECK(and_then == kReturnAtEnd);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100724 Ret(ne);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100725 }
726 push(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000727 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100728 CallStub(&store_buffer_overflow);
729 pop(lr);
730 bind(&done);
731 if (and_then == kReturnAtEnd) {
732 Ret();
Steve Block8defd9f2010-07-08 12:39:36 +0100733 }
734}
735
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100736void MacroAssembler::PushCommonFrame(Register marker_reg) {
737 if (marker_reg.is_valid()) {
738 if (FLAG_enable_embedded_constant_pool) {
739 if (marker_reg.code() > pp.code()) {
740 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
741 add(fp, sp, Operand(kPointerSize));
742 Push(marker_reg);
743 } else {
744 stm(db_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
745 add(fp, sp, Operand(2 * kPointerSize));
746 }
747 } else {
748 if (marker_reg.code() > fp.code()) {
749 stm(db_w, sp, fp.bit() | lr.bit());
750 mov(fp, Operand(sp));
751 Push(marker_reg);
752 } else {
753 stm(db_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
754 add(fp, sp, Operand(kPointerSize));
755 }
756 }
757 } else {
758 stm(db_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
759 fp.bit() | lr.bit());
760 add(fp, sp, Operand(FLAG_enable_embedded_constant_pool ? kPointerSize : 0));
761 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000762}
763
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100764void MacroAssembler::PopCommonFrame(Register marker_reg) {
765 if (marker_reg.is_valid()) {
766 if (FLAG_enable_embedded_constant_pool) {
767 if (marker_reg.code() > pp.code()) {
768 pop(marker_reg);
769 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
770 } else {
771 ldm(ia_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
772 }
773 } else {
774 if (marker_reg.code() > fp.code()) {
775 pop(marker_reg);
776 ldm(ia_w, sp, fp.bit() | lr.bit());
777 } else {
778 ldm(ia_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
779 }
780 }
781 } else {
782 ldm(ia_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
783 fp.bit() | lr.bit());
784 }
785}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000786
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100787void MacroAssembler::PushStandardFrame(Register function_reg) {
788 DCHECK(!function_reg.is_valid() || function_reg.code() < cp.code());
789 stm(db_w, sp, (function_reg.is_valid() ? function_reg.bit() : 0) | cp.bit() |
Ben Murdoch014dc512016-03-22 12:00:34 +0000790 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
791 fp.bit() | lr.bit());
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100792 int offset = -StandardFrameConstants::kContextOffset;
793 offset += function_reg.is_valid() ? kPointerSize : 0;
794 add(fp, sp, Operand(offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000795}
796
797
Ben Murdochb0fe1622011-05-05 13:52:32 +0100798// Push and pop all registers that can hold pointers.
799void MacroAssembler::PushSafepointRegisters() {
Ben Murdoch014dc512016-03-22 12:00:34 +0000800 // Safepoints expect a block of contiguous register values starting with r0.
801 // except when FLAG_enable_embedded_constant_pool, which omits pp.
802 DCHECK(kSafepointSavedRegisters ==
803 (FLAG_enable_embedded_constant_pool
804 ? ((1 << (kNumSafepointSavedRegisters + 1)) - 1) & ~pp.bit()
805 : (1 << kNumSafepointSavedRegisters) - 1));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100806 // Safepoints expect a block of kNumSafepointRegisters values on the
807 // stack, so adjust the stack for unsaved registers.
808 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000809 DCHECK(num_unsaved >= 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100810 sub(sp, sp, Operand(num_unsaved * kPointerSize));
811 stm(db_w, sp, kSafepointSavedRegisters);
812}
813
814
815void MacroAssembler::PopSafepointRegisters() {
816 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
817 ldm(ia_w, sp, kSafepointSavedRegisters);
818 add(sp, sp, Operand(num_unsaved * kPointerSize));
819}
820
821
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100822void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
823 str(src, SafepointRegisterSlot(dst));
Steve Block1e0659c2011-05-24 12:43:12 +0100824}
825
826
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100827void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
828 ldr(dst, SafepointRegisterSlot(src));
Steve Block1e0659c2011-05-24 12:43:12 +0100829}
830
831
Ben Murdochb0fe1622011-05-05 13:52:32 +0100832int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
833 // The registers are pushed starting with the highest encoding,
834 // which means that lowest encodings are closest to the stack pointer.
Ben Murdoch014dc512016-03-22 12:00:34 +0000835 if (FLAG_enable_embedded_constant_pool && reg_code > pp.code()) {
836 // RegList omits pp.
837 reg_code -= 1;
838 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000839 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100840 return reg_code;
841}
842
843
Steve Block1e0659c2011-05-24 12:43:12 +0100844MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
845 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
846}
847
848
849MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000850 // Number of d-regs not known at snapshot time.
851 DCHECK(!serializer_enabled());
Steve Block1e0659c2011-05-24 12:43:12 +0100852 // General purpose registers are pushed last on the stack.
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100853 const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
Ben Murdoch014dc512016-03-22 12:00:34 +0000854 int doubles_size = config->num_allocatable_double_registers() * kDoubleSize;
Steve Block1e0659c2011-05-24 12:43:12 +0100855 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
856 return MemOperand(sp, doubles_size + register_offset);
857}
858
859
Leon Clarkef7060e22010-06-03 12:02:55 +0100860void MacroAssembler::Ldrd(Register dst1, Register dst2,
861 const MemOperand& src, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000862 DCHECK(src.rm().is(no_reg));
863 DCHECK(!dst1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100864
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000865 // V8 does not use this addressing mode, so the fallback code
866 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000867 DCHECK((src.am() != PreIndex) && (src.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000868
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100869 // Generate two ldr instructions if ldrd is not applicable.
870 if ((dst1.code() % 2 == 0) && (dst1.code() + 1 == dst2.code())) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100871 ldrd(dst1, dst2, src, cond);
872 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000873 if ((src.am() == Offset) || (src.am() == NegOffset)) {
874 MemOperand src2(src);
875 src2.set_offset(src2.offset() + 4);
876 if (dst1.is(src.rn())) {
877 ldr(dst2, src2, cond);
878 ldr(dst1, src, cond);
879 } else {
880 ldr(dst1, src, cond);
881 ldr(dst2, src2, cond);
882 }
883 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884 DCHECK((src.am() == PostIndex) || (src.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000885 if (dst1.is(src.rn())) {
886 ldr(dst2, MemOperand(src.rn(), 4, Offset), cond);
887 ldr(dst1, src, cond);
888 } else {
889 MemOperand src2(src);
890 src2.set_offset(src2.offset() - 4);
891 ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond);
892 ldr(dst2, src2, cond);
893 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100894 }
895 }
896}
897
898
899void MacroAssembler::Strd(Register src1, Register src2,
900 const MemOperand& dst, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000901 DCHECK(dst.rm().is(no_reg));
902 DCHECK(!src1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100903
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000904 // V8 does not use this addressing mode, so the fallback code
905 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000906 DCHECK((dst.am() != PreIndex) && (dst.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000907
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100908 // Generate two str instructions if strd is not applicable.
909 if ((src1.code() % 2 == 0) && (src1.code() + 1 == src2.code())) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100910 strd(src1, src2, dst, cond);
911 } else {
912 MemOperand dst2(dst);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000913 if ((dst.am() == Offset) || (dst.am() == NegOffset)) {
914 dst2.set_offset(dst2.offset() + 4);
915 str(src1, dst, cond);
916 str(src2, dst2, cond);
917 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000918 DCHECK((dst.am() == PostIndex) || (dst.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000919 dst2.set_offset(dst2.offset() - 4);
920 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond);
921 str(src2, dst2, cond);
922 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100923 }
924}
925
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000926void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister dst,
927 const DwVfpRegister src,
928 const Condition cond) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100929 // Subtracting 0.0 preserves all inputs except for signalling NaNs, which
930 // become quiet NaNs. We use vsub rather than vadd because vsub preserves -0.0
931 // inputs: -0.0 + 0.0 = 0.0, but -0.0 - 0.0 = -0.0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000932 vsub(dst, src, kDoubleRegZero, cond);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100933}
934
935
Ben Murdoch014dc512016-03-22 12:00:34 +0000936void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
937 const SwVfpRegister src2,
938 const Condition cond) {
939 // Compare and move FPSCR flags to the normal condition flags.
940 VFPCompareAndLoadFlags(src1, src2, pc, cond);
941}
942
943void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
944 const float src2,
945 const Condition cond) {
946 // Compare and move FPSCR flags to the normal condition flags.
947 VFPCompareAndLoadFlags(src1, src2, pc, cond);
948}
949
950
Ben Murdochb8e0da22011-05-16 14:20:40 +0100951void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
952 const DwVfpRegister src2,
953 const Condition cond) {
954 // Compare and move FPSCR flags to the normal condition flags.
955 VFPCompareAndLoadFlags(src1, src2, pc, cond);
956}
957
958void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
959 const double src2,
960 const Condition cond) {
961 // Compare and move FPSCR flags to the normal condition flags.
962 VFPCompareAndLoadFlags(src1, src2, pc, cond);
963}
964
965
Ben Murdoch014dc512016-03-22 12:00:34 +0000966void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
967 const SwVfpRegister src2,
968 const Register fpscr_flags,
969 const Condition cond) {
970 // Compare and load FPSCR.
971 vcmp(src1, src2, cond);
972 vmrs(fpscr_flags, cond);
973}
974
975void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
976 const float src2,
977 const Register fpscr_flags,
978 const Condition cond) {
979 // Compare and load FPSCR.
980 vcmp(src1, src2, cond);
981 vmrs(fpscr_flags, cond);
982}
983
984
Ben Murdochb8e0da22011-05-16 14:20:40 +0100985void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
986 const DwVfpRegister src2,
987 const Register fpscr_flags,
988 const Condition cond) {
989 // Compare and load FPSCR.
990 vcmp(src1, src2, cond);
991 vmrs(fpscr_flags, cond);
992}
993
994void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
995 const double src2,
996 const Register fpscr_flags,
997 const Condition cond) {
998 // Compare and load FPSCR.
999 vcmp(src1, src2, cond);
1000 vmrs(fpscr_flags, cond);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001001}
1002
Ben Murdoch014dc512016-03-22 12:00:34 +00001003
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001004void MacroAssembler::Vmov(const DwVfpRegister dst,
1005 const double imm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001006 const Register scratch) {
Ben Murdochf91f0612016-11-29 16:50:11 +00001007 int64_t imm_bits = bit_cast<int64_t>(imm);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001008 // Handle special values first.
Ben Murdochf91f0612016-11-29 16:50:11 +00001009 if (imm_bits == bit_cast<int64_t>(0.0)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001010 vmov(dst, kDoubleRegZero);
Ben Murdochf91f0612016-11-29 16:50:11 +00001011 } else if (imm_bits == bit_cast<int64_t>(-0.0)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001012 vneg(dst, kDoubleRegZero);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001013 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001014 vmov(dst, imm, scratch);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001015 }
1016}
1017
Ben Murdoch086aeea2011-05-13 15:57:08 +01001018
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001019void MacroAssembler::VmovHigh(Register dst, DwVfpRegister src) {
1020 if (src.code() < 16) {
1021 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1022 vmov(dst, loc.high());
1023 } else {
1024 vmov(dst, VmovIndexHi, src);
1025 }
1026}
1027
1028
1029void MacroAssembler::VmovHigh(DwVfpRegister dst, Register src) {
1030 if (dst.code() < 16) {
1031 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1032 vmov(loc.high(), src);
1033 } else {
1034 vmov(dst, VmovIndexHi, src);
1035 }
1036}
1037
1038
1039void MacroAssembler::VmovLow(Register dst, DwVfpRegister src) {
1040 if (src.code() < 16) {
1041 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1042 vmov(dst, loc.low());
1043 } else {
1044 vmov(dst, VmovIndexLo, src);
1045 }
1046}
1047
1048
1049void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) {
1050 if (dst.code() < 16) {
1051 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1052 vmov(loc.low(), src);
1053 } else {
1054 vmov(dst, VmovIndexLo, src);
1055 }
1056}
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001057void MacroAssembler::LslPair(Register dst_low, Register dst_high,
1058 Register src_low, Register src_high,
1059 Register scratch, Register shift) {
1060 DCHECK(!AreAliased(dst_high, src_low));
1061 DCHECK(!AreAliased(dst_high, shift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001062
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001063 Label less_than_32;
1064 Label done;
1065 rsb(scratch, shift, Operand(32), SetCC);
1066 b(gt, &less_than_32);
1067 // If shift >= 32
1068 and_(scratch, shift, Operand(0x1f));
1069 lsl(dst_high, src_low, Operand(scratch));
1070 mov(dst_low, Operand(0));
1071 jmp(&done);
1072 bind(&less_than_32);
1073 // If shift < 32
1074 lsl(dst_high, src_high, Operand(shift));
1075 orr(dst_high, dst_high, Operand(src_low, LSR, scratch));
1076 lsl(dst_low, src_low, Operand(shift));
1077 bind(&done);
1078}
1079
1080void MacroAssembler::LslPair(Register dst_low, Register dst_high,
1081 Register src_low, Register src_high,
1082 uint32_t shift) {
1083 DCHECK(!AreAliased(dst_high, src_low));
1084 Label less_than_32;
1085 Label done;
1086 if (shift == 0) {
1087 Move(dst_high, src_high);
1088 Move(dst_low, src_low);
1089 } else if (shift == 32) {
1090 Move(dst_high, src_low);
1091 Move(dst_low, Operand(0));
1092 } else if (shift >= 32) {
1093 shift &= 0x1f;
1094 lsl(dst_high, src_low, Operand(shift));
1095 mov(dst_low, Operand(0));
1096 } else {
1097 lsl(dst_high, src_high, Operand(shift));
1098 orr(dst_high, dst_high, Operand(src_low, LSR, 32 - shift));
1099 lsl(dst_low, src_low, Operand(shift));
1100 }
1101}
1102
1103void MacroAssembler::LsrPair(Register dst_low, Register dst_high,
1104 Register src_low, Register src_high,
1105 Register scratch, Register shift) {
1106 DCHECK(!AreAliased(dst_low, src_high));
1107 DCHECK(!AreAliased(dst_low, shift));
1108
1109 Label less_than_32;
1110 Label done;
1111 rsb(scratch, shift, Operand(32), SetCC);
1112 b(gt, &less_than_32);
1113 // If shift >= 32
1114 and_(scratch, shift, Operand(0x1f));
1115 lsr(dst_low, src_high, Operand(scratch));
1116 mov(dst_high, Operand(0));
1117 jmp(&done);
1118 bind(&less_than_32);
1119 // If shift < 32
1120
1121 lsr(dst_low, src_low, Operand(shift));
1122 orr(dst_low, dst_low, Operand(src_high, LSL, scratch));
1123 lsr(dst_high, src_high, Operand(shift));
1124 bind(&done);
1125}
1126
1127void MacroAssembler::LsrPair(Register dst_low, Register dst_high,
1128 Register src_low, Register src_high,
1129 uint32_t shift) {
1130 DCHECK(!AreAliased(dst_low, src_high));
1131 Label less_than_32;
1132 Label done;
1133 if (shift == 32) {
1134 mov(dst_low, src_high);
1135 mov(dst_high, Operand(0));
1136 } else if (shift > 32) {
1137 shift &= 0x1f;
1138 lsr(dst_low, src_high, Operand(shift));
1139 mov(dst_high, Operand(0));
1140 } else if (shift == 0) {
1141 Move(dst_low, src_low);
1142 Move(dst_high, src_high);
1143 } else {
1144 lsr(dst_low, src_low, Operand(shift));
1145 orr(dst_low, dst_low, Operand(src_high, LSL, 32 - shift));
1146 lsr(dst_high, src_high, Operand(shift));
1147 }
1148}
1149
1150void MacroAssembler::AsrPair(Register dst_low, Register dst_high,
1151 Register src_low, Register src_high,
1152 Register scratch, Register shift) {
1153 DCHECK(!AreAliased(dst_low, src_high));
1154 DCHECK(!AreAliased(dst_low, shift));
1155
1156 Label less_than_32;
1157 Label done;
1158 rsb(scratch, shift, Operand(32), SetCC);
1159 b(gt, &less_than_32);
1160 // If shift >= 32
1161 and_(scratch, shift, Operand(0x1f));
1162 asr(dst_low, src_high, Operand(scratch));
1163 asr(dst_high, src_high, Operand(31));
1164 jmp(&done);
1165 bind(&less_than_32);
1166 // If shift < 32
1167 lsr(dst_low, src_low, Operand(shift));
1168 orr(dst_low, dst_low, Operand(src_high, LSL, scratch));
1169 asr(dst_high, src_high, Operand(shift));
1170 bind(&done);
1171}
1172
1173void MacroAssembler::AsrPair(Register dst_low, Register dst_high,
1174 Register src_low, Register src_high,
1175 uint32_t shift) {
1176 DCHECK(!AreAliased(dst_low, src_high));
1177 Label less_than_32;
1178 Label done;
1179 if (shift == 32) {
1180 mov(dst_low, src_high);
1181 asr(dst_high, src_high, Operand(31));
1182 } else if (shift > 32) {
1183 shift &= 0x1f;
1184 asr(dst_low, src_high, Operand(shift));
1185 asr(dst_high, src_high, Operand(31));
1186 } else if (shift == 0) {
1187 Move(dst_low, src_low);
1188 Move(dst_high, src_high);
1189 } else {
1190 lsr(dst_low, src_low, Operand(shift));
1191 orr(dst_low, dst_low, Operand(src_high, LSL, 32 - shift));
1192 asr(dst_high, src_high, Operand(shift));
1193 }
1194}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001195
Ben Murdoch014dc512016-03-22 12:00:34 +00001196void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
1197 Register code_target_address) {
1198 DCHECK(FLAG_enable_embedded_constant_pool);
1199 ldr(pp, MemOperand(code_target_address,
1200 Code::kConstantPoolOffset - Code::kHeaderSize));
1201 add(pp, pp, code_target_address);
1202}
1203
1204
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205void MacroAssembler::LoadConstantPoolPointerRegister() {
Ben Murdoch014dc512016-03-22 12:00:34 +00001206 DCHECK(FLAG_enable_embedded_constant_pool);
1207 int entry_offset = pc_offset() + Instruction::kPCReadOffset;
1208 sub(ip, pc, Operand(entry_offset));
1209 LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210}
1211
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001212void MacroAssembler::StubPrologue(StackFrame::Type type) {
1213 mov(ip, Operand(Smi::FromInt(type)));
1214 PushCommonFrame(ip);
Ben Murdoch014dc512016-03-22 12:00:34 +00001215 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001216 LoadConstantPoolPointerRegister();
Ben Murdoch014dc512016-03-22 12:00:34 +00001217 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001218 }
1219}
1220
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221void MacroAssembler::Prologue(bool code_pre_aging) {
1222 { PredictableCodeSizeScope predictible_code_size_scope(
1223 this, kNoCodeAgeSequenceLength);
1224 // The following three instructions must remain together and unmodified
1225 // for code aging to work properly.
1226 if (code_pre_aging) {
1227 // Pre-age the code.
1228 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
1229 add(r0, pc, Operand(-8));
1230 ldr(pc, MemOperand(pc, -4));
1231 emit_code_stub_address(stub);
1232 } else {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001233 PushStandardFrame(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001234 nop(ip.code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001235 }
1236 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001237 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001238 LoadConstantPoolPointerRegister();
Ben Murdoch014dc512016-03-22 12:00:34 +00001239 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001240 }
1241}
1242
1243
Ben Murdoch014dc512016-03-22 12:00:34 +00001244void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1245 ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch13e2dad2016-09-16 13:49:30 +01001246 ldr(vector, FieldMemOperand(vector, JSFunction::kLiteralsOffset));
1247 ldr(vector, FieldMemOperand(vector, LiteralsArray::kFeedbackVectorOffset));
Ben Murdoch014dc512016-03-22 12:00:34 +00001248}
1249
1250
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001251void MacroAssembler::EnterFrame(StackFrame::Type type,
Emily Bernier958fae72015-03-24 16:35:39 -04001252 bool load_constant_pool_pointer_reg) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001253 // r0-r3: preserved
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001254 mov(ip, Operand(Smi::FromInt(type)));
1255 PushCommonFrame(ip);
Ben Murdoch014dc512016-03-22 12:00:34 +00001256 if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001257 LoadConstantPoolPointerRegister();
1258 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001259 if (type == StackFrame::INTERNAL) {
1260 mov(ip, Operand(CodeObject()));
1261 push(ip);
1262 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001263}
1264
1265
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001266int MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001267 // r0: preserved
1268 // r1: preserved
1269 // r2: preserved
1270
1271 // Drop the execution stack down to the frame pointer and restore
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001272 // the caller frame pointer, return address and constant pool pointer
Ben Murdoch014dc512016-03-22 12:00:34 +00001273 // (if FLAG_enable_embedded_constant_pool).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001274 int frame_ends;
Ben Murdoch014dc512016-03-22 12:00:34 +00001275 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001276 add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
1277 frame_ends = pc_offset();
1278 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
1279 } else {
1280 mov(sp, fp);
1281 frame_ends = pc_offset();
1282 ldm(ia_w, sp, fp.bit() | lr.bit());
1283 }
1284 return frame_ends;
Steve Blocka7e24c12009-10-30 11:49:00 +00001285}
1286
Ben Murdochf91f0612016-11-29 16:50:11 +00001287void MacroAssembler::EnterBuiltinFrame(Register context, Register target,
1288 Register argc) {
1289 Push(lr, fp, context, target);
1290 add(fp, sp, Operand(2 * kPointerSize));
1291 Push(argc);
1292}
Steve Blocka7e24c12009-10-30 11:49:00 +00001293
Ben Murdochf91f0612016-11-29 16:50:11 +00001294void MacroAssembler::LeaveBuiltinFrame(Register context, Register target,
1295 Register argc) {
1296 Pop(argc);
1297 Pop(lr, fp, context, target);
1298}
1299
1300void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
1301 StackFrame::Type frame_type) {
1302 DCHECK(frame_type == StackFrame::EXIT ||
1303 frame_type == StackFrame::BUILTIN_EXIT);
1304
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001305 // Set up the frame structure on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001306 DCHECK_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1307 DCHECK_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1308 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Ben Murdochf91f0612016-11-29 16:50:11 +00001309 mov(ip, Operand(Smi::FromInt(frame_type)));
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001310 PushCommonFrame(ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001311 // Reserve room for saved entry sp and code object.
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001312 sub(sp, fp, Operand(ExitFrameConstants::kFixedFrameSizeFromFp));
Steve Block44f0eee2011-05-26 01:26:41 +01001313 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001314 mov(ip, Operand::Zero());
Steve Block1e0659c2011-05-24 12:43:12 +01001315 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
1316 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001317 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001318 str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1319 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001320 mov(ip, Operand(CodeObject()));
Steve Block1e0659c2011-05-24 12:43:12 +01001321 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001322
1323 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00001324 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001325 str(fp, MemOperand(ip));
Ben Murdoch589d6972011-11-30 16:04:58 +00001326 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001327 str(cp, MemOperand(ip));
1328
Ben Murdochb0fe1622011-05-05 13:52:32 +01001329 // Optionally save all double registers.
1330 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001331 SaveFPRegs(sp, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001332 // Note that d0 will be accessible at
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001333 // fp - ExitFrameConstants::kFrameSize -
1334 // DwVfpRegister::kMaxNumRegisters * kDoubleSize,
1335 // since the sp slot, code slot and constant pool slot (if
Ben Murdoch014dc512016-03-22 12:00:34 +00001336 // FLAG_enable_embedded_constant_pool) were pushed after the fp.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001337 }
Steve Block1e0659c2011-05-24 12:43:12 +01001338
1339 // Reserve place for the return address and stack space and align the frame
1340 // preparing for calling the runtime function.
1341 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1342 sub(sp, sp, Operand((stack_space + 1) * kPointerSize));
1343 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001344 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +01001345 and_(sp, sp, Operand(-frame_alignment));
1346 }
1347
1348 // Set the exit frame sp value to point just before the return address
1349 // location.
1350 add(ip, sp, Operand(kPointerSize));
1351 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001352}
1353
1354
Steve Block6ded16b2010-05-10 14:33:55 +01001355void MacroAssembler::InitializeNewString(Register string,
1356 Register length,
1357 Heap::RootListIndex map_index,
1358 Register scratch1,
1359 Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001360 SmiTag(scratch1, length);
Steve Block6ded16b2010-05-10 14:33:55 +01001361 LoadRoot(scratch2, map_index);
1362 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
1363 mov(scratch1, Operand(String::kEmptyHashField));
1364 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
1365 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
1366}
1367
1368
1369int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001370#if V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001371 // Running on the real platform. Use the alignment as mandated by the local
1372 // environment.
1373 // Note: This will break if we ever start generating snapshots on one ARM
1374 // platform for another ARM platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001375 return base::OS::ActivationFrameAlignment();
1376#else // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001377 // If we are using the simulator then we should always align to the expected
1378 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +01001379 // if the target platform will need alignment, so this is controlled from a
1380 // flag.
1381 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001382#endif // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001383}
1384
1385
Ben Murdoch014dc512016-03-22 12:00:34 +00001386void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
1387 bool restore_context,
1388 bool argument_count_is_length) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001389 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1390
Ben Murdochb0fe1622011-05-05 13:52:32 +01001391 // Optionally restore all double registers.
1392 if (save_doubles) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001393 // Calculate the stack location of the saved doubles and restore them.
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001394 const int offset = ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001395 sub(r3, fp,
1396 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize));
1397 RestoreFPRegs(r3, ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001398 }
1399
Steve Blocka7e24c12009-10-30 11:49:00 +00001400 // Clear top frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001401 mov(r3, Operand::Zero());
Ben Murdoch589d6972011-11-30 16:04:58 +00001402 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001403 str(r3, MemOperand(ip));
1404
1405 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001406 if (restore_context) {
1407 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1408 ldr(cp, MemOperand(ip));
1409 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001410#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001411 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001412 str(r3, MemOperand(ip));
1413#endif
1414
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001415 // Tear down the exit frame, pop the arguments, and return.
Ben Murdoch014dc512016-03-22 12:00:34 +00001416 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001417 ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1418 }
Steve Block1e0659c2011-05-24 12:43:12 +01001419 mov(sp, Operand(fp));
1420 ldm(ia_w, sp, fp.bit() | lr.bit());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001421 if (argument_count.is_valid()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001422 if (argument_count_is_length) {
1423 add(sp, sp, argument_count);
1424 } else {
1425 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
1426 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001427 }
1428}
1429
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001430
1431void MacroAssembler::MovFromFloatResult(const DwVfpRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001432 if (use_eabi_hardfloat()) {
1433 Move(dst, d0);
1434 } else {
1435 vmov(dst, r0, r1);
1436 }
1437}
1438
1439
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001440// On ARM this is just a synonym to make the purpose clear.
1441void MacroAssembler::MovFromFloatParameter(DwVfpRegister dst) {
1442 MovFromFloatResult(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001443}
1444
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001445void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
1446 Register caller_args_count_reg,
1447 Register scratch0, Register scratch1) {
1448#if DEBUG
1449 if (callee_args_count.is_reg()) {
1450 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
1451 scratch1));
1452 } else {
1453 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
1454 }
1455#endif
1456
1457 // Calculate the end of destination area where we will put the arguments
1458 // after we drop current frame. We add kPointerSize to count the receiver
1459 // argument which is not included into formal parameters count.
1460 Register dst_reg = scratch0;
1461 add(dst_reg, fp, Operand(caller_args_count_reg, LSL, kPointerSizeLog2));
1462 add(dst_reg, dst_reg,
1463 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
1464
1465 Register src_reg = caller_args_count_reg;
1466 // Calculate the end of source area. +kPointerSize is for the receiver.
1467 if (callee_args_count.is_reg()) {
1468 add(src_reg, sp, Operand(callee_args_count.reg(), LSL, kPointerSizeLog2));
1469 add(src_reg, src_reg, Operand(kPointerSize));
1470 } else {
1471 add(src_reg, sp,
1472 Operand((callee_args_count.immediate() + 1) * kPointerSize));
1473 }
1474
1475 if (FLAG_debug_code) {
1476 cmp(src_reg, dst_reg);
1477 Check(lo, kStackAccessBelowStackPointer);
1478 }
1479
1480 // Restore caller's frame pointer and return address now as they will be
1481 // overwritten by the copying loop.
1482 ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
1483 ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1484
1485 // Now copy callee arguments to the caller frame going backwards to avoid
1486 // callee arguments corruption (source and destination areas could overlap).
1487
1488 // Both src_reg and dst_reg are pointing to the word after the one to copy,
1489 // so they must be pre-decremented in the loop.
1490 Register tmp_reg = scratch1;
1491 Label loop, entry;
1492 b(&entry);
1493 bind(&loop);
1494 ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
1495 str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
1496 bind(&entry);
1497 cmp(sp, src_reg);
1498 b(ne, &loop);
1499
1500 // Leave current frame.
1501 mov(sp, dst_reg);
1502}
Steve Blocka7e24c12009-10-30 11:49:00 +00001503
1504void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1505 const ParameterCount& actual,
Steve Blocka7e24c12009-10-30 11:49:00 +00001506 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001507 bool* definitely_mismatches,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001508 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001509 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001510 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001511 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00001512 Label regular_invoke;
1513
1514 // Check whether the expected and actual arguments count match. If not,
1515 // setup registers according to contract with ArgumentsAdaptorTrampoline:
1516 // r0: actual arguments count
1517 // r1: function (passed through to callee)
1518 // r2: expected arguments count
Steve Blocka7e24c12009-10-30 11:49:00 +00001519
1520 // The code below is made a lot easier because the calling code already sets
1521 // up actual and expected registers according to the contract if values are
1522 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001523 DCHECK(actual.is_immediate() || actual.reg().is(r0));
1524 DCHECK(expected.is_immediate() || expected.reg().is(r2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001525
1526 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001527 DCHECK(actual.is_immediate());
Ben Murdoch014dc512016-03-22 12:00:34 +00001528 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001529 if (expected.immediate() == actual.immediate()) {
1530 definitely_matches = true;
1531 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001532 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1533 if (expected.immediate() == sentinel) {
1534 // Don't worry about adapting arguments for builtins that
1535 // don't want that done. Skip adaption code by making it look
1536 // like we have a match between expected and actual number of
1537 // arguments.
1538 definitely_matches = true;
1539 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001540 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00001541 mov(r2, Operand(expected.immediate()));
1542 }
1543 }
1544 } else {
1545 if (actual.is_immediate()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001546 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001547 cmp(expected.reg(), Operand(actual.immediate()));
1548 b(eq, &regular_invoke);
Steve Blocka7e24c12009-10-30 11:49:00 +00001549 } else {
1550 cmp(expected.reg(), Operand(actual.reg()));
1551 b(eq, &regular_invoke);
1552 }
1553 }
1554
1555 if (!definitely_matches) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001556 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001557 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001558 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001559 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001560 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00001561 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001562 if (!*definitely_mismatches) {
1563 b(done);
1564 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001565 } else {
1566 Jump(adaptor, RelocInfo::CODE_TARGET);
1567 }
1568 bind(&regular_invoke);
1569 }
1570}
1571
1572
Ben Murdoch014dc512016-03-22 12:00:34 +00001573void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
1574 const ParameterCount& expected,
1575 const ParameterCount& actual) {
1576 Label skip_flooding;
Ben Murdoch13e2dad2016-09-16 13:49:30 +01001577 ExternalReference last_step_action =
1578 ExternalReference::debug_last_step_action_address(isolate());
1579 STATIC_ASSERT(StepFrame > StepIn);
1580 mov(r4, Operand(last_step_action));
1581 ldrsb(r4, MemOperand(r4));
1582 cmp(r4, Operand(StepIn));
1583 b(lt, &skip_flooding);
Ben Murdoch014dc512016-03-22 12:00:34 +00001584 {
1585 FrameScope frame(this,
1586 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1587 if (expected.is_reg()) {
1588 SmiTag(expected.reg());
1589 Push(expected.reg());
1590 }
1591 if (actual.is_reg()) {
1592 SmiTag(actual.reg());
1593 Push(actual.reg());
1594 }
1595 if (new_target.is_valid()) {
1596 Push(new_target);
1597 }
1598 Push(fun);
1599 Push(fun);
Ben Murdoch109988c2016-05-18 11:27:45 +01001600 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch014dc512016-03-22 12:00:34 +00001601 Pop(fun);
1602 if (new_target.is_valid()) {
1603 Pop(new_target);
1604 }
1605 if (actual.is_reg()) {
1606 Pop(actual.reg());
1607 SmiUntag(actual.reg());
1608 }
1609 if (expected.is_reg()) {
1610 Pop(expected.reg());
1611 SmiUntag(expected.reg());
1612 }
1613 }
1614 bind(&skip_flooding);
1615}
1616
1617
1618void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
1619 const ParameterCount& expected,
1620 const ParameterCount& actual,
1621 InvokeFlag flag,
1622 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001623 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001624 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch014dc512016-03-22 12:00:34 +00001625 DCHECK(function.is(r1));
1626 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(r3));
1627
1628 if (call_wrapper.NeedsDebugStepCheck()) {
1629 FloodFunctionIfStepping(function, new_target, expected, actual);
1630 }
1631
1632 // Clear the new.target register if not given.
1633 if (!new_target.is_valid()) {
1634 LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1635 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001636
Steve Blocka7e24c12009-10-30 11:49:00 +00001637 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001638 bool definitely_mismatches = false;
Ben Murdoch014dc512016-03-22 12:00:34 +00001639 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001640 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001641 if (!definitely_mismatches) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001642 // We call indirectly through the code field in the function to
1643 // allow recompilation to take effect without changing any of the
1644 // call sites.
1645 Register code = r4;
1646 ldr(code, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001647 if (flag == CALL_FUNCTION) {
1648 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001649 Call(code);
1650 call_wrapper.AfterCall();
1651 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001652 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001653 Jump(code);
1654 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001655
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001656 // Continue here if InvokePrologue does handle the invocation due to
1657 // mismatched parameter counts.
1658 bind(&done);
1659 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001660}
1661
1662
Steve Blocka7e24c12009-10-30 11:49:00 +00001663void MacroAssembler::InvokeFunction(Register fun,
Ben Murdoch014dc512016-03-22 12:00:34 +00001664 Register new_target,
Steve Blocka7e24c12009-10-30 11:49:00 +00001665 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001666 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001667 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001668 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001669 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001670
Steve Blocka7e24c12009-10-30 11:49:00 +00001671 // Contract with called JS functions requires that function is passed in r1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001672 DCHECK(fun.is(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001673
1674 Register expected_reg = r2;
Ben Murdoch014dc512016-03-22 12:00:34 +00001675 Register temp_reg = r4;
Steve Blocka7e24c12009-10-30 11:49:00 +00001676
Ben Murdoch014dc512016-03-22 12:00:34 +00001677 ldr(temp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001678 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1679 ldr(expected_reg,
Ben Murdoch014dc512016-03-22 12:00:34 +00001680 FieldMemOperand(temp_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00001681 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001682 SmiUntag(expected_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001683
1684 ParameterCount expected(expected_reg);
Ben Murdoch014dc512016-03-22 12:00:34 +00001685 InvokeFunctionCode(fun, new_target, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00001686}
1687
1688
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001689void MacroAssembler::InvokeFunction(Register function,
1690 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00001691 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00001692 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001694 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001695 DCHECK(flag == JUMP_FUNCTION || has_frame());
1696
1697 // Contract with called JS functions requires that function is passed in r1.
1698 DCHECK(function.is(r1));
Andrei Popescu402d9372010-02-26 13:31:12 +00001699
1700 // Get the function and setup the context.
Andrei Popescu402d9372010-02-26 13:31:12 +00001701 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1702
Ben Murdoch014dc512016-03-22 12:00:34 +00001703 InvokeFunctionCode(r1, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001704}
1705
1706
1707void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
1708 const ParameterCount& expected,
1709 const ParameterCount& actual,
1710 InvokeFlag flag,
1711 const CallWrapper& call_wrapper) {
1712 Move(r1, function);
1713 InvokeFunction(r1, expected, actual, flag, call_wrapper);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001714}
1715
1716
Ben Murdochb0fe1622011-05-05 13:52:32 +01001717void MacroAssembler::IsObjectJSStringType(Register object,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001718 Register scratch,
1719 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720 DCHECK(kNotStringTag != 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001721
1722 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1723 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1724 tst(scratch, Operand(kIsNotStringMask));
Steve Block1e0659c2011-05-24 12:43:12 +01001725 b(ne, fail);
Andrei Popescu402d9372010-02-26 13:31:12 +00001726}
1727
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001728
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001729void MacroAssembler::IsObjectNameType(Register object,
1730 Register scratch,
1731 Label* fail) {
1732 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1733 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1734 cmp(scratch, Operand(LAST_NAME_TYPE));
1735 b(hi, fail);
1736}
1737
1738
Andrei Popescu402d9372010-02-26 13:31:12 +00001739void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001740 mov(r0, Operand::Zero());
Ben Murdoch014dc512016-03-22 12:00:34 +00001741 mov(r1,
1742 Operand(ExternalReference(Runtime::kHandleDebuggerStatement, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001743 CEntryStub ces(isolate(), 1);
1744 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch014dc512016-03-22 12:00:34 +00001745 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Andrei Popescu402d9372010-02-26 13:31:12 +00001746}
Steve Blocka7e24c12009-10-30 11:49:00 +00001747
1748
Ben Murdoch014dc512016-03-22 12:00:34 +00001749void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001750 // Adjust this code if not the case.
Ben Murdoch014dc512016-03-22 12:00:34 +00001751 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001752 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001753
1754 // Link the current handler as the next handler.
1755 mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1756 ldr(r5, MemOperand(r6));
1757 push(r5);
Ben Murdoch014dc512016-03-22 12:00:34 +00001758
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001759 // Set this new handler as the current one.
1760 str(sp, MemOperand(r6));
Steve Blocka7e24c12009-10-30 11:49:00 +00001761}
1762
1763
Ben Murdoch014dc512016-03-22 12:00:34 +00001764void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001765 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Leon Clarkee46be812010-01-19 14:06:41 +00001766 pop(r1);
Ben Murdoch589d6972011-11-30 16:04:58 +00001767 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Leon Clarkee46be812010-01-19 14:06:41 +00001768 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1769 str(r1, MemOperand(ip));
1770}
1771
1772
Steve Blocka7e24c12009-10-30 11:49:00 +00001773void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1774 Register scratch,
1775 Label* miss) {
1776 Label same_contexts;
1777
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001778 DCHECK(!holder_reg.is(scratch));
1779 DCHECK(!holder_reg.is(ip));
1780 DCHECK(!scratch.is(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001781
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001782 // Load current lexical context from the active StandardFrame, which
1783 // may require crawling past STUB frames.
1784 Label load_context;
1785 Label has_context;
1786 DCHECK(!ip.is(scratch));
1787 mov(ip, fp);
1788 bind(&load_context);
1789 ldr(scratch, MemOperand(ip, CommonFrameConstants::kContextOrFrameTypeOffset));
1790 JumpIfNotSmi(scratch, &has_context);
1791 ldr(ip, MemOperand(ip, CommonFrameConstants::kCallerFPOffset));
1792 b(&load_context);
1793 bind(&has_context);
1794
Steve Blocka7e24c12009-10-30 11:49:00 +00001795 // In debug mode, make sure the lexical context is set.
1796#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001797 cmp(scratch, Operand::Zero());
1798 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001799#endif
1800
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001801 // Load the native context of the current context.
Ben Murdoch014dc512016-03-22 12:00:34 +00001802 ldr(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00001803
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001804 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001805 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001806 // Cannot use ip as a temporary in this verification code. Due to the fact
1807 // that ip is clobbered as part of cmp with an object Operand.
1808 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001809 // Read the first word and compare to the native_context_map.
Steve Blocka7e24c12009-10-30 11:49:00 +00001810 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001811 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001812 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001813 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001814 pop(holder_reg); // Restore holder.
1815 }
1816
1817 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001818 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001819 cmp(scratch, Operand(ip));
1820 b(eq, &same_contexts);
1821
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001822 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001823 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001824 // Cannot use ip as a temporary in this verification code. Due to the fact
1825 // that ip is clobbered as part of cmp with an object Operand.
1826 push(holder_reg); // Temporarily save holder on the stack.
1827 mov(holder_reg, ip); // Move ip to its holding place.
1828 LoadRoot(ip, Heap::kNullValueRootIndex);
1829 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001830 Check(ne, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001831
1832 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001833 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001834 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001835 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001836 // Restore ip is not needed. ip is reloaded below.
1837 pop(holder_reg); // Restore holder.
1838 // Restore ip to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001839 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001840 }
1841
1842 // Check that the security token in the calling global object is
1843 // compatible with the security token in the receiving global
1844 // object.
1845 int token_offset = Context::kHeaderSize +
1846 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1847
1848 ldr(scratch, FieldMemOperand(scratch, token_offset));
1849 ldr(ip, FieldMemOperand(ip, token_offset));
1850 cmp(scratch, Operand(ip));
1851 b(ne, miss);
1852
1853 bind(&same_contexts);
1854}
1855
1856
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001857// Compute the hash code from the untagged key. This must be kept in sync with
1858// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1859// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001860void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
1861 // First of all we assign the hash seed to scratch.
1862 LoadRoot(scratch, Heap::kHashSeedRootIndex);
1863 SmiUntag(scratch);
1864
1865 // Xor original key with a seed.
1866 eor(t0, t0, Operand(scratch));
1867
1868 // Compute the hash code from the untagged key. This must be kept in sync
1869 // with ComputeIntegerHash in utils.h.
1870 //
1871 // hash = ~hash + (hash << 15);
1872 mvn(scratch, Operand(t0));
1873 add(t0, scratch, Operand(t0, LSL, 15));
1874 // hash = hash ^ (hash >> 12);
1875 eor(t0, t0, Operand(t0, LSR, 12));
1876 // hash = hash + (hash << 2);
1877 add(t0, t0, Operand(t0, LSL, 2));
1878 // hash = hash ^ (hash >> 4);
1879 eor(t0, t0, Operand(t0, LSR, 4));
1880 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001881 mov(scratch, Operand(t0, LSL, 11));
1882 add(t0, t0, Operand(t0, LSL, 3));
1883 add(t0, t0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001884 // hash = hash ^ (hash >> 16);
1885 eor(t0, t0, Operand(t0, LSR, 16));
Ben Murdoch014dc512016-03-22 12:00:34 +00001886 bic(t0, t0, Operand(0xc0000000u));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001887}
1888
1889
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001890void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1891 Register elements,
1892 Register key,
1893 Register result,
1894 Register t0,
1895 Register t1,
1896 Register t2) {
1897 // Register use:
1898 //
1899 // elements - holds the slow-case elements of the receiver on entry.
1900 // Unchanged unless 'result' is the same register.
1901 //
1902 // key - holds the smi key on entry.
1903 // Unchanged unless 'result' is the same register.
1904 //
1905 // result - holds the result on exit if the load succeeded.
1906 // Allowed to be the same as 'key' or 'result'.
1907 // Unchanged on bailout so 'key' or 'result' can be used
1908 // in further computation.
1909 //
1910 // Scratch registers:
1911 //
1912 // t0 - holds the untagged key on entry and holds the hash once computed.
1913 //
1914 // t1 - used to hold the capacity mask of the dictionary
1915 //
1916 // t2 - used for the index into the dictionary.
1917 Label done;
1918
Ben Murdochc7cc0282012-03-05 14:35:55 +00001919 GetNumberHash(t0, t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001920
1921 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001922 ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001923 SmiUntag(t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001924 sub(t1, t1, Operand(1));
1925
1926 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001927 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001928 // Use t2 for index calculations and keep the hash intact in t0.
1929 mov(t2, t0);
1930 // Compute the masked index: (hash + i + i * i) & mask.
1931 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00001932 add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001933 }
1934 and_(t2, t2, Operand(t1));
1935
1936 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001937 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001938 add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
1939
1940 // Check if the key is identical to the name.
1941 add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001942 ldr(ip, FieldMemOperand(t2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001943 cmp(key, Operand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001944 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001945 b(eq, &done);
1946 } else {
1947 b(ne, miss);
1948 }
1949 }
1950
1951 bind(&done);
Emily Bernier958fae72015-03-24 16:35:39 -04001952 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001953 // t2: elements + (index * kPointerSize)
1954 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001955 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001956 ldr(t1, FieldMemOperand(t2, kDetailsOffset));
Ben Murdoch014dc512016-03-22 12:00:34 +00001957 DCHECK_EQ(DATA, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00001958 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001959 b(ne, miss);
1960
1961 // Get the value at the masked, scaled index and return.
1962 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001963 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001964 ldr(result, FieldMemOperand(t2, kValueOffset));
1965}
1966
1967
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001968void MacroAssembler::Allocate(int object_size,
1969 Register result,
1970 Register scratch1,
1971 Register scratch2,
1972 Label* gc_required,
1973 AllocationFlags flags) {
1974 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001975 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07001976 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001977 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001978 // Trash the registers to simulate an allocation failure.
1979 mov(result, Operand(0x7091));
1980 mov(scratch1, Operand(0x7191));
1981 mov(scratch2, Operand(0x7291));
1982 }
1983 jmp(gc_required);
1984 return;
1985 }
1986
Ben Murdoch014dc512016-03-22 12:00:34 +00001987 DCHECK(!AreAliased(result, scratch1, scratch2, ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001988
Kristian Monsen25f61362010-05-21 11:50:48 +01001989 // Make object size into bytes.
1990 if ((flags & SIZE_IN_WORDS) != 0) {
1991 object_size *= kPointerSize;
1992 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001993 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Kristian Monsen25f61362010-05-21 11:50:48 +01001994
Ben Murdochb0fe1622011-05-05 13:52:32 +01001995 // Check relative positions of allocation top and limit addresses.
1996 // The values must be adjacent in memory to allow the use of LDM.
1997 // Also, assert that the registers are numbered such that the values
1998 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001999 ExternalReference allocation_top =
2000 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2001 ExternalReference allocation_limit =
2002 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002003
Ben Murdoch014dc512016-03-22 12:00:34 +00002004 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
2005 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006 DCHECK((limit - top) == kPointerSize);
2007 DCHECK(result.code() < ip.code());
2008
2009 // Set up allocation top address register.
Ben Murdoch014dc512016-03-22 12:00:34 +00002010 Register top_address = scratch1;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002011 // This code stores a temporary value in ip. This is OK, as the code below
2012 // does not need ip for implicit literal generation.
Ben Murdoch014dc512016-03-22 12:00:34 +00002013 Register alloc_limit = ip;
2014 Register result_end = scratch2;
2015 mov(top_address, Operand(allocation_top));
2016
Steve Blocka7e24c12009-10-30 11:49:00 +00002017 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch014dc512016-03-22 12:00:34 +00002018 // Load allocation top into result and allocation limit into alloc_limit.
2019 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002020 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002021 if (emit_debug_code()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00002022 // Assert that result actually contains top on entry.
2023 ldr(alloc_limit, MemOperand(top_address));
2024 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002025 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002026 }
Ben Murdoch014dc512016-03-22 12:00:34 +00002027 // Load allocation limit. Result already contains allocation top.
2028 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00002029 }
2030
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002031 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2032 // Align the next allocation. Storing the filler map without checking top is
2033 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002034 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch014dc512016-03-22 12:00:34 +00002035 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002036 Label aligned;
2037 b(eq, &aligned);
Ben Murdoch014dc512016-03-22 12:00:34 +00002038 if ((flags & PRETENURE) != 0) {
2039 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002040 b(hs, gc_required);
2041 }
Ben Murdoch014dc512016-03-22 12:00:34 +00002042 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2043 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002044 bind(&aligned);
2045 }
2046
Steve Blocka7e24c12009-10-30 11:49:00 +00002047 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002048 // to calculate the new top. We must preserve the ip register at this
2049 // point, so we cannot just use add().
2050 DCHECK(object_size > 0);
2051 Register source = result;
2052 Condition cond = al;
2053 int shift = 0;
2054 while (object_size != 0) {
2055 if (((object_size >> shift) & 0x03) == 0) {
2056 shift += 2;
2057 } else {
2058 int bits = object_size & (0xff << shift);
2059 object_size -= bits;
2060 shift += 8;
2061 Operand bits_operand(bits);
2062 DCHECK(bits_operand.instructions_required(this) == 1);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002063 add(result_end, source, bits_operand, LeaveCC, cond);
Ben Murdoch014dc512016-03-22 12:00:34 +00002064 source = result_end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002065 cond = cc;
2066 }
2067 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002068
Ben Murdoch014dc512016-03-22 12:00:34 +00002069 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00002070 b(hi, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00002071
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002072 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
2073 // The top pointer is not updated for allocation folding dominators.
2074 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00002075 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002076
2077 // Tag object.
2078 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00002079}
2080
2081
Ben Murdoch014dc512016-03-22 12:00:34 +00002082void MacroAssembler::Allocate(Register object_size, Register result,
2083 Register result_end, Register scratch,
2084 Label* gc_required, AllocationFlags flags) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002085 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07002086 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002087 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002088 // Trash the registers to simulate an allocation failure.
2089 mov(result, Operand(0x7091));
Ben Murdoch014dc512016-03-22 12:00:34 +00002090 mov(scratch, Operand(0x7191));
2091 mov(result_end, Operand(0x7291));
John Reck59135872010-11-02 12:39:01 -07002092 }
2093 jmp(gc_required);
2094 return;
2095 }
2096
Ben Murdoch014dc512016-03-22 12:00:34 +00002097 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
2098 // is not specified. Other registers must not overlap.
2099 DCHECK(!AreAliased(object_size, result, scratch, ip));
2100 DCHECK(!AreAliased(result_end, result, scratch, ip));
2101 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00002102
Ben Murdochb0fe1622011-05-05 13:52:32 +01002103 // Check relative positions of allocation top and limit addresses.
2104 // The values must be adjacent in memory to allow the use of LDM.
2105 // Also, assert that the registers are numbered such that the values
2106 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002107 ExternalReference allocation_top =
2108 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2109 ExternalReference allocation_limit =
2110 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdoch014dc512016-03-22 12:00:34 +00002111 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
2112 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002113 DCHECK((limit - top) == kPointerSize);
2114 DCHECK(result.code() < ip.code());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002115
Ben Murdoch014dc512016-03-22 12:00:34 +00002116 // Set up allocation top address and allocation limit registers.
2117 Register top_address = scratch;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002118 // This code stores a temporary value in ip. This is OK, as the code below
2119 // does not need ip for implicit literal generation.
Ben Murdoch014dc512016-03-22 12:00:34 +00002120 Register alloc_limit = ip;
2121 mov(top_address, Operand(allocation_top));
2122
Steve Blocka7e24c12009-10-30 11:49:00 +00002123 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch014dc512016-03-22 12:00:34 +00002124 // Load allocation top into result and allocation limit into alloc_limit.
2125 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002126 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002127 if (emit_debug_code()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00002128 // Assert that result actually contains top on entry.
2129 ldr(alloc_limit, MemOperand(top_address));
2130 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002131 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002132 }
Ben Murdoch014dc512016-03-22 12:00:34 +00002133 // Load allocation limit. Result already contains allocation top.
2134 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00002135 }
2136
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002137 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2138 // Align the next allocation. Storing the filler map without checking top is
2139 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002140 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch014dc512016-03-22 12:00:34 +00002141 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002142 Label aligned;
2143 b(eq, &aligned);
Ben Murdoch014dc512016-03-22 12:00:34 +00002144 if ((flags & PRETENURE) != 0) {
2145 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146 b(hs, gc_required);
2147 }
Ben Murdoch014dc512016-03-22 12:00:34 +00002148 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2149 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002150 bind(&aligned);
2151 }
2152
Steve Blocka7e24c12009-10-30 11:49:00 +00002153 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb0fe1622011-05-05 13:52:32 +01002154 // to calculate the new top. Object size may be in words so a shift is
2155 // required to get the number of bytes.
Kristian Monsen25f61362010-05-21 11:50:48 +01002156 if ((flags & SIZE_IN_WORDS) != 0) {
Ben Murdoch014dc512016-03-22 12:00:34 +00002157 add(result_end, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01002158 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +00002159 add(result_end, result, Operand(object_size), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01002160 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002161
Ben Murdoch014dc512016-03-22 12:00:34 +00002162 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00002163 b(hi, gc_required);
2164
Steve Blockd0582a62009-12-15 09:54:21 +00002165 // Update allocation top. result temporarily holds the new top.
Steve Block44f0eee2011-05-26 01:26:41 +01002166 if (emit_debug_code()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00002167 tst(result_end, Operand(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002168 Check(eq, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00002169 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002170 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
2171 // The top pointer is not updated for allocation folding dominators.
2172 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00002173 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002174
2175 // Tag object.
2176 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00002177}
2178
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002179void MacroAssembler::FastAllocate(Register object_size, Register result,
2180 Register result_end, Register scratch,
2181 AllocationFlags flags) {
2182 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
2183 // is not specified. Other registers must not overlap.
2184 DCHECK(!AreAliased(object_size, result, scratch, ip));
2185 DCHECK(!AreAliased(result_end, result, scratch, ip));
2186 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
2187
2188 ExternalReference allocation_top =
2189 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2190
2191 Register top_address = scratch;
2192 mov(top_address, Operand(allocation_top));
2193 ldr(result, MemOperand(top_address));
2194
2195 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2196 // Align the next allocation. Storing the filler map without checking top is
2197 // safe in new-space because the limit of the heap is aligned there.
2198 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
2199 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
2200 Label aligned;
2201 b(eq, &aligned);
2202 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2203 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
2204 bind(&aligned);
2205 }
2206
2207 // Calculate new top using result. Object size may be in words so a shift is
2208 // required to get the number of bytes.
2209 if ((flags & SIZE_IN_WORDS) != 0) {
2210 add(result_end, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
2211 } else {
2212 add(result_end, result, Operand(object_size), SetCC);
2213 }
2214
2215 // Update allocation top. result temporarily holds the new top.
2216 if (emit_debug_code()) {
2217 tst(result_end, Operand(kObjectAlignmentMask));
2218 Check(eq, kUnalignedAllocationInNewSpace);
2219 }
2220 // The top pointer is not updated for allocation folding dominators.
2221 str(result_end, MemOperand(top_address));
2222
2223 add(result, result, Operand(kHeapObjectTag));
2224}
2225
2226void MacroAssembler::FastAllocate(int object_size, Register result,
2227 Register scratch1, Register scratch2,
2228 AllocationFlags flags) {
2229 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
2230 DCHECK(!AreAliased(result, scratch1, scratch2, ip));
2231
2232 // Make object size into bytes.
2233 if ((flags & SIZE_IN_WORDS) != 0) {
2234 object_size *= kPointerSize;
2235 }
2236 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
2237
2238 ExternalReference allocation_top =
2239 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2240
2241 // Set up allocation top address register.
2242 Register top_address = scratch1;
2243 Register result_end = scratch2;
2244 mov(top_address, Operand(allocation_top));
2245 ldr(result, MemOperand(top_address));
2246
2247 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2248 // Align the next allocation. Storing the filler map without checking top is
2249 // safe in new-space because the limit of the heap is aligned there.
2250 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
2251 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
2252 Label aligned;
2253 b(eq, &aligned);
2254 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2255 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
2256 bind(&aligned);
2257 }
2258
2259 // Calculate new top using result. Object size may be in words so a shift is
2260 // required to get the number of bytes. We must preserve the ip register at
2261 // this point, so we cannot just use add().
2262 DCHECK(object_size > 0);
2263 Register source = result;
2264 Condition cond = al;
2265 int shift = 0;
2266 while (object_size != 0) {
2267 if (((object_size >> shift) & 0x03) == 0) {
2268 shift += 2;
2269 } else {
2270 int bits = object_size & (0xff << shift);
2271 object_size -= bits;
2272 shift += 8;
2273 Operand bits_operand(bits);
2274 DCHECK(bits_operand.instructions_required(this) == 1);
2275 add(result_end, source, bits_operand, LeaveCC, cond);
2276 source = result_end;
2277 cond = cc;
2278 }
2279 }
2280
2281 // The top pointer is not updated for allocation folding dominators.
2282 str(result_end, MemOperand(top_address));
2283
2284 add(result, result, Operand(kHeapObjectTag));
2285}
Steve Blocka7e24c12009-10-30 11:49:00 +00002286
Andrei Popescu31002712010-02-23 13:46:05 +00002287void MacroAssembler::AllocateTwoByteString(Register result,
2288 Register length,
2289 Register scratch1,
2290 Register scratch2,
2291 Register scratch3,
2292 Label* gc_required) {
2293 // Calculate the number of bytes needed for the characters in the string while
2294 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002295 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00002296 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
2297 add(scratch1, scratch1,
2298 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002299 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002300
2301 // Allocate two-byte string in new space.
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002302 Allocate(scratch1, result, scratch2, scratch3, gc_required,
2303 NO_ALLOCATION_FLAGS);
Andrei Popescu31002712010-02-23 13:46:05 +00002304
2305 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01002306 InitializeNewString(result,
2307 length,
2308 Heap::kStringMapRootIndex,
2309 scratch1,
2310 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002311}
2312
2313
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002314void MacroAssembler::AllocateOneByteString(Register result, Register length,
2315 Register scratch1, Register scratch2,
2316 Register scratch3,
2317 Label* gc_required) {
Andrei Popescu31002712010-02-23 13:46:05 +00002318 // Calculate the number of bytes needed for the characters in the string while
2319 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002320 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2321 DCHECK(kCharSize == 1);
Andrei Popescu31002712010-02-23 13:46:05 +00002322 add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002323 Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002324 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002325
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002326 // Allocate one-byte string in new space.
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002327 Allocate(scratch1, result, scratch2, scratch3, gc_required,
2328 NO_ALLOCATION_FLAGS);
Andrei Popescu31002712010-02-23 13:46:05 +00002329
2330 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002331 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
2332 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002333}
2334
2335
2336void MacroAssembler::AllocateTwoByteConsString(Register result,
2337 Register length,
2338 Register scratch1,
2339 Register scratch2,
2340 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002341 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002342 NO_ALLOCATION_FLAGS);
Steve Block6ded16b2010-05-10 14:33:55 +01002343
2344 InitializeNewString(result,
2345 length,
2346 Heap::kConsStringMapRootIndex,
2347 scratch1,
2348 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002349}
2350
2351
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002352void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
2353 Register scratch1,
2354 Register scratch2,
2355 Label* gc_required) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002356 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
2357 NO_ALLOCATION_FLAGS);
Steve Block6ded16b2010-05-10 14:33:55 +01002358
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002359 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
2360 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002361}
2362
2363
Ben Murdoch589d6972011-11-30 16:04:58 +00002364void MacroAssembler::AllocateTwoByteSlicedString(Register result,
2365 Register length,
2366 Register scratch1,
2367 Register scratch2,
2368 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002369 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002370 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00002371
2372 InitializeNewString(result,
2373 length,
2374 Heap::kSlicedStringMapRootIndex,
2375 scratch1,
2376 scratch2);
2377}
2378
2379
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002380void MacroAssembler::AllocateOneByteSlicedString(Register result,
2381 Register length,
2382 Register scratch1,
2383 Register scratch2,
2384 Label* gc_required) {
2385 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002386 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00002387
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002388 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
2389 scratch1, scratch2);
Ben Murdoch589d6972011-11-30 16:04:58 +00002390}
2391
2392
Steve Block6ded16b2010-05-10 14:33:55 +01002393void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00002394 Register map,
2395 Register type_reg,
2396 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002397 const Register temp = type_reg.is(no_reg) ? ip : type_reg;
2398
Steve Block6ded16b2010-05-10 14:33:55 +01002399 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002400 CompareInstanceType(map, temp, type);
2401}
2402
2403
Steve Blocka7e24c12009-10-30 11:49:00 +00002404void MacroAssembler::CompareInstanceType(Register map,
2405 Register type_reg,
2406 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002407 // Registers map and type_reg can be ip. These two lines assert
2408 // that ip can be used with the two instructions (the constants
2409 // will never need ip).
2410 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
2411 STATIC_ASSERT(LAST_TYPE < 256);
Steve Blocka7e24c12009-10-30 11:49:00 +00002412 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
2413 cmp(type_reg, Operand(type));
2414}
2415
2416
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002417void MacroAssembler::CompareRoot(Register obj,
2418 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002419 DCHECK(!obj.is(ip));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002420 LoadRoot(ip, index);
2421 cmp(obj, ip);
2422}
2423
2424
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002425void MacroAssembler::CheckFastElements(Register map,
2426 Register scratch,
2427 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002428 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2429 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2430 STATIC_ASSERT(FAST_ELEMENTS == 2);
2431 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002432 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002433 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002434 b(hi, fail);
2435}
2436
2437
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002438void MacroAssembler::CheckFastObjectElements(Register map,
2439 Register scratch,
2440 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002441 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2442 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2443 STATIC_ASSERT(FAST_ELEMENTS == 2);
2444 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002445 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002446 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002447 b(ls, fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002448 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002449 b(hi, fail);
2450}
2451
2452
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002453void MacroAssembler::CheckFastSmiElements(Register map,
2454 Register scratch,
2455 Label* fail) {
2456 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2457 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002458 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002459 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002460 b(hi, fail);
2461}
2462
2463
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002464void MacroAssembler::StoreNumberToDoubleElements(
2465 Register value_reg,
2466 Register key_reg,
2467 Register elements_reg,
2468 Register scratch1,
2469 LowDwVfpRegister double_scratch,
2470 Label* fail,
2471 int elements_offset) {
Ben Murdoch014dc512016-03-22 12:00:34 +00002472 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002473 Label smi_value, store;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002474
2475 // Handle smi values specially.
2476 JumpIfSmi(value_reg, &smi_value);
2477
2478 // Ensure that the object is a heap number
2479 CheckMap(value_reg,
2480 scratch1,
2481 isolate()->factory()->heap_number_map(),
2482 fail,
2483 DONT_DO_SMI_CHECK);
2484
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002485 vldr(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002486 VFPCanonicalizeNaN(double_scratch);
2487 b(&store);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002488
2489 bind(&smi_value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002490 SmiToDouble(double_scratch, value_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002491
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002492 bind(&store);
2493 add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg));
2494 vstr(double_scratch,
2495 FieldMemOperand(scratch1,
2496 FixedDoubleArray::kHeaderSize - elements_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002497}
2498
2499
2500void MacroAssembler::CompareMap(Register obj,
2501 Register scratch,
2502 Handle<Map> map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002503 Label* early_success) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002504 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002505 CompareMap(scratch, map, early_success);
2506}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002507
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002508
2509void MacroAssembler::CompareMap(Register obj_map,
2510 Handle<Map> map,
2511 Label* early_success) {
2512 cmp(obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002513}
2514
2515
Andrei Popescu31002712010-02-23 13:46:05 +00002516void MacroAssembler::CheckMap(Register obj,
2517 Register scratch,
2518 Handle<Map> map,
2519 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002520 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002521 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002522 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +00002523 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002524
2525 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002526 CompareMap(obj, scratch, map, &success);
Andrei Popescu31002712010-02-23 13:46:05 +00002527 b(ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002528 bind(&success);
Andrei Popescu31002712010-02-23 13:46:05 +00002529}
2530
2531
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002532void MacroAssembler::CheckMap(Register obj,
2533 Register scratch,
2534 Heap::RootListIndex index,
2535 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002536 SmiCheckType smi_check_type) {
2537 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002538 JumpIfSmi(obj, fail);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002539 }
2540 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2541 LoadRoot(ip, index);
2542 cmp(scratch, ip);
2543 b(ne, fail);
2544}
2545
2546
Emily Bernier958fae72015-03-24 16:35:39 -04002547void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
2548 Register scratch2, Handle<WeakCell> cell,
2549 Handle<Code> success,
2550 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002551 Label fail;
2552 if (smi_check_type == DO_SMI_CHECK) {
2553 JumpIfSmi(obj, &fail);
2554 }
Emily Bernier958fae72015-03-24 16:35:39 -04002555 ldr(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
2556 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002557 Jump(success, RelocInfo::CODE_TARGET, eq);
2558 bind(&fail);
2559}
2560
2561
Emily Bernier958fae72015-03-24 16:35:39 -04002562void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2563 Register scratch) {
2564 mov(scratch, Operand(cell));
2565 ldr(scratch, FieldMemOperand(scratch, WeakCell::kValueOffset));
2566 cmp(value, scratch);
2567}
2568
2569
Ben Murdoch014dc512016-03-22 12:00:34 +00002570void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernier958fae72015-03-24 16:35:39 -04002571 mov(value, Operand(cell));
2572 ldr(value, FieldMemOperand(value, WeakCell::kValueOffset));
Ben Murdoch014dc512016-03-22 12:00:34 +00002573}
2574
2575
2576void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2577 Label* miss) {
2578 GetWeakValue(value, cell);
Emily Bernier958fae72015-03-24 16:35:39 -04002579 JumpIfSmi(value, miss);
2580}
2581
2582
Ben Murdoch014dc512016-03-22 12:00:34 +00002583void MacroAssembler::GetMapConstructor(Register result, Register map,
2584 Register temp, Register temp2) {
2585 Label done, loop;
2586 ldr(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
2587 bind(&loop);
2588 JumpIfSmi(result, &done);
2589 CompareObjectType(result, temp, temp2, MAP_TYPE);
2590 b(ne, &done);
2591 ldr(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
2592 b(&loop);
2593 bind(&done);
2594}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002595
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002596
Ben Murdoch014dc512016-03-22 12:00:34 +00002597void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
2598 Register scratch, Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002599 // Get the prototype or initial map from the function.
2600 ldr(result,
2601 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2602
2603 // If the prototype or initial map is the hole, don't return it and
2604 // simply miss the cache instead. This will allow us to allocate a
2605 // prototype object on-demand in the runtime system.
2606 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2607 cmp(result, ip);
2608 b(eq, miss);
2609
2610 // If the function does not have an initial map, we're done.
2611 Label done;
2612 CompareObjectType(result, scratch, scratch, MAP_TYPE);
2613 b(ne, &done);
2614
2615 // Get the prototype from the initial map.
2616 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002617
Steve Blocka7e24c12009-10-30 11:49:00 +00002618 // All done.
2619 bind(&done);
2620}
2621
2622
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002623void MacroAssembler::CallStub(CodeStub* stub,
2624 TypeFeedbackId ast_id,
2625 Condition cond) {
2626 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
2627 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00002628}
2629
2630
Andrei Popescu31002712010-02-23 13:46:05 +00002631void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +00002632 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2633}
2634
2635
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002636bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002637 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00002638}
2639
2640
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002641void MacroAssembler::IndexFromHash(Register hash, Register index) {
2642 // If the hash field contains an array index pick it out. The assert checks
2643 // that the constants for the maximum number of digits for an array index
2644 // cached in the hash field and the number of bits reserved for it does not
2645 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002646 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002647 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002648 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002649}
2650
2651
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002652void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002653 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002654 vmov(value.low(), smi);
2655 vcvt_f64_s32(value, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01002656 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002657 SmiUntag(ip, smi);
2658 vmov(value.low(), ip);
2659 vcvt_f64_s32(value, value.low());
Iain Merrick9ac36c92010-09-13 15:29:50 +01002660 }
2661}
2662
2663
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002664void MacroAssembler::TestDoubleIsInt32(DwVfpRegister double_input,
2665 LowDwVfpRegister double_scratch) {
2666 DCHECK(!double_input.is(double_scratch));
2667 vcvt_s32_f64(double_scratch.low(), double_input);
2668 vcvt_f64_s32(double_scratch, double_scratch.low());
2669 VFPCompareAndSetFlags(double_input, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002670}
2671
2672
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002673void MacroAssembler::TryDoubleToInt32Exact(Register result,
2674 DwVfpRegister double_input,
2675 LowDwVfpRegister double_scratch) {
2676 DCHECK(!double_input.is(double_scratch));
2677 vcvt_s32_f64(double_scratch.low(), double_input);
2678 vmov(result, double_scratch.low());
2679 vcvt_f64_s32(double_scratch, double_scratch.low());
2680 VFPCompareAndSetFlags(double_input, double_scratch);
2681}
Steve Block44f0eee2011-05-26 01:26:41 +01002682
Steve Block44f0eee2011-05-26 01:26:41 +01002683
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002684void MacroAssembler::TryInt32Floor(Register result,
2685 DwVfpRegister double_input,
2686 Register input_high,
2687 LowDwVfpRegister double_scratch,
2688 Label* done,
2689 Label* exact) {
2690 DCHECK(!result.is(input_high));
2691 DCHECK(!double_input.is(double_scratch));
2692 Label negative, exception;
Steve Block44f0eee2011-05-26 01:26:41 +01002693
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002694 VmovHigh(input_high, double_input);
Steve Block44f0eee2011-05-26 01:26:41 +01002695
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002696 // Test for NaN and infinities.
2697 Sbfx(result, input_high,
2698 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
2699 cmp(result, Operand(-1));
2700 b(eq, &exception);
2701 // Test for values that can be exactly represented as a
2702 // signed 32-bit integer.
2703 TryDoubleToInt32Exact(result, double_input, double_scratch);
2704 // If exact, return (result already fetched).
2705 b(eq, exact);
2706 cmp(input_high, Operand::Zero());
2707 b(mi, &negative);
Steve Block44f0eee2011-05-26 01:26:41 +01002708
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002709 // Input is in ]+0, +inf[.
2710 // If result equals 0x7fffffff input was out of range or
2711 // in ]0x7fffffff, 0x80000000[. We ignore this last case which
2712 // could fits into an int32, that means we always think input was
2713 // out of range and always go to exception.
2714 // If result < 0x7fffffff, go to done, result fetched.
2715 cmn(result, Operand(1));
2716 b(mi, &exception);
2717 b(done);
Steve Block44f0eee2011-05-26 01:26:41 +01002718
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002719 // Input is in ]-inf, -0[.
2720 // If x is a non integer negative number,
2721 // floor(x) <=> round_to_zero(x) - 1.
2722 bind(&negative);
2723 sub(result, result, Operand(1), SetCC);
2724 // If result is still negative, go to done, result fetched.
2725 // Else, we had an overflow and we fall through exception.
2726 b(mi, done);
2727 bind(&exception);
2728}
Steve Block44f0eee2011-05-26 01:26:41 +01002729
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002730void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2731 DwVfpRegister double_input,
2732 Label* done) {
2733 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2734 vcvt_s32_f64(double_scratch.low(), double_input);
2735 vmov(result, double_scratch.low());
Steve Block44f0eee2011-05-26 01:26:41 +01002736
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002737 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
2738 sub(ip, result, Operand(1));
2739 cmp(ip, Operand(0x7ffffffe));
2740 b(lt, done);
2741}
Steve Block44f0eee2011-05-26 01:26:41 +01002742
Steve Block44f0eee2011-05-26 01:26:41 +01002743
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002744void MacroAssembler::TruncateDoubleToI(Register result,
2745 DwVfpRegister double_input) {
2746 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002747
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002748 TryInlineTruncateDoubleToI(result, double_input, &done);
2749
2750 // If we fell through then inline version didn't succeed - call stub instead.
2751 push(lr);
2752 sub(sp, sp, Operand(kDoubleSize)); // Put input on stack.
2753 vstr(double_input, MemOperand(sp, 0));
2754
2755 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
2756 CallStub(&stub);
2757
2758 add(sp, sp, Operand(kDoubleSize));
2759 pop(lr);
2760
Steve Block44f0eee2011-05-26 01:26:41 +01002761 bind(&done);
2762}
2763
2764
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002765void MacroAssembler::TruncateHeapNumberToI(Register result,
2766 Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002767 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002768 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2769 DCHECK(!result.is(object));
Steve Block44f0eee2011-05-26 01:26:41 +01002770
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002771 vldr(double_scratch,
2772 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
2773 TryInlineTruncateDoubleToI(result, double_scratch, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01002774
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002775 // If we fell through then inline version didn't succeed - call stub instead.
2776 push(lr);
2777 DoubleToIStub stub(isolate(),
2778 object,
2779 result,
2780 HeapNumber::kValueOffset - kHeapObjectTag,
2781 true,
2782 true);
2783 CallStub(&stub);
2784 pop(lr);
2785
2786 bind(&done);
2787}
2788
2789
2790void MacroAssembler::TruncateNumberToI(Register object,
2791 Register result,
2792 Register heap_number_map,
2793 Register scratch1,
2794 Label* not_number) {
2795 Label done;
2796 DCHECK(!result.is(object));
2797
2798 UntagAndJumpIfSmi(result, object, &done);
2799 JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
2800 TruncateHeapNumberToI(result, object);
2801
Steve Block44f0eee2011-05-26 01:26:41 +01002802 bind(&done);
2803}
2804
2805
Andrei Popescu31002712010-02-23 13:46:05 +00002806void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2807 Register src,
2808 int num_least_bits) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002809 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002810 ubfx(dst, src, kSmiTagSize, num_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +00002811 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002812 SmiUntag(dst, src);
Andrei Popescu31002712010-02-23 13:46:05 +00002813 and_(dst, dst, Operand((1 << num_least_bits) - 1));
2814 }
2815}
2816
2817
Steve Block1e0659c2011-05-24 12:43:12 +01002818void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2819 Register src,
2820 int num_least_bits) {
2821 and_(dst, src, Operand((1 << num_least_bits) - 1));
2822}
2823
2824
Steve Block44f0eee2011-05-26 01:26:41 +01002825void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002826 int num_arguments,
2827 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002828 // All parameters are on the stack. r0 has the return value after call.
2829
2830 // If the expected number of arguments of the runtime function is
2831 // constant, we check that the actual number of arguments match the
2832 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002833 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00002834
Leon Clarke4515c472010-02-03 11:58:03 +00002835 // TODO(1236192): Most runtime routines don't need the number of
2836 // arguments passed in because it is constant. At some point we
2837 // should remove this need and make the runtime routine entry code
2838 // smarter.
2839 mov(r0, Operand(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01002840 mov(r1, Operand(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002841 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002842 CallStub(&stub);
2843}
2844
2845
Andrei Popescu402d9372010-02-26 13:31:12 +00002846void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2847 int num_arguments) {
2848 mov(r0, Operand(num_arguments));
2849 mov(r1, Operand(ext));
2850
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002851 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002852 CallStub(&stub);
2853}
2854
2855
Ben Murdoch014dc512016-03-22 12:00:34 +00002856void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
2857 const Runtime::Function* function = Runtime::FunctionForId(fid);
2858 DCHECK_EQ(1, function->result_size);
2859 if (function->nargs >= 0) {
2860 // TODO(1236192): Most runtime routines don't need the number of
2861 // arguments passed in because it is constant. At some point we
2862 // should remove this need and make the runtime routine entry code
2863 // smarter.
2864 mov(r0, Operand(function->nargs));
2865 }
2866 JumpToExternalReference(ExternalReference(fid, isolate()));
Steve Block6ded16b2010-05-10 14:33:55 +01002867}
2868
Ben Murdochf91f0612016-11-29 16:50:11 +00002869void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
2870 bool builtin_exit_frame) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002871#if defined(__thumb__)
2872 // Thumb mode builtin.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002873 DCHECK((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002874#endif
2875 mov(r1, Operand(builtin));
Ben Murdochf91f0612016-11-29 16:50:11 +00002876 CEntryStub stub(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
2877 builtin_exit_frame);
Steve Blocka7e24c12009-10-30 11:49:00 +00002878 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2879}
2880
Steve Blocka7e24c12009-10-30 11:49:00 +00002881void MacroAssembler::SetCounter(StatsCounter* counter, int value,
2882 Register scratch1, Register scratch2) {
2883 if (FLAG_native_code_counters && counter->Enabled()) {
2884 mov(scratch1, Operand(value));
2885 mov(scratch2, Operand(ExternalReference(counter)));
2886 str(scratch1, MemOperand(scratch2));
2887 }
2888}
2889
2890
2891void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
2892 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002893 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002894 if (FLAG_native_code_counters && counter->Enabled()) {
2895 mov(scratch2, Operand(ExternalReference(counter)));
2896 ldr(scratch1, MemOperand(scratch2));
2897 add(scratch1, scratch1, Operand(value));
2898 str(scratch1, MemOperand(scratch2));
2899 }
2900}
2901
2902
2903void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
2904 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002905 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002906 if (FLAG_native_code_counters && counter->Enabled()) {
2907 mov(scratch2, Operand(ExternalReference(counter)));
2908 ldr(scratch1, MemOperand(scratch2));
2909 sub(scratch1, scratch1, Operand(value));
2910 str(scratch1, MemOperand(scratch2));
2911 }
2912}
2913
2914
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002915void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01002916 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002917 Check(cond, reason);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002918}
2919
2920
Iain Merrick75681382010-08-19 15:07:18 +01002921void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002922 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002923 DCHECK(!elements.is(ip));
Iain Merrick75681382010-08-19 15:07:18 +01002924 Label ok;
2925 push(elements);
2926 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
2927 LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2928 cmp(elements, ip);
2929 b(eq, &ok);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002930 LoadRoot(ip, Heap::kFixedDoubleArrayMapRootIndex);
2931 cmp(elements, ip);
2932 b(eq, &ok);
Iain Merrick75681382010-08-19 15:07:18 +01002933 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2934 cmp(elements, ip);
2935 b(eq, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002936 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002937 bind(&ok);
2938 pop(elements);
2939 }
2940}
2941
2942
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002943void MacroAssembler::Check(Condition cond, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002944 Label L;
Steve Block1e0659c2011-05-24 12:43:12 +01002945 b(cond, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002946 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002947 // will not return here
2948 bind(&L);
2949}
2950
2951
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002952void MacroAssembler::Abort(BailoutReason reason) {
Steve Block8defd9f2010-07-08 12:39:36 +01002953 Label abort_start;
2954 bind(&abort_start);
Steve Blocka7e24c12009-10-30 11:49:00 +00002955#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002956 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002957 if (msg != NULL) {
2958 RecordComment("Abort message: ");
2959 RecordComment(msg);
2960 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002961
2962 if (FLAG_trap_on_abort) {
2963 stop(msg);
2964 return;
2965 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002966#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002967
Ben Murdochf91f0612016-11-29 16:50:11 +00002968 // Check if Abort() has already been initialized.
2969 DCHECK(isolate()->builtins()->Abort()->IsHeapObject());
2970
2971 Move(r1, Smi::FromInt(static_cast<int>(reason)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002972
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002973 // Disable stub call restrictions to always allow calls to abort.
2974 if (!has_frame_) {
2975 // We don't actually want to generate a pile of code for this, so just
2976 // claim there is a stack frame, without generating one.
2977 FrameScope scope(this, StackFrame::NONE);
Ben Murdochf91f0612016-11-29 16:50:11 +00002978 Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002979 } else {
Ben Murdochf91f0612016-11-29 16:50:11 +00002980 Call(isolate()->builtins()->Abort(), RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002981 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002982 // will not return here
Steve Block8defd9f2010-07-08 12:39:36 +01002983 if (is_const_pool_blocked()) {
2984 // If the calling code cares about the exact number of
2985 // instructions generated, we insert padding here to keep the size
2986 // of the Abort macro constant.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002987 static const int kExpectedAbortInstructions = 7;
Steve Block8defd9f2010-07-08 12:39:36 +01002988 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002989 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block8defd9f2010-07-08 12:39:36 +01002990 while (abort_instructions++ < kExpectedAbortInstructions) {
2991 nop();
2992 }
2993 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002994}
2995
2996
Steve Blockd0582a62009-12-15 09:54:21 +00002997void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2998 if (context_chain_length > 0) {
2999 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003000 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00003001 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003002 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00003003 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003004 } else {
3005 // Slot is in the current function context. Move it into the
3006 // destination register in case we store into it (the write barrier
3007 // cannot be allowed to destroy the context in esi).
3008 mov(dst, cp);
3009 }
Steve Blockd0582a62009-12-15 09:54:21 +00003010}
3011
3012
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003013void MacroAssembler::LoadTransitionedArrayMapConditional(
3014 ElementsKind expected_kind,
3015 ElementsKind transitioned_kind,
3016 Register map_in_out,
3017 Register scratch,
3018 Label* no_map_match) {
Ben Murdoch014dc512016-03-22 12:00:34 +00003019 DCHECK(IsFastElementsKind(expected_kind));
3020 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003021
3022 // Check that the function's map is the same as the expected cached map.
Ben Murdoch014dc512016-03-22 12:00:34 +00003023 ldr(scratch, NativeContextMemOperand());
3024 ldr(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003025 cmp(map_in_out, ip);
3026 b(ne, no_map_match);
3027
3028 // Use the transitioned cached map.
Ben Murdoch014dc512016-03-22 12:00:34 +00003029 ldr(map_in_out,
3030 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003031}
3032
3033
Ben Murdoch014dc512016-03-22 12:00:34 +00003034void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
3035 ldr(dst, NativeContextMemOperand());
3036 ldr(dst, ContextMemOperand(dst, index));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003037}
3038
3039
3040void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
3041 Register map,
3042 Register scratch) {
3043 // Load the initial map. The global functions all have initial maps.
3044 ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003045 if (emit_debug_code()) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003046 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00003047 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003048 b(&ok);
3049 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003050 Abort(kGlobalFunctionsMustHaveInitialMap);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003051 bind(&ok);
3052 }
3053}
3054
3055
Steve Block1e0659c2011-05-24 12:43:12 +01003056void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
3057 Register reg,
3058 Register scratch,
3059 Label* not_power_of_two_or_zero) {
3060 sub(scratch, reg, Operand(1), SetCC);
3061 b(mi, not_power_of_two_or_zero);
3062 tst(scratch, reg);
3063 b(ne, not_power_of_two_or_zero);
3064}
3065
3066
Steve Block44f0eee2011-05-26 01:26:41 +01003067void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
3068 Register reg,
3069 Register scratch,
3070 Label* zero_and_neg,
3071 Label* not_power_of_two) {
3072 sub(scratch, reg, Operand(1), SetCC);
3073 b(mi, zero_and_neg);
3074 tst(scratch, reg);
3075 b(ne, not_power_of_two);
3076}
3077
3078
Andrei Popescu31002712010-02-23 13:46:05 +00003079void MacroAssembler::JumpIfNotBothSmi(Register reg1,
3080 Register reg2,
3081 Label* on_not_both_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01003082 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00003083 tst(reg1, Operand(kSmiTagMask));
3084 tst(reg2, Operand(kSmiTagMask), eq);
3085 b(ne, on_not_both_smi);
3086}
3087
3088
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003089void MacroAssembler::UntagAndJumpIfSmi(
3090 Register dst, Register src, Label* smi_case) {
3091 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003092 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003093 b(cc, smi_case); // Shifter carry is not set for a smi.
3094}
3095
3096
3097void MacroAssembler::UntagAndJumpIfNotSmi(
3098 Register dst, Register src, Label* non_smi_case) {
3099 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003100 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003101 b(cs, non_smi_case); // Shifter carry is set for a non-smi.
3102}
3103
3104
Andrei Popescu31002712010-02-23 13:46:05 +00003105void MacroAssembler::JumpIfEitherSmi(Register reg1,
3106 Register reg2,
3107 Label* on_either_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01003108 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00003109 tst(reg1, Operand(kSmiTagMask));
3110 tst(reg2, Operand(kSmiTagMask), ne);
3111 b(eq, on_either_smi);
3112}
3113
Ben Murdoch3b9bc312016-06-02 14:46:10 +01003114void MacroAssembler::AssertNotNumber(Register object) {
3115 if (emit_debug_code()) {
3116 STATIC_ASSERT(kSmiTag == 0);
3117 tst(object, Operand(kSmiTagMask));
3118 Check(ne, kOperandIsANumber);
3119 push(object);
3120 CompareObjectType(object, object, object, HEAP_NUMBER_TYPE);
3121 pop(object);
3122 Check(ne, kOperandIsANumber);
3123 }
3124}
Andrei Popescu31002712010-02-23 13:46:05 +00003125
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003126void MacroAssembler::AssertNotSmi(Register object) {
3127 if (emit_debug_code()) {
3128 STATIC_ASSERT(kSmiTag == 0);
3129 tst(object, Operand(kSmiTagMask));
3130 Check(ne, kOperandIsASmi);
3131 }
Iain Merrick75681382010-08-19 15:07:18 +01003132}
3133
3134
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003135void MacroAssembler::AssertSmi(Register object) {
3136 if (emit_debug_code()) {
3137 STATIC_ASSERT(kSmiTag == 0);
3138 tst(object, Operand(kSmiTagMask));
3139 Check(eq, kOperandIsNotSmi);
3140 }
Steve Block1e0659c2011-05-24 12:43:12 +01003141}
3142
3143
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003144void MacroAssembler::AssertString(Register object) {
3145 if (emit_debug_code()) {
3146 STATIC_ASSERT(kSmiTag == 0);
3147 tst(object, Operand(kSmiTagMask));
3148 Check(ne, kOperandIsASmiAndNotAString);
3149 push(object);
3150 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3151 CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
3152 pop(object);
3153 Check(lo, kOperandIsNotAString);
3154 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003155}
3156
3157
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003158void MacroAssembler::AssertName(Register object) {
3159 if (emit_debug_code()) {
3160 STATIC_ASSERT(kSmiTag == 0);
3161 tst(object, Operand(kSmiTagMask));
3162 Check(ne, kOperandIsASmiAndNotAName);
3163 push(object);
3164 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3165 CompareInstanceType(object, object, LAST_NAME_TYPE);
3166 pop(object);
3167 Check(le, kOperandIsNotAName);
3168 }
3169}
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003170
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003171
Ben Murdoch014dc512016-03-22 12:00:34 +00003172void MacroAssembler::AssertFunction(Register object) {
3173 if (emit_debug_code()) {
3174 STATIC_ASSERT(kSmiTag == 0);
3175 tst(object, Operand(kSmiTagMask));
3176 Check(ne, kOperandIsASmiAndNotAFunction);
3177 push(object);
3178 CompareObjectType(object, object, object, JS_FUNCTION_TYPE);
3179 pop(object);
3180 Check(eq, kOperandIsNotAFunction);
3181 }
3182}
3183
3184
3185void MacroAssembler::AssertBoundFunction(Register object) {
3186 if (emit_debug_code()) {
3187 STATIC_ASSERT(kSmiTag == 0);
3188 tst(object, Operand(kSmiTagMask));
3189 Check(ne, kOperandIsASmiAndNotABoundFunction);
3190 push(object);
3191 CompareObjectType(object, object, object, JS_BOUND_FUNCTION_TYPE);
3192 pop(object);
3193 Check(eq, kOperandIsNotABoundFunction);
3194 }
3195}
3196
Ben Murdochbcf72ee2016-08-08 18:44:38 +01003197void MacroAssembler::AssertGeneratorObject(Register object) {
3198 if (emit_debug_code()) {
3199 STATIC_ASSERT(kSmiTag == 0);
3200 tst(object, Operand(kSmiTagMask));
3201 Check(ne, kOperandIsASmiAndNotAGeneratorObject);
3202 push(object);
3203 CompareObjectType(object, object, object, JS_GENERATOR_OBJECT_TYPE);
3204 pop(object);
3205 Check(eq, kOperandIsNotAGeneratorObject);
3206 }
3207}
Ben Murdoch014dc512016-03-22 12:00:34 +00003208
Ben Murdoch109988c2016-05-18 11:27:45 +01003209void MacroAssembler::AssertReceiver(Register object) {
3210 if (emit_debug_code()) {
3211 STATIC_ASSERT(kSmiTag == 0);
3212 tst(object, Operand(kSmiTagMask));
3213 Check(ne, kOperandIsASmiAndNotAReceiver);
3214 push(object);
3215 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3216 CompareObjectType(object, object, object, FIRST_JS_RECEIVER_TYPE);
3217 pop(object);
3218 Check(hs, kOperandIsNotAReceiver);
3219 }
3220}
3221
3222
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003223void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
3224 Register scratch) {
3225 if (emit_debug_code()) {
3226 Label done_checking;
3227 AssertNotSmi(object);
3228 CompareRoot(object, Heap::kUndefinedValueRootIndex);
3229 b(eq, &done_checking);
3230 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3231 CompareRoot(scratch, Heap::kAllocationSiteMapRootIndex);
3232 Assert(eq, kExpectedUndefinedOrCell);
3233 bind(&done_checking);
3234 }
3235}
3236
3237
3238void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
3239 if (emit_debug_code()) {
3240 CompareRoot(reg, index);
3241 Check(eq, kHeapNumberMapRegisterClobbered);
3242 }
Steve Block1e0659c2011-05-24 12:43:12 +01003243}
3244
3245
3246void MacroAssembler::JumpIfNotHeapNumber(Register object,
3247 Register heap_number_map,
3248 Register scratch,
3249 Label* on_not_heap_number) {
3250 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003251 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01003252 cmp(scratch, heap_number_map);
3253 b(ne, on_not_heap_number);
3254}
3255
3256
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003257void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
3258 Register first, Register second, Register scratch1, Register scratch2,
Leon Clarked91b9f72010-01-27 17:25:45 +00003259 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003260 // Test that both first and second are sequential one-byte strings.
Leon Clarked91b9f72010-01-27 17:25:45 +00003261 // Assume that they are non-smis.
3262 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
3263 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
3264 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3265 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003266
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003267 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
3268 scratch2, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00003269}
3270
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003271void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
3272 Register second,
3273 Register scratch1,
3274 Register scratch2,
3275 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003276 // Check that neither is a smi.
Leon Clarked91b9f72010-01-27 17:25:45 +00003277 and_(scratch1, first, Operand(second));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003278 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003279 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
3280 scratch2, failure);
3281}
3282
3283
3284void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3285 Label* not_unique_name) {
3286 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3287 Label succeed;
3288 tst(reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3289 b(eq, &succeed);
3290 cmp(reg, Operand(SYMBOL_TYPE));
3291 b(ne, not_unique_name);
3292
3293 bind(&succeed);
Leon Clarked91b9f72010-01-27 17:25:45 +00003294}
3295
Steve Blockd0582a62009-12-15 09:54:21 +00003296
Steve Block6ded16b2010-05-10 14:33:55 +01003297// Allocates a heap number or jumps to the need_gc label if the young space
3298// is full and a scavenge is needed.
3299void MacroAssembler::AllocateHeapNumber(Register result,
3300 Register scratch1,
3301 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003302 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003303 Label* gc_required,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003304 MutableMode mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01003305 // Allocate an object in the heap for the heap number and tag it as a heap
3306 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003307 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochbcf72ee2016-08-08 18:44:38 +01003308 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003309
3310 Heap::RootListIndex map_index = mode == MUTABLE
3311 ? Heap::kMutableHeapNumberMapRootIndex
3312 : Heap::kHeapNumberMapRootIndex;
3313 AssertIsRoot(heap_number_map, map_index);
Steve Block6ded16b2010-05-10 14:33:55 +01003314
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003315 // Store heap number map in the allocated object.
Ben Murdochbcf72ee2016-08-08 18:44:38 +01003316 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003317}
3318
3319
Steve Block8defd9f2010-07-08 12:39:36 +01003320void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3321 DwVfpRegister value,
3322 Register scratch1,
3323 Register scratch2,
3324 Register heap_number_map,
3325 Label* gc_required) {
3326 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
3327 sub(scratch1, result, Operand(kHeapObjectTag));
3328 vstr(value, scratch1, HeapNumber::kValueOffset);
3329}
3330
3331
Ben Murdoch014dc512016-03-22 12:00:34 +00003332void MacroAssembler::AllocateJSValue(Register result, Register constructor,
3333 Register value, Register scratch1,
3334 Register scratch2, Label* gc_required) {
3335 DCHECK(!result.is(constructor));
3336 DCHECK(!result.is(scratch1));
3337 DCHECK(!result.is(scratch2));
3338 DCHECK(!result.is(value));
Ben Murdochbb769b22010-08-11 14:56:33 +01003339
Ben Murdoch014dc512016-03-22 12:00:34 +00003340 // Allocate JSValue in new space.
Ben Murdochbcf72ee2016-08-08 18:44:38 +01003341 Allocate(JSValue::kSize, result, scratch1, scratch2, gc_required,
3342 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003343
Ben Murdoch014dc512016-03-22 12:00:34 +00003344 // Initialize the JSValue.
3345 LoadGlobalFunctionInitialMap(constructor, scratch1, scratch2);
3346 str(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
3347 LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
3348 str(scratch1, FieldMemOperand(result, JSObject::kPropertiesOffset));
3349 str(scratch1, FieldMemOperand(result, JSObject::kElementsOffset));
3350 str(value, FieldMemOperand(result, JSValue::kValueOffset));
3351 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
Ben Murdochbb769b22010-08-11 14:56:33 +01003352}
3353
3354
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003355void MacroAssembler::CopyBytes(Register src,
3356 Register dst,
3357 Register length,
3358 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003359 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003360
3361 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003362 cmp(length, Operand(kPointerSize));
3363 b(le, &byte_loop);
3364
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003365 bind(&align_loop_1);
3366 tst(src, Operand(kPointerSize - 1));
3367 b(eq, &word_loop);
3368 ldrb(scratch, MemOperand(src, 1, PostIndex));
3369 strb(scratch, MemOperand(dst, 1, PostIndex));
3370 sub(length, length, Operand(1), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003371 b(&align_loop_1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003372 // Copy bytes in word size chunks.
3373 bind(&word_loop);
Steve Block44f0eee2011-05-26 01:26:41 +01003374 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003375 tst(src, Operand(kPointerSize - 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003376 Assert(eq, kExpectingAlignmentForCopyBytes);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003377 }
3378 cmp(length, Operand(kPointerSize));
3379 b(lt, &byte_loop);
3380 ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
Ben Murdochf91f0612016-11-29 16:50:11 +00003381 str(scratch, MemOperand(dst, kPointerSize, PostIndex));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003382 sub(length, length, Operand(kPointerSize));
3383 b(&word_loop);
3384
3385 // Copy the last bytes if any left.
3386 bind(&byte_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003387 cmp(length, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003388 b(eq, &done);
3389 bind(&byte_loop_1);
3390 ldrb(scratch, MemOperand(src, 1, PostIndex));
3391 strb(scratch, MemOperand(dst, 1, PostIndex));
3392 sub(length, length, Operand(1), SetCC);
3393 b(ne, &byte_loop_1);
3394 bind(&done);
3395}
3396
3397
Ben Murdoch014dc512016-03-22 12:00:34 +00003398void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
3399 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003400 Register filler) {
3401 Label loop, entry;
3402 b(&entry);
3403 bind(&loop);
Ben Murdoch014dc512016-03-22 12:00:34 +00003404 str(filler, MemOperand(current_address, kPointerSize, PostIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003405 bind(&entry);
Ben Murdoch014dc512016-03-22 12:00:34 +00003406 cmp(current_address, end_address);
3407 b(lo, &loop);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003408}
3409
3410
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003411void MacroAssembler::CheckFor32DRegs(Register scratch) {
3412 mov(scratch, Operand(ExternalReference::cpu_features()));
3413 ldr(scratch, MemOperand(scratch));
3414 tst(scratch, Operand(1u << VFP32DREGS));
Steve Block6ded16b2010-05-10 14:33:55 +01003415}
3416
3417
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003418void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
3419 CheckFor32DRegs(scratch);
3420 vstm(db_w, location, d16, d31, ne);
3421 sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3422 vstm(db_w, location, d0, d15);
3423}
3424
3425
3426void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
3427 CheckFor32DRegs(scratch);
3428 vldm(ia_w, location, d0, d15);
3429 vldm(ia_w, location, d16, d31, ne);
3430 add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3431}
3432
3433
3434void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
3435 Register first, Register second, Register scratch1, Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +01003436 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003437 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003438 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003439 const int kFlatOneByteStringTag =
3440 kStringTag | kOneByteStringTag | kSeqStringTag;
3441 and_(scratch1, first, Operand(kFlatOneByteStringMask));
3442 and_(scratch2, second, Operand(kFlatOneByteStringMask));
3443 cmp(scratch1, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003444 // Ignore second test if first test failed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003445 cmp(scratch2, Operand(kFlatOneByteStringTag), eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003446 b(ne, failure);
3447}
3448
3449
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003450void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
3451 Register scratch,
3452 Label* failure) {
3453 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003454 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003455 const int kFlatOneByteStringTag =
3456 kStringTag | kOneByteStringTag | kSeqStringTag;
3457 and_(scratch, type, Operand(kFlatOneByteStringMask));
3458 cmp(scratch, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003459 b(ne, failure);
3460}
3461
Steve Block44f0eee2011-05-26 01:26:41 +01003462static const int kRegisterPassedArguments = 4;
Steve Block6ded16b2010-05-10 14:33:55 +01003463
Steve Block44f0eee2011-05-26 01:26:41 +01003464
Ben Murdoch257744e2011-11-30 15:57:28 +00003465int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
3466 int num_double_arguments) {
3467 int stack_passed_words = 0;
3468 if (use_eabi_hardfloat()) {
3469 // In the hard floating point calling convention, we can use
3470 // all double registers to pass doubles.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003471 if (num_double_arguments > DoubleRegister::NumRegisters()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003472 stack_passed_words +=
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003473 2 * (num_double_arguments - DoubleRegister::NumRegisters());
Ben Murdoch257744e2011-11-30 15:57:28 +00003474 }
3475 } else {
3476 // In the soft floating point calling convention, every double
3477 // argument is passed using two registers.
3478 num_reg_arguments += 2 * num_double_arguments;
3479 }
Steve Block6ded16b2010-05-10 14:33:55 +01003480 // Up to four simple arguments are passed in registers r0..r3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003481 if (num_reg_arguments > kRegisterPassedArguments) {
3482 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
3483 }
3484 return stack_passed_words;
3485}
3486
3487
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003488void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3489 Register index,
3490 Register value,
3491 uint32_t encoding_mask) {
3492 Label is_object;
3493 SmiTst(string);
3494 Check(ne, kNonObject);
3495
3496 ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
3497 ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
3498
3499 and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
3500 cmp(ip, Operand(encoding_mask));
3501 Check(eq, kUnexpectedStringType);
3502
3503 // The index is assumed to be untagged coming in, tag it to compare with the
3504 // string length without using a temp register, it is restored at the end of
3505 // this function.
3506 Label index_tag_ok, index_tag_bad;
3507 TrySmiTag(index, index, &index_tag_bad);
3508 b(&index_tag_ok);
3509 bind(&index_tag_bad);
3510 Abort(kIndexIsTooLarge);
3511 bind(&index_tag_ok);
3512
3513 ldr(ip, FieldMemOperand(string, String::kLengthOffset));
3514 cmp(index, ip);
3515 Check(lt, kIndexIsTooLarge);
3516
3517 cmp(index, Operand(Smi::FromInt(0)));
3518 Check(ge, kIndexIsNegative);
3519
3520 SmiUntag(index, index);
3521}
3522
3523
Ben Murdoch257744e2011-11-30 15:57:28 +00003524void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3525 int num_double_arguments,
3526 Register scratch) {
3527 int frame_alignment = ActivationFrameAlignment();
3528 int stack_passed_arguments = CalculateStackPassedWords(
3529 num_reg_arguments, num_double_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01003530 if (frame_alignment > kPointerSize) {
3531 // Make stack end at alignment and make room for num_arguments - 4 words
3532 // and the original value of sp.
3533 mov(scratch, sp);
3534 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003535 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003536 and_(sp, sp, Operand(-frame_alignment));
3537 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
3538 } else {
3539 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
3540 }
3541}
3542
3543
Ben Murdoch257744e2011-11-30 15:57:28 +00003544void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3545 Register scratch) {
3546 PrepareCallCFunction(num_reg_arguments, 0, scratch);
3547}
3548
3549
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003550void MacroAssembler::MovToFloatParameter(DwVfpRegister src) {
3551 DCHECK(src.is(d0));
3552 if (!use_eabi_hardfloat()) {
3553 vmov(r0, r1, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003554 }
3555}
3556
3557
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003558// On ARM this is just a synonym to make the purpose clear.
3559void MacroAssembler::MovToFloatResult(DwVfpRegister src) {
3560 MovToFloatParameter(src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003561}
3562
3563
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003564void MacroAssembler::MovToFloatParameters(DwVfpRegister src1,
3565 DwVfpRegister src2) {
3566 DCHECK(src1.is(d0));
3567 DCHECK(src2.is(d1));
3568 if (!use_eabi_hardfloat()) {
3569 vmov(r0, r1, src1);
3570 vmov(r2, r3, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003571 }
3572}
3573
3574
3575void MacroAssembler::CallCFunction(ExternalReference function,
3576 int num_reg_arguments,
3577 int num_double_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003578 mov(ip, Operand(function));
3579 CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003580}
3581
3582
3583void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003584 int num_reg_arguments,
3585 int num_double_arguments) {
3586 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003587}
3588
3589
Steve Block6ded16b2010-05-10 14:33:55 +01003590void MacroAssembler::CallCFunction(ExternalReference function,
3591 int num_arguments) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003592 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003593}
3594
Ben Murdoch257744e2011-11-30 15:57:28 +00003595
Steve Block44f0eee2011-05-26 01:26:41 +01003596void MacroAssembler::CallCFunction(Register function,
Steve Block44f0eee2011-05-26 01:26:41 +01003597 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003598 CallCFunction(function, num_arguments, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01003599}
3600
3601
Steve Block44f0eee2011-05-26 01:26:41 +01003602void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch257744e2011-11-30 15:57:28 +00003603 int num_reg_arguments,
3604 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003605 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01003606 // Make sure that the stack is aligned before calling a C function unless
3607 // running in the simulator. The simulator has its own alignment check which
3608 // provides more information.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003609#if V8_HOST_ARCH_ARM
Steve Block44f0eee2011-05-26 01:26:41 +01003610 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003611 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01003612 int frame_alignment_mask = frame_alignment - 1;
3613 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003614 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003615 Label alignment_as_expected;
3616 tst(sp, Operand(frame_alignment_mask));
3617 b(eq, &alignment_as_expected);
3618 // Don't use Check here, as it will call Runtime_Abort possibly
3619 // re-entering here.
3620 stop("Unexpected alignment");
3621 bind(&alignment_as_expected);
3622 }
3623 }
3624#endif
3625
3626 // Just call directly. The function called cannot cause a GC, or
3627 // allow preemption, so the return address in the link register
3628 // stays correct.
3629 Call(function);
Ben Murdoch257744e2011-11-30 15:57:28 +00003630 int stack_passed_arguments = CalculateStackPassedWords(
3631 num_reg_arguments, num_double_arguments);
3632 if (ActivationFrameAlignment() > kPointerSize) {
Steve Block6ded16b2010-05-10 14:33:55 +01003633 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3634 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +00003635 add(sp, sp, Operand(stack_passed_arguments * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003636 }
Steve Block1e0659c2011-05-24 12:43:12 +01003637}
3638
3639
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003640void MacroAssembler::CheckPageFlag(
3641 Register object,
3642 Register scratch,
3643 int mask,
3644 Condition cc,
3645 Label* condition_met) {
Ben Murdoch109988c2016-05-18 11:27:45 +01003646 DCHECK(cc == eq || cc == ne);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003647 Bfc(scratch, object, 0, kPageSizeBits);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003648 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
3649 tst(scratch, Operand(mask));
3650 b(cc, condition_met);
3651}
3652
3653
3654void MacroAssembler::JumpIfBlack(Register object,
3655 Register scratch0,
3656 Register scratch1,
3657 Label* on_black) {
Ben Murdoch014dc512016-03-22 12:00:34 +00003658 HasColor(object, scratch0, scratch1, on_black, 1, 1); // kBlackBitPattern.
3659 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003660}
3661
3662
3663void MacroAssembler::HasColor(Register object,
3664 Register bitmap_scratch,
3665 Register mask_scratch,
3666 Label* has_color,
3667 int first_bit,
3668 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003669 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003670
3671 GetMarkBits(object, bitmap_scratch, mask_scratch);
3672
3673 Label other_color, word_boundary;
3674 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3675 tst(ip, Operand(mask_scratch));
3676 b(first_bit == 1 ? eq : ne, &other_color);
3677 // Shift left 1 by adding.
3678 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC);
3679 b(eq, &word_boundary);
3680 tst(ip, Operand(mask_scratch));
3681 b(second_bit == 1 ? ne : eq, has_color);
3682 jmp(&other_color);
3683
3684 bind(&word_boundary);
3685 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
3686 tst(ip, Operand(1));
3687 b(second_bit == 1 ? ne : eq, has_color);
3688 bind(&other_color);
3689}
3690
3691
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003692void MacroAssembler::GetMarkBits(Register addr_reg,
3693 Register bitmap_reg,
3694 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003695 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003696 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3697 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
3698 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
3699 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits);
3700 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2));
3701 mov(ip, Operand(1));
3702 mov(mask_reg, Operand(ip, LSL, mask_reg));
3703}
3704
3705
Ben Murdoch014dc512016-03-22 12:00:34 +00003706void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
3707 Register mask_scratch, Register load_scratch,
3708 Label* value_is_white) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003709 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ip));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003710 GetMarkBits(value, bitmap_scratch, mask_scratch);
3711
3712 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003713 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch014dc512016-03-22 12:00:34 +00003714 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3715 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003716 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003717
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003718 // Since both black and grey have a 1 in the first position and white does
3719 // not have a 1 there we only need to check one bit.
3720 ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3721 tst(mask_scratch, load_scratch);
Ben Murdoch014dc512016-03-22 12:00:34 +00003722 b(eq, value_is_white);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003723}
3724
3725
Ben Murdoch257744e2011-11-30 15:57:28 +00003726void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01003727 usat(output_reg, 8, Operand(input_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00003728}
3729
3730
3731void MacroAssembler::ClampDoubleToUint8(Register result_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003732 DwVfpRegister input_reg,
3733 LowDwVfpRegister double_scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003734 Label done;
Ben Murdoch257744e2011-11-30 15:57:28 +00003735
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003736 // Handle inputs >= 255 (including +infinity).
3737 Vmov(double_scratch, 255.0, result_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003738 mov(result_reg, Operand(255));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003739 VFPCompareAndSetFlags(input_reg, double_scratch);
3740 b(ge, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003741
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003742 // For inputs < 255 (including negative) vcvt_u32_f64 with round-to-nearest
3743 // rounding mode will provide the correct result.
3744 vcvt_u32_f64(double_scratch.low(), input_reg, kFPSCRRounding);
3745 vmov(result_reg, double_scratch.low());
3746
Ben Murdoch257744e2011-11-30 15:57:28 +00003747 bind(&done);
3748}
3749
3750
3751void MacroAssembler::LoadInstanceDescriptors(Register map,
3752 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003753 ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
3754}
3755
3756
3757void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3758 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3759 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3760}
3761
3762
3763void MacroAssembler::EnumLength(Register dst, Register map) {
3764 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3765 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3766 and_(dst, dst, Operand(Map::EnumLengthBits::kMask));
3767 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003768}
3769
3770
Ben Murdoch014dc512016-03-22 12:00:34 +00003771void MacroAssembler::LoadAccessor(Register dst, Register holder,
3772 int accessor_index,
3773 AccessorComponent accessor) {
3774 ldr(dst, FieldMemOperand(holder, HeapObject::kMapOffset));
3775 LoadInstanceDescriptors(dst, dst);
3776 ldr(dst,
3777 FieldMemOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3778 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
3779 : AccessorPair::kSetterOffset;
3780 ldr(dst, FieldMemOperand(dst, offset));
3781}
3782
3783
Ben Murdoch109988c2016-05-18 11:27:45 +01003784void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3785 Register null_value = r5;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003786 Register empty_fixed_array_value = r6;
3787 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003788 Label next, start;
3789 mov(r2, r0);
3790
3791 // Check if the enum length field is properly initialized, indicating that
3792 // there is an enum cache.
3793 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
3794
3795 EnumLength(r3, r1);
3796 cmp(r3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
3797 b(eq, call_runtime);
3798
Ben Murdoch109988c2016-05-18 11:27:45 +01003799 LoadRoot(null_value, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003800 jmp(&start);
3801
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003802 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003803 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003804
3805 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003806 EnumLength(r3, r1);
3807 cmp(r3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003808 b(ne, call_runtime);
3809
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003810 bind(&start);
3811
3812 // Check that there are no elements. Register r2 contains the current JS
3813 // object we've reached through the prototype chain.
3814 Label no_elements;
3815 ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset));
3816 cmp(r2, empty_fixed_array_value);
3817 b(eq, &no_elements);
3818
3819 // Second chance, the object may be using the empty slow element dictionary.
3820 CompareRoot(r2, Heap::kEmptySlowElementDictionaryRootIndex);
3821 b(ne, call_runtime);
3822
3823 bind(&no_elements);
3824 ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3825 cmp(r2, null_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003826 b(ne, &next);
3827}
3828
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003829void MacroAssembler::TestJSArrayForAllocationMemento(
3830 Register receiver_reg,
3831 Register scratch_reg,
3832 Label* no_memento_found) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01003833 Label map_check;
3834 Label top_check;
Ben Murdochbcf72ee2016-08-08 18:44:38 +01003835 ExternalReference new_space_allocation_top_adr =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003836 ExternalReference::new_space_allocation_top_address(isolate());
Ben Murdoch3b9bc312016-06-02 14:46:10 +01003837 const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
3838 const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003839
Ben Murdoch3b9bc312016-06-02 14:46:10 +01003840 // Bail out if the object is not in new space.
3841 JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
3842 // If the object is in new space, we need to check whether it is on the same
3843 // page as the current top.
3844 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
Ben Murdochbcf72ee2016-08-08 18:44:38 +01003845 mov(ip, Operand(new_space_allocation_top_adr));
3846 ldr(ip, MemOperand(ip));
3847 eor(scratch_reg, scratch_reg, Operand(ip));
Ben Murdoch3b9bc312016-06-02 14:46:10 +01003848 tst(scratch_reg, Operand(~Page::kPageAlignmentMask));
3849 b(eq, &top_check);
3850 // The object is on a different page than allocation top. Bail out if the
3851 // object sits on the page boundary as no memento can follow and we cannot
3852 // touch the memory following it.
3853 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
3854 eor(scratch_reg, scratch_reg, Operand(receiver_reg));
3855 tst(scratch_reg, Operand(~Page::kPageAlignmentMask));
3856 b(ne, no_memento_found);
3857 // Continue with the actual map check.
3858 jmp(&map_check);
3859 // If top is on the same page as the current object, we need to check whether
3860 // we are below top.
3861 bind(&top_check);
3862 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
Ben Murdochbcf72ee2016-08-08 18:44:38 +01003863 mov(ip, Operand(new_space_allocation_top_adr));
3864 ldr(ip, MemOperand(ip));
3865 cmp(scratch_reg, ip);
Ben Murdoch3b9bc312016-06-02 14:46:10 +01003866 b(gt, no_memento_found);
3867 // Memento map check.
3868 bind(&map_check);
3869 ldr(scratch_reg, MemOperand(receiver_reg, kMementoMapOffset));
3870 cmp(scratch_reg, Operand(isolate()->factory()->allocation_memento_map()));
3871}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003872
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003873Register GetRegisterThatIsNotOneOf(Register reg1,
3874 Register reg2,
3875 Register reg3,
3876 Register reg4,
3877 Register reg5,
3878 Register reg6) {
3879 RegList regs = 0;
3880 if (reg1.is_valid()) regs |= reg1.bit();
3881 if (reg2.is_valid()) regs |= reg2.bit();
3882 if (reg3.is_valid()) regs |= reg3.bit();
3883 if (reg4.is_valid()) regs |= reg4.bit();
3884 if (reg5.is_valid()) regs |= reg5.bit();
3885 if (reg6.is_valid()) regs |= reg6.bit();
3886
Ben Murdoch13e2dad2016-09-16 13:49:30 +01003887 const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
Ben Murdoch014dc512016-03-22 12:00:34 +00003888 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
3889 int code = config->GetAllocatableGeneralCode(i);
3890 Register candidate = Register::from_code(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003891 if (regs & candidate.bit()) continue;
3892 return candidate;
3893 }
3894 UNREACHABLE();
3895 return no_reg;
3896}
3897
3898
3899void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3900 Register object,
3901 Register scratch0,
3902 Register scratch1,
3903 Label* found) {
3904 DCHECK(!scratch1.is(scratch0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003905 Register current = scratch0;
Ben Murdoch014dc512016-03-22 12:00:34 +00003906 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003907
3908 // scratch contained elements pointer.
3909 mov(current, object);
Ben Murdoch014dc512016-03-22 12:00:34 +00003910 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
3911 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
3912 CompareRoot(current, Heap::kNullValueRootIndex);
3913 b(eq, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003914
3915 // Loop based on the map going up the prototype chain.
3916 bind(&loop_again);
3917 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
Ben Murdoch014dc512016-03-22 12:00:34 +00003918
3919 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3920 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3921 ldrb(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset));
3922 cmp(scratch1, Operand(JS_OBJECT_TYPE));
3923 b(lo, found);
3924
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003925 ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
3926 DecodeField<Map::ElementsKindBits>(scratch1);
3927 cmp(scratch1, Operand(DICTIONARY_ELEMENTS));
3928 b(eq, found);
3929 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
Ben Murdoch014dc512016-03-22 12:00:34 +00003930 CompareRoot(current, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003931 b(ne, &loop_again);
Ben Murdoch014dc512016-03-22 12:00:34 +00003932
3933 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003934}
3935
3936
3937#ifdef DEBUG
3938bool AreAliased(Register reg1,
3939 Register reg2,
3940 Register reg3,
3941 Register reg4,
3942 Register reg5,
3943 Register reg6,
3944 Register reg7,
3945 Register reg8) {
3946 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3947 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3948 reg7.is_valid() + reg8.is_valid();
3949
3950 RegList regs = 0;
3951 if (reg1.is_valid()) regs |= reg1.bit();
3952 if (reg2.is_valid()) regs |= reg2.bit();
3953 if (reg3.is_valid()) regs |= reg3.bit();
3954 if (reg4.is_valid()) regs |= reg4.bit();
3955 if (reg5.is_valid()) regs |= reg5.bit();
3956 if (reg6.is_valid()) regs |= reg6.bit();
3957 if (reg7.is_valid()) regs |= reg7.bit();
3958 if (reg8.is_valid()) regs |= reg8.bit();
3959 int n_of_non_aliasing_regs = NumRegs(regs);
3960
3961 return n_of_valid_regs != n_of_non_aliasing_regs;
3962}
3963#endif
3964
3965
Ben Murdoch014dc512016-03-22 12:00:34 +00003966CodePatcher::CodePatcher(Isolate* isolate, byte* address, int instructions,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003967 FlushICache flush_cache)
Steve Blocka7e24c12009-10-30 11:49:00 +00003968 : address_(address),
Steve Blocka7e24c12009-10-30 11:49:00 +00003969 size_(instructions * Assembler::kInstrSize),
Ben Murdoch014dc512016-03-22 12:00:34 +00003970 masm_(isolate, address, size_ + Assembler::kGap, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003971 flush_cache_(flush_cache) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003972 // Create a new macro assembler pointing to the address of the code to patch.
3973 // The size is adjusted with kGap on order for the assembler to generate size
3974 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003975 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003976}
3977
3978
3979CodePatcher::~CodePatcher() {
3980 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003981 if (flush_cache_ == FLUSH) {
Ben Murdoch014dc512016-03-22 12:00:34 +00003982 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003983 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003984
Ben Murdoch13e2dad2016-09-16 13:49:30 +01003985 // Check that we don't have any pending constant pools.
3986 DCHECK(masm_.pending_32_bit_constants_.empty());
3987 DCHECK(masm_.pending_64_bit_constants_.empty());
3988
Steve Blocka7e24c12009-10-30 11:49:00 +00003989 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003990 DCHECK(masm_.pc_ == address_ + size_);
3991 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003992}
3993
3994
Steve Block1e0659c2011-05-24 12:43:12 +01003995void CodePatcher::Emit(Instr instr) {
3996 masm()->emit(instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00003997}
3998
3999
4000void CodePatcher::Emit(Address addr) {
4001 masm()->emit(reinterpret_cast<Instr>(addr));
4002}
Steve Block1e0659c2011-05-24 12:43:12 +01004003
4004
4005void CodePatcher::EmitCondition(Condition cond) {
4006 Instr instr = Assembler::instr_at(masm_.pc_);
4007 instr = (instr & ~kCondMask) | cond;
4008 masm_.emit(instr);
4009}
Steve Blocka7e24c12009-10-30 11:49:00 +00004010
4011
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004012void MacroAssembler::TruncatingDiv(Register result,
4013 Register dividend,
4014 int32_t divisor) {
4015 DCHECK(!dividend.is(result));
4016 DCHECK(!dividend.is(ip));
4017 DCHECK(!result.is(ip));
4018 base::MagicNumbersForDivision<uint32_t> mag =
Emily Bernier958fae72015-03-24 16:35:39 -04004019 base::SignedDivisionByConstant(bit_cast<uint32_t>(divisor));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004020 mov(ip, Operand(mag.multiplier));
Emily Bernier958fae72015-03-24 16:35:39 -04004021 bool neg = (mag.multiplier & (1U << 31)) != 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004022 if (divisor > 0 && neg) {
Emily Bernier958fae72015-03-24 16:35:39 -04004023 smmla(result, dividend, ip, dividend);
4024 } else {
4025 smmul(result, dividend, ip);
4026 if (divisor < 0 && !neg && mag.multiplier > 0) {
4027 sub(result, result, Operand(dividend));
4028 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004029 }
4030 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift));
4031 add(result, result, Operand(dividend, LSR, 31));
4032}
4033
Emily Bernier958fae72015-03-24 16:35:39 -04004034} // namespace internal
4035} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01004036
4037#endif // V8_TARGET_ARCH_ARM