blob: 6af3d6c20cb74107494a278d6441367c65e3a168 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Iain Merrick9ac36c92010-09-13 15:29:50 +01005#include <limits.h> // For LONG_MIN, LONG_MAX.
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/debug/debug.h"
14#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040015#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000016
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017#include "src/arm/macro-assembler-arm.h"
18
Steve Blocka7e24c12009-10-30 11:49:00 +000019namespace v8 {
20namespace internal {
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010024 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010030 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Steve Blocka7e24c12009-10-30 11:49:00 +000034void MacroAssembler::Jump(Register target, Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +000035 bx(target, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000036}
37
38
39void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
40 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000042 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000043}
44
45
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000046void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode,
Steve Blocka7e24c12009-10-30 11:49:00 +000047 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000049 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
50}
51
52
53void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
54 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000056 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 AllowDeferredHandleDereference embedding_raw_address;
Steve Blocka7e24c12009-10-30 11:49:00 +000058 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
59}
60
61
Steve Block44f0eee2011-05-26 01:26:41 +010062int MacroAssembler::CallSize(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010063 return kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +010064}
65
66
Steve Blocka7e24c12009-10-30 11:49:00 +000067void MacroAssembler::Call(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010068 // Block constant pool for the call instruction sequence.
69 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000070 Label start;
71 bind(&start);
Steve Blocka7e24c12009-10-30 11:49:00 +000072 blx(target, cond);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 DCHECK_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block44f0eee2011-05-26 01:26:41 +010077int MacroAssembler::CallSize(
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000078 Address target, RelocInfo::Mode rmode, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010079 Instr mov_instr = cond | MOV | LeaveCC;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
81 return kInstrSize +
82 mov_operand.instructions_required(this, mov_instr) * kInstrSize;
83}
84
85
86int MacroAssembler::CallStubSize(
87 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) {
88 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
89}
90
91
92int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate,
93 Address target,
94 RelocInfo::Mode rmode,
95 Condition cond) {
96 Instr mov_instr = cond | MOV | LeaveCC;
97 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
98 return kInstrSize +
99 mov_operand.instructions_required(NULL, mov_instr) * kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +0100100}
101
102
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000103void MacroAssembler::Call(Address target,
Ben Murdoch257744e2011-11-30 15:57:28 +0000104 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105 Condition cond,
106 TargetAddressStorageMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +0100107 // Block constant pool for the call instruction sequence.
108 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000109 Label start;
110 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111
112 bool old_predictable_code_size = predictable_code_size();
113 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
114 set_predictable_code_size(true);
115 }
116
117#ifdef DEBUG
118 // Check the expected size before generating code to ensure we assume the same
119 // constant pool availability (e.g., whether constant pool is full or not).
120 int expected_size = CallSize(target, rmode, cond);
121#endif
122
123 // Call sequence on V7 or later may be :
124 // movw ip, #... @ call address low 16
125 // movt ip, #... @ call address high 16
126 // blx ip
127 // @ return address
128 // Or for pre-V7 or values that may be back-patched
129 // to avoid ICache flushes:
130 // ldr ip, [pc, #...] @ call address
131 // blx ip
132 // @ return address
Steve Block6ded16b2010-05-10 14:33:55 +0100133
Steve Block44f0eee2011-05-26 01:26:41 +0100134 // Statement positions are expected to be recorded when the target
135 // address is loaded. The mov method will automatically record
136 // positions when pc is the target, since this is not the case here
137 // we have to do it explicitly.
138 positions_recorder()->WriteRecordedPositions();
Steve Block6ded16b2010-05-10 14:33:55 +0100139
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000140 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
Steve Block44f0eee2011-05-26 01:26:41 +0100141 blx(ip, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100142
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
144 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
145 set_predictable_code_size(old_predictable_code_size);
146 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000147}
148
149
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000150int MacroAssembler::CallSize(Handle<Code> code,
151 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000153 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000155 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
Ben Murdoch257744e2011-11-30 15:57:28 +0000156}
157
158
159void MacroAssembler::Call(Handle<Code> code,
160 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 TypeFeedbackId ast_id,
162 Condition cond,
163 TargetAddressStorageMode mode) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000164 Label start;
165 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000166 DCHECK(RelocInfo::IsCodeTarget(rmode));
167 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000168 SetRecordedAstId(ast_id);
169 rmode = RelocInfo::CODE_TARGET_WITH_ID;
170 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000171 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172 AllowDeferredHandleDereference embedding_raw_address;
173 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000174}
175
176
177void MacroAssembler::Ret(Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000178 bx(lr, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000179}
180
181
Leon Clarkee46be812010-01-19 14:06:41 +0000182void MacroAssembler::Drop(int count, Condition cond) {
183 if (count > 0) {
184 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
185 }
186}
187
Ben Murdoch097c5b22016-05-18 11:27:45 +0100188void MacroAssembler::Drop(Register count, Condition cond) {
189 add(sp, sp, Operand(count, LSL, kPointerSizeLog2), LeaveCC, cond);
190}
Leon Clarkee46be812010-01-19 14:06:41 +0000191
Ben Murdochb0fe1622011-05-05 13:52:32 +0100192void MacroAssembler::Ret(int drop, Condition cond) {
193 Drop(drop, cond);
194 Ret(cond);
195}
196
197
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100198void MacroAssembler::Swap(Register reg1,
199 Register reg2,
200 Register scratch,
201 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100202 if (scratch.is(no_reg)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100203 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
204 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
205 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100206 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100207 mov(scratch, reg1, LeaveCC, cond);
208 mov(reg1, reg2, LeaveCC, cond);
209 mov(reg2, scratch, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100210 }
211}
212
213
Leon Clarkee46be812010-01-19 14:06:41 +0000214void MacroAssembler::Call(Label* target) {
215 bl(target);
216}
217
218
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000219void MacroAssembler::Push(Handle<Object> handle) {
220 mov(ip, Operand(handle));
221 push(ip);
222}
223
224
Leon Clarkee46be812010-01-19 14:06:41 +0000225void MacroAssembler::Move(Register dst, Handle<Object> value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000226 AllowDeferredHandleDereference smi_check;
227 if (value->IsSmi()) {
228 mov(dst, Operand(value));
229 } else {
230 DCHECK(value->IsHeapObject());
231 if (isolate()->heap()->InNewSpace(*value)) {
232 Handle<Cell> cell = isolate()->factory()->NewCell(value);
233 mov(dst, Operand(cell));
234 ldr(dst, FieldMemOperand(dst, Cell::kValueOffset));
235 } else {
236 mov(dst, Operand(value));
237 }
238 }
Leon Clarkee46be812010-01-19 14:06:41 +0000239}
Steve Blockd0582a62009-12-15 09:54:21 +0000240
241
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000242void MacroAssembler::Move(Register dst, Register src, Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100243 if (!dst.is(src)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000244 mov(dst, src, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100245 }
246}
247
248
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000250 if (!dst.is(src)) {
251 vmov(dst, src);
252 }
253}
254
255
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256void MacroAssembler::Mls(Register dst, Register src1, Register src2,
257 Register srcA, Condition cond) {
258 if (CpuFeatures::IsSupported(MLS)) {
259 CpuFeatureScope scope(this, MLS);
260 mls(dst, src1, src2, srcA, cond);
261 } else {
262 DCHECK(!srcA.is(ip));
263 mul(ip, src1, src2, LeaveCC, cond);
264 sub(dst, srcA, ip, LeaveCC, cond);
265 }
266}
267
268
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100269void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
270 Condition cond) {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800271 if (!src2.is_reg() &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 !src2.must_output_reloc_info(this) &&
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800273 src2.immediate() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274 mov(dst, Operand::Zero(), LeaveCC, cond);
275 } else if (!(src2.instructions_required(this) == 1) &&
276 !src2.must_output_reloc_info(this) &&
Ben Murdoch8b112d22011-06-08 16:22:53 +0100277 CpuFeatures::IsSupported(ARMv7) &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 base::bits::IsPowerOfTwo32(src2.immediate() + 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000279 ubfx(dst, src1, 0,
280 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800281 } else {
282 and_(dst, src1, src2, LeaveCC, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100283 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100284}
285
286
287void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
288 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289 DCHECK(lsb < 32);
290 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100291 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
292 and_(dst, src1, Operand(mask), LeaveCC, cond);
293 if (lsb != 0) {
294 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
295 }
296 } else {
297 ubfx(dst, src1, lsb, width, cond);
298 }
299}
300
301
302void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
303 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000304 DCHECK(lsb < 32);
305 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100306 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
307 and_(dst, src1, Operand(mask), LeaveCC, cond);
308 int shift_up = 32 - lsb - width;
309 int shift_down = lsb + shift_up;
310 if (shift_up != 0) {
311 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
312 }
313 if (shift_down != 0) {
314 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
315 }
316 } else {
317 sbfx(dst, src1, lsb, width, cond);
318 }
319}
320
321
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100322void MacroAssembler::Bfi(Register dst,
323 Register src,
324 Register scratch,
325 int lsb,
326 int width,
327 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 DCHECK(0 <= lsb && lsb < 32);
329 DCHECK(0 <= width && width < 32);
330 DCHECK(lsb + width < 32);
331 DCHECK(!scratch.is(dst));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100332 if (width == 0) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100334 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
335 bic(dst, dst, Operand(mask));
336 and_(scratch, src, Operand((1 << width) - 1));
337 mov(scratch, Operand(scratch, LSL, lsb));
338 orr(dst, dst, scratch);
339 } else {
340 bfi(dst, src, lsb, width, cond);
341 }
342}
343
344
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000345void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width,
346 Condition cond) {
347 DCHECK(lsb < 32);
348 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100349 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350 bic(dst, src, Operand(mask));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100351 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000352 Move(dst, src, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100353 bfc(dst, lsb, width, cond);
354 }
355}
356
357
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100358void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
359 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
361 DCHECK(!dst.is(pc) && !src.rm().is(pc));
362 DCHECK((satpos >= 0) && (satpos <= 31));
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100363
364 // These asserts are required to ensure compatibility with the ARMv7
365 // implementation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000366 DCHECK((src.shift_op() == ASR) || (src.shift_op() == LSL));
367 DCHECK(src.rs().is(no_reg));
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100368
369 Label done;
370 int satval = (1 << satpos) - 1;
371
372 if (cond != al) {
373 b(NegateCondition(cond), &done); // Skip saturate if !condition.
374 }
375 if (!(src.is_reg() && dst.is(src.rm()))) {
376 mov(dst, src);
377 }
378 tst(dst, Operand(~satval));
379 b(eq, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000380 mov(dst, Operand::Zero(), LeaveCC, mi); // 0 if negative.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100381 mov(dst, Operand(satval), LeaveCC, pl); // satval if positive.
382 bind(&done);
383 } else {
384 usat(dst, satpos, src, cond);
385 }
386}
387
388
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000389void MacroAssembler::Load(Register dst,
390 const MemOperand& src,
391 Representation r) {
392 DCHECK(!r.IsDouble());
393 if (r.IsInteger8()) {
394 ldrsb(dst, src);
395 } else if (r.IsUInteger8()) {
396 ldrb(dst, src);
397 } else if (r.IsInteger16()) {
398 ldrsh(dst, src);
399 } else if (r.IsUInteger16()) {
400 ldrh(dst, src);
401 } else {
402 ldr(dst, src);
403 }
404}
405
406
407void MacroAssembler::Store(Register src,
408 const MemOperand& dst,
409 Representation r) {
410 DCHECK(!r.IsDouble());
411 if (r.IsInteger8() || r.IsUInteger8()) {
412 strb(src, dst);
413 } else if (r.IsInteger16() || r.IsUInteger16()) {
414 strh(src, dst);
415 } else {
416 if (r.IsHeapObject()) {
417 AssertNotSmi(src);
418 } else if (r.IsSmi()) {
419 AssertSmi(src);
420 }
421 str(src, dst);
422 }
423}
424
425
Steve Blocka7e24c12009-10-30 11:49:00 +0000426void MacroAssembler::LoadRoot(Register destination,
427 Heap::RootListIndex index,
428 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000429 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
430 isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
431 !predictable_code_size()) {
432 // The CPU supports fast immediate values, and this root will never
433 // change. We will load it as a relocatable immediate value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000434 Handle<Object> root = isolate()->heap()->root_handle(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000435 mov(destination, Operand(root), LeaveCC, cond);
436 return;
437 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000438 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000439}
440
441
Kristian Monsen25f61362010-05-21 11:50:48 +0100442void MacroAssembler::StoreRoot(Register source,
443 Heap::RootListIndex index,
444 Condition cond) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000445 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000446 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
447}
448
449
Steve Block6ded16b2010-05-10 14:33:55 +0100450void MacroAssembler::InNewSpace(Register object,
451 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100452 Condition cond,
Steve Block6ded16b2010-05-10 14:33:55 +0100453 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000454 DCHECK(cond == eq || cond == ne);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100455 const int mask =
456 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
457 CheckPageFlag(object, scratch, mask, cond, branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100458}
459
460
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100461void MacroAssembler::RecordWriteField(
462 Register object,
463 int offset,
464 Register value,
465 Register dst,
466 LinkRegisterStatus lr_status,
467 SaveFPRegsMode save_fp,
468 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 SmiCheck smi_check,
470 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100471 // First, check if a write barrier is even needed. The tests below
472 // catch stores of Smis.
Steve Block6ded16b2010-05-10 14:33:55 +0100473 Label done;
474
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100475 // Skip barrier if writing a smi.
476 if (smi_check == INLINE_SMI_CHECK) {
477 JumpIfSmi(value, &done);
478 }
Steve Block6ded16b2010-05-10 14:33:55 +0100479
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100480 // Although the object register is tagged, the offset is relative to the start
481 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000482 DCHECK(IsAligned(offset, kPointerSize));
Steve Block8defd9f2010-07-08 12:39:36 +0100483
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100484 add(dst, object, Operand(offset - kHeapObjectTag));
485 if (emit_debug_code()) {
486 Label ok;
487 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
488 b(eq, &ok);
489 stop("Unaligned cell in write barrier");
490 bind(&ok);
491 }
492
493 RecordWrite(object,
494 dst,
495 value,
496 lr_status,
497 save_fp,
498 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000499 OMIT_SMI_CHECK,
500 pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000501
502 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000503
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100504 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000505 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100506 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 mov(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
508 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
509 }
510}
511
512
513// Will clobber 4 registers: object, map, dst, ip. The
514// register 'object' contains a heap object pointer.
515void MacroAssembler::RecordWriteForMap(Register object,
516 Register map,
517 Register dst,
518 LinkRegisterStatus lr_status,
519 SaveFPRegsMode fp_mode) {
520 if (emit_debug_code()) {
521 ldr(dst, FieldMemOperand(map, HeapObject::kMapOffset));
522 cmp(dst, Operand(isolate()->factory()->meta_map()));
523 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
524 }
525
526 if (!FLAG_incremental_marking) {
527 return;
528 }
529
530 if (emit_debug_code()) {
531 ldr(ip, FieldMemOperand(object, HeapObject::kMapOffset));
532 cmp(ip, map);
533 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
534 }
535
536 Label done;
537
538 // A single check of the map's pages interesting flag suffices, since it is
539 // only set during incremental collection, and then it's also guaranteed that
540 // the from object's page's interesting flag is also set. This optimization
541 // relies on the fact that maps can never be in new space.
542 CheckPageFlag(map,
543 map, // Used as scratch.
544 MemoryChunk::kPointersToHereAreInterestingMask,
545 eq,
546 &done);
547
548 add(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
549 if (emit_debug_code()) {
550 Label ok;
551 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
552 b(eq, &ok);
553 stop("Unaligned cell in write barrier");
554 bind(&ok);
555 }
556
557 // Record the actual write.
558 if (lr_status == kLRHasNotBeenSaved) {
559 push(lr);
560 }
561 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
562 fp_mode);
563 CallStub(&stub);
564 if (lr_status == kLRHasNotBeenSaved) {
565 pop(lr);
566 }
567
568 bind(&done);
569
570 // Count number of write barriers in generated code.
571 isolate()->counters()->write_barriers_static()->Increment();
572 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst);
573
574 // Clobber clobbered registers when running with the debug-code flag
575 // turned on to provoke errors.
576 if (emit_debug_code()) {
577 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
578 mov(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Leon Clarke4515c472010-02-03 11:58:03 +0000579 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000580}
581
582
Steve Block8defd9f2010-07-08 12:39:36 +0100583// Will clobber 4 registers: object, address, scratch, ip. The
584// register 'object' contains a heap object pointer. The heap object
585// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000586void MacroAssembler::RecordWrite(
587 Register object,
588 Register address,
589 Register value,
590 LinkRegisterStatus lr_status,
591 SaveFPRegsMode fp_mode,
592 RememberedSetAction remembered_set_action,
593 SmiCheck smi_check,
594 PointersToHereCheck pointers_to_here_check_for_value) {
595 DCHECK(!object.is(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100596 if (emit_debug_code()) {
597 ldr(ip, MemOperand(address));
598 cmp(ip, value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000599 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100600 }
Steve Block8defd9f2010-07-08 12:39:36 +0100601
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 if (remembered_set_action == OMIT_REMEMBERED_SET &&
603 !FLAG_incremental_marking) {
604 return;
605 }
606
607 // First, check if a write barrier is even needed. The tests below
608 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100609 Label done;
610
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100611 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000612 JumpIfSmi(value, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100613 }
614
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
616 CheckPageFlag(value,
617 value, // Used as scratch.
618 MemoryChunk::kPointersToHereAreInterestingMask,
619 eq,
620 &done);
621 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100622 CheckPageFlag(object,
623 value, // Used as scratch.
624 MemoryChunk::kPointersFromHereAreInterestingMask,
625 eq,
626 &done);
Steve Block8defd9f2010-07-08 12:39:36 +0100627
628 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100629 if (lr_status == kLRHasNotBeenSaved) {
630 push(lr);
631 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000632 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
633 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100634 CallStub(&stub);
635 if (lr_status == kLRHasNotBeenSaved) {
636 pop(lr);
637 }
Steve Block8defd9f2010-07-08 12:39:36 +0100638
639 bind(&done);
640
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000641 // Count number of write barriers in generated code.
642 isolate()->counters()->write_barriers_static()->Increment();
643 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip,
644 value);
645
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100646 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100647 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100648 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000649 mov(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
650 mov(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100651 }
652}
653
Ben Murdoch097c5b22016-05-18 11:27:45 +0100654void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
655 Register code_entry,
656 Register scratch) {
657 const int offset = JSFunction::kCodeEntryOffset;
658
659 // Since a code entry (value) is always in old space, we don't need to update
660 // remembered set. If incremental marking is off, there is nothing for us to
661 // do.
662 if (!FLAG_incremental_marking) return;
663
664 DCHECK(js_function.is(r1));
665 DCHECK(code_entry.is(r4));
666 DCHECK(scratch.is(r5));
667 AssertNotSmi(js_function);
668
669 if (emit_debug_code()) {
670 add(scratch, js_function, Operand(offset - kHeapObjectTag));
671 ldr(ip, MemOperand(scratch));
672 cmp(ip, code_entry);
673 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
674 }
675
676 // First, check if a write barrier is even needed. The tests below
677 // catch stores of Smis and stores into young gen.
678 Label done;
679
680 CheckPageFlag(code_entry, scratch,
681 MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
682 CheckPageFlag(js_function, scratch,
683 MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
684
685 const Register dst = scratch;
686 add(dst, js_function, Operand(offset - kHeapObjectTag));
687
688 push(code_entry);
689
690 // Save caller-saved registers, which includes js_function.
691 DCHECK((kCallerSaved & js_function.bit()) != 0);
692 DCHECK_EQ(kCallerSaved & code_entry.bit(), 0);
693 stm(db_w, sp, (kCallerSaved | lr.bit()));
694
695 int argument_count = 3;
696 PrepareCallCFunction(argument_count, code_entry);
697
698 mov(r0, js_function);
699 mov(r1, dst);
700 mov(r2, Operand(ExternalReference::isolate_address(isolate())));
701
702 {
703 AllowExternalCallThatCantCauseGC scope(this);
704 CallCFunction(
705 ExternalReference::incremental_marking_record_write_code_entry_function(
706 isolate()),
707 argument_count);
708 }
709
710 // Restore caller-saved registers (including js_function and code_entry).
711 ldm(ia_w, sp, (kCallerSaved | lr.bit()));
712
713 pop(code_entry);
714
715 bind(&done);
716}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100717
718void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
719 Register address,
720 Register scratch,
721 SaveFPRegsMode fp_mode,
722 RememberedSetFinalAction and_then) {
723 Label done;
724 if (emit_debug_code()) {
725 Label ok;
726 JumpIfNotInNewSpace(object, scratch, &ok);
727 stop("Remembered set pointer is in new space");
728 bind(&ok);
729 }
730 // Load store buffer top.
731 ExternalReference store_buffer =
732 ExternalReference::store_buffer_top(isolate());
733 mov(ip, Operand(store_buffer));
734 ldr(scratch, MemOperand(ip));
735 // Store pointer to buffer and increment buffer top.
736 str(address, MemOperand(scratch, kPointerSize, PostIndex));
737 // Write back new top of buffer.
738 str(scratch, MemOperand(ip));
739 // Call stub on end of buffer.
740 // Check for end of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100741 tst(scratch, Operand(StoreBuffer::kStoreBufferMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100742 if (and_then == kFallThroughAtEnd) {
Ben Murdochda12d292016-06-02 14:46:10 +0100743 b(ne, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100744 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000745 DCHECK(and_then == kReturnAtEnd);
Ben Murdochda12d292016-06-02 14:46:10 +0100746 Ret(ne);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100747 }
748 push(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000749 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100750 CallStub(&store_buffer_overflow);
751 pop(lr);
752 bind(&done);
753 if (and_then == kReturnAtEnd) {
754 Ret();
Steve Block8defd9f2010-07-08 12:39:36 +0100755 }
756}
757
Ben Murdochda12d292016-06-02 14:46:10 +0100758void MacroAssembler::PushCommonFrame(Register marker_reg) {
759 if (marker_reg.is_valid()) {
760 if (FLAG_enable_embedded_constant_pool) {
761 if (marker_reg.code() > pp.code()) {
762 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
763 add(fp, sp, Operand(kPointerSize));
764 Push(marker_reg);
765 } else {
766 stm(db_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
767 add(fp, sp, Operand(2 * kPointerSize));
768 }
769 } else {
770 if (marker_reg.code() > fp.code()) {
771 stm(db_w, sp, fp.bit() | lr.bit());
772 mov(fp, Operand(sp));
773 Push(marker_reg);
774 } else {
775 stm(db_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
776 add(fp, sp, Operand(kPointerSize));
777 }
778 }
779 } else {
780 stm(db_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
781 fp.bit() | lr.bit());
782 add(fp, sp, Operand(FLAG_enable_embedded_constant_pool ? kPointerSize : 0));
783 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000784}
785
Ben Murdochda12d292016-06-02 14:46:10 +0100786void MacroAssembler::PopCommonFrame(Register marker_reg) {
787 if (marker_reg.is_valid()) {
788 if (FLAG_enable_embedded_constant_pool) {
789 if (marker_reg.code() > pp.code()) {
790 pop(marker_reg);
791 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
792 } else {
793 ldm(ia_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
794 }
795 } else {
796 if (marker_reg.code() > fp.code()) {
797 pop(marker_reg);
798 ldm(ia_w, sp, fp.bit() | lr.bit());
799 } else {
800 ldm(ia_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
801 }
802 }
803 } else {
804 ldm(ia_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
805 fp.bit() | lr.bit());
806 }
807}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000808
Ben Murdochda12d292016-06-02 14:46:10 +0100809void MacroAssembler::PushStandardFrame(Register function_reg) {
810 DCHECK(!function_reg.is_valid() || function_reg.code() < cp.code());
811 stm(db_w, sp, (function_reg.is_valid() ? function_reg.bit() : 0) | cp.bit() |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000812 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
813 fp.bit() | lr.bit());
Ben Murdochda12d292016-06-02 14:46:10 +0100814 int offset = -StandardFrameConstants::kContextOffset;
815 offset += function_reg.is_valid() ? kPointerSize : 0;
816 add(fp, sp, Operand(offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000817}
818
819
Ben Murdochb0fe1622011-05-05 13:52:32 +0100820// Push and pop all registers that can hold pointers.
821void MacroAssembler::PushSafepointRegisters() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000822 // Safepoints expect a block of contiguous register values starting with r0.
823 // except when FLAG_enable_embedded_constant_pool, which omits pp.
824 DCHECK(kSafepointSavedRegisters ==
825 (FLAG_enable_embedded_constant_pool
826 ? ((1 << (kNumSafepointSavedRegisters + 1)) - 1) & ~pp.bit()
827 : (1 << kNumSafepointSavedRegisters) - 1));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100828 // Safepoints expect a block of kNumSafepointRegisters values on the
829 // stack, so adjust the stack for unsaved registers.
830 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831 DCHECK(num_unsaved >= 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100832 sub(sp, sp, Operand(num_unsaved * kPointerSize));
833 stm(db_w, sp, kSafepointSavedRegisters);
834}
835
836
837void MacroAssembler::PopSafepointRegisters() {
838 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
839 ldm(ia_w, sp, kSafepointSavedRegisters);
840 add(sp, sp, Operand(num_unsaved * kPointerSize));
841}
842
843
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100844void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
845 str(src, SafepointRegisterSlot(dst));
Steve Block1e0659c2011-05-24 12:43:12 +0100846}
847
848
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100849void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
850 ldr(dst, SafepointRegisterSlot(src));
Steve Block1e0659c2011-05-24 12:43:12 +0100851}
852
853
Ben Murdochb0fe1622011-05-05 13:52:32 +0100854int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
855 // The registers are pushed starting with the highest encoding,
856 // which means that lowest encodings are closest to the stack pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000857 if (FLAG_enable_embedded_constant_pool && reg_code > pp.code()) {
858 // RegList omits pp.
859 reg_code -= 1;
860 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000861 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100862 return reg_code;
863}
864
865
Steve Block1e0659c2011-05-24 12:43:12 +0100866MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
867 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
868}
869
870
871MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000872 // Number of d-regs not known at snapshot time.
873 DCHECK(!serializer_enabled());
Steve Block1e0659c2011-05-24 12:43:12 +0100874 // General purpose registers are pushed last on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000875 const RegisterConfiguration* config =
876 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
877 int doubles_size = config->num_allocatable_double_registers() * kDoubleSize;
Steve Block1e0659c2011-05-24 12:43:12 +0100878 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
879 return MemOperand(sp, doubles_size + register_offset);
880}
881
882
Leon Clarkef7060e22010-06-03 12:02:55 +0100883void MacroAssembler::Ldrd(Register dst1, Register dst2,
884 const MemOperand& src, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000885 DCHECK(src.rm().is(no_reg));
886 DCHECK(!dst1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100887
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000888 // V8 does not use this addressing mode, so the fallback code
889 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000890 DCHECK((src.am() != PreIndex) && (src.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000891
Leon Clarkef7060e22010-06-03 12:02:55 +0100892 // Generate two ldr instructions if ldrd is not available.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000893 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() &&
894 (dst1.code() % 2 == 0) && (dst1.code() + 1 == dst2.code())) {
895 CpuFeatureScope scope(this, ARMv7);
Leon Clarkef7060e22010-06-03 12:02:55 +0100896 ldrd(dst1, dst2, src, cond);
897 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000898 if ((src.am() == Offset) || (src.am() == NegOffset)) {
899 MemOperand src2(src);
900 src2.set_offset(src2.offset() + 4);
901 if (dst1.is(src.rn())) {
902 ldr(dst2, src2, cond);
903 ldr(dst1, src, cond);
904 } else {
905 ldr(dst1, src, cond);
906 ldr(dst2, src2, cond);
907 }
908 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000909 DCHECK((src.am() == PostIndex) || (src.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000910 if (dst1.is(src.rn())) {
911 ldr(dst2, MemOperand(src.rn(), 4, Offset), cond);
912 ldr(dst1, src, cond);
913 } else {
914 MemOperand src2(src);
915 src2.set_offset(src2.offset() - 4);
916 ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond);
917 ldr(dst2, src2, cond);
918 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100919 }
920 }
921}
922
923
924void MacroAssembler::Strd(Register src1, Register src2,
925 const MemOperand& dst, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000926 DCHECK(dst.rm().is(no_reg));
927 DCHECK(!src1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100928
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000929 // V8 does not use this addressing mode, so the fallback code
930 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000931 DCHECK((dst.am() != PreIndex) && (dst.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000932
Leon Clarkef7060e22010-06-03 12:02:55 +0100933 // Generate two str instructions if strd is not available.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000934 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() &&
935 (src1.code() % 2 == 0) && (src1.code() + 1 == src2.code())) {
936 CpuFeatureScope scope(this, ARMv7);
Leon Clarkef7060e22010-06-03 12:02:55 +0100937 strd(src1, src2, dst, cond);
938 } else {
939 MemOperand dst2(dst);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000940 if ((dst.am() == Offset) || (dst.am() == NegOffset)) {
941 dst2.set_offset(dst2.offset() + 4);
942 str(src1, dst, cond);
943 str(src2, dst2, cond);
944 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000945 DCHECK((dst.am() == PostIndex) || (dst.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000946 dst2.set_offset(dst2.offset() - 4);
947 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond);
948 str(src2, dst2, cond);
949 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100950 }
951}
952
953
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000954void MacroAssembler::VFPEnsureFPSCRState(Register scratch) {
955 // If needed, restore wanted bits of FPSCR.
956 Label fpscr_done;
957 vmrs(scratch);
958 if (emit_debug_code()) {
959 Label rounding_mode_correct;
960 tst(scratch, Operand(kVFPRoundingModeMask));
961 b(eq, &rounding_mode_correct);
962 // Don't call Assert here, since Runtime_Abort could re-enter here.
963 stop("Default rounding mode not set");
964 bind(&rounding_mode_correct);
965 }
966 tst(scratch, Operand(kVFPDefaultNaNModeControlBit));
967 b(ne, &fpscr_done);
968 orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit));
969 vmsr(scratch);
970 bind(&fpscr_done);
971}
972
973
974void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister dst,
975 const DwVfpRegister src,
976 const Condition cond) {
977 vsub(dst, src, kDoubleRegZero, cond);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100978}
979
980
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000981void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
982 const SwVfpRegister src2,
983 const Condition cond) {
984 // Compare and move FPSCR flags to the normal condition flags.
985 VFPCompareAndLoadFlags(src1, src2, pc, cond);
986}
987
988void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
989 const float src2,
990 const Condition cond) {
991 // Compare and move FPSCR flags to the normal condition flags.
992 VFPCompareAndLoadFlags(src1, src2, pc, cond);
993}
994
995
Ben Murdochb8e0da22011-05-16 14:20:40 +0100996void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
997 const DwVfpRegister src2,
998 const Condition cond) {
999 // Compare and move FPSCR flags to the normal condition flags.
1000 VFPCompareAndLoadFlags(src1, src2, pc, cond);
1001}
1002
1003void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
1004 const double src2,
1005 const Condition cond) {
1006 // Compare and move FPSCR flags to the normal condition flags.
1007 VFPCompareAndLoadFlags(src1, src2, pc, cond);
1008}
1009
1010
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001011void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
1012 const SwVfpRegister src2,
1013 const Register fpscr_flags,
1014 const Condition cond) {
1015 // Compare and load FPSCR.
1016 vcmp(src1, src2, cond);
1017 vmrs(fpscr_flags, cond);
1018}
1019
1020void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
1021 const float src2,
1022 const Register fpscr_flags,
1023 const Condition cond) {
1024 // Compare and load FPSCR.
1025 vcmp(src1, src2, cond);
1026 vmrs(fpscr_flags, cond);
1027}
1028
1029
Ben Murdochb8e0da22011-05-16 14:20:40 +01001030void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
1031 const DwVfpRegister src2,
1032 const Register fpscr_flags,
1033 const Condition cond) {
1034 // Compare and load FPSCR.
1035 vcmp(src1, src2, cond);
1036 vmrs(fpscr_flags, cond);
1037}
1038
1039void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
1040 const double src2,
1041 const Register fpscr_flags,
1042 const Condition cond) {
1043 // Compare and load FPSCR.
1044 vcmp(src1, src2, cond);
1045 vmrs(fpscr_flags, cond);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001046}
1047
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001048
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001049void MacroAssembler::Vmov(const DwVfpRegister dst,
1050 const double imm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001051 const Register scratch) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001052 static const DoubleRepresentation minus_zero(-0.0);
1053 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001054 DoubleRepresentation value_rep(imm);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001055 // Handle special values first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001056 if (value_rep == zero) {
1057 vmov(dst, kDoubleRegZero);
1058 } else if (value_rep == minus_zero) {
1059 vneg(dst, kDoubleRegZero);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001060 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001061 vmov(dst, imm, scratch);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001062 }
1063}
1064
Ben Murdoch086aeea2011-05-13 15:57:08 +01001065
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001066void MacroAssembler::VmovHigh(Register dst, DwVfpRegister src) {
1067 if (src.code() < 16) {
1068 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1069 vmov(dst, loc.high());
1070 } else {
1071 vmov(dst, VmovIndexHi, src);
1072 }
1073}
1074
1075
1076void MacroAssembler::VmovHigh(DwVfpRegister dst, Register src) {
1077 if (dst.code() < 16) {
1078 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1079 vmov(loc.high(), src);
1080 } else {
1081 vmov(dst, VmovIndexHi, src);
1082 }
1083}
1084
1085
1086void MacroAssembler::VmovLow(Register dst, DwVfpRegister src) {
1087 if (src.code() < 16) {
1088 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1089 vmov(dst, loc.low());
1090 } else {
1091 vmov(dst, VmovIndexLo, src);
1092 }
1093}
1094
1095
1096void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) {
1097 if (dst.code() < 16) {
1098 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1099 vmov(loc.low(), src);
1100 } else {
1101 vmov(dst, VmovIndexLo, src);
1102 }
1103}
Ben Murdochda12d292016-06-02 14:46:10 +01001104void MacroAssembler::LslPair(Register dst_low, Register dst_high,
1105 Register src_low, Register src_high,
1106 Register scratch, Register shift) {
1107 DCHECK(!AreAliased(dst_high, src_low));
1108 DCHECK(!AreAliased(dst_high, shift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001109
Ben Murdochda12d292016-06-02 14:46:10 +01001110 Label less_than_32;
1111 Label done;
1112 rsb(scratch, shift, Operand(32), SetCC);
1113 b(gt, &less_than_32);
1114 // If shift >= 32
1115 and_(scratch, shift, Operand(0x1f));
1116 lsl(dst_high, src_low, Operand(scratch));
1117 mov(dst_low, Operand(0));
1118 jmp(&done);
1119 bind(&less_than_32);
1120 // If shift < 32
1121 lsl(dst_high, src_high, Operand(shift));
1122 orr(dst_high, dst_high, Operand(src_low, LSR, scratch));
1123 lsl(dst_low, src_low, Operand(shift));
1124 bind(&done);
1125}
1126
1127void MacroAssembler::LslPair(Register dst_low, Register dst_high,
1128 Register src_low, Register src_high,
1129 uint32_t shift) {
1130 DCHECK(!AreAliased(dst_high, src_low));
1131 Label less_than_32;
1132 Label done;
1133 if (shift == 0) {
1134 Move(dst_high, src_high);
1135 Move(dst_low, src_low);
1136 } else if (shift == 32) {
1137 Move(dst_high, src_low);
1138 Move(dst_low, Operand(0));
1139 } else if (shift >= 32) {
1140 shift &= 0x1f;
1141 lsl(dst_high, src_low, Operand(shift));
1142 mov(dst_low, Operand(0));
1143 } else {
1144 lsl(dst_high, src_high, Operand(shift));
1145 orr(dst_high, dst_high, Operand(src_low, LSR, 32 - shift));
1146 lsl(dst_low, src_low, Operand(shift));
1147 }
1148}
1149
1150void MacroAssembler::LsrPair(Register dst_low, Register dst_high,
1151 Register src_low, Register src_high,
1152 Register scratch, Register shift) {
1153 DCHECK(!AreAliased(dst_low, src_high));
1154 DCHECK(!AreAliased(dst_low, shift));
1155
1156 Label less_than_32;
1157 Label done;
1158 rsb(scratch, shift, Operand(32), SetCC);
1159 b(gt, &less_than_32);
1160 // If shift >= 32
1161 and_(scratch, shift, Operand(0x1f));
1162 lsr(dst_low, src_high, Operand(scratch));
1163 mov(dst_high, Operand(0));
1164 jmp(&done);
1165 bind(&less_than_32);
1166 // If shift < 32
1167
1168 lsr(dst_low, src_low, Operand(shift));
1169 orr(dst_low, dst_low, Operand(src_high, LSL, scratch));
1170 lsr(dst_high, src_high, Operand(shift));
1171 bind(&done);
1172}
1173
1174void MacroAssembler::LsrPair(Register dst_low, Register dst_high,
1175 Register src_low, Register src_high,
1176 uint32_t shift) {
1177 DCHECK(!AreAliased(dst_low, src_high));
1178 Label less_than_32;
1179 Label done;
1180 if (shift == 32) {
1181 mov(dst_low, src_high);
1182 mov(dst_high, Operand(0));
1183 } else if (shift > 32) {
1184 shift &= 0x1f;
1185 lsr(dst_low, src_high, Operand(shift));
1186 mov(dst_high, Operand(0));
1187 } else if (shift == 0) {
1188 Move(dst_low, src_low);
1189 Move(dst_high, src_high);
1190 } else {
1191 lsr(dst_low, src_low, Operand(shift));
1192 orr(dst_low, dst_low, Operand(src_high, LSL, 32 - shift));
1193 lsr(dst_high, src_high, Operand(shift));
1194 }
1195}
1196
1197void MacroAssembler::AsrPair(Register dst_low, Register dst_high,
1198 Register src_low, Register src_high,
1199 Register scratch, Register shift) {
1200 DCHECK(!AreAliased(dst_low, src_high));
1201 DCHECK(!AreAliased(dst_low, shift));
1202
1203 Label less_than_32;
1204 Label done;
1205 rsb(scratch, shift, Operand(32), SetCC);
1206 b(gt, &less_than_32);
1207 // If shift >= 32
1208 and_(scratch, shift, Operand(0x1f));
1209 asr(dst_low, src_high, Operand(scratch));
1210 asr(dst_high, src_high, Operand(31));
1211 jmp(&done);
1212 bind(&less_than_32);
1213 // If shift < 32
1214 lsr(dst_low, src_low, Operand(shift));
1215 orr(dst_low, dst_low, Operand(src_high, LSL, scratch));
1216 asr(dst_high, src_high, Operand(shift));
1217 bind(&done);
1218}
1219
1220void MacroAssembler::AsrPair(Register dst_low, Register dst_high,
1221 Register src_low, Register src_high,
1222 uint32_t shift) {
1223 DCHECK(!AreAliased(dst_low, src_high));
1224 Label less_than_32;
1225 Label done;
1226 if (shift == 32) {
1227 mov(dst_low, src_high);
1228 asr(dst_high, src_high, Operand(31));
1229 } else if (shift > 32) {
1230 shift &= 0x1f;
1231 asr(dst_low, src_high, Operand(shift));
1232 asr(dst_high, src_high, Operand(31));
1233 } else if (shift == 0) {
1234 Move(dst_low, src_low);
1235 Move(dst_high, src_high);
1236 } else {
1237 lsr(dst_low, src_low, Operand(shift));
1238 orr(dst_low, dst_low, Operand(src_high, LSL, 32 - shift));
1239 asr(dst_high, src_high, Operand(shift));
1240 }
1241}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001242
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001243void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
1244 Register code_target_address) {
1245 DCHECK(FLAG_enable_embedded_constant_pool);
1246 ldr(pp, MemOperand(code_target_address,
1247 Code::kConstantPoolOffset - Code::kHeaderSize));
1248 add(pp, pp, code_target_address);
1249}
1250
1251
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001252void MacroAssembler::LoadConstantPoolPointerRegister() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001253 DCHECK(FLAG_enable_embedded_constant_pool);
1254 int entry_offset = pc_offset() + Instruction::kPCReadOffset;
1255 sub(ip, pc, Operand(entry_offset));
1256 LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001257}
1258
Ben Murdochda12d292016-06-02 14:46:10 +01001259void MacroAssembler::StubPrologue(StackFrame::Type type) {
1260 mov(ip, Operand(Smi::FromInt(type)));
1261 PushCommonFrame(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001262 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001263 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001264 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001265 }
1266}
1267
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001268void MacroAssembler::Prologue(bool code_pre_aging) {
1269 { PredictableCodeSizeScope predictible_code_size_scope(
1270 this, kNoCodeAgeSequenceLength);
1271 // The following three instructions must remain together and unmodified
1272 // for code aging to work properly.
1273 if (code_pre_aging) {
1274 // Pre-age the code.
1275 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
1276 add(r0, pc, Operand(-8));
1277 ldr(pc, MemOperand(pc, -4));
1278 emit_code_stub_address(stub);
1279 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01001280 PushStandardFrame(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001281 nop(ip.code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001282 }
1283 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001284 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001286 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001287 }
1288}
1289
1290
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001291void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1292 ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1293 ldr(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
1294 ldr(vector,
1295 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
1296}
1297
1298
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001299void MacroAssembler::EnterFrame(StackFrame::Type type,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001300 bool load_constant_pool_pointer_reg) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001301 // r0-r3: preserved
Ben Murdochda12d292016-06-02 14:46:10 +01001302 mov(ip, Operand(Smi::FromInt(type)));
1303 PushCommonFrame(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001304 if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001305 LoadConstantPoolPointerRegister();
1306 }
Ben Murdochda12d292016-06-02 14:46:10 +01001307 if (type == StackFrame::INTERNAL) {
1308 mov(ip, Operand(CodeObject()));
1309 push(ip);
1310 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001311}
1312
1313
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001314int MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001315 // r0: preserved
1316 // r1: preserved
1317 // r2: preserved
1318
1319 // Drop the execution stack down to the frame pointer and restore
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001320 // the caller frame pointer, return address and constant pool pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001321 // (if FLAG_enable_embedded_constant_pool).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001322 int frame_ends;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001323 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001324 add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
1325 frame_ends = pc_offset();
1326 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
1327 } else {
1328 mov(sp, fp);
1329 frame_ends = pc_offset();
1330 ldm(ia_w, sp, fp.bit() | lr.bit());
1331 }
1332 return frame_ends;
Steve Blocka7e24c12009-10-30 11:49:00 +00001333}
1334
1335
Steve Block1e0659c2011-05-24 12:43:12 +01001336void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001337 // Set up the frame structure on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001338 DCHECK_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1339 DCHECK_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1340 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Ben Murdochda12d292016-06-02 14:46:10 +01001341 mov(ip, Operand(Smi::FromInt(StackFrame::EXIT)));
1342 PushCommonFrame(ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001343 // Reserve room for saved entry sp and code object.
Ben Murdochda12d292016-06-02 14:46:10 +01001344 sub(sp, fp, Operand(ExitFrameConstants::kFixedFrameSizeFromFp));
Steve Block44f0eee2011-05-26 01:26:41 +01001345 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346 mov(ip, Operand::Zero());
Steve Block1e0659c2011-05-24 12:43:12 +01001347 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
1348 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001349 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001350 str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1351 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001352 mov(ip, Operand(CodeObject()));
Steve Block1e0659c2011-05-24 12:43:12 +01001353 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001354
1355 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00001356 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001357 str(fp, MemOperand(ip));
Ben Murdoch589d6972011-11-30 16:04:58 +00001358 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001359 str(cp, MemOperand(ip));
1360
Ben Murdochb0fe1622011-05-05 13:52:32 +01001361 // Optionally save all double registers.
1362 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001363 SaveFPRegs(sp, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001364 // Note that d0 will be accessible at
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001365 // fp - ExitFrameConstants::kFrameSize -
1366 // DwVfpRegister::kMaxNumRegisters * kDoubleSize,
1367 // since the sp slot, code slot and constant pool slot (if
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001368 // FLAG_enable_embedded_constant_pool) were pushed after the fp.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001369 }
Steve Block1e0659c2011-05-24 12:43:12 +01001370
1371 // Reserve place for the return address and stack space and align the frame
1372 // preparing for calling the runtime function.
1373 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1374 sub(sp, sp, Operand((stack_space + 1) * kPointerSize));
1375 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001376 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +01001377 and_(sp, sp, Operand(-frame_alignment));
1378 }
1379
1380 // Set the exit frame sp value to point just before the return address
1381 // location.
1382 add(ip, sp, Operand(kPointerSize));
1383 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001384}
1385
1386
Steve Block6ded16b2010-05-10 14:33:55 +01001387void MacroAssembler::InitializeNewString(Register string,
1388 Register length,
1389 Heap::RootListIndex map_index,
1390 Register scratch1,
1391 Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392 SmiTag(scratch1, length);
Steve Block6ded16b2010-05-10 14:33:55 +01001393 LoadRoot(scratch2, map_index);
1394 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
1395 mov(scratch1, Operand(String::kEmptyHashField));
1396 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
1397 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
1398}
1399
1400
1401int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001402#if V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001403 // Running on the real platform. Use the alignment as mandated by the local
1404 // environment.
1405 // Note: This will break if we ever start generating snapshots on one ARM
1406 // platform for another ARM platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001407 return base::OS::ActivationFrameAlignment();
1408#else // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001409 // If we are using the simulator then we should always align to the expected
1410 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +01001411 // if the target platform will need alignment, so this is controlled from a
1412 // flag.
1413 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001414#endif // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001415}
1416
1417
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
1419 bool restore_context,
1420 bool argument_count_is_length) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001421 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1422
Ben Murdochb0fe1622011-05-05 13:52:32 +01001423 // Optionally restore all double registers.
1424 if (save_doubles) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001425 // Calculate the stack location of the saved doubles and restore them.
Ben Murdochda12d292016-06-02 14:46:10 +01001426 const int offset = ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001427 sub(r3, fp,
1428 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize));
1429 RestoreFPRegs(r3, ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001430 }
1431
Steve Blocka7e24c12009-10-30 11:49:00 +00001432 // Clear top frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433 mov(r3, Operand::Zero());
Ben Murdoch589d6972011-11-30 16:04:58 +00001434 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001435 str(r3, MemOperand(ip));
1436
1437 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001438 if (restore_context) {
1439 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1440 ldr(cp, MemOperand(ip));
1441 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001442#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001443 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001444 str(r3, MemOperand(ip));
1445#endif
1446
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001447 // Tear down the exit frame, pop the arguments, and return.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001448 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001449 ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1450 }
Steve Block1e0659c2011-05-24 12:43:12 +01001451 mov(sp, Operand(fp));
1452 ldm(ia_w, sp, fp.bit() | lr.bit());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001453 if (argument_count.is_valid()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001454 if (argument_count_is_length) {
1455 add(sp, sp, argument_count);
1456 } else {
1457 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
1458 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001459 }
1460}
1461
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001462
1463void MacroAssembler::MovFromFloatResult(const DwVfpRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001464 if (use_eabi_hardfloat()) {
1465 Move(dst, d0);
1466 } else {
1467 vmov(dst, r0, r1);
1468 }
1469}
1470
1471
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001472// On ARM this is just a synonym to make the purpose clear.
1473void MacroAssembler::MovFromFloatParameter(DwVfpRegister dst) {
1474 MovFromFloatResult(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001475}
1476
Ben Murdochda12d292016-06-02 14:46:10 +01001477void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
1478 Register caller_args_count_reg,
1479 Register scratch0, Register scratch1) {
1480#if DEBUG
1481 if (callee_args_count.is_reg()) {
1482 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
1483 scratch1));
1484 } else {
1485 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
1486 }
1487#endif
1488
1489 // Calculate the end of destination area where we will put the arguments
1490 // after we drop current frame. We add kPointerSize to count the receiver
1491 // argument which is not included into formal parameters count.
1492 Register dst_reg = scratch0;
1493 add(dst_reg, fp, Operand(caller_args_count_reg, LSL, kPointerSizeLog2));
1494 add(dst_reg, dst_reg,
1495 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
1496
1497 Register src_reg = caller_args_count_reg;
1498 // Calculate the end of source area. +kPointerSize is for the receiver.
1499 if (callee_args_count.is_reg()) {
1500 add(src_reg, sp, Operand(callee_args_count.reg(), LSL, kPointerSizeLog2));
1501 add(src_reg, src_reg, Operand(kPointerSize));
1502 } else {
1503 add(src_reg, sp,
1504 Operand((callee_args_count.immediate() + 1) * kPointerSize));
1505 }
1506
1507 if (FLAG_debug_code) {
1508 cmp(src_reg, dst_reg);
1509 Check(lo, kStackAccessBelowStackPointer);
1510 }
1511
1512 // Restore caller's frame pointer and return address now as they will be
1513 // overwritten by the copying loop.
1514 ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
1515 ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1516
1517 // Now copy callee arguments to the caller frame going backwards to avoid
1518 // callee arguments corruption (source and destination areas could overlap).
1519
1520 // Both src_reg and dst_reg are pointing to the word after the one to copy,
1521 // so they must be pre-decremented in the loop.
1522 Register tmp_reg = scratch1;
1523 Label loop, entry;
1524 b(&entry);
1525 bind(&loop);
1526 ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
1527 str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
1528 bind(&entry);
1529 cmp(sp, src_reg);
1530 b(ne, &loop);
1531
1532 // Leave current frame.
1533 mov(sp, dst_reg);
1534}
Steve Blocka7e24c12009-10-30 11:49:00 +00001535
1536void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1537 const ParameterCount& actual,
Steve Blocka7e24c12009-10-30 11:49:00 +00001538 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001539 bool* definitely_mismatches,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001540 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001541 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001542 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001543 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00001544 Label regular_invoke;
1545
1546 // Check whether the expected and actual arguments count match. If not,
1547 // setup registers according to contract with ArgumentsAdaptorTrampoline:
1548 // r0: actual arguments count
1549 // r1: function (passed through to callee)
1550 // r2: expected arguments count
Steve Blocka7e24c12009-10-30 11:49:00 +00001551
1552 // The code below is made a lot easier because the calling code already sets
1553 // up actual and expected registers according to the contract if values are
1554 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001555 DCHECK(actual.is_immediate() || actual.reg().is(r0));
1556 DCHECK(expected.is_immediate() || expected.reg().is(r2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001557
1558 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001559 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001560 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001561 if (expected.immediate() == actual.immediate()) {
1562 definitely_matches = true;
1563 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001564 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1565 if (expected.immediate() == sentinel) {
1566 // Don't worry about adapting arguments for builtins that
1567 // don't want that done. Skip adaption code by making it look
1568 // like we have a match between expected and actual number of
1569 // arguments.
1570 definitely_matches = true;
1571 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001572 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00001573 mov(r2, Operand(expected.immediate()));
1574 }
1575 }
1576 } else {
1577 if (actual.is_immediate()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001578 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001579 cmp(expected.reg(), Operand(actual.immediate()));
1580 b(eq, &regular_invoke);
Steve Blocka7e24c12009-10-30 11:49:00 +00001581 } else {
1582 cmp(expected.reg(), Operand(actual.reg()));
1583 b(eq, &regular_invoke);
1584 }
1585 }
1586
1587 if (!definitely_matches) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001588 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001589 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001590 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001591 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001592 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00001593 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001594 if (!*definitely_mismatches) {
1595 b(done);
1596 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001597 } else {
1598 Jump(adaptor, RelocInfo::CODE_TARGET);
1599 }
1600 bind(&regular_invoke);
1601 }
1602}
1603
1604
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001605void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
1606 const ParameterCount& expected,
1607 const ParameterCount& actual) {
1608 Label skip_flooding;
1609 ExternalReference step_in_enabled =
1610 ExternalReference::debug_step_in_enabled_address(isolate());
1611 mov(r4, Operand(step_in_enabled));
1612 ldrb(r4, MemOperand(r4));
1613 cmp(r4, Operand(0));
1614 b(eq, &skip_flooding);
1615 {
1616 FrameScope frame(this,
1617 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1618 if (expected.is_reg()) {
1619 SmiTag(expected.reg());
1620 Push(expected.reg());
1621 }
1622 if (actual.is_reg()) {
1623 SmiTag(actual.reg());
1624 Push(actual.reg());
1625 }
1626 if (new_target.is_valid()) {
1627 Push(new_target);
1628 }
1629 Push(fun);
1630 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001631 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001632 Pop(fun);
1633 if (new_target.is_valid()) {
1634 Pop(new_target);
1635 }
1636 if (actual.is_reg()) {
1637 Pop(actual.reg());
1638 SmiUntag(actual.reg());
1639 }
1640 if (expected.is_reg()) {
1641 Pop(expected.reg());
1642 SmiUntag(expected.reg());
1643 }
1644 }
1645 bind(&skip_flooding);
1646}
1647
1648
1649void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
1650 const ParameterCount& expected,
1651 const ParameterCount& actual,
1652 InvokeFlag flag,
1653 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001654 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001655 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001656 DCHECK(function.is(r1));
1657 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(r3));
1658
1659 if (call_wrapper.NeedsDebugStepCheck()) {
1660 FloodFunctionIfStepping(function, new_target, expected, actual);
1661 }
1662
1663 // Clear the new.target register if not given.
1664 if (!new_target.is_valid()) {
1665 LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1666 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001667
Steve Blocka7e24c12009-10-30 11:49:00 +00001668 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001669 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001670 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001671 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001672 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001673 // We call indirectly through the code field in the function to
1674 // allow recompilation to take effect without changing any of the
1675 // call sites.
1676 Register code = r4;
1677 ldr(code, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001678 if (flag == CALL_FUNCTION) {
1679 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001680 Call(code);
1681 call_wrapper.AfterCall();
1682 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001683 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001684 Jump(code);
1685 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001686
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001687 // Continue here if InvokePrologue does handle the invocation due to
1688 // mismatched parameter counts.
1689 bind(&done);
1690 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001691}
1692
1693
Steve Blocka7e24c12009-10-30 11:49:00 +00001694void MacroAssembler::InvokeFunction(Register fun,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001695 Register new_target,
Steve Blocka7e24c12009-10-30 11:49:00 +00001696 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001697 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001699 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001700 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001701
Steve Blocka7e24c12009-10-30 11:49:00 +00001702 // Contract with called JS functions requires that function is passed in r1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001703 DCHECK(fun.is(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001704
1705 Register expected_reg = r2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001706 Register temp_reg = r4;
Steve Blocka7e24c12009-10-30 11:49:00 +00001707
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001708 ldr(temp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001709 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1710 ldr(expected_reg,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001711 FieldMemOperand(temp_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00001712 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001713 SmiUntag(expected_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001714
1715 ParameterCount expected(expected_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 InvokeFunctionCode(fun, new_target, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00001717}
1718
1719
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720void MacroAssembler::InvokeFunction(Register function,
1721 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00001722 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00001723 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001724 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001725 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001726 DCHECK(flag == JUMP_FUNCTION || has_frame());
1727
1728 // Contract with called JS functions requires that function is passed in r1.
1729 DCHECK(function.is(r1));
Andrei Popescu402d9372010-02-26 13:31:12 +00001730
1731 // Get the function and setup the context.
Andrei Popescu402d9372010-02-26 13:31:12 +00001732 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1733
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001734 InvokeFunctionCode(r1, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001735}
1736
1737
1738void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
1739 const ParameterCount& expected,
1740 const ParameterCount& actual,
1741 InvokeFlag flag,
1742 const CallWrapper& call_wrapper) {
1743 Move(r1, function);
1744 InvokeFunction(r1, expected, actual, flag, call_wrapper);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001745}
1746
1747
Ben Murdochb0fe1622011-05-05 13:52:32 +01001748void MacroAssembler::IsObjectJSStringType(Register object,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001749 Register scratch,
1750 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001751 DCHECK(kNotStringTag != 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001752
1753 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1754 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1755 tst(scratch, Operand(kIsNotStringMask));
Steve Block1e0659c2011-05-24 12:43:12 +01001756 b(ne, fail);
Andrei Popescu402d9372010-02-26 13:31:12 +00001757}
1758
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001759
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760void MacroAssembler::IsObjectNameType(Register object,
1761 Register scratch,
1762 Label* fail) {
1763 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1764 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1765 cmp(scratch, Operand(LAST_NAME_TYPE));
1766 b(hi, fail);
1767}
1768
1769
Andrei Popescu402d9372010-02-26 13:31:12 +00001770void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771 mov(r0, Operand::Zero());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001772 mov(r1,
1773 Operand(ExternalReference(Runtime::kHandleDebuggerStatement, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001774 CEntryStub ces(isolate(), 1);
1775 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001776 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Andrei Popescu402d9372010-02-26 13:31:12 +00001777}
Steve Blocka7e24c12009-10-30 11:49:00 +00001778
1779
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001780void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001781 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001782 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001783 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001784
1785 // Link the current handler as the next handler.
1786 mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1787 ldr(r5, MemOperand(r6));
1788 push(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001789
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001790 // Set this new handler as the current one.
1791 str(sp, MemOperand(r6));
Steve Blocka7e24c12009-10-30 11:49:00 +00001792}
1793
1794
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001795void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001796 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Leon Clarkee46be812010-01-19 14:06:41 +00001797 pop(r1);
Ben Murdoch589d6972011-11-30 16:04:58 +00001798 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Leon Clarkee46be812010-01-19 14:06:41 +00001799 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1800 str(r1, MemOperand(ip));
1801}
1802
1803
Steve Blocka7e24c12009-10-30 11:49:00 +00001804void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1805 Register scratch,
1806 Label* miss) {
1807 Label same_contexts;
1808
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001809 DCHECK(!holder_reg.is(scratch));
1810 DCHECK(!holder_reg.is(ip));
1811 DCHECK(!scratch.is(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001812
Ben Murdochda12d292016-06-02 14:46:10 +01001813 // Load current lexical context from the active StandardFrame, which
1814 // may require crawling past STUB frames.
1815 Label load_context;
1816 Label has_context;
1817 DCHECK(!ip.is(scratch));
1818 mov(ip, fp);
1819 bind(&load_context);
1820 ldr(scratch, MemOperand(ip, CommonFrameConstants::kContextOrFrameTypeOffset));
1821 JumpIfNotSmi(scratch, &has_context);
1822 ldr(ip, MemOperand(ip, CommonFrameConstants::kCallerFPOffset));
1823 b(&load_context);
1824 bind(&has_context);
1825
Steve Blocka7e24c12009-10-30 11:49:00 +00001826 // In debug mode, make sure the lexical context is set.
1827#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001828 cmp(scratch, Operand::Zero());
1829 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001830#endif
1831
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001832 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001833 ldr(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00001834
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001835 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001836 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001837 // Cannot use ip as a temporary in this verification code. Due to the fact
1838 // that ip is clobbered as part of cmp with an object Operand.
1839 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001840 // Read the first word and compare to the native_context_map.
Steve Blocka7e24c12009-10-30 11:49:00 +00001841 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001842 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001843 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001844 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001845 pop(holder_reg); // Restore holder.
1846 }
1847
1848 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001849 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001850 cmp(scratch, Operand(ip));
1851 b(eq, &same_contexts);
1852
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001853 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001854 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001855 // Cannot use ip as a temporary in this verification code. Due to the fact
1856 // that ip is clobbered as part of cmp with an object Operand.
1857 push(holder_reg); // Temporarily save holder on the stack.
1858 mov(holder_reg, ip); // Move ip to its holding place.
1859 LoadRoot(ip, Heap::kNullValueRootIndex);
1860 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001861 Check(ne, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001862
1863 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001864 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001865 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001866 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001867 // Restore ip is not needed. ip is reloaded below.
1868 pop(holder_reg); // Restore holder.
1869 // Restore ip to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001870 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001871 }
1872
1873 // Check that the security token in the calling global object is
1874 // compatible with the security token in the receiving global
1875 // object.
1876 int token_offset = Context::kHeaderSize +
1877 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1878
1879 ldr(scratch, FieldMemOperand(scratch, token_offset));
1880 ldr(ip, FieldMemOperand(ip, token_offset));
1881 cmp(scratch, Operand(ip));
1882 b(ne, miss);
1883
1884 bind(&same_contexts);
1885}
1886
1887
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001888// Compute the hash code from the untagged key. This must be kept in sync with
1889// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1890// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001891void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
1892 // First of all we assign the hash seed to scratch.
1893 LoadRoot(scratch, Heap::kHashSeedRootIndex);
1894 SmiUntag(scratch);
1895
1896 // Xor original key with a seed.
1897 eor(t0, t0, Operand(scratch));
1898
1899 // Compute the hash code from the untagged key. This must be kept in sync
1900 // with ComputeIntegerHash in utils.h.
1901 //
1902 // hash = ~hash + (hash << 15);
1903 mvn(scratch, Operand(t0));
1904 add(t0, scratch, Operand(t0, LSL, 15));
1905 // hash = hash ^ (hash >> 12);
1906 eor(t0, t0, Operand(t0, LSR, 12));
1907 // hash = hash + (hash << 2);
1908 add(t0, t0, Operand(t0, LSL, 2));
1909 // hash = hash ^ (hash >> 4);
1910 eor(t0, t0, Operand(t0, LSR, 4));
1911 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001912 mov(scratch, Operand(t0, LSL, 11));
1913 add(t0, t0, Operand(t0, LSL, 3));
1914 add(t0, t0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001915 // hash = hash ^ (hash >> 16);
1916 eor(t0, t0, Operand(t0, LSR, 16));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001917 bic(t0, t0, Operand(0xc0000000u));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001918}
1919
1920
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001921void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1922 Register elements,
1923 Register key,
1924 Register result,
1925 Register t0,
1926 Register t1,
1927 Register t2) {
1928 // Register use:
1929 //
1930 // elements - holds the slow-case elements of the receiver on entry.
1931 // Unchanged unless 'result' is the same register.
1932 //
1933 // key - holds the smi key on entry.
1934 // Unchanged unless 'result' is the same register.
1935 //
1936 // result - holds the result on exit if the load succeeded.
1937 // Allowed to be the same as 'key' or 'result'.
1938 // Unchanged on bailout so 'key' or 'result' can be used
1939 // in further computation.
1940 //
1941 // Scratch registers:
1942 //
1943 // t0 - holds the untagged key on entry and holds the hash once computed.
1944 //
1945 // t1 - used to hold the capacity mask of the dictionary
1946 //
1947 // t2 - used for the index into the dictionary.
1948 Label done;
1949
Ben Murdochc7cc0282012-03-05 14:35:55 +00001950 GetNumberHash(t0, t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001951
1952 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001953 ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001954 SmiUntag(t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001955 sub(t1, t1, Operand(1));
1956
1957 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001958 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001959 // Use t2 for index calculations and keep the hash intact in t0.
1960 mov(t2, t0);
1961 // Compute the masked index: (hash + i + i * i) & mask.
1962 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00001963 add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001964 }
1965 and_(t2, t2, Operand(t1));
1966
1967 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001968 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001969 add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
1970
1971 // Check if the key is identical to the name.
1972 add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001973 ldr(ip, FieldMemOperand(t2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001974 cmp(key, Operand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001975 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001976 b(eq, &done);
1977 } else {
1978 b(ne, miss);
1979 }
1980 }
1981
1982 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001983 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001984 // t2: elements + (index * kPointerSize)
1985 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001986 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001987 ldr(t1, FieldMemOperand(t2, kDetailsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001988 DCHECK_EQ(DATA, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00001989 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001990 b(ne, miss);
1991
1992 // Get the value at the masked, scaled index and return.
1993 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001994 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001995 ldr(result, FieldMemOperand(t2, kValueOffset));
1996}
1997
1998
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001999void MacroAssembler::Allocate(int object_size,
2000 Register result,
2001 Register scratch1,
2002 Register scratch2,
2003 Label* gc_required,
2004 AllocationFlags flags) {
2005 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07002006 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002007 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002008 // Trash the registers to simulate an allocation failure.
2009 mov(result, Operand(0x7091));
2010 mov(scratch1, Operand(0x7191));
2011 mov(scratch2, Operand(0x7291));
2012 }
2013 jmp(gc_required);
2014 return;
2015 }
2016
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002017 DCHECK(!AreAliased(result, scratch1, scratch2, ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00002018
Kristian Monsen25f61362010-05-21 11:50:48 +01002019 // Make object size into bytes.
2020 if ((flags & SIZE_IN_WORDS) != 0) {
2021 object_size *= kPointerSize;
2022 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002023 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Kristian Monsen25f61362010-05-21 11:50:48 +01002024
Ben Murdochb0fe1622011-05-05 13:52:32 +01002025 // Check relative positions of allocation top and limit addresses.
2026 // The values must be adjacent in memory to allow the use of LDM.
2027 // Also, assert that the registers are numbered such that the values
2028 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002029 ExternalReference allocation_top =
2030 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2031 ExternalReference allocation_limit =
2032 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002033
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002034 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
2035 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002036 DCHECK((limit - top) == kPointerSize);
2037 DCHECK(result.code() < ip.code());
2038
2039 // Set up allocation top address register.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002040 Register top_address = scratch1;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002041 // This code stores a temporary value in ip. This is OK, as the code below
2042 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002043 Register alloc_limit = ip;
2044 Register result_end = scratch2;
2045 mov(top_address, Operand(allocation_top));
2046
Steve Blocka7e24c12009-10-30 11:49:00 +00002047 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002048 // Load allocation top into result and allocation limit into alloc_limit.
2049 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002050 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002051 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002052 // Assert that result actually contains top on entry.
2053 ldr(alloc_limit, MemOperand(top_address));
2054 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002055 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002056 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002057 // Load allocation limit. Result already contains allocation top.
2058 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00002059 }
2060
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002061 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2062 // Align the next allocation. Storing the filler map without checking top is
2063 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002064 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002065 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002066 Label aligned;
2067 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002068 if ((flags & PRETENURE) != 0) {
2069 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002070 b(hs, gc_required);
2071 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002072 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2073 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002074 bind(&aligned);
2075 }
2076
Steve Blocka7e24c12009-10-30 11:49:00 +00002077 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002078 // to calculate the new top. We must preserve the ip register at this
2079 // point, so we cannot just use add().
2080 DCHECK(object_size > 0);
2081 Register source = result;
2082 Condition cond = al;
2083 int shift = 0;
2084 while (object_size != 0) {
2085 if (((object_size >> shift) & 0x03) == 0) {
2086 shift += 2;
2087 } else {
2088 int bits = object_size & (0xff << shift);
2089 object_size -= bits;
2090 shift += 8;
2091 Operand bits_operand(bits);
2092 DCHECK(bits_operand.instructions_required(this) == 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002093 add(result_end, source, bits_operand, SetCC, cond);
2094 source = result_end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002095 cond = cc;
2096 }
2097 }
Steve Block1e0659c2011-05-24 12:43:12 +01002098 b(cs, gc_required);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002099 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00002100 b(hi, gc_required);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002101 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00002102
Ben Murdochb0fe1622011-05-05 13:52:32 +01002103 // Tag object if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00002104 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002105 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00002106 }
2107}
2108
2109
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002110void MacroAssembler::Allocate(Register object_size, Register result,
2111 Register result_end, Register scratch,
2112 Label* gc_required, AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07002113 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002114 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002115 // Trash the registers to simulate an allocation failure.
2116 mov(result, Operand(0x7091));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002117 mov(scratch, Operand(0x7191));
2118 mov(result_end, Operand(0x7291));
John Reck59135872010-11-02 12:39:01 -07002119 }
2120 jmp(gc_required);
2121 return;
2122 }
2123
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002124 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
2125 // is not specified. Other registers must not overlap.
2126 DCHECK(!AreAliased(object_size, result, scratch, ip));
2127 DCHECK(!AreAliased(result_end, result, scratch, ip));
2128 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00002129
Ben Murdochb0fe1622011-05-05 13:52:32 +01002130 // Check relative positions of allocation top and limit addresses.
2131 // The values must be adjacent in memory to allow the use of LDM.
2132 // Also, assert that the registers are numbered such that the values
2133 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002134 ExternalReference allocation_top =
2135 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2136 ExternalReference allocation_limit =
2137 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002138 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
2139 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002140 DCHECK((limit - top) == kPointerSize);
2141 DCHECK(result.code() < ip.code());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002142
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002143 // Set up allocation top address and allocation limit registers.
2144 Register top_address = scratch;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002145 // This code stores a temporary value in ip. This is OK, as the code below
2146 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002147 Register alloc_limit = ip;
2148 mov(top_address, Operand(allocation_top));
2149
Steve Blocka7e24c12009-10-30 11:49:00 +00002150 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002151 // Load allocation top into result and allocation limit into alloc_limit.
2152 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002153 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002154 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002155 // Assert that result actually contains top on entry.
2156 ldr(alloc_limit, MemOperand(top_address));
2157 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002158 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002159 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002160 // Load allocation limit. Result already contains allocation top.
2161 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00002162 }
2163
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002164 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2165 // Align the next allocation. Storing the filler map without checking top is
2166 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002167 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002168 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002169 Label aligned;
2170 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002171 if ((flags & PRETENURE) != 0) {
2172 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002173 b(hs, gc_required);
2174 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002175 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2176 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002177 bind(&aligned);
2178 }
2179
Steve Blocka7e24c12009-10-30 11:49:00 +00002180 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb0fe1622011-05-05 13:52:32 +01002181 // to calculate the new top. Object size may be in words so a shift is
2182 // required to get the number of bytes.
Kristian Monsen25f61362010-05-21 11:50:48 +01002183 if ((flags & SIZE_IN_WORDS) != 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002184 add(result_end, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01002185 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002186 add(result_end, result, Operand(object_size), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01002187 }
Steve Block1e0659c2011-05-24 12:43:12 +01002188 b(cs, gc_required);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002189 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00002190 b(hi, gc_required);
2191
Steve Blockd0582a62009-12-15 09:54:21 +00002192 // Update allocation top. result temporarily holds the new top.
Steve Block44f0eee2011-05-26 01:26:41 +01002193 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002194 tst(result_end, Operand(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002195 Check(eq, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00002196 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002197 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00002198
2199 // Tag object if requested.
2200 if ((flags & TAG_OBJECT) != 0) {
2201 add(result, result, Operand(kHeapObjectTag));
2202 }
2203}
2204
2205
Andrei Popescu31002712010-02-23 13:46:05 +00002206void MacroAssembler::AllocateTwoByteString(Register result,
2207 Register length,
2208 Register scratch1,
2209 Register scratch2,
2210 Register scratch3,
2211 Label* gc_required) {
2212 // Calculate the number of bytes needed for the characters in the string while
2213 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002214 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00002215 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
2216 add(scratch1, scratch1,
2217 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002218 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002219
2220 // Allocate two-byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002221 Allocate(scratch1,
2222 result,
2223 scratch2,
2224 scratch3,
2225 gc_required,
2226 TAG_OBJECT);
Andrei Popescu31002712010-02-23 13:46:05 +00002227
2228 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01002229 InitializeNewString(result,
2230 length,
2231 Heap::kStringMapRootIndex,
2232 scratch1,
2233 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002234}
2235
2236
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002237void MacroAssembler::AllocateOneByteString(Register result, Register length,
2238 Register scratch1, Register scratch2,
2239 Register scratch3,
2240 Label* gc_required) {
Andrei Popescu31002712010-02-23 13:46:05 +00002241 // Calculate the number of bytes needed for the characters in the string while
2242 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002243 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2244 DCHECK(kCharSize == 1);
Andrei Popescu31002712010-02-23 13:46:05 +00002245 add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002246 Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002247 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002248
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002249 // Allocate one-byte string in new space.
2250 Allocate(scratch1,
2251 result,
2252 scratch2,
2253 scratch3,
2254 gc_required,
2255 TAG_OBJECT);
Andrei Popescu31002712010-02-23 13:46:05 +00002256
2257 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002258 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
2259 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002260}
2261
2262
2263void MacroAssembler::AllocateTwoByteConsString(Register result,
2264 Register length,
2265 Register scratch1,
2266 Register scratch2,
2267 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002268 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
2269 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01002270
2271 InitializeNewString(result,
2272 length,
2273 Heap::kConsStringMapRootIndex,
2274 scratch1,
2275 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002276}
2277
2278
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002279void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
2280 Register scratch1,
2281 Register scratch2,
2282 Label* gc_required) {
2283 Allocate(ConsString::kSize,
2284 result,
2285 scratch1,
2286 scratch2,
2287 gc_required,
2288 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01002289
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002290 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
2291 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002292}
2293
2294
Ben Murdoch589d6972011-11-30 16:04:58 +00002295void MacroAssembler::AllocateTwoByteSlicedString(Register result,
2296 Register length,
2297 Register scratch1,
2298 Register scratch2,
2299 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002300 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2301 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00002302
2303 InitializeNewString(result,
2304 length,
2305 Heap::kSlicedStringMapRootIndex,
2306 scratch1,
2307 scratch2);
2308}
2309
2310
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002311void MacroAssembler::AllocateOneByteSlicedString(Register result,
2312 Register length,
2313 Register scratch1,
2314 Register scratch2,
2315 Label* gc_required) {
2316 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2317 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00002318
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002319 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
2320 scratch1, scratch2);
Ben Murdoch589d6972011-11-30 16:04:58 +00002321}
2322
2323
Steve Block6ded16b2010-05-10 14:33:55 +01002324void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00002325 Register map,
2326 Register type_reg,
2327 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002328 const Register temp = type_reg.is(no_reg) ? ip : type_reg;
2329
Steve Block6ded16b2010-05-10 14:33:55 +01002330 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002331 CompareInstanceType(map, temp, type);
2332}
2333
2334
Steve Blocka7e24c12009-10-30 11:49:00 +00002335void MacroAssembler::CompareInstanceType(Register map,
2336 Register type_reg,
2337 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002338 // Registers map and type_reg can be ip. These two lines assert
2339 // that ip can be used with the two instructions (the constants
2340 // will never need ip).
2341 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
2342 STATIC_ASSERT(LAST_TYPE < 256);
Steve Blocka7e24c12009-10-30 11:49:00 +00002343 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
2344 cmp(type_reg, Operand(type));
2345}
2346
2347
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002348void MacroAssembler::CompareRoot(Register obj,
2349 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002350 DCHECK(!obj.is(ip));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002351 LoadRoot(ip, index);
2352 cmp(obj, ip);
2353}
2354
2355
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002356void MacroAssembler::CheckFastElements(Register map,
2357 Register scratch,
2358 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002359 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2360 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2361 STATIC_ASSERT(FAST_ELEMENTS == 2);
2362 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002363 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002364 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002365 b(hi, fail);
2366}
2367
2368
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002369void MacroAssembler::CheckFastObjectElements(Register map,
2370 Register scratch,
2371 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002372 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2373 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2374 STATIC_ASSERT(FAST_ELEMENTS == 2);
2375 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002376 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002377 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002378 b(ls, fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002379 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002380 b(hi, fail);
2381}
2382
2383
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002384void MacroAssembler::CheckFastSmiElements(Register map,
2385 Register scratch,
2386 Label* fail) {
2387 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2388 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002389 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002390 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002391 b(hi, fail);
2392}
2393
2394
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002395void MacroAssembler::StoreNumberToDoubleElements(
2396 Register value_reg,
2397 Register key_reg,
2398 Register elements_reg,
2399 Register scratch1,
2400 LowDwVfpRegister double_scratch,
2401 Label* fail,
2402 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002403 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002404 Label smi_value, store;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002405
2406 // Handle smi values specially.
2407 JumpIfSmi(value_reg, &smi_value);
2408
2409 // Ensure that the object is a heap number
2410 CheckMap(value_reg,
2411 scratch1,
2412 isolate()->factory()->heap_number_map(),
2413 fail,
2414 DONT_DO_SMI_CHECK);
2415
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002416 vldr(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
2417 // Force a canonical NaN.
2418 if (emit_debug_code()) {
2419 vmrs(ip);
2420 tst(ip, Operand(kVFPDefaultNaNModeControlBit));
2421 Assert(ne, kDefaultNaNModeNotSet);
2422 }
2423 VFPCanonicalizeNaN(double_scratch);
2424 b(&store);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002425
2426 bind(&smi_value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002427 SmiToDouble(double_scratch, value_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002428
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002429 bind(&store);
2430 add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg));
2431 vstr(double_scratch,
2432 FieldMemOperand(scratch1,
2433 FixedDoubleArray::kHeaderSize - elements_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002434}
2435
2436
2437void MacroAssembler::CompareMap(Register obj,
2438 Register scratch,
2439 Handle<Map> map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002440 Label* early_success) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002441 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002442 CompareMap(scratch, map, early_success);
2443}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002444
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002445
2446void MacroAssembler::CompareMap(Register obj_map,
2447 Handle<Map> map,
2448 Label* early_success) {
2449 cmp(obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002450}
2451
2452
Andrei Popescu31002712010-02-23 13:46:05 +00002453void MacroAssembler::CheckMap(Register obj,
2454 Register scratch,
2455 Handle<Map> map,
2456 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002457 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002458 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002459 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +00002460 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002461
2462 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002463 CompareMap(obj, scratch, map, &success);
Andrei Popescu31002712010-02-23 13:46:05 +00002464 b(ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002465 bind(&success);
Andrei Popescu31002712010-02-23 13:46:05 +00002466}
2467
2468
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002469void MacroAssembler::CheckMap(Register obj,
2470 Register scratch,
2471 Heap::RootListIndex index,
2472 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002473 SmiCheckType smi_check_type) {
2474 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002475 JumpIfSmi(obj, fail);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002476 }
2477 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2478 LoadRoot(ip, index);
2479 cmp(scratch, ip);
2480 b(ne, fail);
2481}
2482
2483
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002484void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
2485 Register scratch2, Handle<WeakCell> cell,
2486 Handle<Code> success,
2487 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002488 Label fail;
2489 if (smi_check_type == DO_SMI_CHECK) {
2490 JumpIfSmi(obj, &fail);
2491 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002492 ldr(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
2493 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002494 Jump(success, RelocInfo::CODE_TARGET, eq);
2495 bind(&fail);
2496}
2497
2498
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002499void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2500 Register scratch) {
2501 mov(scratch, Operand(cell));
2502 ldr(scratch, FieldMemOperand(scratch, WeakCell::kValueOffset));
2503 cmp(value, scratch);
2504}
2505
2506
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002507void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002508 mov(value, Operand(cell));
2509 ldr(value, FieldMemOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002510}
2511
2512
2513void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2514 Label* miss) {
2515 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002516 JumpIfSmi(value, miss);
2517}
2518
2519
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002520void MacroAssembler::GetMapConstructor(Register result, Register map,
2521 Register temp, Register temp2) {
2522 Label done, loop;
2523 ldr(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
2524 bind(&loop);
2525 JumpIfSmi(result, &done);
2526 CompareObjectType(result, temp, temp2, MAP_TYPE);
2527 b(ne, &done);
2528 ldr(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
2529 b(&loop);
2530 bind(&done);
2531}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002532
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002533
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002534void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
2535 Register scratch, Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002536 // Get the prototype or initial map from the function.
2537 ldr(result,
2538 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2539
2540 // If the prototype or initial map is the hole, don't return it and
2541 // simply miss the cache instead. This will allow us to allocate a
2542 // prototype object on-demand in the runtime system.
2543 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2544 cmp(result, ip);
2545 b(eq, miss);
2546
2547 // If the function does not have an initial map, we're done.
2548 Label done;
2549 CompareObjectType(result, scratch, scratch, MAP_TYPE);
2550 b(ne, &done);
2551
2552 // Get the prototype from the initial map.
2553 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002554
Steve Blocka7e24c12009-10-30 11:49:00 +00002555 // All done.
2556 bind(&done);
2557}
2558
2559
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002560void MacroAssembler::CallStub(CodeStub* stub,
2561 TypeFeedbackId ast_id,
2562 Condition cond) {
2563 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
2564 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00002565}
2566
2567
Andrei Popescu31002712010-02-23 13:46:05 +00002568void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +00002569 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2570}
2571
2572
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002573bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002574 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00002575}
2576
2577
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002578void MacroAssembler::IndexFromHash(Register hash, Register index) {
2579 // If the hash field contains an array index pick it out. The assert checks
2580 // that the constants for the maximum number of digits for an array index
2581 // cached in the hash field and the number of bits reserved for it does not
2582 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002583 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002584 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002585 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002586}
2587
2588
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002589void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002590 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002591 vmov(value.low(), smi);
2592 vcvt_f64_s32(value, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01002593 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002594 SmiUntag(ip, smi);
2595 vmov(value.low(), ip);
2596 vcvt_f64_s32(value, value.low());
Iain Merrick9ac36c92010-09-13 15:29:50 +01002597 }
2598}
2599
2600
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002601void MacroAssembler::TestDoubleIsInt32(DwVfpRegister double_input,
2602 LowDwVfpRegister double_scratch) {
2603 DCHECK(!double_input.is(double_scratch));
2604 vcvt_s32_f64(double_scratch.low(), double_input);
2605 vcvt_f64_s32(double_scratch, double_scratch.low());
2606 VFPCompareAndSetFlags(double_input, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002607}
2608
2609
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002610void MacroAssembler::TryDoubleToInt32Exact(Register result,
2611 DwVfpRegister double_input,
2612 LowDwVfpRegister double_scratch) {
2613 DCHECK(!double_input.is(double_scratch));
2614 vcvt_s32_f64(double_scratch.low(), double_input);
2615 vmov(result, double_scratch.low());
2616 vcvt_f64_s32(double_scratch, double_scratch.low());
2617 VFPCompareAndSetFlags(double_input, double_scratch);
2618}
Steve Block44f0eee2011-05-26 01:26:41 +01002619
Steve Block44f0eee2011-05-26 01:26:41 +01002620
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002621void MacroAssembler::TryInt32Floor(Register result,
2622 DwVfpRegister double_input,
2623 Register input_high,
2624 LowDwVfpRegister double_scratch,
2625 Label* done,
2626 Label* exact) {
2627 DCHECK(!result.is(input_high));
2628 DCHECK(!double_input.is(double_scratch));
2629 Label negative, exception;
Steve Block44f0eee2011-05-26 01:26:41 +01002630
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002631 VmovHigh(input_high, double_input);
Steve Block44f0eee2011-05-26 01:26:41 +01002632
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002633 // Test for NaN and infinities.
2634 Sbfx(result, input_high,
2635 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
2636 cmp(result, Operand(-1));
2637 b(eq, &exception);
2638 // Test for values that can be exactly represented as a
2639 // signed 32-bit integer.
2640 TryDoubleToInt32Exact(result, double_input, double_scratch);
2641 // If exact, return (result already fetched).
2642 b(eq, exact);
2643 cmp(input_high, Operand::Zero());
2644 b(mi, &negative);
Steve Block44f0eee2011-05-26 01:26:41 +01002645
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002646 // Input is in ]+0, +inf[.
2647 // If result equals 0x7fffffff input was out of range or
2648 // in ]0x7fffffff, 0x80000000[. We ignore this last case which
2649 // could fits into an int32, that means we always think input was
2650 // out of range and always go to exception.
2651 // If result < 0x7fffffff, go to done, result fetched.
2652 cmn(result, Operand(1));
2653 b(mi, &exception);
2654 b(done);
Steve Block44f0eee2011-05-26 01:26:41 +01002655
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002656 // Input is in ]-inf, -0[.
2657 // If x is a non integer negative number,
2658 // floor(x) <=> round_to_zero(x) - 1.
2659 bind(&negative);
2660 sub(result, result, Operand(1), SetCC);
2661 // If result is still negative, go to done, result fetched.
2662 // Else, we had an overflow and we fall through exception.
2663 b(mi, done);
2664 bind(&exception);
2665}
Steve Block44f0eee2011-05-26 01:26:41 +01002666
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002667void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2668 DwVfpRegister double_input,
2669 Label* done) {
2670 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2671 vcvt_s32_f64(double_scratch.low(), double_input);
2672 vmov(result, double_scratch.low());
Steve Block44f0eee2011-05-26 01:26:41 +01002673
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002674 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
2675 sub(ip, result, Operand(1));
2676 cmp(ip, Operand(0x7ffffffe));
2677 b(lt, done);
2678}
Steve Block44f0eee2011-05-26 01:26:41 +01002679
Steve Block44f0eee2011-05-26 01:26:41 +01002680
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002681void MacroAssembler::TruncateDoubleToI(Register result,
2682 DwVfpRegister double_input) {
2683 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002684
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002685 TryInlineTruncateDoubleToI(result, double_input, &done);
2686
2687 // If we fell through then inline version didn't succeed - call stub instead.
2688 push(lr);
2689 sub(sp, sp, Operand(kDoubleSize)); // Put input on stack.
2690 vstr(double_input, MemOperand(sp, 0));
2691
2692 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
2693 CallStub(&stub);
2694
2695 add(sp, sp, Operand(kDoubleSize));
2696 pop(lr);
2697
Steve Block44f0eee2011-05-26 01:26:41 +01002698 bind(&done);
2699}
2700
2701
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002702void MacroAssembler::TruncateHeapNumberToI(Register result,
2703 Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002704 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002705 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2706 DCHECK(!result.is(object));
Steve Block44f0eee2011-05-26 01:26:41 +01002707
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002708 vldr(double_scratch,
2709 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
2710 TryInlineTruncateDoubleToI(result, double_scratch, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01002711
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002712 // If we fell through then inline version didn't succeed - call stub instead.
2713 push(lr);
2714 DoubleToIStub stub(isolate(),
2715 object,
2716 result,
2717 HeapNumber::kValueOffset - kHeapObjectTag,
2718 true,
2719 true);
2720 CallStub(&stub);
2721 pop(lr);
2722
2723 bind(&done);
2724}
2725
2726
2727void MacroAssembler::TruncateNumberToI(Register object,
2728 Register result,
2729 Register heap_number_map,
2730 Register scratch1,
2731 Label* not_number) {
2732 Label done;
2733 DCHECK(!result.is(object));
2734
2735 UntagAndJumpIfSmi(result, object, &done);
2736 JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
2737 TruncateHeapNumberToI(result, object);
2738
Steve Block44f0eee2011-05-26 01:26:41 +01002739 bind(&done);
2740}
2741
2742
Andrei Popescu31002712010-02-23 13:46:05 +00002743void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2744 Register src,
2745 int num_least_bits) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002746 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002747 ubfx(dst, src, kSmiTagSize, num_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +00002748 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002749 SmiUntag(dst, src);
Andrei Popescu31002712010-02-23 13:46:05 +00002750 and_(dst, dst, Operand((1 << num_least_bits) - 1));
2751 }
2752}
2753
2754
Steve Block1e0659c2011-05-24 12:43:12 +01002755void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2756 Register src,
2757 int num_least_bits) {
2758 and_(dst, src, Operand((1 << num_least_bits) - 1));
2759}
2760
2761
Steve Block44f0eee2011-05-26 01:26:41 +01002762void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002763 int num_arguments,
2764 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002765 // All parameters are on the stack. r0 has the return value after call.
2766
2767 // If the expected number of arguments of the runtime function is
2768 // constant, we check that the actual number of arguments match the
2769 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002770 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00002771
Leon Clarke4515c472010-02-03 11:58:03 +00002772 // TODO(1236192): Most runtime routines don't need the number of
2773 // arguments passed in because it is constant. At some point we
2774 // should remove this need and make the runtime routine entry code
2775 // smarter.
2776 mov(r0, Operand(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01002777 mov(r1, Operand(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002778 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002779 CallStub(&stub);
2780}
2781
2782
Andrei Popescu402d9372010-02-26 13:31:12 +00002783void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2784 int num_arguments) {
2785 mov(r0, Operand(num_arguments));
2786 mov(r1, Operand(ext));
2787
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002788 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002789 CallStub(&stub);
2790}
2791
2792
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002793void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
2794 const Runtime::Function* function = Runtime::FunctionForId(fid);
2795 DCHECK_EQ(1, function->result_size);
2796 if (function->nargs >= 0) {
2797 // TODO(1236192): Most runtime routines don't need the number of
2798 // arguments passed in because it is constant. At some point we
2799 // should remove this need and make the runtime routine entry code
2800 // smarter.
2801 mov(r0, Operand(function->nargs));
2802 }
2803 JumpToExternalReference(ExternalReference(fid, isolate()));
Steve Block6ded16b2010-05-10 14:33:55 +01002804}
2805
2806
2807void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002808#if defined(__thumb__)
2809 // Thumb mode builtin.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002810 DCHECK((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002811#endif
2812 mov(r1, Operand(builtin));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002813 CEntryStub stub(isolate(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002814 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2815}
2816
2817
Steve Blocka7e24c12009-10-30 11:49:00 +00002818void MacroAssembler::SetCounter(StatsCounter* counter, int value,
2819 Register scratch1, Register scratch2) {
2820 if (FLAG_native_code_counters && counter->Enabled()) {
2821 mov(scratch1, Operand(value));
2822 mov(scratch2, Operand(ExternalReference(counter)));
2823 str(scratch1, MemOperand(scratch2));
2824 }
2825}
2826
2827
2828void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
2829 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002830 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002831 if (FLAG_native_code_counters && counter->Enabled()) {
2832 mov(scratch2, Operand(ExternalReference(counter)));
2833 ldr(scratch1, MemOperand(scratch2));
2834 add(scratch1, scratch1, Operand(value));
2835 str(scratch1, MemOperand(scratch2));
2836 }
2837}
2838
2839
2840void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
2841 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002842 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002843 if (FLAG_native_code_counters && counter->Enabled()) {
2844 mov(scratch2, Operand(ExternalReference(counter)));
2845 ldr(scratch1, MemOperand(scratch2));
2846 sub(scratch1, scratch1, Operand(value));
2847 str(scratch1, MemOperand(scratch2));
2848 }
2849}
2850
2851
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002852void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01002853 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002854 Check(cond, reason);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002855}
2856
2857
Iain Merrick75681382010-08-19 15:07:18 +01002858void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002859 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002860 DCHECK(!elements.is(ip));
Iain Merrick75681382010-08-19 15:07:18 +01002861 Label ok;
2862 push(elements);
2863 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
2864 LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2865 cmp(elements, ip);
2866 b(eq, &ok);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002867 LoadRoot(ip, Heap::kFixedDoubleArrayMapRootIndex);
2868 cmp(elements, ip);
2869 b(eq, &ok);
Iain Merrick75681382010-08-19 15:07:18 +01002870 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2871 cmp(elements, ip);
2872 b(eq, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002873 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002874 bind(&ok);
2875 pop(elements);
2876 }
2877}
2878
2879
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002880void MacroAssembler::Check(Condition cond, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002881 Label L;
Steve Block1e0659c2011-05-24 12:43:12 +01002882 b(cond, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002883 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002884 // will not return here
2885 bind(&L);
2886}
2887
2888
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002889void MacroAssembler::Abort(BailoutReason reason) {
Steve Block8defd9f2010-07-08 12:39:36 +01002890 Label abort_start;
2891 bind(&abort_start);
Steve Blocka7e24c12009-10-30 11:49:00 +00002892#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002893 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002894 if (msg != NULL) {
2895 RecordComment("Abort message: ");
2896 RecordComment(msg);
2897 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002898
2899 if (FLAG_trap_on_abort) {
2900 stop(msg);
2901 return;
2902 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002903#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002904
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002905 mov(r0, Operand(Smi::FromInt(reason)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002906 push(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002907
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002908 // Disable stub call restrictions to always allow calls to abort.
2909 if (!has_frame_) {
2910 // We don't actually want to generate a pile of code for this, so just
2911 // claim there is a stack frame, without generating one.
2912 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002913 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002914 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002915 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002916 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002917 // will not return here
Steve Block8defd9f2010-07-08 12:39:36 +01002918 if (is_const_pool_blocked()) {
2919 // If the calling code cares about the exact number of
2920 // instructions generated, we insert padding here to keep the size
2921 // of the Abort macro constant.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002922 static const int kExpectedAbortInstructions = 7;
Steve Block8defd9f2010-07-08 12:39:36 +01002923 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002924 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block8defd9f2010-07-08 12:39:36 +01002925 while (abort_instructions++ < kExpectedAbortInstructions) {
2926 nop();
2927 }
2928 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002929}
2930
2931
Steve Blockd0582a62009-12-15 09:54:21 +00002932void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2933 if (context_chain_length > 0) {
2934 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002935 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002936 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002937 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002938 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002939 } else {
2940 // Slot is in the current function context. Move it into the
2941 // destination register in case we store into it (the write barrier
2942 // cannot be allowed to destroy the context in esi).
2943 mov(dst, cp);
2944 }
Steve Blockd0582a62009-12-15 09:54:21 +00002945}
2946
2947
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002948void MacroAssembler::LoadTransitionedArrayMapConditional(
2949 ElementsKind expected_kind,
2950 ElementsKind transitioned_kind,
2951 Register map_in_out,
2952 Register scratch,
2953 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002954 DCHECK(IsFastElementsKind(expected_kind));
2955 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002956
2957 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002958 ldr(scratch, NativeContextMemOperand());
2959 ldr(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002960 cmp(map_in_out, ip);
2961 b(ne, no_map_match);
2962
2963 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002964 ldr(map_in_out,
2965 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002966}
2967
2968
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002969void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
2970 ldr(dst, NativeContextMemOperand());
2971 ldr(dst, ContextMemOperand(dst, index));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002972}
2973
2974
2975void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2976 Register map,
2977 Register scratch) {
2978 // Load the initial map. The global functions all have initial maps.
2979 ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002980 if (emit_debug_code()) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002981 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00002982 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002983 b(&ok);
2984 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002985 Abort(kGlobalFunctionsMustHaveInitialMap);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002986 bind(&ok);
2987 }
2988}
2989
2990
Steve Block1e0659c2011-05-24 12:43:12 +01002991void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
2992 Register reg,
2993 Register scratch,
2994 Label* not_power_of_two_or_zero) {
2995 sub(scratch, reg, Operand(1), SetCC);
2996 b(mi, not_power_of_two_or_zero);
2997 tst(scratch, reg);
2998 b(ne, not_power_of_two_or_zero);
2999}
3000
3001
Steve Block44f0eee2011-05-26 01:26:41 +01003002void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
3003 Register reg,
3004 Register scratch,
3005 Label* zero_and_neg,
3006 Label* not_power_of_two) {
3007 sub(scratch, reg, Operand(1), SetCC);
3008 b(mi, zero_and_neg);
3009 tst(scratch, reg);
3010 b(ne, not_power_of_two);
3011}
3012
3013
Andrei Popescu31002712010-02-23 13:46:05 +00003014void MacroAssembler::JumpIfNotBothSmi(Register reg1,
3015 Register reg2,
3016 Label* on_not_both_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01003017 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00003018 tst(reg1, Operand(kSmiTagMask));
3019 tst(reg2, Operand(kSmiTagMask), eq);
3020 b(ne, on_not_both_smi);
3021}
3022
3023
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003024void MacroAssembler::UntagAndJumpIfSmi(
3025 Register dst, Register src, Label* smi_case) {
3026 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003027 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003028 b(cc, smi_case); // Shifter carry is not set for a smi.
3029}
3030
3031
3032void MacroAssembler::UntagAndJumpIfNotSmi(
3033 Register dst, Register src, Label* non_smi_case) {
3034 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003035 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003036 b(cs, non_smi_case); // Shifter carry is set for a non-smi.
3037}
3038
3039
Andrei Popescu31002712010-02-23 13:46:05 +00003040void MacroAssembler::JumpIfEitherSmi(Register reg1,
3041 Register reg2,
3042 Label* on_either_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01003043 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00003044 tst(reg1, Operand(kSmiTagMask));
3045 tst(reg2, Operand(kSmiTagMask), ne);
3046 b(eq, on_either_smi);
3047}
3048
Ben Murdochda12d292016-06-02 14:46:10 +01003049void MacroAssembler::AssertNotNumber(Register object) {
3050 if (emit_debug_code()) {
3051 STATIC_ASSERT(kSmiTag == 0);
3052 tst(object, Operand(kSmiTagMask));
3053 Check(ne, kOperandIsANumber);
3054 push(object);
3055 CompareObjectType(object, object, object, HEAP_NUMBER_TYPE);
3056 pop(object);
3057 Check(ne, kOperandIsANumber);
3058 }
3059}
Andrei Popescu31002712010-02-23 13:46:05 +00003060
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003061void MacroAssembler::AssertNotSmi(Register object) {
3062 if (emit_debug_code()) {
3063 STATIC_ASSERT(kSmiTag == 0);
3064 tst(object, Operand(kSmiTagMask));
3065 Check(ne, kOperandIsASmi);
3066 }
Iain Merrick75681382010-08-19 15:07:18 +01003067}
3068
3069
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003070void MacroAssembler::AssertSmi(Register object) {
3071 if (emit_debug_code()) {
3072 STATIC_ASSERT(kSmiTag == 0);
3073 tst(object, Operand(kSmiTagMask));
3074 Check(eq, kOperandIsNotSmi);
3075 }
Steve Block1e0659c2011-05-24 12:43:12 +01003076}
3077
3078
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003079void MacroAssembler::AssertString(Register object) {
3080 if (emit_debug_code()) {
3081 STATIC_ASSERT(kSmiTag == 0);
3082 tst(object, Operand(kSmiTagMask));
3083 Check(ne, kOperandIsASmiAndNotAString);
3084 push(object);
3085 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3086 CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
3087 pop(object);
3088 Check(lo, kOperandIsNotAString);
3089 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003090}
3091
3092
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003093void MacroAssembler::AssertName(Register object) {
3094 if (emit_debug_code()) {
3095 STATIC_ASSERT(kSmiTag == 0);
3096 tst(object, Operand(kSmiTagMask));
3097 Check(ne, kOperandIsASmiAndNotAName);
3098 push(object);
3099 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3100 CompareInstanceType(object, object, LAST_NAME_TYPE);
3101 pop(object);
3102 Check(le, kOperandIsNotAName);
3103 }
3104}
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003105
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003106
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003107void MacroAssembler::AssertFunction(Register object) {
3108 if (emit_debug_code()) {
3109 STATIC_ASSERT(kSmiTag == 0);
3110 tst(object, Operand(kSmiTagMask));
3111 Check(ne, kOperandIsASmiAndNotAFunction);
3112 push(object);
3113 CompareObjectType(object, object, object, JS_FUNCTION_TYPE);
3114 pop(object);
3115 Check(eq, kOperandIsNotAFunction);
3116 }
3117}
3118
3119
3120void MacroAssembler::AssertBoundFunction(Register object) {
3121 if (emit_debug_code()) {
3122 STATIC_ASSERT(kSmiTag == 0);
3123 tst(object, Operand(kSmiTagMask));
3124 Check(ne, kOperandIsASmiAndNotABoundFunction);
3125 push(object);
3126 CompareObjectType(object, object, object, JS_BOUND_FUNCTION_TYPE);
3127 pop(object);
3128 Check(eq, kOperandIsNotABoundFunction);
3129 }
3130}
3131
3132
Ben Murdoch097c5b22016-05-18 11:27:45 +01003133void MacroAssembler::AssertReceiver(Register object) {
3134 if (emit_debug_code()) {
3135 STATIC_ASSERT(kSmiTag == 0);
3136 tst(object, Operand(kSmiTagMask));
3137 Check(ne, kOperandIsASmiAndNotAReceiver);
3138 push(object);
3139 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3140 CompareObjectType(object, object, object, FIRST_JS_RECEIVER_TYPE);
3141 pop(object);
3142 Check(hs, kOperandIsNotAReceiver);
3143 }
3144}
3145
3146
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003147void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
3148 Register scratch) {
3149 if (emit_debug_code()) {
3150 Label done_checking;
3151 AssertNotSmi(object);
3152 CompareRoot(object, Heap::kUndefinedValueRootIndex);
3153 b(eq, &done_checking);
3154 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3155 CompareRoot(scratch, Heap::kAllocationSiteMapRootIndex);
3156 Assert(eq, kExpectedUndefinedOrCell);
3157 bind(&done_checking);
3158 }
3159}
3160
3161
3162void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
3163 if (emit_debug_code()) {
3164 CompareRoot(reg, index);
3165 Check(eq, kHeapNumberMapRegisterClobbered);
3166 }
Steve Block1e0659c2011-05-24 12:43:12 +01003167}
3168
3169
3170void MacroAssembler::JumpIfNotHeapNumber(Register object,
3171 Register heap_number_map,
3172 Register scratch,
3173 Label* on_not_heap_number) {
3174 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003175 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01003176 cmp(scratch, heap_number_map);
3177 b(ne, on_not_heap_number);
3178}
3179
3180
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003181void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
3182 Register first, Register second, Register scratch1, Register scratch2,
Leon Clarked91b9f72010-01-27 17:25:45 +00003183 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003184 // Test that both first and second are sequential one-byte strings.
Leon Clarked91b9f72010-01-27 17:25:45 +00003185 // Assume that they are non-smis.
3186 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
3187 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
3188 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3189 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003190
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003191 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
3192 scratch2, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00003193}
3194
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003195void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
3196 Register second,
3197 Register scratch1,
3198 Register scratch2,
3199 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003200 // Check that neither is a smi.
Leon Clarked91b9f72010-01-27 17:25:45 +00003201 and_(scratch1, first, Operand(second));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003202 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003203 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
3204 scratch2, failure);
3205}
3206
3207
3208void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3209 Label* not_unique_name) {
3210 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3211 Label succeed;
3212 tst(reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3213 b(eq, &succeed);
3214 cmp(reg, Operand(SYMBOL_TYPE));
3215 b(ne, not_unique_name);
3216
3217 bind(&succeed);
Leon Clarked91b9f72010-01-27 17:25:45 +00003218}
3219
Steve Blockd0582a62009-12-15 09:54:21 +00003220
Steve Block6ded16b2010-05-10 14:33:55 +01003221// Allocates a heap number or jumps to the need_gc label if the young space
3222// is full and a scavenge is needed.
3223void MacroAssembler::AllocateHeapNumber(Register result,
3224 Register scratch1,
3225 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003226 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003227 Label* gc_required,
3228 TaggingMode tagging_mode,
3229 MutableMode mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01003230 // Allocate an object in the heap for the heap number and tag it as a heap
3231 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003232 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
3233 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS);
3234
3235 Heap::RootListIndex map_index = mode == MUTABLE
3236 ? Heap::kMutableHeapNumberMapRootIndex
3237 : Heap::kHeapNumberMapRootIndex;
3238 AssertIsRoot(heap_number_map, map_index);
Steve Block6ded16b2010-05-10 14:33:55 +01003239
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003240 // Store heap number map in the allocated object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003241 if (tagging_mode == TAG_RESULT) {
3242 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
3243 } else {
3244 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset));
3245 }
Steve Block6ded16b2010-05-10 14:33:55 +01003246}
3247
3248
Steve Block8defd9f2010-07-08 12:39:36 +01003249void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3250 DwVfpRegister value,
3251 Register scratch1,
3252 Register scratch2,
3253 Register heap_number_map,
3254 Label* gc_required) {
3255 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
3256 sub(scratch1, result, Operand(kHeapObjectTag));
3257 vstr(value, scratch1, HeapNumber::kValueOffset);
3258}
3259
3260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003261void MacroAssembler::AllocateJSValue(Register result, Register constructor,
3262 Register value, Register scratch1,
3263 Register scratch2, Label* gc_required) {
3264 DCHECK(!result.is(constructor));
3265 DCHECK(!result.is(scratch1));
3266 DCHECK(!result.is(scratch2));
3267 DCHECK(!result.is(value));
Ben Murdochbb769b22010-08-11 14:56:33 +01003268
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003269 // Allocate JSValue in new space.
3270 Allocate(JSValue::kSize, result, scratch1, scratch2, gc_required, TAG_OBJECT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003271
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003272 // Initialize the JSValue.
3273 LoadGlobalFunctionInitialMap(constructor, scratch1, scratch2);
3274 str(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
3275 LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
3276 str(scratch1, FieldMemOperand(result, JSObject::kPropertiesOffset));
3277 str(scratch1, FieldMemOperand(result, JSObject::kElementsOffset));
3278 str(value, FieldMemOperand(result, JSValue::kValueOffset));
3279 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
Ben Murdochbb769b22010-08-11 14:56:33 +01003280}
3281
3282
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003283void MacroAssembler::CopyBytes(Register src,
3284 Register dst,
3285 Register length,
3286 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003287 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003288
3289 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003290 cmp(length, Operand(kPointerSize));
3291 b(le, &byte_loop);
3292
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003293 bind(&align_loop_1);
3294 tst(src, Operand(kPointerSize - 1));
3295 b(eq, &word_loop);
3296 ldrb(scratch, MemOperand(src, 1, PostIndex));
3297 strb(scratch, MemOperand(dst, 1, PostIndex));
3298 sub(length, length, Operand(1), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003299 b(&align_loop_1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003300 // Copy bytes in word size chunks.
3301 bind(&word_loop);
Steve Block44f0eee2011-05-26 01:26:41 +01003302 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003303 tst(src, Operand(kPointerSize - 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003304 Assert(eq, kExpectingAlignmentForCopyBytes);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003305 }
3306 cmp(length, Operand(kPointerSize));
3307 b(lt, &byte_loop);
3308 ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003309 if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) {
3310 str(scratch, MemOperand(dst, kPointerSize, PostIndex));
3311 } else {
3312 strb(scratch, MemOperand(dst, 1, PostIndex));
3313 mov(scratch, Operand(scratch, LSR, 8));
3314 strb(scratch, MemOperand(dst, 1, PostIndex));
3315 mov(scratch, Operand(scratch, LSR, 8));
3316 strb(scratch, MemOperand(dst, 1, PostIndex));
3317 mov(scratch, Operand(scratch, LSR, 8));
3318 strb(scratch, MemOperand(dst, 1, PostIndex));
3319 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003320 sub(length, length, Operand(kPointerSize));
3321 b(&word_loop);
3322
3323 // Copy the last bytes if any left.
3324 bind(&byte_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003325 cmp(length, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003326 b(eq, &done);
3327 bind(&byte_loop_1);
3328 ldrb(scratch, MemOperand(src, 1, PostIndex));
3329 strb(scratch, MemOperand(dst, 1, PostIndex));
3330 sub(length, length, Operand(1), SetCC);
3331 b(ne, &byte_loop_1);
3332 bind(&done);
3333}
3334
3335
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003336void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
3337 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003338 Register filler) {
3339 Label loop, entry;
3340 b(&entry);
3341 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003342 str(filler, MemOperand(current_address, kPointerSize, PostIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003343 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003344 cmp(current_address, end_address);
3345 b(lo, &loop);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003346}
3347
3348
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003349void MacroAssembler::CheckFor32DRegs(Register scratch) {
3350 mov(scratch, Operand(ExternalReference::cpu_features()));
3351 ldr(scratch, MemOperand(scratch));
3352 tst(scratch, Operand(1u << VFP32DREGS));
Steve Block6ded16b2010-05-10 14:33:55 +01003353}
3354
3355
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003356void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
3357 CheckFor32DRegs(scratch);
3358 vstm(db_w, location, d16, d31, ne);
3359 sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3360 vstm(db_w, location, d0, d15);
3361}
3362
3363
3364void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
3365 CheckFor32DRegs(scratch);
3366 vldm(ia_w, location, d0, d15);
3367 vldm(ia_w, location, d16, d31, ne);
3368 add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3369}
3370
3371
3372void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
3373 Register first, Register second, Register scratch1, Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +01003374 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003375 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003376 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003377 const int kFlatOneByteStringTag =
3378 kStringTag | kOneByteStringTag | kSeqStringTag;
3379 and_(scratch1, first, Operand(kFlatOneByteStringMask));
3380 and_(scratch2, second, Operand(kFlatOneByteStringMask));
3381 cmp(scratch1, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003382 // Ignore second test if first test failed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003383 cmp(scratch2, Operand(kFlatOneByteStringTag), eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003384 b(ne, failure);
3385}
3386
3387
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003388void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
3389 Register scratch,
3390 Label* failure) {
3391 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003392 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003393 const int kFlatOneByteStringTag =
3394 kStringTag | kOneByteStringTag | kSeqStringTag;
3395 and_(scratch, type, Operand(kFlatOneByteStringMask));
3396 cmp(scratch, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003397 b(ne, failure);
3398}
3399
Steve Block44f0eee2011-05-26 01:26:41 +01003400static const int kRegisterPassedArguments = 4;
Steve Block6ded16b2010-05-10 14:33:55 +01003401
Steve Block44f0eee2011-05-26 01:26:41 +01003402
Ben Murdoch257744e2011-11-30 15:57:28 +00003403int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
3404 int num_double_arguments) {
3405 int stack_passed_words = 0;
3406 if (use_eabi_hardfloat()) {
3407 // In the hard floating point calling convention, we can use
3408 // all double registers to pass doubles.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003409 if (num_double_arguments > DoubleRegister::NumRegisters()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003410 stack_passed_words +=
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003411 2 * (num_double_arguments - DoubleRegister::NumRegisters());
Ben Murdoch257744e2011-11-30 15:57:28 +00003412 }
3413 } else {
3414 // In the soft floating point calling convention, every double
3415 // argument is passed using two registers.
3416 num_reg_arguments += 2 * num_double_arguments;
3417 }
Steve Block6ded16b2010-05-10 14:33:55 +01003418 // Up to four simple arguments are passed in registers r0..r3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003419 if (num_reg_arguments > kRegisterPassedArguments) {
3420 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
3421 }
3422 return stack_passed_words;
3423}
3424
3425
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003426void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3427 Register index,
3428 Register value,
3429 uint32_t encoding_mask) {
3430 Label is_object;
3431 SmiTst(string);
3432 Check(ne, kNonObject);
3433
3434 ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
3435 ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
3436
3437 and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
3438 cmp(ip, Operand(encoding_mask));
3439 Check(eq, kUnexpectedStringType);
3440
3441 // The index is assumed to be untagged coming in, tag it to compare with the
3442 // string length without using a temp register, it is restored at the end of
3443 // this function.
3444 Label index_tag_ok, index_tag_bad;
3445 TrySmiTag(index, index, &index_tag_bad);
3446 b(&index_tag_ok);
3447 bind(&index_tag_bad);
3448 Abort(kIndexIsTooLarge);
3449 bind(&index_tag_ok);
3450
3451 ldr(ip, FieldMemOperand(string, String::kLengthOffset));
3452 cmp(index, ip);
3453 Check(lt, kIndexIsTooLarge);
3454
3455 cmp(index, Operand(Smi::FromInt(0)));
3456 Check(ge, kIndexIsNegative);
3457
3458 SmiUntag(index, index);
3459}
3460
3461
Ben Murdoch257744e2011-11-30 15:57:28 +00003462void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3463 int num_double_arguments,
3464 Register scratch) {
3465 int frame_alignment = ActivationFrameAlignment();
3466 int stack_passed_arguments = CalculateStackPassedWords(
3467 num_reg_arguments, num_double_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01003468 if (frame_alignment > kPointerSize) {
3469 // Make stack end at alignment and make room for num_arguments - 4 words
3470 // and the original value of sp.
3471 mov(scratch, sp);
3472 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003473 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003474 and_(sp, sp, Operand(-frame_alignment));
3475 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
3476 } else {
3477 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
3478 }
3479}
3480
3481
Ben Murdoch257744e2011-11-30 15:57:28 +00003482void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3483 Register scratch) {
3484 PrepareCallCFunction(num_reg_arguments, 0, scratch);
3485}
3486
3487
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003488void MacroAssembler::MovToFloatParameter(DwVfpRegister src) {
3489 DCHECK(src.is(d0));
3490 if (!use_eabi_hardfloat()) {
3491 vmov(r0, r1, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003492 }
3493}
3494
3495
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003496// On ARM this is just a synonym to make the purpose clear.
3497void MacroAssembler::MovToFloatResult(DwVfpRegister src) {
3498 MovToFloatParameter(src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003499}
3500
3501
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003502void MacroAssembler::MovToFloatParameters(DwVfpRegister src1,
3503 DwVfpRegister src2) {
3504 DCHECK(src1.is(d0));
3505 DCHECK(src2.is(d1));
3506 if (!use_eabi_hardfloat()) {
3507 vmov(r0, r1, src1);
3508 vmov(r2, r3, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003509 }
3510}
3511
3512
3513void MacroAssembler::CallCFunction(ExternalReference function,
3514 int num_reg_arguments,
3515 int num_double_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003516 mov(ip, Operand(function));
3517 CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003518}
3519
3520
3521void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003522 int num_reg_arguments,
3523 int num_double_arguments) {
3524 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003525}
3526
3527
Steve Block6ded16b2010-05-10 14:33:55 +01003528void MacroAssembler::CallCFunction(ExternalReference function,
3529 int num_arguments) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003530 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003531}
3532
Ben Murdoch257744e2011-11-30 15:57:28 +00003533
Steve Block44f0eee2011-05-26 01:26:41 +01003534void MacroAssembler::CallCFunction(Register function,
Steve Block44f0eee2011-05-26 01:26:41 +01003535 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003536 CallCFunction(function, num_arguments, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01003537}
3538
3539
Steve Block44f0eee2011-05-26 01:26:41 +01003540void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch257744e2011-11-30 15:57:28 +00003541 int num_reg_arguments,
3542 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003543 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01003544 // Make sure that the stack is aligned before calling a C function unless
3545 // running in the simulator. The simulator has its own alignment check which
3546 // provides more information.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003547#if V8_HOST_ARCH_ARM
Steve Block44f0eee2011-05-26 01:26:41 +01003548 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003549 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01003550 int frame_alignment_mask = frame_alignment - 1;
3551 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003552 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003553 Label alignment_as_expected;
3554 tst(sp, Operand(frame_alignment_mask));
3555 b(eq, &alignment_as_expected);
3556 // Don't use Check here, as it will call Runtime_Abort possibly
3557 // re-entering here.
3558 stop("Unexpected alignment");
3559 bind(&alignment_as_expected);
3560 }
3561 }
3562#endif
3563
3564 // Just call directly. The function called cannot cause a GC, or
3565 // allow preemption, so the return address in the link register
3566 // stays correct.
3567 Call(function);
Ben Murdoch257744e2011-11-30 15:57:28 +00003568 int stack_passed_arguments = CalculateStackPassedWords(
3569 num_reg_arguments, num_double_arguments);
3570 if (ActivationFrameAlignment() > kPointerSize) {
Steve Block6ded16b2010-05-10 14:33:55 +01003571 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3572 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003573 add(sp, sp, Operand(stack_passed_arguments * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003574 }
Steve Block1e0659c2011-05-24 12:43:12 +01003575}
3576
3577
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003578void MacroAssembler::CheckPageFlag(
3579 Register object,
3580 Register scratch,
3581 int mask,
3582 Condition cc,
3583 Label* condition_met) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003584 DCHECK(cc == eq || cc == ne);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003585 Bfc(scratch, object, 0, kPageSizeBits);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003586 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
3587 tst(scratch, Operand(mask));
3588 b(cc, condition_met);
3589}
3590
3591
3592void MacroAssembler::JumpIfBlack(Register object,
3593 Register scratch0,
3594 Register scratch1,
3595 Label* on_black) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003596 HasColor(object, scratch0, scratch1, on_black, 1, 1); // kBlackBitPattern.
3597 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003598}
3599
3600
3601void MacroAssembler::HasColor(Register object,
3602 Register bitmap_scratch,
3603 Register mask_scratch,
3604 Label* has_color,
3605 int first_bit,
3606 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003607 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003608
3609 GetMarkBits(object, bitmap_scratch, mask_scratch);
3610
3611 Label other_color, word_boundary;
3612 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3613 tst(ip, Operand(mask_scratch));
3614 b(first_bit == 1 ? eq : ne, &other_color);
3615 // Shift left 1 by adding.
3616 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC);
3617 b(eq, &word_boundary);
3618 tst(ip, Operand(mask_scratch));
3619 b(second_bit == 1 ? ne : eq, has_color);
3620 jmp(&other_color);
3621
3622 bind(&word_boundary);
3623 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
3624 tst(ip, Operand(1));
3625 b(second_bit == 1 ? ne : eq, has_color);
3626 bind(&other_color);
3627}
3628
3629
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003630void MacroAssembler::GetMarkBits(Register addr_reg,
3631 Register bitmap_reg,
3632 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003633 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003634 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3635 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
3636 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
3637 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits);
3638 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2));
3639 mov(ip, Operand(1));
3640 mov(mask_reg, Operand(ip, LSL, mask_reg));
3641}
3642
3643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003644void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
3645 Register mask_scratch, Register load_scratch,
3646 Label* value_is_white) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003647 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ip));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003648 GetMarkBits(value, bitmap_scratch, mask_scratch);
3649
3650 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003651 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003652 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3653 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003654 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003655
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003656 // Since both black and grey have a 1 in the first position and white does
3657 // not have a 1 there we only need to check one bit.
3658 ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3659 tst(mask_scratch, load_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003660 b(eq, value_is_white);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003661}
3662
3663
Ben Murdoch257744e2011-11-30 15:57:28 +00003664void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
3665 Usat(output_reg, 8, Operand(input_reg));
3666}
3667
3668
3669void MacroAssembler::ClampDoubleToUint8(Register result_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003670 DwVfpRegister input_reg,
3671 LowDwVfpRegister double_scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003672 Label done;
Ben Murdoch257744e2011-11-30 15:57:28 +00003673
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003674 // Handle inputs >= 255 (including +infinity).
3675 Vmov(double_scratch, 255.0, result_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003676 mov(result_reg, Operand(255));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003677 VFPCompareAndSetFlags(input_reg, double_scratch);
3678 b(ge, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003679
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003680 // For inputs < 255 (including negative) vcvt_u32_f64 with round-to-nearest
3681 // rounding mode will provide the correct result.
3682 vcvt_u32_f64(double_scratch.low(), input_reg, kFPSCRRounding);
3683 vmov(result_reg, double_scratch.low());
3684
Ben Murdoch257744e2011-11-30 15:57:28 +00003685 bind(&done);
3686}
3687
3688
3689void MacroAssembler::LoadInstanceDescriptors(Register map,
3690 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003691 ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
3692}
3693
3694
3695void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3696 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3697 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3698}
3699
3700
3701void MacroAssembler::EnumLength(Register dst, Register map) {
3702 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3703 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3704 and_(dst, dst, Operand(Map::EnumLengthBits::kMask));
3705 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003706}
3707
3708
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003709void MacroAssembler::LoadAccessor(Register dst, Register holder,
3710 int accessor_index,
3711 AccessorComponent accessor) {
3712 ldr(dst, FieldMemOperand(holder, HeapObject::kMapOffset));
3713 LoadInstanceDescriptors(dst, dst);
3714 ldr(dst,
3715 FieldMemOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3716 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
3717 : AccessorPair::kSetterOffset;
3718 ldr(dst, FieldMemOperand(dst, offset));
3719}
3720
3721
Ben Murdoch097c5b22016-05-18 11:27:45 +01003722void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3723 Register null_value = r5;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003724 Register empty_fixed_array_value = r6;
3725 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003726 Label next, start;
3727 mov(r2, r0);
3728
3729 // Check if the enum length field is properly initialized, indicating that
3730 // there is an enum cache.
3731 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
3732
3733 EnumLength(r3, r1);
3734 cmp(r3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
3735 b(eq, call_runtime);
3736
Ben Murdoch097c5b22016-05-18 11:27:45 +01003737 LoadRoot(null_value, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003738 jmp(&start);
3739
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003740 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003741 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003742
3743 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003744 EnumLength(r3, r1);
3745 cmp(r3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003746 b(ne, call_runtime);
3747
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003748 bind(&start);
3749
3750 // Check that there are no elements. Register r2 contains the current JS
3751 // object we've reached through the prototype chain.
3752 Label no_elements;
3753 ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset));
3754 cmp(r2, empty_fixed_array_value);
3755 b(eq, &no_elements);
3756
3757 // Second chance, the object may be using the empty slow element dictionary.
3758 CompareRoot(r2, Heap::kEmptySlowElementDictionaryRootIndex);
3759 b(ne, call_runtime);
3760
3761 bind(&no_elements);
3762 ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3763 cmp(r2, null_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003764 b(ne, &next);
3765}
3766
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003767void MacroAssembler::TestJSArrayForAllocationMemento(
3768 Register receiver_reg,
3769 Register scratch_reg,
3770 Label* no_memento_found) {
Ben Murdochda12d292016-06-02 14:46:10 +01003771 Label map_check;
3772 Label top_check;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003773 ExternalReference new_space_allocation_top =
3774 ExternalReference::new_space_allocation_top_address(isolate());
Ben Murdochda12d292016-06-02 14:46:10 +01003775 const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
3776 const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003777
Ben Murdochda12d292016-06-02 14:46:10 +01003778 // Bail out if the object is not in new space.
3779 JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
3780 // If the object is in new space, we need to check whether it is on the same
3781 // page as the current top.
3782 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
3783 eor(scratch_reg, scratch_reg, Operand(new_space_allocation_top));
3784 tst(scratch_reg, Operand(~Page::kPageAlignmentMask));
3785 b(eq, &top_check);
3786 // The object is on a different page than allocation top. Bail out if the
3787 // object sits on the page boundary as no memento can follow and we cannot
3788 // touch the memory following it.
3789 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
3790 eor(scratch_reg, scratch_reg, Operand(receiver_reg));
3791 tst(scratch_reg, Operand(~Page::kPageAlignmentMask));
3792 b(ne, no_memento_found);
3793 // Continue with the actual map check.
3794 jmp(&map_check);
3795 // If top is on the same page as the current object, we need to check whether
3796 // we are below top.
3797 bind(&top_check);
3798 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
3799 cmp(scratch_reg, Operand(new_space_allocation_top));
3800 b(gt, no_memento_found);
3801 // Memento map check.
3802 bind(&map_check);
3803 ldr(scratch_reg, MemOperand(receiver_reg, kMementoMapOffset));
3804 cmp(scratch_reg, Operand(isolate()->factory()->allocation_memento_map()));
3805}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003806
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003807Register GetRegisterThatIsNotOneOf(Register reg1,
3808 Register reg2,
3809 Register reg3,
3810 Register reg4,
3811 Register reg5,
3812 Register reg6) {
3813 RegList regs = 0;
3814 if (reg1.is_valid()) regs |= reg1.bit();
3815 if (reg2.is_valid()) regs |= reg2.bit();
3816 if (reg3.is_valid()) regs |= reg3.bit();
3817 if (reg4.is_valid()) regs |= reg4.bit();
3818 if (reg5.is_valid()) regs |= reg5.bit();
3819 if (reg6.is_valid()) regs |= reg6.bit();
3820
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003821 const RegisterConfiguration* config =
3822 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
3823 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
3824 int code = config->GetAllocatableGeneralCode(i);
3825 Register candidate = Register::from_code(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003826 if (regs & candidate.bit()) continue;
3827 return candidate;
3828 }
3829 UNREACHABLE();
3830 return no_reg;
3831}
3832
3833
3834void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3835 Register object,
3836 Register scratch0,
3837 Register scratch1,
3838 Label* found) {
3839 DCHECK(!scratch1.is(scratch0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003840 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003841 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003842
3843 // scratch contained elements pointer.
3844 mov(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003845 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
3846 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
3847 CompareRoot(current, Heap::kNullValueRootIndex);
3848 b(eq, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003849
3850 // Loop based on the map going up the prototype chain.
3851 bind(&loop_again);
3852 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003853
3854 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3855 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3856 ldrb(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset));
3857 cmp(scratch1, Operand(JS_OBJECT_TYPE));
3858 b(lo, found);
3859
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003860 ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
3861 DecodeField<Map::ElementsKindBits>(scratch1);
3862 cmp(scratch1, Operand(DICTIONARY_ELEMENTS));
3863 b(eq, found);
3864 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003865 CompareRoot(current, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003866 b(ne, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003867
3868 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003869}
3870
3871
3872#ifdef DEBUG
3873bool AreAliased(Register reg1,
3874 Register reg2,
3875 Register reg3,
3876 Register reg4,
3877 Register reg5,
3878 Register reg6,
3879 Register reg7,
3880 Register reg8) {
3881 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3882 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3883 reg7.is_valid() + reg8.is_valid();
3884
3885 RegList regs = 0;
3886 if (reg1.is_valid()) regs |= reg1.bit();
3887 if (reg2.is_valid()) regs |= reg2.bit();
3888 if (reg3.is_valid()) regs |= reg3.bit();
3889 if (reg4.is_valid()) regs |= reg4.bit();
3890 if (reg5.is_valid()) regs |= reg5.bit();
3891 if (reg6.is_valid()) regs |= reg6.bit();
3892 if (reg7.is_valid()) regs |= reg7.bit();
3893 if (reg8.is_valid()) regs |= reg8.bit();
3894 int n_of_non_aliasing_regs = NumRegs(regs);
3895
3896 return n_of_valid_regs != n_of_non_aliasing_regs;
3897}
3898#endif
3899
3900
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003901CodePatcher::CodePatcher(Isolate* isolate, byte* address, int instructions,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003902 FlushICache flush_cache)
Steve Blocka7e24c12009-10-30 11:49:00 +00003903 : address_(address),
Steve Blocka7e24c12009-10-30 11:49:00 +00003904 size_(instructions * Assembler::kInstrSize),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003905 masm_(isolate, address, size_ + Assembler::kGap, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003906 flush_cache_(flush_cache) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003907 // Create a new macro assembler pointing to the address of the code to patch.
3908 // The size is adjusted with kGap on order for the assembler to generate size
3909 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003910 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003911}
3912
3913
3914CodePatcher::~CodePatcher() {
3915 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003916 if (flush_cache_ == FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003917 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003918 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003919
3920 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003921 DCHECK(masm_.pc_ == address_ + size_);
3922 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003923}
3924
3925
Steve Block1e0659c2011-05-24 12:43:12 +01003926void CodePatcher::Emit(Instr instr) {
3927 masm()->emit(instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00003928}
3929
3930
3931void CodePatcher::Emit(Address addr) {
3932 masm()->emit(reinterpret_cast<Instr>(addr));
3933}
Steve Block1e0659c2011-05-24 12:43:12 +01003934
3935
3936void CodePatcher::EmitCondition(Condition cond) {
3937 Instr instr = Assembler::instr_at(masm_.pc_);
3938 instr = (instr & ~kCondMask) | cond;
3939 masm_.emit(instr);
3940}
Steve Blocka7e24c12009-10-30 11:49:00 +00003941
3942
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003943void MacroAssembler::TruncatingDiv(Register result,
3944 Register dividend,
3945 int32_t divisor) {
3946 DCHECK(!dividend.is(result));
3947 DCHECK(!dividend.is(ip));
3948 DCHECK(!result.is(ip));
3949 base::MagicNumbersForDivision<uint32_t> mag =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003950 base::SignedDivisionByConstant(bit_cast<uint32_t>(divisor));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003951 mov(ip, Operand(mag.multiplier));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003952 bool neg = (mag.multiplier & (1U << 31)) != 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003953 if (divisor > 0 && neg) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003954 smmla(result, dividend, ip, dividend);
3955 } else {
3956 smmul(result, dividend, ip);
3957 if (divisor < 0 && !neg && mag.multiplier > 0) {
3958 sub(result, result, Operand(dividend));
3959 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003960 }
3961 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift));
3962 add(result, result, Operand(dividend, LSR, 31));
3963}
3964
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003965} // namespace internal
3966} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01003967
3968#endif // V8_TARGET_ARCH_ARM