blob: 80aef0c4ffefa253069de6ac66473a4ac116b2a4 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Iain Merrick9ac36c92010-09-13 15:29:50 +01005#include <limits.h> // For LONG_MIN, LONG_MAX.
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/debug/debug.h"
14#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040015#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000016
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017#include "src/arm/macro-assembler-arm.h"
18
Steve Blocka7e24c12009-10-30 11:49:00 +000019namespace v8 {
20namespace internal {
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010024 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010030 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Steve Blocka7e24c12009-10-30 11:49:00 +000034void MacroAssembler::Jump(Register target, Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +000035 bx(target, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000036}
37
38
39void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
40 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000042 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000043}
44
45
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000046void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode,
Steve Blocka7e24c12009-10-30 11:49:00 +000047 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000049 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
50}
51
52
53void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
54 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000056 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 AllowDeferredHandleDereference embedding_raw_address;
Steve Blocka7e24c12009-10-30 11:49:00 +000058 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
59}
60
61
Steve Block44f0eee2011-05-26 01:26:41 +010062int MacroAssembler::CallSize(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010063 return kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +010064}
65
66
Steve Blocka7e24c12009-10-30 11:49:00 +000067void MacroAssembler::Call(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010068 // Block constant pool for the call instruction sequence.
69 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000070 Label start;
71 bind(&start);
Steve Blocka7e24c12009-10-30 11:49:00 +000072 blx(target, cond);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 DCHECK_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block44f0eee2011-05-26 01:26:41 +010077int MacroAssembler::CallSize(
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000078 Address target, RelocInfo::Mode rmode, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010079 Instr mov_instr = cond | MOV | LeaveCC;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
81 return kInstrSize +
82 mov_operand.instructions_required(this, mov_instr) * kInstrSize;
83}
84
85
86int MacroAssembler::CallStubSize(
87 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) {
88 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
89}
90
91
92int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate,
93 Address target,
94 RelocInfo::Mode rmode,
95 Condition cond) {
96 Instr mov_instr = cond | MOV | LeaveCC;
97 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
98 return kInstrSize +
99 mov_operand.instructions_required(NULL, mov_instr) * kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +0100100}
101
102
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000103void MacroAssembler::Call(Address target,
Ben Murdoch257744e2011-11-30 15:57:28 +0000104 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105 Condition cond,
106 TargetAddressStorageMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +0100107 // Block constant pool for the call instruction sequence.
108 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000109 Label start;
110 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111
112 bool old_predictable_code_size = predictable_code_size();
113 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
114 set_predictable_code_size(true);
115 }
116
117#ifdef DEBUG
118 // Check the expected size before generating code to ensure we assume the same
119 // constant pool availability (e.g., whether constant pool is full or not).
120 int expected_size = CallSize(target, rmode, cond);
121#endif
122
123 // Call sequence on V7 or later may be :
124 // movw ip, #... @ call address low 16
125 // movt ip, #... @ call address high 16
126 // blx ip
127 // @ return address
128 // Or for pre-V7 or values that may be back-patched
129 // to avoid ICache flushes:
130 // ldr ip, [pc, #...] @ call address
131 // blx ip
132 // @ return address
Steve Block6ded16b2010-05-10 14:33:55 +0100133
Steve Block44f0eee2011-05-26 01:26:41 +0100134 // Statement positions are expected to be recorded when the target
135 // address is loaded. The mov method will automatically record
136 // positions when pc is the target, since this is not the case here
137 // we have to do it explicitly.
138 positions_recorder()->WriteRecordedPositions();
Steve Block6ded16b2010-05-10 14:33:55 +0100139
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000140 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
Steve Block44f0eee2011-05-26 01:26:41 +0100141 blx(ip, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100142
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
144 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
145 set_predictable_code_size(old_predictable_code_size);
146 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000147}
148
149
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000150int MacroAssembler::CallSize(Handle<Code> code,
151 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000153 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000155 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
Ben Murdoch257744e2011-11-30 15:57:28 +0000156}
157
158
159void MacroAssembler::Call(Handle<Code> code,
160 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 TypeFeedbackId ast_id,
162 Condition cond,
163 TargetAddressStorageMode mode) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000164 Label start;
165 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000166 DCHECK(RelocInfo::IsCodeTarget(rmode));
167 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000168 SetRecordedAstId(ast_id);
169 rmode = RelocInfo::CODE_TARGET_WITH_ID;
170 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000171 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172 AllowDeferredHandleDereference embedding_raw_address;
173 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000174}
175
176
177void MacroAssembler::Ret(Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000178 bx(lr, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000179}
180
181
Leon Clarkee46be812010-01-19 14:06:41 +0000182void MacroAssembler::Drop(int count, Condition cond) {
183 if (count > 0) {
184 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
185 }
186}
187
Ben Murdoch097c5b22016-05-18 11:27:45 +0100188void MacroAssembler::Drop(Register count, Condition cond) {
189 add(sp, sp, Operand(count, LSL, kPointerSizeLog2), LeaveCC, cond);
190}
Leon Clarkee46be812010-01-19 14:06:41 +0000191
Ben Murdochb0fe1622011-05-05 13:52:32 +0100192void MacroAssembler::Ret(int drop, Condition cond) {
193 Drop(drop, cond);
194 Ret(cond);
195}
196
197
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100198void MacroAssembler::Swap(Register reg1,
199 Register reg2,
200 Register scratch,
201 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100202 if (scratch.is(no_reg)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100203 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
204 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
205 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100206 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100207 mov(scratch, reg1, LeaveCC, cond);
208 mov(reg1, reg2, LeaveCC, cond);
209 mov(reg2, scratch, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100210 }
211}
212
213
Leon Clarkee46be812010-01-19 14:06:41 +0000214void MacroAssembler::Call(Label* target) {
215 bl(target);
216}
217
218
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000219void MacroAssembler::Push(Handle<Object> handle) {
220 mov(ip, Operand(handle));
221 push(ip);
222}
223
224
Leon Clarkee46be812010-01-19 14:06:41 +0000225void MacroAssembler::Move(Register dst, Handle<Object> value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000226 AllowDeferredHandleDereference smi_check;
227 if (value->IsSmi()) {
228 mov(dst, Operand(value));
229 } else {
230 DCHECK(value->IsHeapObject());
231 if (isolate()->heap()->InNewSpace(*value)) {
232 Handle<Cell> cell = isolate()->factory()->NewCell(value);
233 mov(dst, Operand(cell));
234 ldr(dst, FieldMemOperand(dst, Cell::kValueOffset));
235 } else {
236 mov(dst, Operand(value));
237 }
238 }
Leon Clarkee46be812010-01-19 14:06:41 +0000239}
Steve Blockd0582a62009-12-15 09:54:21 +0000240
241
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000242void MacroAssembler::Move(Register dst, Register src, Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100243 if (!dst.is(src)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000244 mov(dst, src, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100245 }
246}
247
248
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000250 if (!dst.is(src)) {
251 vmov(dst, src);
252 }
253}
254
255
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256void MacroAssembler::Mls(Register dst, Register src1, Register src2,
257 Register srcA, Condition cond) {
258 if (CpuFeatures::IsSupported(MLS)) {
259 CpuFeatureScope scope(this, MLS);
260 mls(dst, src1, src2, srcA, cond);
261 } else {
262 DCHECK(!srcA.is(ip));
263 mul(ip, src1, src2, LeaveCC, cond);
264 sub(dst, srcA, ip, LeaveCC, cond);
265 }
266}
267
268
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100269void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
270 Condition cond) {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800271 if (!src2.is_reg() &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 !src2.must_output_reloc_info(this) &&
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800273 src2.immediate() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274 mov(dst, Operand::Zero(), LeaveCC, cond);
275 } else if (!(src2.instructions_required(this) == 1) &&
276 !src2.must_output_reloc_info(this) &&
Ben Murdoch8b112d22011-06-08 16:22:53 +0100277 CpuFeatures::IsSupported(ARMv7) &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 base::bits::IsPowerOfTwo32(src2.immediate() + 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000279 ubfx(dst, src1, 0,
280 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800281 } else {
282 and_(dst, src1, src2, LeaveCC, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100283 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100284}
285
286
287void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
288 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289 DCHECK(lsb < 32);
290 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100291 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
292 and_(dst, src1, Operand(mask), LeaveCC, cond);
293 if (lsb != 0) {
294 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
295 }
296 } else {
297 ubfx(dst, src1, lsb, width, cond);
298 }
299}
300
301
302void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
303 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000304 DCHECK(lsb < 32);
305 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100306 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
307 and_(dst, src1, Operand(mask), LeaveCC, cond);
308 int shift_up = 32 - lsb - width;
309 int shift_down = lsb + shift_up;
310 if (shift_up != 0) {
311 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
312 }
313 if (shift_down != 0) {
314 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
315 }
316 } else {
317 sbfx(dst, src1, lsb, width, cond);
318 }
319}
320
321
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100322void MacroAssembler::Bfi(Register dst,
323 Register src,
324 Register scratch,
325 int lsb,
326 int width,
327 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 DCHECK(0 <= lsb && lsb < 32);
329 DCHECK(0 <= width && width < 32);
330 DCHECK(lsb + width < 32);
331 DCHECK(!scratch.is(dst));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100332 if (width == 0) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100334 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
335 bic(dst, dst, Operand(mask));
336 and_(scratch, src, Operand((1 << width) - 1));
337 mov(scratch, Operand(scratch, LSL, lsb));
338 orr(dst, dst, scratch);
339 } else {
340 bfi(dst, src, lsb, width, cond);
341 }
342}
343
344
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000345void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width,
346 Condition cond) {
347 DCHECK(lsb < 32);
348 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100349 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350 bic(dst, src, Operand(mask));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100351 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000352 Move(dst, src, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100353 bfc(dst, lsb, width, cond);
354 }
355}
356
357
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100358void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
359 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
361 DCHECK(!dst.is(pc) && !src.rm().is(pc));
362 DCHECK((satpos >= 0) && (satpos <= 31));
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100363
364 // These asserts are required to ensure compatibility with the ARMv7
365 // implementation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000366 DCHECK((src.shift_op() == ASR) || (src.shift_op() == LSL));
367 DCHECK(src.rs().is(no_reg));
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100368
369 Label done;
370 int satval = (1 << satpos) - 1;
371
372 if (cond != al) {
373 b(NegateCondition(cond), &done); // Skip saturate if !condition.
374 }
375 if (!(src.is_reg() && dst.is(src.rm()))) {
376 mov(dst, src);
377 }
378 tst(dst, Operand(~satval));
379 b(eq, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000380 mov(dst, Operand::Zero(), LeaveCC, mi); // 0 if negative.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100381 mov(dst, Operand(satval), LeaveCC, pl); // satval if positive.
382 bind(&done);
383 } else {
384 usat(dst, satpos, src, cond);
385 }
386}
387
388
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000389void MacroAssembler::Load(Register dst,
390 const MemOperand& src,
391 Representation r) {
392 DCHECK(!r.IsDouble());
393 if (r.IsInteger8()) {
394 ldrsb(dst, src);
395 } else if (r.IsUInteger8()) {
396 ldrb(dst, src);
397 } else if (r.IsInteger16()) {
398 ldrsh(dst, src);
399 } else if (r.IsUInteger16()) {
400 ldrh(dst, src);
401 } else {
402 ldr(dst, src);
403 }
404}
405
406
407void MacroAssembler::Store(Register src,
408 const MemOperand& dst,
409 Representation r) {
410 DCHECK(!r.IsDouble());
411 if (r.IsInteger8() || r.IsUInteger8()) {
412 strb(src, dst);
413 } else if (r.IsInteger16() || r.IsUInteger16()) {
414 strh(src, dst);
415 } else {
416 if (r.IsHeapObject()) {
417 AssertNotSmi(src);
418 } else if (r.IsSmi()) {
419 AssertSmi(src);
420 }
421 str(src, dst);
422 }
423}
424
425
Steve Blocka7e24c12009-10-30 11:49:00 +0000426void MacroAssembler::LoadRoot(Register destination,
427 Heap::RootListIndex index,
428 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000429 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
430 isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
431 !predictable_code_size()) {
432 // The CPU supports fast immediate values, and this root will never
433 // change. We will load it as a relocatable immediate value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000434 Handle<Object> root = isolate()->heap()->root_handle(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000435 mov(destination, Operand(root), LeaveCC, cond);
436 return;
437 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000438 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000439}
440
441
Kristian Monsen25f61362010-05-21 11:50:48 +0100442void MacroAssembler::StoreRoot(Register source,
443 Heap::RootListIndex index,
444 Condition cond) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000445 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000446 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
447}
448
449
Steve Block6ded16b2010-05-10 14:33:55 +0100450void MacroAssembler::InNewSpace(Register object,
451 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100452 Condition cond,
Steve Block6ded16b2010-05-10 14:33:55 +0100453 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000454 DCHECK(cond == eq || cond == ne);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100455 const int mask =
456 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
457 CheckPageFlag(object, scratch, mask, cond, branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100458}
459
460
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100461void MacroAssembler::RecordWriteField(
462 Register object,
463 int offset,
464 Register value,
465 Register dst,
466 LinkRegisterStatus lr_status,
467 SaveFPRegsMode save_fp,
468 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 SmiCheck smi_check,
470 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100471 // First, check if a write barrier is even needed. The tests below
472 // catch stores of Smis.
Steve Block6ded16b2010-05-10 14:33:55 +0100473 Label done;
474
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100475 // Skip barrier if writing a smi.
476 if (smi_check == INLINE_SMI_CHECK) {
477 JumpIfSmi(value, &done);
478 }
Steve Block6ded16b2010-05-10 14:33:55 +0100479
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100480 // Although the object register is tagged, the offset is relative to the start
481 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000482 DCHECK(IsAligned(offset, kPointerSize));
Steve Block8defd9f2010-07-08 12:39:36 +0100483
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100484 add(dst, object, Operand(offset - kHeapObjectTag));
485 if (emit_debug_code()) {
486 Label ok;
487 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
488 b(eq, &ok);
489 stop("Unaligned cell in write barrier");
490 bind(&ok);
491 }
492
493 RecordWrite(object,
494 dst,
495 value,
496 lr_status,
497 save_fp,
498 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000499 OMIT_SMI_CHECK,
500 pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000501
502 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000503
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100504 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000505 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100506 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 mov(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
508 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
509 }
510}
511
512
513// Will clobber 4 registers: object, map, dst, ip. The
514// register 'object' contains a heap object pointer.
515void MacroAssembler::RecordWriteForMap(Register object,
516 Register map,
517 Register dst,
518 LinkRegisterStatus lr_status,
519 SaveFPRegsMode fp_mode) {
520 if (emit_debug_code()) {
521 ldr(dst, FieldMemOperand(map, HeapObject::kMapOffset));
522 cmp(dst, Operand(isolate()->factory()->meta_map()));
523 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
524 }
525
526 if (!FLAG_incremental_marking) {
527 return;
528 }
529
530 if (emit_debug_code()) {
531 ldr(ip, FieldMemOperand(object, HeapObject::kMapOffset));
532 cmp(ip, map);
533 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
534 }
535
536 Label done;
537
538 // A single check of the map's pages interesting flag suffices, since it is
539 // only set during incremental collection, and then it's also guaranteed that
540 // the from object's page's interesting flag is also set. This optimization
541 // relies on the fact that maps can never be in new space.
542 CheckPageFlag(map,
543 map, // Used as scratch.
544 MemoryChunk::kPointersToHereAreInterestingMask,
545 eq,
546 &done);
547
548 add(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
549 if (emit_debug_code()) {
550 Label ok;
551 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
552 b(eq, &ok);
553 stop("Unaligned cell in write barrier");
554 bind(&ok);
555 }
556
557 // Record the actual write.
558 if (lr_status == kLRHasNotBeenSaved) {
559 push(lr);
560 }
561 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
562 fp_mode);
563 CallStub(&stub);
564 if (lr_status == kLRHasNotBeenSaved) {
565 pop(lr);
566 }
567
568 bind(&done);
569
570 // Count number of write barriers in generated code.
571 isolate()->counters()->write_barriers_static()->Increment();
572 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst);
573
574 // Clobber clobbered registers when running with the debug-code flag
575 // turned on to provoke errors.
576 if (emit_debug_code()) {
577 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
578 mov(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Leon Clarke4515c472010-02-03 11:58:03 +0000579 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000580}
581
582
Steve Block8defd9f2010-07-08 12:39:36 +0100583// Will clobber 4 registers: object, address, scratch, ip. The
584// register 'object' contains a heap object pointer. The heap object
585// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000586void MacroAssembler::RecordWrite(
587 Register object,
588 Register address,
589 Register value,
590 LinkRegisterStatus lr_status,
591 SaveFPRegsMode fp_mode,
592 RememberedSetAction remembered_set_action,
593 SmiCheck smi_check,
594 PointersToHereCheck pointers_to_here_check_for_value) {
595 DCHECK(!object.is(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100596 if (emit_debug_code()) {
597 ldr(ip, MemOperand(address));
598 cmp(ip, value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000599 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100600 }
Steve Block8defd9f2010-07-08 12:39:36 +0100601
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 if (remembered_set_action == OMIT_REMEMBERED_SET &&
603 !FLAG_incremental_marking) {
604 return;
605 }
606
607 // First, check if a write barrier is even needed. The tests below
608 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100609 Label done;
610
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100611 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000612 JumpIfSmi(value, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100613 }
614
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
616 CheckPageFlag(value,
617 value, // Used as scratch.
618 MemoryChunk::kPointersToHereAreInterestingMask,
619 eq,
620 &done);
621 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100622 CheckPageFlag(object,
623 value, // Used as scratch.
624 MemoryChunk::kPointersFromHereAreInterestingMask,
625 eq,
626 &done);
Steve Block8defd9f2010-07-08 12:39:36 +0100627
628 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100629 if (lr_status == kLRHasNotBeenSaved) {
630 push(lr);
631 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000632 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
633 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100634 CallStub(&stub);
635 if (lr_status == kLRHasNotBeenSaved) {
636 pop(lr);
637 }
Steve Block8defd9f2010-07-08 12:39:36 +0100638
639 bind(&done);
640
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000641 // Count number of write barriers in generated code.
642 isolate()->counters()->write_barriers_static()->Increment();
643 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip,
644 value);
645
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100646 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100647 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100648 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000649 mov(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
650 mov(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100651 }
652}
653
Ben Murdoch097c5b22016-05-18 11:27:45 +0100654void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
655 Register code_entry,
656 Register scratch) {
657 const int offset = JSFunction::kCodeEntryOffset;
658
659 // Since a code entry (value) is always in old space, we don't need to update
660 // remembered set. If incremental marking is off, there is nothing for us to
661 // do.
662 if (!FLAG_incremental_marking) return;
663
664 DCHECK(js_function.is(r1));
665 DCHECK(code_entry.is(r4));
666 DCHECK(scratch.is(r5));
667 AssertNotSmi(js_function);
668
669 if (emit_debug_code()) {
670 add(scratch, js_function, Operand(offset - kHeapObjectTag));
671 ldr(ip, MemOperand(scratch));
672 cmp(ip, code_entry);
673 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
674 }
675
676 // First, check if a write barrier is even needed. The tests below
677 // catch stores of Smis and stores into young gen.
678 Label done;
679
680 CheckPageFlag(code_entry, scratch,
681 MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
682 CheckPageFlag(js_function, scratch,
683 MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
684
685 const Register dst = scratch;
686 add(dst, js_function, Operand(offset - kHeapObjectTag));
687
688 push(code_entry);
689
690 // Save caller-saved registers, which includes js_function.
691 DCHECK((kCallerSaved & js_function.bit()) != 0);
692 DCHECK_EQ(kCallerSaved & code_entry.bit(), 0);
693 stm(db_w, sp, (kCallerSaved | lr.bit()));
694
695 int argument_count = 3;
696 PrepareCallCFunction(argument_count, code_entry);
697
698 mov(r0, js_function);
699 mov(r1, dst);
700 mov(r2, Operand(ExternalReference::isolate_address(isolate())));
701
702 {
703 AllowExternalCallThatCantCauseGC scope(this);
704 CallCFunction(
705 ExternalReference::incremental_marking_record_write_code_entry_function(
706 isolate()),
707 argument_count);
708 }
709
710 // Restore caller-saved registers (including js_function and code_entry).
711 ldm(ia_w, sp, (kCallerSaved | lr.bit()));
712
713 pop(code_entry);
714
715 bind(&done);
716}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100717
718void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
719 Register address,
720 Register scratch,
721 SaveFPRegsMode fp_mode,
722 RememberedSetFinalAction and_then) {
723 Label done;
724 if (emit_debug_code()) {
725 Label ok;
726 JumpIfNotInNewSpace(object, scratch, &ok);
727 stop("Remembered set pointer is in new space");
728 bind(&ok);
729 }
730 // Load store buffer top.
731 ExternalReference store_buffer =
732 ExternalReference::store_buffer_top(isolate());
733 mov(ip, Operand(store_buffer));
734 ldr(scratch, MemOperand(ip));
735 // Store pointer to buffer and increment buffer top.
736 str(address, MemOperand(scratch, kPointerSize, PostIndex));
737 // Write back new top of buffer.
738 str(scratch, MemOperand(ip));
739 // Call stub on end of buffer.
740 // Check for end of buffer.
741 tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
742 if (and_then == kFallThroughAtEnd) {
743 b(eq, &done);
744 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000745 DCHECK(and_then == kReturnAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100746 Ret(eq);
747 }
748 push(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000749 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100750 CallStub(&store_buffer_overflow);
751 pop(lr);
752 bind(&done);
753 if (and_then == kReturnAtEnd) {
754 Ret();
Steve Block8defd9f2010-07-08 12:39:36 +0100755 }
756}
757
758
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759void MacroAssembler::PushFixedFrame(Register marker_reg) {
760 DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000761 stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | cp.bit() |
762 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
763 fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000764}
765
766
767void MacroAssembler::PopFixedFrame(Register marker_reg) {
768 DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000769 ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | cp.bit() |
770 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
771 fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000772}
773
774
Ben Murdochb0fe1622011-05-05 13:52:32 +0100775// Push and pop all registers that can hold pointers.
776void MacroAssembler::PushSafepointRegisters() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000777 // Safepoints expect a block of contiguous register values starting with r0.
778 // except when FLAG_enable_embedded_constant_pool, which omits pp.
779 DCHECK(kSafepointSavedRegisters ==
780 (FLAG_enable_embedded_constant_pool
781 ? ((1 << (kNumSafepointSavedRegisters + 1)) - 1) & ~pp.bit()
782 : (1 << kNumSafepointSavedRegisters) - 1));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100783 // Safepoints expect a block of kNumSafepointRegisters values on the
784 // stack, so adjust the stack for unsaved registers.
785 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000786 DCHECK(num_unsaved >= 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100787 sub(sp, sp, Operand(num_unsaved * kPointerSize));
788 stm(db_w, sp, kSafepointSavedRegisters);
789}
790
791
792void MacroAssembler::PopSafepointRegisters() {
793 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
794 ldm(ia_w, sp, kSafepointSavedRegisters);
795 add(sp, sp, Operand(num_unsaved * kPointerSize));
796}
797
798
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100799void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
800 str(src, SafepointRegisterSlot(dst));
Steve Block1e0659c2011-05-24 12:43:12 +0100801}
802
803
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100804void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
805 ldr(dst, SafepointRegisterSlot(src));
Steve Block1e0659c2011-05-24 12:43:12 +0100806}
807
808
Ben Murdochb0fe1622011-05-05 13:52:32 +0100809int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
810 // The registers are pushed starting with the highest encoding,
811 // which means that lowest encodings are closest to the stack pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000812 if (FLAG_enable_embedded_constant_pool && reg_code > pp.code()) {
813 // RegList omits pp.
814 reg_code -= 1;
815 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000816 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100817 return reg_code;
818}
819
820
Steve Block1e0659c2011-05-24 12:43:12 +0100821MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
822 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
823}
824
825
826MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000827 // Number of d-regs not known at snapshot time.
828 DCHECK(!serializer_enabled());
Steve Block1e0659c2011-05-24 12:43:12 +0100829 // General purpose registers are pushed last on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000830 const RegisterConfiguration* config =
831 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
832 int doubles_size = config->num_allocatable_double_registers() * kDoubleSize;
Steve Block1e0659c2011-05-24 12:43:12 +0100833 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
834 return MemOperand(sp, doubles_size + register_offset);
835}
836
837
Leon Clarkef7060e22010-06-03 12:02:55 +0100838void MacroAssembler::Ldrd(Register dst1, Register dst2,
839 const MemOperand& src, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000840 DCHECK(src.rm().is(no_reg));
841 DCHECK(!dst1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100842
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000843 // V8 does not use this addressing mode, so the fallback code
844 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845 DCHECK((src.am() != PreIndex) && (src.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000846
Leon Clarkef7060e22010-06-03 12:02:55 +0100847 // Generate two ldr instructions if ldrd is not available.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000848 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() &&
849 (dst1.code() % 2 == 0) && (dst1.code() + 1 == dst2.code())) {
850 CpuFeatureScope scope(this, ARMv7);
Leon Clarkef7060e22010-06-03 12:02:55 +0100851 ldrd(dst1, dst2, src, cond);
852 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000853 if ((src.am() == Offset) || (src.am() == NegOffset)) {
854 MemOperand src2(src);
855 src2.set_offset(src2.offset() + 4);
856 if (dst1.is(src.rn())) {
857 ldr(dst2, src2, cond);
858 ldr(dst1, src, cond);
859 } else {
860 ldr(dst1, src, cond);
861 ldr(dst2, src2, cond);
862 }
863 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000864 DCHECK((src.am() == PostIndex) || (src.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000865 if (dst1.is(src.rn())) {
866 ldr(dst2, MemOperand(src.rn(), 4, Offset), cond);
867 ldr(dst1, src, cond);
868 } else {
869 MemOperand src2(src);
870 src2.set_offset(src2.offset() - 4);
871 ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond);
872 ldr(dst2, src2, cond);
873 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100874 }
875 }
876}
877
878
879void MacroAssembler::Strd(Register src1, Register src2,
880 const MemOperand& dst, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000881 DCHECK(dst.rm().is(no_reg));
882 DCHECK(!src1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100883
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000884 // V8 does not use this addressing mode, so the fallback code
885 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 DCHECK((dst.am() != PreIndex) && (dst.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000887
Leon Clarkef7060e22010-06-03 12:02:55 +0100888 // Generate two str instructions if strd is not available.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() &&
890 (src1.code() % 2 == 0) && (src1.code() + 1 == src2.code())) {
891 CpuFeatureScope scope(this, ARMv7);
Leon Clarkef7060e22010-06-03 12:02:55 +0100892 strd(src1, src2, dst, cond);
893 } else {
894 MemOperand dst2(dst);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000895 if ((dst.am() == Offset) || (dst.am() == NegOffset)) {
896 dst2.set_offset(dst2.offset() + 4);
897 str(src1, dst, cond);
898 str(src2, dst2, cond);
899 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000900 DCHECK((dst.am() == PostIndex) || (dst.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000901 dst2.set_offset(dst2.offset() - 4);
902 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond);
903 str(src2, dst2, cond);
904 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100905 }
906}
907
908
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000909void MacroAssembler::VFPEnsureFPSCRState(Register scratch) {
910 // If needed, restore wanted bits of FPSCR.
911 Label fpscr_done;
912 vmrs(scratch);
913 if (emit_debug_code()) {
914 Label rounding_mode_correct;
915 tst(scratch, Operand(kVFPRoundingModeMask));
916 b(eq, &rounding_mode_correct);
917 // Don't call Assert here, since Runtime_Abort could re-enter here.
918 stop("Default rounding mode not set");
919 bind(&rounding_mode_correct);
920 }
921 tst(scratch, Operand(kVFPDefaultNaNModeControlBit));
922 b(ne, &fpscr_done);
923 orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit));
924 vmsr(scratch);
925 bind(&fpscr_done);
926}
927
928
929void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister dst,
930 const DwVfpRegister src,
931 const Condition cond) {
932 vsub(dst, src, kDoubleRegZero, cond);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100933}
934
935
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000936void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
937 const SwVfpRegister src2,
938 const Condition cond) {
939 // Compare and move FPSCR flags to the normal condition flags.
940 VFPCompareAndLoadFlags(src1, src2, pc, cond);
941}
942
943void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
944 const float src2,
945 const Condition cond) {
946 // Compare and move FPSCR flags to the normal condition flags.
947 VFPCompareAndLoadFlags(src1, src2, pc, cond);
948}
949
950
Ben Murdochb8e0da22011-05-16 14:20:40 +0100951void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
952 const DwVfpRegister src2,
953 const Condition cond) {
954 // Compare and move FPSCR flags to the normal condition flags.
955 VFPCompareAndLoadFlags(src1, src2, pc, cond);
956}
957
958void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
959 const double src2,
960 const Condition cond) {
961 // Compare and move FPSCR flags to the normal condition flags.
962 VFPCompareAndLoadFlags(src1, src2, pc, cond);
963}
964
965
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000966void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
967 const SwVfpRegister src2,
968 const Register fpscr_flags,
969 const Condition cond) {
970 // Compare and load FPSCR.
971 vcmp(src1, src2, cond);
972 vmrs(fpscr_flags, cond);
973}
974
975void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
976 const float src2,
977 const Register fpscr_flags,
978 const Condition cond) {
979 // Compare and load FPSCR.
980 vcmp(src1, src2, cond);
981 vmrs(fpscr_flags, cond);
982}
983
984
Ben Murdochb8e0da22011-05-16 14:20:40 +0100985void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
986 const DwVfpRegister src2,
987 const Register fpscr_flags,
988 const Condition cond) {
989 // Compare and load FPSCR.
990 vcmp(src1, src2, cond);
991 vmrs(fpscr_flags, cond);
992}
993
994void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
995 const double src2,
996 const Register fpscr_flags,
997 const Condition cond) {
998 // Compare and load FPSCR.
999 vcmp(src1, src2, cond);
1000 vmrs(fpscr_flags, cond);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001001}
1002
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001003
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001004void MacroAssembler::Vmov(const DwVfpRegister dst,
1005 const double imm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001006 const Register scratch) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001007 static const DoubleRepresentation minus_zero(-0.0);
1008 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001009 DoubleRepresentation value_rep(imm);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001010 // Handle special values first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001011 if (value_rep == zero) {
1012 vmov(dst, kDoubleRegZero);
1013 } else if (value_rep == minus_zero) {
1014 vneg(dst, kDoubleRegZero);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001015 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001016 vmov(dst, imm, scratch);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001017 }
1018}
1019
Ben Murdoch086aeea2011-05-13 15:57:08 +01001020
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001021void MacroAssembler::VmovHigh(Register dst, DwVfpRegister src) {
1022 if (src.code() < 16) {
1023 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1024 vmov(dst, loc.high());
1025 } else {
1026 vmov(dst, VmovIndexHi, src);
1027 }
1028}
1029
1030
1031void MacroAssembler::VmovHigh(DwVfpRegister dst, Register src) {
1032 if (dst.code() < 16) {
1033 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1034 vmov(loc.high(), src);
1035 } else {
1036 vmov(dst, VmovIndexHi, src);
1037 }
1038}
1039
1040
1041void MacroAssembler::VmovLow(Register dst, DwVfpRegister src) {
1042 if (src.code() < 16) {
1043 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1044 vmov(dst, loc.low());
1045 } else {
1046 vmov(dst, VmovIndexLo, src);
1047 }
1048}
1049
1050
1051void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) {
1052 if (dst.code() < 16) {
1053 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1054 vmov(loc.low(), src);
1055 } else {
1056 vmov(dst, VmovIndexLo, src);
1057 }
1058}
1059
1060
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001061void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
1062 Register code_target_address) {
1063 DCHECK(FLAG_enable_embedded_constant_pool);
1064 ldr(pp, MemOperand(code_target_address,
1065 Code::kConstantPoolOffset - Code::kHeaderSize));
1066 add(pp, pp, code_target_address);
1067}
1068
1069
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001070void MacroAssembler::LoadConstantPoolPointerRegister() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001071 DCHECK(FLAG_enable_embedded_constant_pool);
1072 int entry_offset = pc_offset() + Instruction::kPCReadOffset;
1073 sub(ip, pc, Operand(entry_offset));
1074 LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001075}
1076
1077
1078void MacroAssembler::StubPrologue() {
1079 PushFixedFrame();
1080 Push(Smi::FromInt(StackFrame::STUB));
1081 // Adjust FP to point to saved FP.
1082 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001083 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001084 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001085 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001086 }
1087}
1088
1089
1090void MacroAssembler::Prologue(bool code_pre_aging) {
1091 { PredictableCodeSizeScope predictible_code_size_scope(
1092 this, kNoCodeAgeSequenceLength);
1093 // The following three instructions must remain together and unmodified
1094 // for code aging to work properly.
1095 if (code_pre_aging) {
1096 // Pre-age the code.
1097 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
1098 add(r0, pc, Operand(-8));
1099 ldr(pc, MemOperand(pc, -4));
1100 emit_code_stub_address(stub);
1101 } else {
1102 PushFixedFrame(r1);
1103 nop(ip.code());
1104 // Adjust FP to point to saved FP.
1105 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1106 }
1107 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001108 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001109 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001110 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001111 }
1112}
1113
1114
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001115void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1116 ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1117 ldr(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
1118 ldr(vector,
1119 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
1120}
1121
1122
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001123void MacroAssembler::EnterFrame(StackFrame::Type type,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001124 bool load_constant_pool_pointer_reg) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001125 // r0-r3: preserved
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001126 PushFixedFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001127 if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001128 LoadConstantPoolPointerRegister();
1129 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001130 mov(ip, Operand(Smi::FromInt(type)));
1131 push(ip);
1132 mov(ip, Operand(CodeObject()));
1133 push(ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001134 // Adjust FP to point to saved FP.
1135 add(fp, sp,
1136 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001137}
1138
1139
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001140int MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001141 // r0: preserved
1142 // r1: preserved
1143 // r2: preserved
1144
1145 // Drop the execution stack down to the frame pointer and restore
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001146 // the caller frame pointer, return address and constant pool pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001147 // (if FLAG_enable_embedded_constant_pool).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001148 int frame_ends;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001149 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150 add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
1151 frame_ends = pc_offset();
1152 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
1153 } else {
1154 mov(sp, fp);
1155 frame_ends = pc_offset();
1156 ldm(ia_w, sp, fp.bit() | lr.bit());
1157 }
1158 return frame_ends;
Steve Blocka7e24c12009-10-30 11:49:00 +00001159}
1160
1161
Steve Block1e0659c2011-05-24 12:43:12 +01001162void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001163 // Set up the frame structure on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001164 DCHECK_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1165 DCHECK_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1166 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Steve Block1e0659c2011-05-24 12:43:12 +01001167 Push(lr, fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001168 mov(fp, Operand(sp)); // Set up new frame pointer.
Steve Block1e0659c2011-05-24 12:43:12 +01001169 // Reserve room for saved entry sp and code object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001170 sub(sp, sp, Operand(ExitFrameConstants::kFrameSize));
Steve Block44f0eee2011-05-26 01:26:41 +01001171 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001172 mov(ip, Operand::Zero());
Steve Block1e0659c2011-05-24 12:43:12 +01001173 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
1174 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001175 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001176 str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1177 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001178 mov(ip, Operand(CodeObject()));
Steve Block1e0659c2011-05-24 12:43:12 +01001179 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001180
1181 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00001182 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001183 str(fp, MemOperand(ip));
Ben Murdoch589d6972011-11-30 16:04:58 +00001184 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001185 str(cp, MemOperand(ip));
1186
Ben Murdochb0fe1622011-05-05 13:52:32 +01001187 // Optionally save all double registers.
1188 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001189 SaveFPRegs(sp, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001190 // Note that d0 will be accessible at
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001191 // fp - ExitFrameConstants::kFrameSize -
1192 // DwVfpRegister::kMaxNumRegisters * kDoubleSize,
1193 // since the sp slot, code slot and constant pool slot (if
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001194 // FLAG_enable_embedded_constant_pool) were pushed after the fp.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001195 }
Steve Block1e0659c2011-05-24 12:43:12 +01001196
1197 // Reserve place for the return address and stack space and align the frame
1198 // preparing for calling the runtime function.
1199 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1200 sub(sp, sp, Operand((stack_space + 1) * kPointerSize));
1201 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001202 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +01001203 and_(sp, sp, Operand(-frame_alignment));
1204 }
1205
1206 // Set the exit frame sp value to point just before the return address
1207 // location.
1208 add(ip, sp, Operand(kPointerSize));
1209 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001210}
1211
1212
Steve Block6ded16b2010-05-10 14:33:55 +01001213void MacroAssembler::InitializeNewString(Register string,
1214 Register length,
1215 Heap::RootListIndex map_index,
1216 Register scratch1,
1217 Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001218 SmiTag(scratch1, length);
Steve Block6ded16b2010-05-10 14:33:55 +01001219 LoadRoot(scratch2, map_index);
1220 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
1221 mov(scratch1, Operand(String::kEmptyHashField));
1222 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
1223 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
1224}
1225
1226
1227int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001228#if V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001229 // Running on the real platform. Use the alignment as mandated by the local
1230 // environment.
1231 // Note: This will break if we ever start generating snapshots on one ARM
1232 // platform for another ARM platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001233 return base::OS::ActivationFrameAlignment();
1234#else // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001235 // If we are using the simulator then we should always align to the expected
1236 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +01001237 // if the target platform will need alignment, so this is controlled from a
1238 // flag.
1239 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001240#endif // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001241}
1242
1243
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001244void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
1245 bool restore_context,
1246 bool argument_count_is_length) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001247 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1248
Ben Murdochb0fe1622011-05-05 13:52:32 +01001249 // Optionally restore all double registers.
1250 if (save_doubles) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001251 // Calculate the stack location of the saved doubles and restore them.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001252 const int offset = ExitFrameConstants::kFrameSize;
1253 sub(r3, fp,
1254 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize));
1255 RestoreFPRegs(r3, ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001256 }
1257
Steve Blocka7e24c12009-10-30 11:49:00 +00001258 // Clear top frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001259 mov(r3, Operand::Zero());
Ben Murdoch589d6972011-11-30 16:04:58 +00001260 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001261 str(r3, MemOperand(ip));
1262
1263 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001264 if (restore_context) {
1265 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1266 ldr(cp, MemOperand(ip));
1267 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001268#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001269 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001270 str(r3, MemOperand(ip));
1271#endif
1272
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001273 // Tear down the exit frame, pop the arguments, and return.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001274 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001275 ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1276 }
Steve Block1e0659c2011-05-24 12:43:12 +01001277 mov(sp, Operand(fp));
1278 ldm(ia_w, sp, fp.bit() | lr.bit());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001279 if (argument_count.is_valid()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001280 if (argument_count_is_length) {
1281 add(sp, sp, argument_count);
1282 } else {
1283 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
1284 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001285 }
1286}
1287
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001288
1289void MacroAssembler::MovFromFloatResult(const DwVfpRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001290 if (use_eabi_hardfloat()) {
1291 Move(dst, d0);
1292 } else {
1293 vmov(dst, r0, r1);
1294 }
1295}
1296
1297
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001298// On ARM this is just a synonym to make the purpose clear.
1299void MacroAssembler::MovFromFloatParameter(DwVfpRegister dst) {
1300 MovFromFloatResult(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001301}
1302
1303
1304void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1305 const ParameterCount& actual,
Steve Blocka7e24c12009-10-30 11:49:00 +00001306 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001307 bool* definitely_mismatches,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001308 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001309 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001310 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001311 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00001312 Label regular_invoke;
1313
1314 // Check whether the expected and actual arguments count match. If not,
1315 // setup registers according to contract with ArgumentsAdaptorTrampoline:
1316 // r0: actual arguments count
1317 // r1: function (passed through to callee)
1318 // r2: expected arguments count
Steve Blocka7e24c12009-10-30 11:49:00 +00001319
1320 // The code below is made a lot easier because the calling code already sets
1321 // up actual and expected registers according to the contract if values are
1322 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001323 DCHECK(actual.is_immediate() || actual.reg().is(r0));
1324 DCHECK(expected.is_immediate() || expected.reg().is(r2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001325
1326 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001327 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001328 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001329 if (expected.immediate() == actual.immediate()) {
1330 definitely_matches = true;
1331 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001332 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1333 if (expected.immediate() == sentinel) {
1334 // Don't worry about adapting arguments for builtins that
1335 // don't want that done. Skip adaption code by making it look
1336 // like we have a match between expected and actual number of
1337 // arguments.
1338 definitely_matches = true;
1339 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001340 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00001341 mov(r2, Operand(expected.immediate()));
1342 }
1343 }
1344 } else {
1345 if (actual.is_immediate()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001346 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001347 cmp(expected.reg(), Operand(actual.immediate()));
1348 b(eq, &regular_invoke);
Steve Blocka7e24c12009-10-30 11:49:00 +00001349 } else {
1350 cmp(expected.reg(), Operand(actual.reg()));
1351 b(eq, &regular_invoke);
1352 }
1353 }
1354
1355 if (!definitely_matches) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001356 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001357 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001358 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001359 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001360 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00001361 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001362 if (!*definitely_mismatches) {
1363 b(done);
1364 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001365 } else {
1366 Jump(adaptor, RelocInfo::CODE_TARGET);
1367 }
1368 bind(&regular_invoke);
1369 }
1370}
1371
1372
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001373void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
1374 const ParameterCount& expected,
1375 const ParameterCount& actual) {
1376 Label skip_flooding;
1377 ExternalReference step_in_enabled =
1378 ExternalReference::debug_step_in_enabled_address(isolate());
1379 mov(r4, Operand(step_in_enabled));
1380 ldrb(r4, MemOperand(r4));
1381 cmp(r4, Operand(0));
1382 b(eq, &skip_flooding);
1383 {
1384 FrameScope frame(this,
1385 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1386 if (expected.is_reg()) {
1387 SmiTag(expected.reg());
1388 Push(expected.reg());
1389 }
1390 if (actual.is_reg()) {
1391 SmiTag(actual.reg());
1392 Push(actual.reg());
1393 }
1394 if (new_target.is_valid()) {
1395 Push(new_target);
1396 }
1397 Push(fun);
1398 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001399 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001400 Pop(fun);
1401 if (new_target.is_valid()) {
1402 Pop(new_target);
1403 }
1404 if (actual.is_reg()) {
1405 Pop(actual.reg());
1406 SmiUntag(actual.reg());
1407 }
1408 if (expected.is_reg()) {
1409 Pop(expected.reg());
1410 SmiUntag(expected.reg());
1411 }
1412 }
1413 bind(&skip_flooding);
1414}
1415
1416
1417void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
1418 const ParameterCount& expected,
1419 const ParameterCount& actual,
1420 InvokeFlag flag,
1421 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001422 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001423 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001424 DCHECK(function.is(r1));
1425 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(r3));
1426
1427 if (call_wrapper.NeedsDebugStepCheck()) {
1428 FloodFunctionIfStepping(function, new_target, expected, actual);
1429 }
1430
1431 // Clear the new.target register if not given.
1432 if (!new_target.is_valid()) {
1433 LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1434 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001435
Steve Blocka7e24c12009-10-30 11:49:00 +00001436 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001437 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001438 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001439 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001440 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001441 // We call indirectly through the code field in the function to
1442 // allow recompilation to take effect without changing any of the
1443 // call sites.
1444 Register code = r4;
1445 ldr(code, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001446 if (flag == CALL_FUNCTION) {
1447 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001448 Call(code);
1449 call_wrapper.AfterCall();
1450 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001451 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001452 Jump(code);
1453 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001454
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001455 // Continue here if InvokePrologue does handle the invocation due to
1456 // mismatched parameter counts.
1457 bind(&done);
1458 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001459}
1460
1461
Steve Blocka7e24c12009-10-30 11:49:00 +00001462void MacroAssembler::InvokeFunction(Register fun,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001463 Register new_target,
Steve Blocka7e24c12009-10-30 11:49:00 +00001464 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001465 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001466 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001467 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001469
Steve Blocka7e24c12009-10-30 11:49:00 +00001470 // Contract with called JS functions requires that function is passed in r1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001471 DCHECK(fun.is(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001472
1473 Register expected_reg = r2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001474 Register temp_reg = r4;
Steve Blocka7e24c12009-10-30 11:49:00 +00001475
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001476 ldr(temp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001477 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1478 ldr(expected_reg,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001479 FieldMemOperand(temp_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00001480 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001481 SmiUntag(expected_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001482
1483 ParameterCount expected(expected_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001484 InvokeFunctionCode(fun, new_target, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00001485}
1486
1487
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001488void MacroAssembler::InvokeFunction(Register function,
1489 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00001490 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00001491 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001492 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001493 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001494 DCHECK(flag == JUMP_FUNCTION || has_frame());
1495
1496 // Contract with called JS functions requires that function is passed in r1.
1497 DCHECK(function.is(r1));
Andrei Popescu402d9372010-02-26 13:31:12 +00001498
1499 // Get the function and setup the context.
Andrei Popescu402d9372010-02-26 13:31:12 +00001500 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1501
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001502 InvokeFunctionCode(r1, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001503}
1504
1505
1506void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
1507 const ParameterCount& expected,
1508 const ParameterCount& actual,
1509 InvokeFlag flag,
1510 const CallWrapper& call_wrapper) {
1511 Move(r1, function);
1512 InvokeFunction(r1, expected, actual, flag, call_wrapper);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001513}
1514
1515
Ben Murdochb0fe1622011-05-05 13:52:32 +01001516void MacroAssembler::IsObjectJSStringType(Register object,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001517 Register scratch,
1518 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001519 DCHECK(kNotStringTag != 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001520
1521 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1522 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1523 tst(scratch, Operand(kIsNotStringMask));
Steve Block1e0659c2011-05-24 12:43:12 +01001524 b(ne, fail);
Andrei Popescu402d9372010-02-26 13:31:12 +00001525}
1526
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001527
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001528void MacroAssembler::IsObjectNameType(Register object,
1529 Register scratch,
1530 Label* fail) {
1531 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1532 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1533 cmp(scratch, Operand(LAST_NAME_TYPE));
1534 b(hi, fail);
1535}
1536
1537
Andrei Popescu402d9372010-02-26 13:31:12 +00001538void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001539 mov(r0, Operand::Zero());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001540 mov(r1,
1541 Operand(ExternalReference(Runtime::kHandleDebuggerStatement, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001542 CEntryStub ces(isolate(), 1);
1543 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Andrei Popescu402d9372010-02-26 13:31:12 +00001545}
Steve Blocka7e24c12009-10-30 11:49:00 +00001546
1547
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001548void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001549 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001550 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001551 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001552
1553 // Link the current handler as the next handler.
1554 mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1555 ldr(r5, MemOperand(r6));
1556 push(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001557
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001558 // Set this new handler as the current one.
1559 str(sp, MemOperand(r6));
Steve Blocka7e24c12009-10-30 11:49:00 +00001560}
1561
1562
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001563void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001564 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Leon Clarkee46be812010-01-19 14:06:41 +00001565 pop(r1);
Ben Murdoch589d6972011-11-30 16:04:58 +00001566 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Leon Clarkee46be812010-01-19 14:06:41 +00001567 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1568 str(r1, MemOperand(ip));
1569}
1570
1571
Steve Blocka7e24c12009-10-30 11:49:00 +00001572void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1573 Register scratch,
1574 Label* miss) {
1575 Label same_contexts;
1576
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001577 DCHECK(!holder_reg.is(scratch));
1578 DCHECK(!holder_reg.is(ip));
1579 DCHECK(!scratch.is(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001580
1581 // Load current lexical context from the stack frame.
1582 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
1583 // In debug mode, make sure the lexical context is set.
1584#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001585 cmp(scratch, Operand::Zero());
1586 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001587#endif
1588
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001589 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001590 ldr(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00001591
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001592 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001593 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001594 // Cannot use ip as a temporary in this verification code. Due to the fact
1595 // that ip is clobbered as part of cmp with an object Operand.
1596 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001597 // Read the first word and compare to the native_context_map.
Steve Blocka7e24c12009-10-30 11:49:00 +00001598 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001599 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001600 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001601 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001602 pop(holder_reg); // Restore holder.
1603 }
1604
1605 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001606 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001607 cmp(scratch, Operand(ip));
1608 b(eq, &same_contexts);
1609
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001610 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001611 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001612 // Cannot use ip as a temporary in this verification code. Due to the fact
1613 // that ip is clobbered as part of cmp with an object Operand.
1614 push(holder_reg); // Temporarily save holder on the stack.
1615 mov(holder_reg, ip); // Move ip to its holding place.
1616 LoadRoot(ip, Heap::kNullValueRootIndex);
1617 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618 Check(ne, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001619
1620 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001621 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001622 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001623 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001624 // Restore ip is not needed. ip is reloaded below.
1625 pop(holder_reg); // Restore holder.
1626 // Restore ip to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001627 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001628 }
1629
1630 // Check that the security token in the calling global object is
1631 // compatible with the security token in the receiving global
1632 // object.
1633 int token_offset = Context::kHeaderSize +
1634 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1635
1636 ldr(scratch, FieldMemOperand(scratch, token_offset));
1637 ldr(ip, FieldMemOperand(ip, token_offset));
1638 cmp(scratch, Operand(ip));
1639 b(ne, miss);
1640
1641 bind(&same_contexts);
1642}
1643
1644
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001645// Compute the hash code from the untagged key. This must be kept in sync with
1646// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1647// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001648void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
1649 // First of all we assign the hash seed to scratch.
1650 LoadRoot(scratch, Heap::kHashSeedRootIndex);
1651 SmiUntag(scratch);
1652
1653 // Xor original key with a seed.
1654 eor(t0, t0, Operand(scratch));
1655
1656 // Compute the hash code from the untagged key. This must be kept in sync
1657 // with ComputeIntegerHash in utils.h.
1658 //
1659 // hash = ~hash + (hash << 15);
1660 mvn(scratch, Operand(t0));
1661 add(t0, scratch, Operand(t0, LSL, 15));
1662 // hash = hash ^ (hash >> 12);
1663 eor(t0, t0, Operand(t0, LSR, 12));
1664 // hash = hash + (hash << 2);
1665 add(t0, t0, Operand(t0, LSL, 2));
1666 // hash = hash ^ (hash >> 4);
1667 eor(t0, t0, Operand(t0, LSR, 4));
1668 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001669 mov(scratch, Operand(t0, LSL, 11));
1670 add(t0, t0, Operand(t0, LSL, 3));
1671 add(t0, t0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001672 // hash = hash ^ (hash >> 16);
1673 eor(t0, t0, Operand(t0, LSR, 16));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001674 bic(t0, t0, Operand(0xc0000000u));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001675}
1676
1677
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001678void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1679 Register elements,
1680 Register key,
1681 Register result,
1682 Register t0,
1683 Register t1,
1684 Register t2) {
1685 // Register use:
1686 //
1687 // elements - holds the slow-case elements of the receiver on entry.
1688 // Unchanged unless 'result' is the same register.
1689 //
1690 // key - holds the smi key on entry.
1691 // Unchanged unless 'result' is the same register.
1692 //
1693 // result - holds the result on exit if the load succeeded.
1694 // Allowed to be the same as 'key' or 'result'.
1695 // Unchanged on bailout so 'key' or 'result' can be used
1696 // in further computation.
1697 //
1698 // Scratch registers:
1699 //
1700 // t0 - holds the untagged key on entry and holds the hash once computed.
1701 //
1702 // t1 - used to hold the capacity mask of the dictionary
1703 //
1704 // t2 - used for the index into the dictionary.
1705 Label done;
1706
Ben Murdochc7cc0282012-03-05 14:35:55 +00001707 GetNumberHash(t0, t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001708
1709 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001710 ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001711 SmiUntag(t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001712 sub(t1, t1, Operand(1));
1713
1714 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001715 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001716 // Use t2 for index calculations and keep the hash intact in t0.
1717 mov(t2, t0);
1718 // Compute the masked index: (hash + i + i * i) & mask.
1719 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00001720 add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001721 }
1722 and_(t2, t2, Operand(t1));
1723
1724 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001725 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001726 add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
1727
1728 // Check if the key is identical to the name.
1729 add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001730 ldr(ip, FieldMemOperand(t2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001731 cmp(key, Operand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001732 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001733 b(eq, &done);
1734 } else {
1735 b(ne, miss);
1736 }
1737 }
1738
1739 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001740 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001741 // t2: elements + (index * kPointerSize)
1742 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001743 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001744 ldr(t1, FieldMemOperand(t2, kDetailsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001745 DCHECK_EQ(DATA, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00001746 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001747 b(ne, miss);
1748
1749 // Get the value at the masked, scaled index and return.
1750 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001751 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001752 ldr(result, FieldMemOperand(t2, kValueOffset));
1753}
1754
1755
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001756void MacroAssembler::Allocate(int object_size,
1757 Register result,
1758 Register scratch1,
1759 Register scratch2,
1760 Label* gc_required,
1761 AllocationFlags flags) {
1762 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07001763 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001764 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001765 // Trash the registers to simulate an allocation failure.
1766 mov(result, Operand(0x7091));
1767 mov(scratch1, Operand(0x7191));
1768 mov(scratch2, Operand(0x7291));
1769 }
1770 jmp(gc_required);
1771 return;
1772 }
1773
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001774 DCHECK(!AreAliased(result, scratch1, scratch2, ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001775
Kristian Monsen25f61362010-05-21 11:50:48 +01001776 // Make object size into bytes.
1777 if ((flags & SIZE_IN_WORDS) != 0) {
1778 object_size *= kPointerSize;
1779 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001780 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Kristian Monsen25f61362010-05-21 11:50:48 +01001781
Ben Murdochb0fe1622011-05-05 13:52:32 +01001782 // Check relative positions of allocation top and limit addresses.
1783 // The values must be adjacent in memory to allow the use of LDM.
1784 // Also, assert that the registers are numbered such that the values
1785 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001786 ExternalReference allocation_top =
1787 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1788 ExternalReference allocation_limit =
1789 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001790
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
1792 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793 DCHECK((limit - top) == kPointerSize);
1794 DCHECK(result.code() < ip.code());
1795
1796 // Set up allocation top address register.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001797 Register top_address = scratch1;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001798 // This code stores a temporary value in ip. This is OK, as the code below
1799 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001800 Register alloc_limit = ip;
1801 Register result_end = scratch2;
1802 mov(top_address, Operand(allocation_top));
1803
Steve Blocka7e24c12009-10-30 11:49:00 +00001804 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001805 // Load allocation top into result and allocation limit into alloc_limit.
1806 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001807 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001808 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001809 // Assert that result actually contains top on entry.
1810 ldr(alloc_limit, MemOperand(top_address));
1811 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001812 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001813 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001814 // Load allocation limit. Result already contains allocation top.
1815 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001816 }
1817
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001818 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1819 // Align the next allocation. Storing the filler map without checking top is
1820 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001821 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001822 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001823 Label aligned;
1824 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001825 if ((flags & PRETENURE) != 0) {
1826 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001827 b(hs, gc_required);
1828 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001829 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
1830 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001831 bind(&aligned);
1832 }
1833
Steve Blocka7e24c12009-10-30 11:49:00 +00001834 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001835 // to calculate the new top. We must preserve the ip register at this
1836 // point, so we cannot just use add().
1837 DCHECK(object_size > 0);
1838 Register source = result;
1839 Condition cond = al;
1840 int shift = 0;
1841 while (object_size != 0) {
1842 if (((object_size >> shift) & 0x03) == 0) {
1843 shift += 2;
1844 } else {
1845 int bits = object_size & (0xff << shift);
1846 object_size -= bits;
1847 shift += 8;
1848 Operand bits_operand(bits);
1849 DCHECK(bits_operand.instructions_required(this) == 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001850 add(result_end, source, bits_operand, SetCC, cond);
1851 source = result_end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001852 cond = cc;
1853 }
1854 }
Steve Block1e0659c2011-05-24 12:43:12 +01001855 b(cs, gc_required);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001856 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00001857 b(hi, gc_required);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001858 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00001859
Ben Murdochb0fe1622011-05-05 13:52:32 +01001860 // Tag object if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00001861 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001862 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001863 }
1864}
1865
1866
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001867void MacroAssembler::Allocate(Register object_size, Register result,
1868 Register result_end, Register scratch,
1869 Label* gc_required, AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001870 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001871 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001872 // Trash the registers to simulate an allocation failure.
1873 mov(result, Operand(0x7091));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001874 mov(scratch, Operand(0x7191));
1875 mov(result_end, Operand(0x7291));
John Reck59135872010-11-02 12:39:01 -07001876 }
1877 jmp(gc_required);
1878 return;
1879 }
1880
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001881 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
1882 // is not specified. Other registers must not overlap.
1883 DCHECK(!AreAliased(object_size, result, scratch, ip));
1884 DCHECK(!AreAliased(result_end, result, scratch, ip));
1885 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001886
Ben Murdochb0fe1622011-05-05 13:52:32 +01001887 // Check relative positions of allocation top and limit addresses.
1888 // The values must be adjacent in memory to allow the use of LDM.
1889 // Also, assert that the registers are numbered such that the values
1890 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001891 ExternalReference allocation_top =
1892 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1893 ExternalReference allocation_limit =
1894 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001895 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
1896 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001897 DCHECK((limit - top) == kPointerSize);
1898 DCHECK(result.code() < ip.code());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001899
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001900 // Set up allocation top address and allocation limit registers.
1901 Register top_address = scratch;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001902 // This code stores a temporary value in ip. This is OK, as the code below
1903 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001904 Register alloc_limit = ip;
1905 mov(top_address, Operand(allocation_top));
1906
Steve Blocka7e24c12009-10-30 11:49:00 +00001907 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001908 // Load allocation top into result and allocation limit into alloc_limit.
1909 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001910 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001911 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001912 // Assert that result actually contains top on entry.
1913 ldr(alloc_limit, MemOperand(top_address));
1914 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001915 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001916 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001917 // Load allocation limit. Result already contains allocation top.
1918 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001919 }
1920
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001921 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1922 // Align the next allocation. Storing the filler map without checking top is
1923 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001924 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001925 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001926 Label aligned;
1927 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001928 if ((flags & PRETENURE) != 0) {
1929 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001930 b(hs, gc_required);
1931 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001932 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
1933 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001934 bind(&aligned);
1935 }
1936
Steve Blocka7e24c12009-10-30 11:49:00 +00001937 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb0fe1622011-05-05 13:52:32 +01001938 // to calculate the new top. Object size may be in words so a shift is
1939 // required to get the number of bytes.
Kristian Monsen25f61362010-05-21 11:50:48 +01001940 if ((flags & SIZE_IN_WORDS) != 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001941 add(result_end, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01001942 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001943 add(result_end, result, Operand(object_size), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01001944 }
Steve Block1e0659c2011-05-24 12:43:12 +01001945 b(cs, gc_required);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001946 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00001947 b(hi, gc_required);
1948
Steve Blockd0582a62009-12-15 09:54:21 +00001949 // Update allocation top. result temporarily holds the new top.
Steve Block44f0eee2011-05-26 01:26:41 +01001950 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001951 tst(result_end, Operand(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001952 Check(eq, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00001953 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001954 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00001955
1956 // Tag object if requested.
1957 if ((flags & TAG_OBJECT) != 0) {
1958 add(result, result, Operand(kHeapObjectTag));
1959 }
1960}
1961
1962
Andrei Popescu31002712010-02-23 13:46:05 +00001963void MacroAssembler::AllocateTwoByteString(Register result,
1964 Register length,
1965 Register scratch1,
1966 Register scratch2,
1967 Register scratch3,
1968 Label* gc_required) {
1969 // Calculate the number of bytes needed for the characters in the string while
1970 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001971 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00001972 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
1973 add(scratch1, scratch1,
1974 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001975 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001976
1977 // Allocate two-byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001978 Allocate(scratch1,
1979 result,
1980 scratch2,
1981 scratch3,
1982 gc_required,
1983 TAG_OBJECT);
Andrei Popescu31002712010-02-23 13:46:05 +00001984
1985 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001986 InitializeNewString(result,
1987 length,
1988 Heap::kStringMapRootIndex,
1989 scratch1,
1990 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001991}
1992
1993
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001994void MacroAssembler::AllocateOneByteString(Register result, Register length,
1995 Register scratch1, Register scratch2,
1996 Register scratch3,
1997 Label* gc_required) {
Andrei Popescu31002712010-02-23 13:46:05 +00001998 // Calculate the number of bytes needed for the characters in the string while
1999 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002000 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2001 DCHECK(kCharSize == 1);
Andrei Popescu31002712010-02-23 13:46:05 +00002002 add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002003 Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002004 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002005
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006 // Allocate one-byte string in new space.
2007 Allocate(scratch1,
2008 result,
2009 scratch2,
2010 scratch3,
2011 gc_required,
2012 TAG_OBJECT);
Andrei Popescu31002712010-02-23 13:46:05 +00002013
2014 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002015 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
2016 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002017}
2018
2019
2020void MacroAssembler::AllocateTwoByteConsString(Register result,
2021 Register length,
2022 Register scratch1,
2023 Register scratch2,
2024 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002025 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
2026 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01002027
2028 InitializeNewString(result,
2029 length,
2030 Heap::kConsStringMapRootIndex,
2031 scratch1,
2032 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002033}
2034
2035
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002036void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
2037 Register scratch1,
2038 Register scratch2,
2039 Label* gc_required) {
2040 Allocate(ConsString::kSize,
2041 result,
2042 scratch1,
2043 scratch2,
2044 gc_required,
2045 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01002046
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002047 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
2048 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002049}
2050
2051
Ben Murdoch589d6972011-11-30 16:04:58 +00002052void MacroAssembler::AllocateTwoByteSlicedString(Register result,
2053 Register length,
2054 Register scratch1,
2055 Register scratch2,
2056 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002057 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2058 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00002059
2060 InitializeNewString(result,
2061 length,
2062 Heap::kSlicedStringMapRootIndex,
2063 scratch1,
2064 scratch2);
2065}
2066
2067
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002068void MacroAssembler::AllocateOneByteSlicedString(Register result,
2069 Register length,
2070 Register scratch1,
2071 Register scratch2,
2072 Label* gc_required) {
2073 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2074 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00002075
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002076 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
2077 scratch1, scratch2);
Ben Murdoch589d6972011-11-30 16:04:58 +00002078}
2079
2080
Steve Block6ded16b2010-05-10 14:33:55 +01002081void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00002082 Register map,
2083 Register type_reg,
2084 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002085 const Register temp = type_reg.is(no_reg) ? ip : type_reg;
2086
Steve Block6ded16b2010-05-10 14:33:55 +01002087 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002088 CompareInstanceType(map, temp, type);
2089}
2090
2091
Steve Blocka7e24c12009-10-30 11:49:00 +00002092void MacroAssembler::CompareInstanceType(Register map,
2093 Register type_reg,
2094 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002095 // Registers map and type_reg can be ip. These two lines assert
2096 // that ip can be used with the two instructions (the constants
2097 // will never need ip).
2098 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
2099 STATIC_ASSERT(LAST_TYPE < 256);
Steve Blocka7e24c12009-10-30 11:49:00 +00002100 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
2101 cmp(type_reg, Operand(type));
2102}
2103
2104
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002105void MacroAssembler::CompareRoot(Register obj,
2106 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002107 DCHECK(!obj.is(ip));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002108 LoadRoot(ip, index);
2109 cmp(obj, ip);
2110}
2111
2112
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002113void MacroAssembler::CheckFastElements(Register map,
2114 Register scratch,
2115 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002116 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2117 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2118 STATIC_ASSERT(FAST_ELEMENTS == 2);
2119 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002120 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002121 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002122 b(hi, fail);
2123}
2124
2125
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002126void MacroAssembler::CheckFastObjectElements(Register map,
2127 Register scratch,
2128 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002129 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2130 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2131 STATIC_ASSERT(FAST_ELEMENTS == 2);
2132 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002133 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002134 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002135 b(ls, fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002136 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002137 b(hi, fail);
2138}
2139
2140
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002141void MacroAssembler::CheckFastSmiElements(Register map,
2142 Register scratch,
2143 Label* fail) {
2144 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2145 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002146 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002147 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002148 b(hi, fail);
2149}
2150
2151
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002152void MacroAssembler::StoreNumberToDoubleElements(
2153 Register value_reg,
2154 Register key_reg,
2155 Register elements_reg,
2156 Register scratch1,
2157 LowDwVfpRegister double_scratch,
2158 Label* fail,
2159 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002160 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002161 Label smi_value, store;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002162
2163 // Handle smi values specially.
2164 JumpIfSmi(value_reg, &smi_value);
2165
2166 // Ensure that the object is a heap number
2167 CheckMap(value_reg,
2168 scratch1,
2169 isolate()->factory()->heap_number_map(),
2170 fail,
2171 DONT_DO_SMI_CHECK);
2172
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002173 vldr(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
2174 // Force a canonical NaN.
2175 if (emit_debug_code()) {
2176 vmrs(ip);
2177 tst(ip, Operand(kVFPDefaultNaNModeControlBit));
2178 Assert(ne, kDefaultNaNModeNotSet);
2179 }
2180 VFPCanonicalizeNaN(double_scratch);
2181 b(&store);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002182
2183 bind(&smi_value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002184 SmiToDouble(double_scratch, value_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002185
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002186 bind(&store);
2187 add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg));
2188 vstr(double_scratch,
2189 FieldMemOperand(scratch1,
2190 FixedDoubleArray::kHeaderSize - elements_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002191}
2192
2193
2194void MacroAssembler::CompareMap(Register obj,
2195 Register scratch,
2196 Handle<Map> map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002197 Label* early_success) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002198 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002199 CompareMap(scratch, map, early_success);
2200}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002201
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002202
2203void MacroAssembler::CompareMap(Register obj_map,
2204 Handle<Map> map,
2205 Label* early_success) {
2206 cmp(obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002207}
2208
2209
Andrei Popescu31002712010-02-23 13:46:05 +00002210void MacroAssembler::CheckMap(Register obj,
2211 Register scratch,
2212 Handle<Map> map,
2213 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002214 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002215 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002216 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +00002217 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002218
2219 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002220 CompareMap(obj, scratch, map, &success);
Andrei Popescu31002712010-02-23 13:46:05 +00002221 b(ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002222 bind(&success);
Andrei Popescu31002712010-02-23 13:46:05 +00002223}
2224
2225
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002226void MacroAssembler::CheckMap(Register obj,
2227 Register scratch,
2228 Heap::RootListIndex index,
2229 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002230 SmiCheckType smi_check_type) {
2231 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002232 JumpIfSmi(obj, fail);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002233 }
2234 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2235 LoadRoot(ip, index);
2236 cmp(scratch, ip);
2237 b(ne, fail);
2238}
2239
2240
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002241void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
2242 Register scratch2, Handle<WeakCell> cell,
2243 Handle<Code> success,
2244 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002245 Label fail;
2246 if (smi_check_type == DO_SMI_CHECK) {
2247 JumpIfSmi(obj, &fail);
2248 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002249 ldr(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
2250 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002251 Jump(success, RelocInfo::CODE_TARGET, eq);
2252 bind(&fail);
2253}
2254
2255
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002256void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2257 Register scratch) {
2258 mov(scratch, Operand(cell));
2259 ldr(scratch, FieldMemOperand(scratch, WeakCell::kValueOffset));
2260 cmp(value, scratch);
2261}
2262
2263
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002264void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002265 mov(value, Operand(cell));
2266 ldr(value, FieldMemOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002267}
2268
2269
2270void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2271 Label* miss) {
2272 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002273 JumpIfSmi(value, miss);
2274}
2275
2276
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002277void MacroAssembler::GetMapConstructor(Register result, Register map,
2278 Register temp, Register temp2) {
2279 Label done, loop;
2280 ldr(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
2281 bind(&loop);
2282 JumpIfSmi(result, &done);
2283 CompareObjectType(result, temp, temp2, MAP_TYPE);
2284 b(ne, &done);
2285 ldr(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
2286 b(&loop);
2287 bind(&done);
2288}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002289
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002290
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002291void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
2292 Register scratch, Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002293 // Get the prototype or initial map from the function.
2294 ldr(result,
2295 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2296
2297 // If the prototype or initial map is the hole, don't return it and
2298 // simply miss the cache instead. This will allow us to allocate a
2299 // prototype object on-demand in the runtime system.
2300 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2301 cmp(result, ip);
2302 b(eq, miss);
2303
2304 // If the function does not have an initial map, we're done.
2305 Label done;
2306 CompareObjectType(result, scratch, scratch, MAP_TYPE);
2307 b(ne, &done);
2308
2309 // Get the prototype from the initial map.
2310 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002311
Steve Blocka7e24c12009-10-30 11:49:00 +00002312 // All done.
2313 bind(&done);
2314}
2315
2316
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002317void MacroAssembler::CallStub(CodeStub* stub,
2318 TypeFeedbackId ast_id,
2319 Condition cond) {
2320 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
2321 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00002322}
2323
2324
Andrei Popescu31002712010-02-23 13:46:05 +00002325void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +00002326 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2327}
2328
2329
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002330bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002331 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00002332}
2333
2334
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002335void MacroAssembler::IndexFromHash(Register hash, Register index) {
2336 // If the hash field contains an array index pick it out. The assert checks
2337 // that the constants for the maximum number of digits for an array index
2338 // cached in the hash field and the number of bits reserved for it does not
2339 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002340 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002341 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002342 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002343}
2344
2345
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002346void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002347 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002348 vmov(value.low(), smi);
2349 vcvt_f64_s32(value, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01002350 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002351 SmiUntag(ip, smi);
2352 vmov(value.low(), ip);
2353 vcvt_f64_s32(value, value.low());
Iain Merrick9ac36c92010-09-13 15:29:50 +01002354 }
2355}
2356
2357
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002358void MacroAssembler::TestDoubleIsInt32(DwVfpRegister double_input,
2359 LowDwVfpRegister double_scratch) {
2360 DCHECK(!double_input.is(double_scratch));
2361 vcvt_s32_f64(double_scratch.low(), double_input);
2362 vcvt_f64_s32(double_scratch, double_scratch.low());
2363 VFPCompareAndSetFlags(double_input, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002364}
2365
2366
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002367void MacroAssembler::TryDoubleToInt32Exact(Register result,
2368 DwVfpRegister double_input,
2369 LowDwVfpRegister double_scratch) {
2370 DCHECK(!double_input.is(double_scratch));
2371 vcvt_s32_f64(double_scratch.low(), double_input);
2372 vmov(result, double_scratch.low());
2373 vcvt_f64_s32(double_scratch, double_scratch.low());
2374 VFPCompareAndSetFlags(double_input, double_scratch);
2375}
Steve Block44f0eee2011-05-26 01:26:41 +01002376
Steve Block44f0eee2011-05-26 01:26:41 +01002377
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002378void MacroAssembler::TryInt32Floor(Register result,
2379 DwVfpRegister double_input,
2380 Register input_high,
2381 LowDwVfpRegister double_scratch,
2382 Label* done,
2383 Label* exact) {
2384 DCHECK(!result.is(input_high));
2385 DCHECK(!double_input.is(double_scratch));
2386 Label negative, exception;
Steve Block44f0eee2011-05-26 01:26:41 +01002387
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002388 VmovHigh(input_high, double_input);
Steve Block44f0eee2011-05-26 01:26:41 +01002389
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002390 // Test for NaN and infinities.
2391 Sbfx(result, input_high,
2392 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
2393 cmp(result, Operand(-1));
2394 b(eq, &exception);
2395 // Test for values that can be exactly represented as a
2396 // signed 32-bit integer.
2397 TryDoubleToInt32Exact(result, double_input, double_scratch);
2398 // If exact, return (result already fetched).
2399 b(eq, exact);
2400 cmp(input_high, Operand::Zero());
2401 b(mi, &negative);
Steve Block44f0eee2011-05-26 01:26:41 +01002402
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002403 // Input is in ]+0, +inf[.
2404 // If result equals 0x7fffffff input was out of range or
2405 // in ]0x7fffffff, 0x80000000[. We ignore this last case which
2406 // could fits into an int32, that means we always think input was
2407 // out of range and always go to exception.
2408 // If result < 0x7fffffff, go to done, result fetched.
2409 cmn(result, Operand(1));
2410 b(mi, &exception);
2411 b(done);
Steve Block44f0eee2011-05-26 01:26:41 +01002412
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002413 // Input is in ]-inf, -0[.
2414 // If x is a non integer negative number,
2415 // floor(x) <=> round_to_zero(x) - 1.
2416 bind(&negative);
2417 sub(result, result, Operand(1), SetCC);
2418 // If result is still negative, go to done, result fetched.
2419 // Else, we had an overflow and we fall through exception.
2420 b(mi, done);
2421 bind(&exception);
2422}
Steve Block44f0eee2011-05-26 01:26:41 +01002423
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002424void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2425 DwVfpRegister double_input,
2426 Label* done) {
2427 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2428 vcvt_s32_f64(double_scratch.low(), double_input);
2429 vmov(result, double_scratch.low());
Steve Block44f0eee2011-05-26 01:26:41 +01002430
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002431 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
2432 sub(ip, result, Operand(1));
2433 cmp(ip, Operand(0x7ffffffe));
2434 b(lt, done);
2435}
Steve Block44f0eee2011-05-26 01:26:41 +01002436
Steve Block44f0eee2011-05-26 01:26:41 +01002437
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002438void MacroAssembler::TruncateDoubleToI(Register result,
2439 DwVfpRegister double_input) {
2440 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002441
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002442 TryInlineTruncateDoubleToI(result, double_input, &done);
2443
2444 // If we fell through then inline version didn't succeed - call stub instead.
2445 push(lr);
2446 sub(sp, sp, Operand(kDoubleSize)); // Put input on stack.
2447 vstr(double_input, MemOperand(sp, 0));
2448
2449 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
2450 CallStub(&stub);
2451
2452 add(sp, sp, Operand(kDoubleSize));
2453 pop(lr);
2454
Steve Block44f0eee2011-05-26 01:26:41 +01002455 bind(&done);
2456}
2457
2458
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002459void MacroAssembler::TruncateHeapNumberToI(Register result,
2460 Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002461 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002462 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2463 DCHECK(!result.is(object));
Steve Block44f0eee2011-05-26 01:26:41 +01002464
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002465 vldr(double_scratch,
2466 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
2467 TryInlineTruncateDoubleToI(result, double_scratch, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01002468
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002469 // If we fell through then inline version didn't succeed - call stub instead.
2470 push(lr);
2471 DoubleToIStub stub(isolate(),
2472 object,
2473 result,
2474 HeapNumber::kValueOffset - kHeapObjectTag,
2475 true,
2476 true);
2477 CallStub(&stub);
2478 pop(lr);
2479
2480 bind(&done);
2481}
2482
2483
2484void MacroAssembler::TruncateNumberToI(Register object,
2485 Register result,
2486 Register heap_number_map,
2487 Register scratch1,
2488 Label* not_number) {
2489 Label done;
2490 DCHECK(!result.is(object));
2491
2492 UntagAndJumpIfSmi(result, object, &done);
2493 JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
2494 TruncateHeapNumberToI(result, object);
2495
Steve Block44f0eee2011-05-26 01:26:41 +01002496 bind(&done);
2497}
2498
2499
Andrei Popescu31002712010-02-23 13:46:05 +00002500void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2501 Register src,
2502 int num_least_bits) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002503 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002504 ubfx(dst, src, kSmiTagSize, num_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +00002505 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002506 SmiUntag(dst, src);
Andrei Popescu31002712010-02-23 13:46:05 +00002507 and_(dst, dst, Operand((1 << num_least_bits) - 1));
2508 }
2509}
2510
2511
Steve Block1e0659c2011-05-24 12:43:12 +01002512void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2513 Register src,
2514 int num_least_bits) {
2515 and_(dst, src, Operand((1 << num_least_bits) - 1));
2516}
2517
2518
Steve Block44f0eee2011-05-26 01:26:41 +01002519void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002520 int num_arguments,
2521 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002522 // All parameters are on the stack. r0 has the return value after call.
2523
2524 // If the expected number of arguments of the runtime function is
2525 // constant, we check that the actual number of arguments match the
2526 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002527 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00002528
Leon Clarke4515c472010-02-03 11:58:03 +00002529 // TODO(1236192): Most runtime routines don't need the number of
2530 // arguments passed in because it is constant. At some point we
2531 // should remove this need and make the runtime routine entry code
2532 // smarter.
2533 mov(r0, Operand(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01002534 mov(r1, Operand(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002535 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002536 CallStub(&stub);
2537}
2538
2539
Andrei Popescu402d9372010-02-26 13:31:12 +00002540void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2541 int num_arguments) {
2542 mov(r0, Operand(num_arguments));
2543 mov(r1, Operand(ext));
2544
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002545 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002546 CallStub(&stub);
2547}
2548
2549
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002550void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
2551 const Runtime::Function* function = Runtime::FunctionForId(fid);
2552 DCHECK_EQ(1, function->result_size);
2553 if (function->nargs >= 0) {
2554 // TODO(1236192): Most runtime routines don't need the number of
2555 // arguments passed in because it is constant. At some point we
2556 // should remove this need and make the runtime routine entry code
2557 // smarter.
2558 mov(r0, Operand(function->nargs));
2559 }
2560 JumpToExternalReference(ExternalReference(fid, isolate()));
Steve Block6ded16b2010-05-10 14:33:55 +01002561}
2562
2563
2564void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002565#if defined(__thumb__)
2566 // Thumb mode builtin.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002567 DCHECK((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002568#endif
2569 mov(r1, Operand(builtin));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002570 CEntryStub stub(isolate(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002571 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2572}
2573
2574
Steve Blocka7e24c12009-10-30 11:49:00 +00002575void MacroAssembler::SetCounter(StatsCounter* counter, int value,
2576 Register scratch1, Register scratch2) {
2577 if (FLAG_native_code_counters && counter->Enabled()) {
2578 mov(scratch1, Operand(value));
2579 mov(scratch2, Operand(ExternalReference(counter)));
2580 str(scratch1, MemOperand(scratch2));
2581 }
2582}
2583
2584
2585void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
2586 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002587 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002588 if (FLAG_native_code_counters && counter->Enabled()) {
2589 mov(scratch2, Operand(ExternalReference(counter)));
2590 ldr(scratch1, MemOperand(scratch2));
2591 add(scratch1, scratch1, Operand(value));
2592 str(scratch1, MemOperand(scratch2));
2593 }
2594}
2595
2596
2597void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
2598 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002599 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002600 if (FLAG_native_code_counters && counter->Enabled()) {
2601 mov(scratch2, Operand(ExternalReference(counter)));
2602 ldr(scratch1, MemOperand(scratch2));
2603 sub(scratch1, scratch1, Operand(value));
2604 str(scratch1, MemOperand(scratch2));
2605 }
2606}
2607
2608
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002609void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01002610 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002611 Check(cond, reason);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002612}
2613
2614
Iain Merrick75681382010-08-19 15:07:18 +01002615void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002616 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002617 DCHECK(!elements.is(ip));
Iain Merrick75681382010-08-19 15:07:18 +01002618 Label ok;
2619 push(elements);
2620 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
2621 LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2622 cmp(elements, ip);
2623 b(eq, &ok);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002624 LoadRoot(ip, Heap::kFixedDoubleArrayMapRootIndex);
2625 cmp(elements, ip);
2626 b(eq, &ok);
Iain Merrick75681382010-08-19 15:07:18 +01002627 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2628 cmp(elements, ip);
2629 b(eq, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002630 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002631 bind(&ok);
2632 pop(elements);
2633 }
2634}
2635
2636
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002637void MacroAssembler::Check(Condition cond, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002638 Label L;
Steve Block1e0659c2011-05-24 12:43:12 +01002639 b(cond, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002640 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002641 // will not return here
2642 bind(&L);
2643}
2644
2645
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002646void MacroAssembler::Abort(BailoutReason reason) {
Steve Block8defd9f2010-07-08 12:39:36 +01002647 Label abort_start;
2648 bind(&abort_start);
Steve Blocka7e24c12009-10-30 11:49:00 +00002649#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002650 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002651 if (msg != NULL) {
2652 RecordComment("Abort message: ");
2653 RecordComment(msg);
2654 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002655
2656 if (FLAG_trap_on_abort) {
2657 stop(msg);
2658 return;
2659 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002660#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002661
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002662 mov(r0, Operand(Smi::FromInt(reason)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002663 push(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002664
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002665 // Disable stub call restrictions to always allow calls to abort.
2666 if (!has_frame_) {
2667 // We don't actually want to generate a pile of code for this, so just
2668 // claim there is a stack frame, without generating one.
2669 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002670 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002671 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002672 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002673 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002674 // will not return here
Steve Block8defd9f2010-07-08 12:39:36 +01002675 if (is_const_pool_blocked()) {
2676 // If the calling code cares about the exact number of
2677 // instructions generated, we insert padding here to keep the size
2678 // of the Abort macro constant.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002679 static const int kExpectedAbortInstructions = 7;
Steve Block8defd9f2010-07-08 12:39:36 +01002680 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002681 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block8defd9f2010-07-08 12:39:36 +01002682 while (abort_instructions++ < kExpectedAbortInstructions) {
2683 nop();
2684 }
2685 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002686}
2687
2688
Steve Blockd0582a62009-12-15 09:54:21 +00002689void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2690 if (context_chain_length > 0) {
2691 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002692 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002693 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002694 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002695 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002696 } else {
2697 // Slot is in the current function context. Move it into the
2698 // destination register in case we store into it (the write barrier
2699 // cannot be allowed to destroy the context in esi).
2700 mov(dst, cp);
2701 }
Steve Blockd0582a62009-12-15 09:54:21 +00002702}
2703
2704
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002705void MacroAssembler::LoadTransitionedArrayMapConditional(
2706 ElementsKind expected_kind,
2707 ElementsKind transitioned_kind,
2708 Register map_in_out,
2709 Register scratch,
2710 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002711 DCHECK(IsFastElementsKind(expected_kind));
2712 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002713
2714 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002715 ldr(scratch, NativeContextMemOperand());
2716 ldr(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002717 cmp(map_in_out, ip);
2718 b(ne, no_map_match);
2719
2720 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002721 ldr(map_in_out,
2722 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002723}
2724
2725
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002726void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
2727 ldr(dst, NativeContextMemOperand());
2728 ldr(dst, ContextMemOperand(dst, index));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002729}
2730
2731
2732void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2733 Register map,
2734 Register scratch) {
2735 // Load the initial map. The global functions all have initial maps.
2736 ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002737 if (emit_debug_code()) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002738 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00002739 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002740 b(&ok);
2741 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002742 Abort(kGlobalFunctionsMustHaveInitialMap);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002743 bind(&ok);
2744 }
2745}
2746
2747
Steve Block1e0659c2011-05-24 12:43:12 +01002748void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
2749 Register reg,
2750 Register scratch,
2751 Label* not_power_of_two_or_zero) {
2752 sub(scratch, reg, Operand(1), SetCC);
2753 b(mi, not_power_of_two_or_zero);
2754 tst(scratch, reg);
2755 b(ne, not_power_of_two_or_zero);
2756}
2757
2758
Steve Block44f0eee2011-05-26 01:26:41 +01002759void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
2760 Register reg,
2761 Register scratch,
2762 Label* zero_and_neg,
2763 Label* not_power_of_two) {
2764 sub(scratch, reg, Operand(1), SetCC);
2765 b(mi, zero_and_neg);
2766 tst(scratch, reg);
2767 b(ne, not_power_of_two);
2768}
2769
2770
Andrei Popescu31002712010-02-23 13:46:05 +00002771void MacroAssembler::JumpIfNotBothSmi(Register reg1,
2772 Register reg2,
2773 Label* on_not_both_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01002774 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00002775 tst(reg1, Operand(kSmiTagMask));
2776 tst(reg2, Operand(kSmiTagMask), eq);
2777 b(ne, on_not_both_smi);
2778}
2779
2780
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002781void MacroAssembler::UntagAndJumpIfSmi(
2782 Register dst, Register src, Label* smi_case) {
2783 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002784 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002785 b(cc, smi_case); // Shifter carry is not set for a smi.
2786}
2787
2788
2789void MacroAssembler::UntagAndJumpIfNotSmi(
2790 Register dst, Register src, Label* non_smi_case) {
2791 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002792 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002793 b(cs, non_smi_case); // Shifter carry is set for a non-smi.
2794}
2795
2796
Andrei Popescu31002712010-02-23 13:46:05 +00002797void MacroAssembler::JumpIfEitherSmi(Register reg1,
2798 Register reg2,
2799 Label* on_either_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01002800 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00002801 tst(reg1, Operand(kSmiTagMask));
2802 tst(reg2, Operand(kSmiTagMask), ne);
2803 b(eq, on_either_smi);
2804}
2805
2806
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002807void MacroAssembler::AssertNotSmi(Register object) {
2808 if (emit_debug_code()) {
2809 STATIC_ASSERT(kSmiTag == 0);
2810 tst(object, Operand(kSmiTagMask));
2811 Check(ne, kOperandIsASmi);
2812 }
Iain Merrick75681382010-08-19 15:07:18 +01002813}
2814
2815
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002816void MacroAssembler::AssertSmi(Register object) {
2817 if (emit_debug_code()) {
2818 STATIC_ASSERT(kSmiTag == 0);
2819 tst(object, Operand(kSmiTagMask));
2820 Check(eq, kOperandIsNotSmi);
2821 }
Steve Block1e0659c2011-05-24 12:43:12 +01002822}
2823
2824
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002825void MacroAssembler::AssertString(Register object) {
2826 if (emit_debug_code()) {
2827 STATIC_ASSERT(kSmiTag == 0);
2828 tst(object, Operand(kSmiTagMask));
2829 Check(ne, kOperandIsASmiAndNotAString);
2830 push(object);
2831 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
2832 CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
2833 pop(object);
2834 Check(lo, kOperandIsNotAString);
2835 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002836}
2837
2838
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002839void MacroAssembler::AssertName(Register object) {
2840 if (emit_debug_code()) {
2841 STATIC_ASSERT(kSmiTag == 0);
2842 tst(object, Operand(kSmiTagMask));
2843 Check(ne, kOperandIsASmiAndNotAName);
2844 push(object);
2845 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
2846 CompareInstanceType(object, object, LAST_NAME_TYPE);
2847 pop(object);
2848 Check(le, kOperandIsNotAName);
2849 }
2850}
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002851
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002852
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002853void MacroAssembler::AssertFunction(Register object) {
2854 if (emit_debug_code()) {
2855 STATIC_ASSERT(kSmiTag == 0);
2856 tst(object, Operand(kSmiTagMask));
2857 Check(ne, kOperandIsASmiAndNotAFunction);
2858 push(object);
2859 CompareObjectType(object, object, object, JS_FUNCTION_TYPE);
2860 pop(object);
2861 Check(eq, kOperandIsNotAFunction);
2862 }
2863}
2864
2865
2866void MacroAssembler::AssertBoundFunction(Register object) {
2867 if (emit_debug_code()) {
2868 STATIC_ASSERT(kSmiTag == 0);
2869 tst(object, Operand(kSmiTagMask));
2870 Check(ne, kOperandIsASmiAndNotABoundFunction);
2871 push(object);
2872 CompareObjectType(object, object, object, JS_BOUND_FUNCTION_TYPE);
2873 pop(object);
2874 Check(eq, kOperandIsNotABoundFunction);
2875 }
2876}
2877
2878
Ben Murdoch097c5b22016-05-18 11:27:45 +01002879void MacroAssembler::AssertReceiver(Register object) {
2880 if (emit_debug_code()) {
2881 STATIC_ASSERT(kSmiTag == 0);
2882 tst(object, Operand(kSmiTagMask));
2883 Check(ne, kOperandIsASmiAndNotAReceiver);
2884 push(object);
2885 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2886 CompareObjectType(object, object, object, FIRST_JS_RECEIVER_TYPE);
2887 pop(object);
2888 Check(hs, kOperandIsNotAReceiver);
2889 }
2890}
2891
2892
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002893void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
2894 Register scratch) {
2895 if (emit_debug_code()) {
2896 Label done_checking;
2897 AssertNotSmi(object);
2898 CompareRoot(object, Heap::kUndefinedValueRootIndex);
2899 b(eq, &done_checking);
2900 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2901 CompareRoot(scratch, Heap::kAllocationSiteMapRootIndex);
2902 Assert(eq, kExpectedUndefinedOrCell);
2903 bind(&done_checking);
2904 }
2905}
2906
2907
2908void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
2909 if (emit_debug_code()) {
2910 CompareRoot(reg, index);
2911 Check(eq, kHeapNumberMapRegisterClobbered);
2912 }
Steve Block1e0659c2011-05-24 12:43:12 +01002913}
2914
2915
2916void MacroAssembler::JumpIfNotHeapNumber(Register object,
2917 Register heap_number_map,
2918 Register scratch,
2919 Label* on_not_heap_number) {
2920 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002921 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01002922 cmp(scratch, heap_number_map);
2923 b(ne, on_not_heap_number);
2924}
2925
2926
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002927void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
2928 Register first, Register second, Register scratch1, Register scratch2,
Leon Clarked91b9f72010-01-27 17:25:45 +00002929 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002930 // Test that both first and second are sequential one-byte strings.
Leon Clarked91b9f72010-01-27 17:25:45 +00002931 // Assume that they are non-smis.
2932 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
2933 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
2934 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
2935 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01002936
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002937 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
2938 scratch2, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00002939}
2940
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002941void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
2942 Register second,
2943 Register scratch1,
2944 Register scratch2,
2945 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002946 // Check that neither is a smi.
Leon Clarked91b9f72010-01-27 17:25:45 +00002947 and_(scratch1, first, Operand(second));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002948 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002949 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
2950 scratch2, failure);
2951}
2952
2953
2954void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
2955 Label* not_unique_name) {
2956 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2957 Label succeed;
2958 tst(reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
2959 b(eq, &succeed);
2960 cmp(reg, Operand(SYMBOL_TYPE));
2961 b(ne, not_unique_name);
2962
2963 bind(&succeed);
Leon Clarked91b9f72010-01-27 17:25:45 +00002964}
2965
Steve Blockd0582a62009-12-15 09:54:21 +00002966
Steve Block6ded16b2010-05-10 14:33:55 +01002967// Allocates a heap number or jumps to the need_gc label if the young space
2968// is full and a scavenge is needed.
2969void MacroAssembler::AllocateHeapNumber(Register result,
2970 Register scratch1,
2971 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002972 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002973 Label* gc_required,
2974 TaggingMode tagging_mode,
2975 MutableMode mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01002976 // Allocate an object in the heap for the heap number and tag it as a heap
2977 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002978 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
2979 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS);
2980
2981 Heap::RootListIndex map_index = mode == MUTABLE
2982 ? Heap::kMutableHeapNumberMapRootIndex
2983 : Heap::kHeapNumberMapRootIndex;
2984 AssertIsRoot(heap_number_map, map_index);
Steve Block6ded16b2010-05-10 14:33:55 +01002985
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002986 // Store heap number map in the allocated object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002987 if (tagging_mode == TAG_RESULT) {
2988 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
2989 } else {
2990 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset));
2991 }
Steve Block6ded16b2010-05-10 14:33:55 +01002992}
2993
2994
Steve Block8defd9f2010-07-08 12:39:36 +01002995void MacroAssembler::AllocateHeapNumberWithValue(Register result,
2996 DwVfpRegister value,
2997 Register scratch1,
2998 Register scratch2,
2999 Register heap_number_map,
3000 Label* gc_required) {
3001 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
3002 sub(scratch1, result, Operand(kHeapObjectTag));
3003 vstr(value, scratch1, HeapNumber::kValueOffset);
3004}
3005
3006
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003007void MacroAssembler::AllocateJSValue(Register result, Register constructor,
3008 Register value, Register scratch1,
3009 Register scratch2, Label* gc_required) {
3010 DCHECK(!result.is(constructor));
3011 DCHECK(!result.is(scratch1));
3012 DCHECK(!result.is(scratch2));
3013 DCHECK(!result.is(value));
Ben Murdochbb769b22010-08-11 14:56:33 +01003014
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003015 // Allocate JSValue in new space.
3016 Allocate(JSValue::kSize, result, scratch1, scratch2, gc_required, TAG_OBJECT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003017
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003018 // Initialize the JSValue.
3019 LoadGlobalFunctionInitialMap(constructor, scratch1, scratch2);
3020 str(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
3021 LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
3022 str(scratch1, FieldMemOperand(result, JSObject::kPropertiesOffset));
3023 str(scratch1, FieldMemOperand(result, JSObject::kElementsOffset));
3024 str(value, FieldMemOperand(result, JSValue::kValueOffset));
3025 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
Ben Murdochbb769b22010-08-11 14:56:33 +01003026}
3027
3028
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003029void MacroAssembler::CopyBytes(Register src,
3030 Register dst,
3031 Register length,
3032 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003033 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003034
3035 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003036 cmp(length, Operand(kPointerSize));
3037 b(le, &byte_loop);
3038
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003039 bind(&align_loop_1);
3040 tst(src, Operand(kPointerSize - 1));
3041 b(eq, &word_loop);
3042 ldrb(scratch, MemOperand(src, 1, PostIndex));
3043 strb(scratch, MemOperand(dst, 1, PostIndex));
3044 sub(length, length, Operand(1), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003045 b(&align_loop_1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003046 // Copy bytes in word size chunks.
3047 bind(&word_loop);
Steve Block44f0eee2011-05-26 01:26:41 +01003048 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003049 tst(src, Operand(kPointerSize - 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003050 Assert(eq, kExpectingAlignmentForCopyBytes);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003051 }
3052 cmp(length, Operand(kPointerSize));
3053 b(lt, &byte_loop);
3054 ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003055 if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) {
3056 str(scratch, MemOperand(dst, kPointerSize, PostIndex));
3057 } else {
3058 strb(scratch, MemOperand(dst, 1, PostIndex));
3059 mov(scratch, Operand(scratch, LSR, 8));
3060 strb(scratch, MemOperand(dst, 1, PostIndex));
3061 mov(scratch, Operand(scratch, LSR, 8));
3062 strb(scratch, MemOperand(dst, 1, PostIndex));
3063 mov(scratch, Operand(scratch, LSR, 8));
3064 strb(scratch, MemOperand(dst, 1, PostIndex));
3065 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003066 sub(length, length, Operand(kPointerSize));
3067 b(&word_loop);
3068
3069 // Copy the last bytes if any left.
3070 bind(&byte_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003071 cmp(length, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003072 b(eq, &done);
3073 bind(&byte_loop_1);
3074 ldrb(scratch, MemOperand(src, 1, PostIndex));
3075 strb(scratch, MemOperand(dst, 1, PostIndex));
3076 sub(length, length, Operand(1), SetCC);
3077 b(ne, &byte_loop_1);
3078 bind(&done);
3079}
3080
3081
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003082void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
3083 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003084 Register filler) {
3085 Label loop, entry;
3086 b(&entry);
3087 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003088 str(filler, MemOperand(current_address, kPointerSize, PostIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003089 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003090 cmp(current_address, end_address);
3091 b(lo, &loop);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003092}
3093
3094
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003095void MacroAssembler::CheckFor32DRegs(Register scratch) {
3096 mov(scratch, Operand(ExternalReference::cpu_features()));
3097 ldr(scratch, MemOperand(scratch));
3098 tst(scratch, Operand(1u << VFP32DREGS));
Steve Block6ded16b2010-05-10 14:33:55 +01003099}
3100
3101
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003102void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
3103 CheckFor32DRegs(scratch);
3104 vstm(db_w, location, d16, d31, ne);
3105 sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3106 vstm(db_w, location, d0, d15);
3107}
3108
3109
3110void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
3111 CheckFor32DRegs(scratch);
3112 vldm(ia_w, location, d0, d15);
3113 vldm(ia_w, location, d16, d31, ne);
3114 add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3115}
3116
3117
3118void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
3119 Register first, Register second, Register scratch1, Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +01003120 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003121 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003122 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003123 const int kFlatOneByteStringTag =
3124 kStringTag | kOneByteStringTag | kSeqStringTag;
3125 and_(scratch1, first, Operand(kFlatOneByteStringMask));
3126 and_(scratch2, second, Operand(kFlatOneByteStringMask));
3127 cmp(scratch1, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003128 // Ignore second test if first test failed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003129 cmp(scratch2, Operand(kFlatOneByteStringTag), eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003130 b(ne, failure);
3131}
3132
3133
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003134void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
3135 Register scratch,
3136 Label* failure) {
3137 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003138 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003139 const int kFlatOneByteStringTag =
3140 kStringTag | kOneByteStringTag | kSeqStringTag;
3141 and_(scratch, type, Operand(kFlatOneByteStringMask));
3142 cmp(scratch, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003143 b(ne, failure);
3144}
3145
Steve Block44f0eee2011-05-26 01:26:41 +01003146static const int kRegisterPassedArguments = 4;
Steve Block6ded16b2010-05-10 14:33:55 +01003147
Steve Block44f0eee2011-05-26 01:26:41 +01003148
Ben Murdoch257744e2011-11-30 15:57:28 +00003149int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
3150 int num_double_arguments) {
3151 int stack_passed_words = 0;
3152 if (use_eabi_hardfloat()) {
3153 // In the hard floating point calling convention, we can use
3154 // all double registers to pass doubles.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003155 if (num_double_arguments > DoubleRegister::NumRegisters()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003156 stack_passed_words +=
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003157 2 * (num_double_arguments - DoubleRegister::NumRegisters());
Ben Murdoch257744e2011-11-30 15:57:28 +00003158 }
3159 } else {
3160 // In the soft floating point calling convention, every double
3161 // argument is passed using two registers.
3162 num_reg_arguments += 2 * num_double_arguments;
3163 }
Steve Block6ded16b2010-05-10 14:33:55 +01003164 // Up to four simple arguments are passed in registers r0..r3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003165 if (num_reg_arguments > kRegisterPassedArguments) {
3166 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
3167 }
3168 return stack_passed_words;
3169}
3170
3171
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003172void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3173 Register index,
3174 Register value,
3175 uint32_t encoding_mask) {
3176 Label is_object;
3177 SmiTst(string);
3178 Check(ne, kNonObject);
3179
3180 ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
3181 ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
3182
3183 and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
3184 cmp(ip, Operand(encoding_mask));
3185 Check(eq, kUnexpectedStringType);
3186
3187 // The index is assumed to be untagged coming in, tag it to compare with the
3188 // string length without using a temp register, it is restored at the end of
3189 // this function.
3190 Label index_tag_ok, index_tag_bad;
3191 TrySmiTag(index, index, &index_tag_bad);
3192 b(&index_tag_ok);
3193 bind(&index_tag_bad);
3194 Abort(kIndexIsTooLarge);
3195 bind(&index_tag_ok);
3196
3197 ldr(ip, FieldMemOperand(string, String::kLengthOffset));
3198 cmp(index, ip);
3199 Check(lt, kIndexIsTooLarge);
3200
3201 cmp(index, Operand(Smi::FromInt(0)));
3202 Check(ge, kIndexIsNegative);
3203
3204 SmiUntag(index, index);
3205}
3206
3207
Ben Murdoch257744e2011-11-30 15:57:28 +00003208void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3209 int num_double_arguments,
3210 Register scratch) {
3211 int frame_alignment = ActivationFrameAlignment();
3212 int stack_passed_arguments = CalculateStackPassedWords(
3213 num_reg_arguments, num_double_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01003214 if (frame_alignment > kPointerSize) {
3215 // Make stack end at alignment and make room for num_arguments - 4 words
3216 // and the original value of sp.
3217 mov(scratch, sp);
3218 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003219 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003220 and_(sp, sp, Operand(-frame_alignment));
3221 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
3222 } else {
3223 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
3224 }
3225}
3226
3227
Ben Murdoch257744e2011-11-30 15:57:28 +00003228void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3229 Register scratch) {
3230 PrepareCallCFunction(num_reg_arguments, 0, scratch);
3231}
3232
3233
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003234void MacroAssembler::MovToFloatParameter(DwVfpRegister src) {
3235 DCHECK(src.is(d0));
3236 if (!use_eabi_hardfloat()) {
3237 vmov(r0, r1, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003238 }
3239}
3240
3241
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003242// On ARM this is just a synonym to make the purpose clear.
3243void MacroAssembler::MovToFloatResult(DwVfpRegister src) {
3244 MovToFloatParameter(src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003245}
3246
3247
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003248void MacroAssembler::MovToFloatParameters(DwVfpRegister src1,
3249 DwVfpRegister src2) {
3250 DCHECK(src1.is(d0));
3251 DCHECK(src2.is(d1));
3252 if (!use_eabi_hardfloat()) {
3253 vmov(r0, r1, src1);
3254 vmov(r2, r3, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003255 }
3256}
3257
3258
3259void MacroAssembler::CallCFunction(ExternalReference function,
3260 int num_reg_arguments,
3261 int num_double_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003262 mov(ip, Operand(function));
3263 CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003264}
3265
3266
3267void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003268 int num_reg_arguments,
3269 int num_double_arguments) {
3270 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003271}
3272
3273
Steve Block6ded16b2010-05-10 14:33:55 +01003274void MacroAssembler::CallCFunction(ExternalReference function,
3275 int num_arguments) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003276 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003277}
3278
Ben Murdoch257744e2011-11-30 15:57:28 +00003279
Steve Block44f0eee2011-05-26 01:26:41 +01003280void MacroAssembler::CallCFunction(Register function,
Steve Block44f0eee2011-05-26 01:26:41 +01003281 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003282 CallCFunction(function, num_arguments, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01003283}
3284
3285
Steve Block44f0eee2011-05-26 01:26:41 +01003286void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch257744e2011-11-30 15:57:28 +00003287 int num_reg_arguments,
3288 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003289 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01003290 // Make sure that the stack is aligned before calling a C function unless
3291 // running in the simulator. The simulator has its own alignment check which
3292 // provides more information.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003293#if V8_HOST_ARCH_ARM
Steve Block44f0eee2011-05-26 01:26:41 +01003294 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003295 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01003296 int frame_alignment_mask = frame_alignment - 1;
3297 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003298 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003299 Label alignment_as_expected;
3300 tst(sp, Operand(frame_alignment_mask));
3301 b(eq, &alignment_as_expected);
3302 // Don't use Check here, as it will call Runtime_Abort possibly
3303 // re-entering here.
3304 stop("Unexpected alignment");
3305 bind(&alignment_as_expected);
3306 }
3307 }
3308#endif
3309
3310 // Just call directly. The function called cannot cause a GC, or
3311 // allow preemption, so the return address in the link register
3312 // stays correct.
3313 Call(function);
Ben Murdoch257744e2011-11-30 15:57:28 +00003314 int stack_passed_arguments = CalculateStackPassedWords(
3315 num_reg_arguments, num_double_arguments);
3316 if (ActivationFrameAlignment() > kPointerSize) {
Steve Block6ded16b2010-05-10 14:33:55 +01003317 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3318 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003319 add(sp, sp, Operand(stack_passed_arguments * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003320 }
Steve Block1e0659c2011-05-24 12:43:12 +01003321}
3322
3323
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003324void MacroAssembler::CheckPageFlag(
3325 Register object,
3326 Register scratch,
3327 int mask,
3328 Condition cc,
3329 Label* condition_met) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003330 DCHECK(cc == eq || cc == ne);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003331 Bfc(scratch, object, 0, kPageSizeBits);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003332 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
3333 tst(scratch, Operand(mask));
3334 b(cc, condition_met);
3335}
3336
3337
3338void MacroAssembler::JumpIfBlack(Register object,
3339 Register scratch0,
3340 Register scratch1,
3341 Label* on_black) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003342 HasColor(object, scratch0, scratch1, on_black, 1, 1); // kBlackBitPattern.
3343 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003344}
3345
3346
3347void MacroAssembler::HasColor(Register object,
3348 Register bitmap_scratch,
3349 Register mask_scratch,
3350 Label* has_color,
3351 int first_bit,
3352 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003353 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003354
3355 GetMarkBits(object, bitmap_scratch, mask_scratch);
3356
3357 Label other_color, word_boundary;
3358 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3359 tst(ip, Operand(mask_scratch));
3360 b(first_bit == 1 ? eq : ne, &other_color);
3361 // Shift left 1 by adding.
3362 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC);
3363 b(eq, &word_boundary);
3364 tst(ip, Operand(mask_scratch));
3365 b(second_bit == 1 ? ne : eq, has_color);
3366 jmp(&other_color);
3367
3368 bind(&word_boundary);
3369 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
3370 tst(ip, Operand(1));
3371 b(second_bit == 1 ? ne : eq, has_color);
3372 bind(&other_color);
3373}
3374
3375
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003376void MacroAssembler::GetMarkBits(Register addr_reg,
3377 Register bitmap_reg,
3378 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003379 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003380 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3381 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
3382 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
3383 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits);
3384 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2));
3385 mov(ip, Operand(1));
3386 mov(mask_reg, Operand(ip, LSL, mask_reg));
3387}
3388
3389
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003390void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
3391 Register mask_scratch, Register load_scratch,
3392 Label* value_is_white) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003393 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ip));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003394 GetMarkBits(value, bitmap_scratch, mask_scratch);
3395
3396 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003397 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003398 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3399 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003400 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003401
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003402 // Since both black and grey have a 1 in the first position and white does
3403 // not have a 1 there we only need to check one bit.
3404 ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3405 tst(mask_scratch, load_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003406 b(eq, value_is_white);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003407}
3408
3409
Ben Murdoch257744e2011-11-30 15:57:28 +00003410void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
3411 Usat(output_reg, 8, Operand(input_reg));
3412}
3413
3414
3415void MacroAssembler::ClampDoubleToUint8(Register result_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003416 DwVfpRegister input_reg,
3417 LowDwVfpRegister double_scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003418 Label done;
Ben Murdoch257744e2011-11-30 15:57:28 +00003419
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003420 // Handle inputs >= 255 (including +infinity).
3421 Vmov(double_scratch, 255.0, result_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003422 mov(result_reg, Operand(255));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003423 VFPCompareAndSetFlags(input_reg, double_scratch);
3424 b(ge, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003425
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003426 // For inputs < 255 (including negative) vcvt_u32_f64 with round-to-nearest
3427 // rounding mode will provide the correct result.
3428 vcvt_u32_f64(double_scratch.low(), input_reg, kFPSCRRounding);
3429 vmov(result_reg, double_scratch.low());
3430
Ben Murdoch257744e2011-11-30 15:57:28 +00003431 bind(&done);
3432}
3433
3434
3435void MacroAssembler::LoadInstanceDescriptors(Register map,
3436 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003437 ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
3438}
3439
3440
3441void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3442 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3443 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3444}
3445
3446
3447void MacroAssembler::EnumLength(Register dst, Register map) {
3448 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3449 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3450 and_(dst, dst, Operand(Map::EnumLengthBits::kMask));
3451 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003452}
3453
3454
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003455void MacroAssembler::LoadAccessor(Register dst, Register holder,
3456 int accessor_index,
3457 AccessorComponent accessor) {
3458 ldr(dst, FieldMemOperand(holder, HeapObject::kMapOffset));
3459 LoadInstanceDescriptors(dst, dst);
3460 ldr(dst,
3461 FieldMemOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3462 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
3463 : AccessorPair::kSetterOffset;
3464 ldr(dst, FieldMemOperand(dst, offset));
3465}
3466
3467
Ben Murdoch097c5b22016-05-18 11:27:45 +01003468void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3469 Register null_value = r5;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003470 Register empty_fixed_array_value = r6;
3471 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003472 Label next, start;
3473 mov(r2, r0);
3474
3475 // Check if the enum length field is properly initialized, indicating that
3476 // there is an enum cache.
3477 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
3478
3479 EnumLength(r3, r1);
3480 cmp(r3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
3481 b(eq, call_runtime);
3482
Ben Murdoch097c5b22016-05-18 11:27:45 +01003483 LoadRoot(null_value, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003484 jmp(&start);
3485
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003486 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003487 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003488
3489 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003490 EnumLength(r3, r1);
3491 cmp(r3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003492 b(ne, call_runtime);
3493
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003494 bind(&start);
3495
3496 // Check that there are no elements. Register r2 contains the current JS
3497 // object we've reached through the prototype chain.
3498 Label no_elements;
3499 ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset));
3500 cmp(r2, empty_fixed_array_value);
3501 b(eq, &no_elements);
3502
3503 // Second chance, the object may be using the empty slow element dictionary.
3504 CompareRoot(r2, Heap::kEmptySlowElementDictionaryRootIndex);
3505 b(ne, call_runtime);
3506
3507 bind(&no_elements);
3508 ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3509 cmp(r2, null_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003510 b(ne, &next);
3511}
3512
3513
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003514void MacroAssembler::TestJSArrayForAllocationMemento(
3515 Register receiver_reg,
3516 Register scratch_reg,
3517 Label* no_memento_found) {
3518 ExternalReference new_space_start =
3519 ExternalReference::new_space_start(isolate());
3520 ExternalReference new_space_allocation_top =
3521 ExternalReference::new_space_allocation_top_address(isolate());
3522 add(scratch_reg, receiver_reg,
3523 Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3524 cmp(scratch_reg, Operand(new_space_start));
3525 b(lt, no_memento_found);
3526 mov(ip, Operand(new_space_allocation_top));
3527 ldr(ip, MemOperand(ip));
3528 cmp(scratch_reg, ip);
3529 b(gt, no_memento_found);
3530 ldr(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
3531 cmp(scratch_reg,
3532 Operand(isolate()->factory()->allocation_memento_map()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003533}
3534
3535
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003536Register GetRegisterThatIsNotOneOf(Register reg1,
3537 Register reg2,
3538 Register reg3,
3539 Register reg4,
3540 Register reg5,
3541 Register reg6) {
3542 RegList regs = 0;
3543 if (reg1.is_valid()) regs |= reg1.bit();
3544 if (reg2.is_valid()) regs |= reg2.bit();
3545 if (reg3.is_valid()) regs |= reg3.bit();
3546 if (reg4.is_valid()) regs |= reg4.bit();
3547 if (reg5.is_valid()) regs |= reg5.bit();
3548 if (reg6.is_valid()) regs |= reg6.bit();
3549
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003550 const RegisterConfiguration* config =
3551 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
3552 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
3553 int code = config->GetAllocatableGeneralCode(i);
3554 Register candidate = Register::from_code(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003555 if (regs & candidate.bit()) continue;
3556 return candidate;
3557 }
3558 UNREACHABLE();
3559 return no_reg;
3560}
3561
3562
3563void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3564 Register object,
3565 Register scratch0,
3566 Register scratch1,
3567 Label* found) {
3568 DCHECK(!scratch1.is(scratch0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003569 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003570 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003571
3572 // scratch contained elements pointer.
3573 mov(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003574 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
3575 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
3576 CompareRoot(current, Heap::kNullValueRootIndex);
3577 b(eq, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003578
3579 // Loop based on the map going up the prototype chain.
3580 bind(&loop_again);
3581 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003582
3583 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3584 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3585 ldrb(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset));
3586 cmp(scratch1, Operand(JS_OBJECT_TYPE));
3587 b(lo, found);
3588
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003589 ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
3590 DecodeField<Map::ElementsKindBits>(scratch1);
3591 cmp(scratch1, Operand(DICTIONARY_ELEMENTS));
3592 b(eq, found);
3593 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003594 CompareRoot(current, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003595 b(ne, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003596
3597 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003598}
3599
3600
3601#ifdef DEBUG
3602bool AreAliased(Register reg1,
3603 Register reg2,
3604 Register reg3,
3605 Register reg4,
3606 Register reg5,
3607 Register reg6,
3608 Register reg7,
3609 Register reg8) {
3610 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3611 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3612 reg7.is_valid() + reg8.is_valid();
3613
3614 RegList regs = 0;
3615 if (reg1.is_valid()) regs |= reg1.bit();
3616 if (reg2.is_valid()) regs |= reg2.bit();
3617 if (reg3.is_valid()) regs |= reg3.bit();
3618 if (reg4.is_valid()) regs |= reg4.bit();
3619 if (reg5.is_valid()) regs |= reg5.bit();
3620 if (reg6.is_valid()) regs |= reg6.bit();
3621 if (reg7.is_valid()) regs |= reg7.bit();
3622 if (reg8.is_valid()) regs |= reg8.bit();
3623 int n_of_non_aliasing_regs = NumRegs(regs);
3624
3625 return n_of_valid_regs != n_of_non_aliasing_regs;
3626}
3627#endif
3628
3629
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003630CodePatcher::CodePatcher(Isolate* isolate, byte* address, int instructions,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003631 FlushICache flush_cache)
Steve Blocka7e24c12009-10-30 11:49:00 +00003632 : address_(address),
Steve Blocka7e24c12009-10-30 11:49:00 +00003633 size_(instructions * Assembler::kInstrSize),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003634 masm_(isolate, address, size_ + Assembler::kGap, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003635 flush_cache_(flush_cache) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003636 // Create a new macro assembler pointing to the address of the code to patch.
3637 // The size is adjusted with kGap on order for the assembler to generate size
3638 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003639 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003640}
3641
3642
3643CodePatcher::~CodePatcher() {
3644 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003645 if (flush_cache_ == FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003646 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003647 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003648
3649 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003650 DCHECK(masm_.pc_ == address_ + size_);
3651 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003652}
3653
3654
Steve Block1e0659c2011-05-24 12:43:12 +01003655void CodePatcher::Emit(Instr instr) {
3656 masm()->emit(instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00003657}
3658
3659
3660void CodePatcher::Emit(Address addr) {
3661 masm()->emit(reinterpret_cast<Instr>(addr));
3662}
Steve Block1e0659c2011-05-24 12:43:12 +01003663
3664
3665void CodePatcher::EmitCondition(Condition cond) {
3666 Instr instr = Assembler::instr_at(masm_.pc_);
3667 instr = (instr & ~kCondMask) | cond;
3668 masm_.emit(instr);
3669}
Steve Blocka7e24c12009-10-30 11:49:00 +00003670
3671
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003672void MacroAssembler::TruncatingDiv(Register result,
3673 Register dividend,
3674 int32_t divisor) {
3675 DCHECK(!dividend.is(result));
3676 DCHECK(!dividend.is(ip));
3677 DCHECK(!result.is(ip));
3678 base::MagicNumbersForDivision<uint32_t> mag =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003679 base::SignedDivisionByConstant(bit_cast<uint32_t>(divisor));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003680 mov(ip, Operand(mag.multiplier));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003681 bool neg = (mag.multiplier & (1U << 31)) != 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003682 if (divisor > 0 && neg) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003683 smmla(result, dividend, ip, dividend);
3684 } else {
3685 smmul(result, dividend, ip);
3686 if (divisor < 0 && !neg && mag.multiplier > 0) {
3687 sub(result, result, Operand(dividend));
3688 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003689 }
3690 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift));
3691 add(result, result, Operand(dividend, LSR, 31));
3692}
3693
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003694} // namespace internal
3695} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01003696
3697#endif // V8_TARGET_ARCH_ARM