blob: 57fa3f580492a8aefb7363c777a6af61cd2f626f [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Iain Merrick9ac36c92010-09-13 15:29:50 +01005#include <limits.h> // For LONG_MIN, LONG_MAX.
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/debug/debug.h"
14#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040015#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000016
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017#include "src/arm/macro-assembler-arm.h"
18
Steve Blocka7e24c12009-10-30 11:49:00 +000019namespace v8 {
20namespace internal {
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010024 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010030 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Steve Blocka7e24c12009-10-30 11:49:00 +000034void MacroAssembler::Jump(Register target, Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +000035 bx(target, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000036}
37
38
39void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
40 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000042 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000043}
44
45
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000046void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode,
Steve Blocka7e24c12009-10-30 11:49:00 +000047 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000049 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
50}
51
52
53void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
54 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000056 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 AllowDeferredHandleDereference embedding_raw_address;
Steve Blocka7e24c12009-10-30 11:49:00 +000058 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
59}
60
61
Steve Block44f0eee2011-05-26 01:26:41 +010062int MacroAssembler::CallSize(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010063 return kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +010064}
65
66
Steve Blocka7e24c12009-10-30 11:49:00 +000067void MacroAssembler::Call(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010068 // Block constant pool for the call instruction sequence.
69 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000070 Label start;
71 bind(&start);
Steve Blocka7e24c12009-10-30 11:49:00 +000072 blx(target, cond);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 DCHECK_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block44f0eee2011-05-26 01:26:41 +010077int MacroAssembler::CallSize(
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000078 Address target, RelocInfo::Mode rmode, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010079 Instr mov_instr = cond | MOV | LeaveCC;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
81 return kInstrSize +
82 mov_operand.instructions_required(this, mov_instr) * kInstrSize;
83}
84
85
86int MacroAssembler::CallStubSize(
87 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) {
88 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
89}
90
91
92int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate,
93 Address target,
94 RelocInfo::Mode rmode,
95 Condition cond) {
96 Instr mov_instr = cond | MOV | LeaveCC;
97 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
98 return kInstrSize +
99 mov_operand.instructions_required(NULL, mov_instr) * kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +0100100}
101
102
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000103void MacroAssembler::Call(Address target,
Ben Murdoch257744e2011-11-30 15:57:28 +0000104 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105 Condition cond,
106 TargetAddressStorageMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +0100107 // Block constant pool for the call instruction sequence.
108 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000109 Label start;
110 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111
112 bool old_predictable_code_size = predictable_code_size();
113 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
114 set_predictable_code_size(true);
115 }
116
117#ifdef DEBUG
118 // Check the expected size before generating code to ensure we assume the same
119 // constant pool availability (e.g., whether constant pool is full or not).
120 int expected_size = CallSize(target, rmode, cond);
121#endif
122
123 // Call sequence on V7 or later may be :
124 // movw ip, #... @ call address low 16
125 // movt ip, #... @ call address high 16
126 // blx ip
127 // @ return address
128 // Or for pre-V7 or values that may be back-patched
129 // to avoid ICache flushes:
130 // ldr ip, [pc, #...] @ call address
131 // blx ip
132 // @ return address
Steve Block6ded16b2010-05-10 14:33:55 +0100133
Steve Block44f0eee2011-05-26 01:26:41 +0100134 // Statement positions are expected to be recorded when the target
135 // address is loaded. The mov method will automatically record
136 // positions when pc is the target, since this is not the case here
137 // we have to do it explicitly.
138 positions_recorder()->WriteRecordedPositions();
Steve Block6ded16b2010-05-10 14:33:55 +0100139
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000140 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
Steve Block44f0eee2011-05-26 01:26:41 +0100141 blx(ip, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100142
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
144 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
145 set_predictable_code_size(old_predictable_code_size);
146 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000147}
148
149
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000150int MacroAssembler::CallSize(Handle<Code> code,
151 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000153 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000155 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
Ben Murdoch257744e2011-11-30 15:57:28 +0000156}
157
158
159void MacroAssembler::Call(Handle<Code> code,
160 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 TypeFeedbackId ast_id,
162 Condition cond,
163 TargetAddressStorageMode mode) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000164 Label start;
165 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000166 DCHECK(RelocInfo::IsCodeTarget(rmode));
167 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000168 SetRecordedAstId(ast_id);
169 rmode = RelocInfo::CODE_TARGET_WITH_ID;
170 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000171 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172 AllowDeferredHandleDereference embedding_raw_address;
173 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000174}
175
176
177void MacroAssembler::Ret(Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000178 bx(lr, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000179}
180
181
Leon Clarkee46be812010-01-19 14:06:41 +0000182void MacroAssembler::Drop(int count, Condition cond) {
183 if (count > 0) {
184 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
185 }
186}
187
188
Ben Murdochb0fe1622011-05-05 13:52:32 +0100189void MacroAssembler::Ret(int drop, Condition cond) {
190 Drop(drop, cond);
191 Ret(cond);
192}
193
194
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100195void MacroAssembler::Swap(Register reg1,
196 Register reg2,
197 Register scratch,
198 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100199 if (scratch.is(no_reg)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100200 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
201 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
202 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100203 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100204 mov(scratch, reg1, LeaveCC, cond);
205 mov(reg1, reg2, LeaveCC, cond);
206 mov(reg2, scratch, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100207 }
208}
209
210
Leon Clarkee46be812010-01-19 14:06:41 +0000211void MacroAssembler::Call(Label* target) {
212 bl(target);
213}
214
215
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000216void MacroAssembler::Push(Handle<Object> handle) {
217 mov(ip, Operand(handle));
218 push(ip);
219}
220
221
Leon Clarkee46be812010-01-19 14:06:41 +0000222void MacroAssembler::Move(Register dst, Handle<Object> value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223 AllowDeferredHandleDereference smi_check;
224 if (value->IsSmi()) {
225 mov(dst, Operand(value));
226 } else {
227 DCHECK(value->IsHeapObject());
228 if (isolate()->heap()->InNewSpace(*value)) {
229 Handle<Cell> cell = isolate()->factory()->NewCell(value);
230 mov(dst, Operand(cell));
231 ldr(dst, FieldMemOperand(dst, Cell::kValueOffset));
232 } else {
233 mov(dst, Operand(value));
234 }
235 }
Leon Clarkee46be812010-01-19 14:06:41 +0000236}
Steve Blockd0582a62009-12-15 09:54:21 +0000237
238
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000239void MacroAssembler::Move(Register dst, Register src, Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100240 if (!dst.is(src)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000241 mov(dst, src, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100242 }
243}
244
245
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000247 if (!dst.is(src)) {
248 vmov(dst, src);
249 }
250}
251
252
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000253void MacroAssembler::Mls(Register dst, Register src1, Register src2,
254 Register srcA, Condition cond) {
255 if (CpuFeatures::IsSupported(MLS)) {
256 CpuFeatureScope scope(this, MLS);
257 mls(dst, src1, src2, srcA, cond);
258 } else {
259 DCHECK(!srcA.is(ip));
260 mul(ip, src1, src2, LeaveCC, cond);
261 sub(dst, srcA, ip, LeaveCC, cond);
262 }
263}
264
265
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100266void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
267 Condition cond) {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800268 if (!src2.is_reg() &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 !src2.must_output_reloc_info(this) &&
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800270 src2.immediate() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000271 mov(dst, Operand::Zero(), LeaveCC, cond);
272 } else if (!(src2.instructions_required(this) == 1) &&
273 !src2.must_output_reloc_info(this) &&
Ben Murdoch8b112d22011-06-08 16:22:53 +0100274 CpuFeatures::IsSupported(ARMv7) &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 base::bits::IsPowerOfTwo32(src2.immediate() + 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000276 ubfx(dst, src1, 0,
277 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800278 } else {
279 and_(dst, src1, src2, LeaveCC, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100280 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100281}
282
283
284void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
285 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000286 DCHECK(lsb < 32);
287 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100288 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
289 and_(dst, src1, Operand(mask), LeaveCC, cond);
290 if (lsb != 0) {
291 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
292 }
293 } else {
294 ubfx(dst, src1, lsb, width, cond);
295 }
296}
297
298
299void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
300 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000301 DCHECK(lsb < 32);
302 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100303 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
304 and_(dst, src1, Operand(mask), LeaveCC, cond);
305 int shift_up = 32 - lsb - width;
306 int shift_down = lsb + shift_up;
307 if (shift_up != 0) {
308 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
309 }
310 if (shift_down != 0) {
311 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
312 }
313 } else {
314 sbfx(dst, src1, lsb, width, cond);
315 }
316}
317
318
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100319void MacroAssembler::Bfi(Register dst,
320 Register src,
321 Register scratch,
322 int lsb,
323 int width,
324 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000325 DCHECK(0 <= lsb && lsb < 32);
326 DCHECK(0 <= width && width < 32);
327 DCHECK(lsb + width < 32);
328 DCHECK(!scratch.is(dst));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100329 if (width == 0) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100331 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
332 bic(dst, dst, Operand(mask));
333 and_(scratch, src, Operand((1 << width) - 1));
334 mov(scratch, Operand(scratch, LSL, lsb));
335 orr(dst, dst, scratch);
336 } else {
337 bfi(dst, src, lsb, width, cond);
338 }
339}
340
341
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000342void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width,
343 Condition cond) {
344 DCHECK(lsb < 32);
345 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100346 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000347 bic(dst, src, Operand(mask));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100348 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000349 Move(dst, src, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100350 bfc(dst, lsb, width, cond);
351 }
352}
353
354
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100355void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
356 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000357 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
358 DCHECK(!dst.is(pc) && !src.rm().is(pc));
359 DCHECK((satpos >= 0) && (satpos <= 31));
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100360
361 // These asserts are required to ensure compatibility with the ARMv7
362 // implementation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000363 DCHECK((src.shift_op() == ASR) || (src.shift_op() == LSL));
364 DCHECK(src.rs().is(no_reg));
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100365
366 Label done;
367 int satval = (1 << satpos) - 1;
368
369 if (cond != al) {
370 b(NegateCondition(cond), &done); // Skip saturate if !condition.
371 }
372 if (!(src.is_reg() && dst.is(src.rm()))) {
373 mov(dst, src);
374 }
375 tst(dst, Operand(~satval));
376 b(eq, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000377 mov(dst, Operand::Zero(), LeaveCC, mi); // 0 if negative.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100378 mov(dst, Operand(satval), LeaveCC, pl); // satval if positive.
379 bind(&done);
380 } else {
381 usat(dst, satpos, src, cond);
382 }
383}
384
385
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000386void MacroAssembler::Load(Register dst,
387 const MemOperand& src,
388 Representation r) {
389 DCHECK(!r.IsDouble());
390 if (r.IsInteger8()) {
391 ldrsb(dst, src);
392 } else if (r.IsUInteger8()) {
393 ldrb(dst, src);
394 } else if (r.IsInteger16()) {
395 ldrsh(dst, src);
396 } else if (r.IsUInteger16()) {
397 ldrh(dst, src);
398 } else {
399 ldr(dst, src);
400 }
401}
402
403
404void MacroAssembler::Store(Register src,
405 const MemOperand& dst,
406 Representation r) {
407 DCHECK(!r.IsDouble());
408 if (r.IsInteger8() || r.IsUInteger8()) {
409 strb(src, dst);
410 } else if (r.IsInteger16() || r.IsUInteger16()) {
411 strh(src, dst);
412 } else {
413 if (r.IsHeapObject()) {
414 AssertNotSmi(src);
415 } else if (r.IsSmi()) {
416 AssertSmi(src);
417 }
418 str(src, dst);
419 }
420}
421
422
Steve Blocka7e24c12009-10-30 11:49:00 +0000423void MacroAssembler::LoadRoot(Register destination,
424 Heap::RootListIndex index,
425 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
427 isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
428 !predictable_code_size()) {
429 // The CPU supports fast immediate values, and this root will never
430 // change. We will load it as a relocatable immediate value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000431 Handle<Object> root = isolate()->heap()->root_handle(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432 mov(destination, Operand(root), LeaveCC, cond);
433 return;
434 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000435 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000436}
437
438
Kristian Monsen25f61362010-05-21 11:50:48 +0100439void MacroAssembler::StoreRoot(Register source,
440 Heap::RootListIndex index,
441 Condition cond) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000442 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000443 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
444}
445
446
Steve Block6ded16b2010-05-10 14:33:55 +0100447void MacroAssembler::InNewSpace(Register object,
448 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100449 Condition cond,
Steve Block6ded16b2010-05-10 14:33:55 +0100450 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000451 DCHECK(cond == eq || cond == ne);
Steve Block44f0eee2011-05-26 01:26:41 +0100452 and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate())));
453 cmp(scratch, Operand(ExternalReference::new_space_start(isolate())));
Steve Block1e0659c2011-05-24 12:43:12 +0100454 b(cond, branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100455}
456
457
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100458void MacroAssembler::RecordWriteField(
459 Register object,
460 int offset,
461 Register value,
462 Register dst,
463 LinkRegisterStatus lr_status,
464 SaveFPRegsMode save_fp,
465 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000466 SmiCheck smi_check,
467 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100468 // First, check if a write barrier is even needed. The tests below
469 // catch stores of Smis.
Steve Block6ded16b2010-05-10 14:33:55 +0100470 Label done;
471
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100472 // Skip barrier if writing a smi.
473 if (smi_check == INLINE_SMI_CHECK) {
474 JumpIfSmi(value, &done);
475 }
Steve Block6ded16b2010-05-10 14:33:55 +0100476
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100477 // Although the object register is tagged, the offset is relative to the start
478 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 DCHECK(IsAligned(offset, kPointerSize));
Steve Block8defd9f2010-07-08 12:39:36 +0100480
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100481 add(dst, object, Operand(offset - kHeapObjectTag));
482 if (emit_debug_code()) {
483 Label ok;
484 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
485 b(eq, &ok);
486 stop("Unaligned cell in write barrier");
487 bind(&ok);
488 }
489
490 RecordWrite(object,
491 dst,
492 value,
493 lr_status,
494 save_fp,
495 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496 OMIT_SMI_CHECK,
497 pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000498
499 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000500
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100501 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000502 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100503 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 mov(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
505 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
506 }
507}
508
509
510// Will clobber 4 registers: object, map, dst, ip. The
511// register 'object' contains a heap object pointer.
512void MacroAssembler::RecordWriteForMap(Register object,
513 Register map,
514 Register dst,
515 LinkRegisterStatus lr_status,
516 SaveFPRegsMode fp_mode) {
517 if (emit_debug_code()) {
518 ldr(dst, FieldMemOperand(map, HeapObject::kMapOffset));
519 cmp(dst, Operand(isolate()->factory()->meta_map()));
520 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
521 }
522
523 if (!FLAG_incremental_marking) {
524 return;
525 }
526
527 if (emit_debug_code()) {
528 ldr(ip, FieldMemOperand(object, HeapObject::kMapOffset));
529 cmp(ip, map);
530 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
531 }
532
533 Label done;
534
535 // A single check of the map's pages interesting flag suffices, since it is
536 // only set during incremental collection, and then it's also guaranteed that
537 // the from object's page's interesting flag is also set. This optimization
538 // relies on the fact that maps can never be in new space.
539 CheckPageFlag(map,
540 map, // Used as scratch.
541 MemoryChunk::kPointersToHereAreInterestingMask,
542 eq,
543 &done);
544
545 add(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
546 if (emit_debug_code()) {
547 Label ok;
548 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
549 b(eq, &ok);
550 stop("Unaligned cell in write barrier");
551 bind(&ok);
552 }
553
554 // Record the actual write.
555 if (lr_status == kLRHasNotBeenSaved) {
556 push(lr);
557 }
558 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
559 fp_mode);
560 CallStub(&stub);
561 if (lr_status == kLRHasNotBeenSaved) {
562 pop(lr);
563 }
564
565 bind(&done);
566
567 // Count number of write barriers in generated code.
568 isolate()->counters()->write_barriers_static()->Increment();
569 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst);
570
571 // Clobber clobbered registers when running with the debug-code flag
572 // turned on to provoke errors.
573 if (emit_debug_code()) {
574 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
575 mov(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Leon Clarke4515c472010-02-03 11:58:03 +0000576 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000577}
578
579
Steve Block8defd9f2010-07-08 12:39:36 +0100580// Will clobber 4 registers: object, address, scratch, ip. The
581// register 'object' contains a heap object pointer. The heap object
582// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000583void MacroAssembler::RecordWrite(
584 Register object,
585 Register address,
586 Register value,
587 LinkRegisterStatus lr_status,
588 SaveFPRegsMode fp_mode,
589 RememberedSetAction remembered_set_action,
590 SmiCheck smi_check,
591 PointersToHereCheck pointers_to_here_check_for_value) {
592 DCHECK(!object.is(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100593 if (emit_debug_code()) {
594 ldr(ip, MemOperand(address));
595 cmp(ip, value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000596 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100597 }
Steve Block8defd9f2010-07-08 12:39:36 +0100598
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000599 if (remembered_set_action == OMIT_REMEMBERED_SET &&
600 !FLAG_incremental_marking) {
601 return;
602 }
603
604 // First, check if a write barrier is even needed. The tests below
605 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100606 Label done;
607
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100608 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000609 JumpIfSmi(value, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100610 }
611
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000612 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
613 CheckPageFlag(value,
614 value, // Used as scratch.
615 MemoryChunk::kPointersToHereAreInterestingMask,
616 eq,
617 &done);
618 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100619 CheckPageFlag(object,
620 value, // Used as scratch.
621 MemoryChunk::kPointersFromHereAreInterestingMask,
622 eq,
623 &done);
Steve Block8defd9f2010-07-08 12:39:36 +0100624
625 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100626 if (lr_status == kLRHasNotBeenSaved) {
627 push(lr);
628 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
630 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100631 CallStub(&stub);
632 if (lr_status == kLRHasNotBeenSaved) {
633 pop(lr);
634 }
Steve Block8defd9f2010-07-08 12:39:36 +0100635
636 bind(&done);
637
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638 // Count number of write barriers in generated code.
639 isolate()->counters()->write_barriers_static()->Increment();
640 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip,
641 value);
642
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100643 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100644 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100645 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000646 mov(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
647 mov(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100648 }
649}
650
651
652void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
653 Register address,
654 Register scratch,
655 SaveFPRegsMode fp_mode,
656 RememberedSetFinalAction and_then) {
657 Label done;
658 if (emit_debug_code()) {
659 Label ok;
660 JumpIfNotInNewSpace(object, scratch, &ok);
661 stop("Remembered set pointer is in new space");
662 bind(&ok);
663 }
664 // Load store buffer top.
665 ExternalReference store_buffer =
666 ExternalReference::store_buffer_top(isolate());
667 mov(ip, Operand(store_buffer));
668 ldr(scratch, MemOperand(ip));
669 // Store pointer to buffer and increment buffer top.
670 str(address, MemOperand(scratch, kPointerSize, PostIndex));
671 // Write back new top of buffer.
672 str(scratch, MemOperand(ip));
673 // Call stub on end of buffer.
674 // Check for end of buffer.
675 tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
676 if (and_then == kFallThroughAtEnd) {
677 b(eq, &done);
678 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000679 DCHECK(and_then == kReturnAtEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100680 Ret(eq);
681 }
682 push(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000683 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100684 CallStub(&store_buffer_overflow);
685 pop(lr);
686 bind(&done);
687 if (and_then == kReturnAtEnd) {
688 Ret();
Steve Block8defd9f2010-07-08 12:39:36 +0100689 }
690}
691
692
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000693void MacroAssembler::PushFixedFrame(Register marker_reg) {
694 DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000695 stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | cp.bit() |
696 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
697 fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000698}
699
700
701void MacroAssembler::PopFixedFrame(Register marker_reg) {
702 DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000703 ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | cp.bit() |
704 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
705 fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000706}
707
708
Ben Murdochb0fe1622011-05-05 13:52:32 +0100709// Push and pop all registers that can hold pointers.
710void MacroAssembler::PushSafepointRegisters() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000711 // Safepoints expect a block of contiguous register values starting with r0.
712 // except when FLAG_enable_embedded_constant_pool, which omits pp.
713 DCHECK(kSafepointSavedRegisters ==
714 (FLAG_enable_embedded_constant_pool
715 ? ((1 << (kNumSafepointSavedRegisters + 1)) - 1) & ~pp.bit()
716 : (1 << kNumSafepointSavedRegisters) - 1));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100717 // Safepoints expect a block of kNumSafepointRegisters values on the
718 // stack, so adjust the stack for unsaved registers.
719 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000720 DCHECK(num_unsaved >= 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100721 sub(sp, sp, Operand(num_unsaved * kPointerSize));
722 stm(db_w, sp, kSafepointSavedRegisters);
723}
724
725
726void MacroAssembler::PopSafepointRegisters() {
727 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
728 ldm(ia_w, sp, kSafepointSavedRegisters);
729 add(sp, sp, Operand(num_unsaved * kPointerSize));
730}
731
732
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100733void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
734 str(src, SafepointRegisterSlot(dst));
Steve Block1e0659c2011-05-24 12:43:12 +0100735}
736
737
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100738void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
739 ldr(dst, SafepointRegisterSlot(src));
Steve Block1e0659c2011-05-24 12:43:12 +0100740}
741
742
Ben Murdochb0fe1622011-05-05 13:52:32 +0100743int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
744 // The registers are pushed starting with the highest encoding,
745 // which means that lowest encodings are closest to the stack pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000746 if (FLAG_enable_embedded_constant_pool && reg_code > pp.code()) {
747 // RegList omits pp.
748 reg_code -= 1;
749 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000750 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100751 return reg_code;
752}
753
754
Steve Block1e0659c2011-05-24 12:43:12 +0100755MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
756 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
757}
758
759
760MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000761 // Number of d-regs not known at snapshot time.
762 DCHECK(!serializer_enabled());
Steve Block1e0659c2011-05-24 12:43:12 +0100763 // General purpose registers are pushed last on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000764 const RegisterConfiguration* config =
765 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
766 int doubles_size = config->num_allocatable_double_registers() * kDoubleSize;
Steve Block1e0659c2011-05-24 12:43:12 +0100767 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
768 return MemOperand(sp, doubles_size + register_offset);
769}
770
771
Leon Clarkef7060e22010-06-03 12:02:55 +0100772void MacroAssembler::Ldrd(Register dst1, Register dst2,
773 const MemOperand& src, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000774 DCHECK(src.rm().is(no_reg));
775 DCHECK(!dst1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100776
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000777 // V8 does not use this addressing mode, so the fallback code
778 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000779 DCHECK((src.am() != PreIndex) && (src.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000780
Leon Clarkef7060e22010-06-03 12:02:55 +0100781 // Generate two ldr instructions if ldrd is not available.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000782 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() &&
783 (dst1.code() % 2 == 0) && (dst1.code() + 1 == dst2.code())) {
784 CpuFeatureScope scope(this, ARMv7);
Leon Clarkef7060e22010-06-03 12:02:55 +0100785 ldrd(dst1, dst2, src, cond);
786 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000787 if ((src.am() == Offset) || (src.am() == NegOffset)) {
788 MemOperand src2(src);
789 src2.set_offset(src2.offset() + 4);
790 if (dst1.is(src.rn())) {
791 ldr(dst2, src2, cond);
792 ldr(dst1, src, cond);
793 } else {
794 ldr(dst1, src, cond);
795 ldr(dst2, src2, cond);
796 }
797 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000798 DCHECK((src.am() == PostIndex) || (src.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000799 if (dst1.is(src.rn())) {
800 ldr(dst2, MemOperand(src.rn(), 4, Offset), cond);
801 ldr(dst1, src, cond);
802 } else {
803 MemOperand src2(src);
804 src2.set_offset(src2.offset() - 4);
805 ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond);
806 ldr(dst2, src2, cond);
807 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100808 }
809 }
810}
811
812
813void MacroAssembler::Strd(Register src1, Register src2,
814 const MemOperand& dst, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000815 DCHECK(dst.rm().is(no_reg));
816 DCHECK(!src1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100817
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000818 // V8 does not use this addressing mode, so the fallback code
819 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000820 DCHECK((dst.am() != PreIndex) && (dst.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000821
Leon Clarkef7060e22010-06-03 12:02:55 +0100822 // Generate two str instructions if strd is not available.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000823 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size() &&
824 (src1.code() % 2 == 0) && (src1.code() + 1 == src2.code())) {
825 CpuFeatureScope scope(this, ARMv7);
Leon Clarkef7060e22010-06-03 12:02:55 +0100826 strd(src1, src2, dst, cond);
827 } else {
828 MemOperand dst2(dst);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000829 if ((dst.am() == Offset) || (dst.am() == NegOffset)) {
830 dst2.set_offset(dst2.offset() + 4);
831 str(src1, dst, cond);
832 str(src2, dst2, cond);
833 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834 DCHECK((dst.am() == PostIndex) || (dst.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000835 dst2.set_offset(dst2.offset() - 4);
836 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond);
837 str(src2, dst2, cond);
838 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100839 }
840}
841
842
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000843void MacroAssembler::VFPEnsureFPSCRState(Register scratch) {
844 // If needed, restore wanted bits of FPSCR.
845 Label fpscr_done;
846 vmrs(scratch);
847 if (emit_debug_code()) {
848 Label rounding_mode_correct;
849 tst(scratch, Operand(kVFPRoundingModeMask));
850 b(eq, &rounding_mode_correct);
851 // Don't call Assert here, since Runtime_Abort could re-enter here.
852 stop("Default rounding mode not set");
853 bind(&rounding_mode_correct);
854 }
855 tst(scratch, Operand(kVFPDefaultNaNModeControlBit));
856 b(ne, &fpscr_done);
857 orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit));
858 vmsr(scratch);
859 bind(&fpscr_done);
860}
861
862
863void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister dst,
864 const DwVfpRegister src,
865 const Condition cond) {
866 vsub(dst, src, kDoubleRegZero, cond);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100867}
868
869
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000870void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
871 const SwVfpRegister src2,
872 const Condition cond) {
873 // Compare and move FPSCR flags to the normal condition flags.
874 VFPCompareAndLoadFlags(src1, src2, pc, cond);
875}
876
877void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
878 const float src2,
879 const Condition cond) {
880 // Compare and move FPSCR flags to the normal condition flags.
881 VFPCompareAndLoadFlags(src1, src2, pc, cond);
882}
883
884
Ben Murdochb8e0da22011-05-16 14:20:40 +0100885void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
886 const DwVfpRegister src2,
887 const Condition cond) {
888 // Compare and move FPSCR flags to the normal condition flags.
889 VFPCompareAndLoadFlags(src1, src2, pc, cond);
890}
891
892void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
893 const double src2,
894 const Condition cond) {
895 // Compare and move FPSCR flags to the normal condition flags.
896 VFPCompareAndLoadFlags(src1, src2, pc, cond);
897}
898
899
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000900void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
901 const SwVfpRegister src2,
902 const Register fpscr_flags,
903 const Condition cond) {
904 // Compare and load FPSCR.
905 vcmp(src1, src2, cond);
906 vmrs(fpscr_flags, cond);
907}
908
909void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
910 const float src2,
911 const Register fpscr_flags,
912 const Condition cond) {
913 // Compare and load FPSCR.
914 vcmp(src1, src2, cond);
915 vmrs(fpscr_flags, cond);
916}
917
918
Ben Murdochb8e0da22011-05-16 14:20:40 +0100919void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
920 const DwVfpRegister src2,
921 const Register fpscr_flags,
922 const Condition cond) {
923 // Compare and load FPSCR.
924 vcmp(src1, src2, cond);
925 vmrs(fpscr_flags, cond);
926}
927
928void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
929 const double src2,
930 const Register fpscr_flags,
931 const Condition cond) {
932 // Compare and load FPSCR.
933 vcmp(src1, src2, cond);
934 vmrs(fpscr_flags, cond);
Ben Murdoch086aeea2011-05-13 15:57:08 +0100935}
936
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000937
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000938void MacroAssembler::Vmov(const DwVfpRegister dst,
939 const double imm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940 const Register scratch) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000941 static const DoubleRepresentation minus_zero(-0.0);
942 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943 DoubleRepresentation value_rep(imm);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000944 // Handle special values first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000945 if (value_rep == zero) {
946 vmov(dst, kDoubleRegZero);
947 } else if (value_rep == minus_zero) {
948 vneg(dst, kDoubleRegZero);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000949 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000950 vmov(dst, imm, scratch);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000951 }
952}
953
Ben Murdoch086aeea2011-05-13 15:57:08 +0100954
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000955void MacroAssembler::VmovHigh(Register dst, DwVfpRegister src) {
956 if (src.code() < 16) {
957 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
958 vmov(dst, loc.high());
959 } else {
960 vmov(dst, VmovIndexHi, src);
961 }
962}
963
964
965void MacroAssembler::VmovHigh(DwVfpRegister dst, Register src) {
966 if (dst.code() < 16) {
967 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
968 vmov(loc.high(), src);
969 } else {
970 vmov(dst, VmovIndexHi, src);
971 }
972}
973
974
975void MacroAssembler::VmovLow(Register dst, DwVfpRegister src) {
976 if (src.code() < 16) {
977 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
978 vmov(dst, loc.low());
979 } else {
980 vmov(dst, VmovIndexLo, src);
981 }
982}
983
984
985void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) {
986 if (dst.code() < 16) {
987 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
988 vmov(loc.low(), src);
989 } else {
990 vmov(dst, VmovIndexLo, src);
991 }
992}
993
994
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000995void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
996 Register code_target_address) {
997 DCHECK(FLAG_enable_embedded_constant_pool);
998 ldr(pp, MemOperand(code_target_address,
999 Code::kConstantPoolOffset - Code::kHeaderSize));
1000 add(pp, pp, code_target_address);
1001}
1002
1003
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001004void MacroAssembler::LoadConstantPoolPointerRegister() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 DCHECK(FLAG_enable_embedded_constant_pool);
1006 int entry_offset = pc_offset() + Instruction::kPCReadOffset;
1007 sub(ip, pc, Operand(entry_offset));
1008 LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001009}
1010
1011
1012void MacroAssembler::StubPrologue() {
1013 PushFixedFrame();
1014 Push(Smi::FromInt(StackFrame::STUB));
1015 // Adjust FP to point to saved FP.
1016 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001017 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001018 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001019 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001020 }
1021}
1022
1023
1024void MacroAssembler::Prologue(bool code_pre_aging) {
1025 { PredictableCodeSizeScope predictible_code_size_scope(
1026 this, kNoCodeAgeSequenceLength);
1027 // The following three instructions must remain together and unmodified
1028 // for code aging to work properly.
1029 if (code_pre_aging) {
1030 // Pre-age the code.
1031 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
1032 add(r0, pc, Operand(-8));
1033 ldr(pc, MemOperand(pc, -4));
1034 emit_code_stub_address(stub);
1035 } else {
1036 PushFixedFrame(r1);
1037 nop(ip.code());
1038 // Adjust FP to point to saved FP.
1039 add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1040 }
1041 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001043 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001045 }
1046}
1047
1048
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001049void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1050 ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1051 ldr(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
1052 ldr(vector,
1053 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
1054}
1055
1056
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001057void MacroAssembler::EnterFrame(StackFrame::Type type,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001058 bool load_constant_pool_pointer_reg) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001059 // r0-r3: preserved
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001060 PushFixedFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001061 if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001062 LoadConstantPoolPointerRegister();
1063 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001064 mov(ip, Operand(Smi::FromInt(type)));
1065 push(ip);
1066 mov(ip, Operand(CodeObject()));
1067 push(ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001068 // Adjust FP to point to saved FP.
1069 add(fp, sp,
1070 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001071}
1072
1073
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074int MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001075 // r0: preserved
1076 // r1: preserved
1077 // r2: preserved
1078
1079 // Drop the execution stack down to the frame pointer and restore
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080 // the caller frame pointer, return address and constant pool pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001081 // (if FLAG_enable_embedded_constant_pool).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001082 int frame_ends;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001083 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001084 add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
1085 frame_ends = pc_offset();
1086 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
1087 } else {
1088 mov(sp, fp);
1089 frame_ends = pc_offset();
1090 ldm(ia_w, sp, fp.bit() | lr.bit());
1091 }
1092 return frame_ends;
Steve Blocka7e24c12009-10-30 11:49:00 +00001093}
1094
1095
Steve Block1e0659c2011-05-24 12:43:12 +01001096void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001097 // Set up the frame structure on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001098 DCHECK_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1099 DCHECK_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1100 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Steve Block1e0659c2011-05-24 12:43:12 +01001101 Push(lr, fp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001102 mov(fp, Operand(sp)); // Set up new frame pointer.
Steve Block1e0659c2011-05-24 12:43:12 +01001103 // Reserve room for saved entry sp and code object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001104 sub(sp, sp, Operand(ExitFrameConstants::kFrameSize));
Steve Block44f0eee2011-05-26 01:26:41 +01001105 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001106 mov(ip, Operand::Zero());
Steve Block1e0659c2011-05-24 12:43:12 +01001107 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
1108 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001109 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001110 str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1111 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001112 mov(ip, Operand(CodeObject()));
Steve Block1e0659c2011-05-24 12:43:12 +01001113 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001114
1115 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00001116 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001117 str(fp, MemOperand(ip));
Ben Murdoch589d6972011-11-30 16:04:58 +00001118 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001119 str(cp, MemOperand(ip));
1120
Ben Murdochb0fe1622011-05-05 13:52:32 +01001121 // Optionally save all double registers.
1122 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001123 SaveFPRegs(sp, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001124 // Note that d0 will be accessible at
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001125 // fp - ExitFrameConstants::kFrameSize -
1126 // DwVfpRegister::kMaxNumRegisters * kDoubleSize,
1127 // since the sp slot, code slot and constant pool slot (if
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001128 // FLAG_enable_embedded_constant_pool) were pushed after the fp.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001129 }
Steve Block1e0659c2011-05-24 12:43:12 +01001130
1131 // Reserve place for the return address and stack space and align the frame
1132 // preparing for calling the runtime function.
1133 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1134 sub(sp, sp, Operand((stack_space + 1) * kPointerSize));
1135 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001136 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +01001137 and_(sp, sp, Operand(-frame_alignment));
1138 }
1139
1140 // Set the exit frame sp value to point just before the return address
1141 // location.
1142 add(ip, sp, Operand(kPointerSize));
1143 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001144}
1145
1146
Steve Block6ded16b2010-05-10 14:33:55 +01001147void MacroAssembler::InitializeNewString(Register string,
1148 Register length,
1149 Heap::RootListIndex map_index,
1150 Register scratch1,
1151 Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001152 SmiTag(scratch1, length);
Steve Block6ded16b2010-05-10 14:33:55 +01001153 LoadRoot(scratch2, map_index);
1154 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
1155 mov(scratch1, Operand(String::kEmptyHashField));
1156 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
1157 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
1158}
1159
1160
1161int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001162#if V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001163 // Running on the real platform. Use the alignment as mandated by the local
1164 // environment.
1165 // Note: This will break if we ever start generating snapshots on one ARM
1166 // platform for another ARM platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001167 return base::OS::ActivationFrameAlignment();
1168#else // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001169 // If we are using the simulator then we should always align to the expected
1170 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +01001171 // if the target platform will need alignment, so this is controlled from a
1172 // flag.
1173 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001174#endif // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001175}
1176
1177
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001178void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
1179 bool restore_context,
1180 bool argument_count_is_length) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001181 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1182
Ben Murdochb0fe1622011-05-05 13:52:32 +01001183 // Optionally restore all double registers.
1184 if (save_doubles) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001185 // Calculate the stack location of the saved doubles and restore them.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001186 const int offset = ExitFrameConstants::kFrameSize;
1187 sub(r3, fp,
1188 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize));
1189 RestoreFPRegs(r3, ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001190 }
1191
Steve Blocka7e24c12009-10-30 11:49:00 +00001192 // Clear top frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001193 mov(r3, Operand::Zero());
Ben Murdoch589d6972011-11-30 16:04:58 +00001194 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001195 str(r3, MemOperand(ip));
1196
1197 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001198 if (restore_context) {
1199 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1200 ldr(cp, MemOperand(ip));
1201 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001202#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001203 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001204 str(r3, MemOperand(ip));
1205#endif
1206
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001207 // Tear down the exit frame, pop the arguments, and return.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001208 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001209 ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1210 }
Steve Block1e0659c2011-05-24 12:43:12 +01001211 mov(sp, Operand(fp));
1212 ldm(ia_w, sp, fp.bit() | lr.bit());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001213 if (argument_count.is_valid()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001214 if (argument_count_is_length) {
1215 add(sp, sp, argument_count);
1216 } else {
1217 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
1218 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001219 }
1220}
1221
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001222
1223void MacroAssembler::MovFromFloatResult(const DwVfpRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001224 if (use_eabi_hardfloat()) {
1225 Move(dst, d0);
1226 } else {
1227 vmov(dst, r0, r1);
1228 }
1229}
1230
1231
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001232// On ARM this is just a synonym to make the purpose clear.
1233void MacroAssembler::MovFromFloatParameter(DwVfpRegister dst) {
1234 MovFromFloatResult(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001235}
1236
1237
1238void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1239 const ParameterCount& actual,
Steve Blocka7e24c12009-10-30 11:49:00 +00001240 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001241 bool* definitely_mismatches,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001242 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001243 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001244 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001245 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00001246 Label regular_invoke;
1247
1248 // Check whether the expected and actual arguments count match. If not,
1249 // setup registers according to contract with ArgumentsAdaptorTrampoline:
1250 // r0: actual arguments count
1251 // r1: function (passed through to callee)
1252 // r2: expected arguments count
Steve Blocka7e24c12009-10-30 11:49:00 +00001253
1254 // The code below is made a lot easier because the calling code already sets
1255 // up actual and expected registers according to the contract if values are
1256 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001257 DCHECK(actual.is_immediate() || actual.reg().is(r0));
1258 DCHECK(expected.is_immediate() || expected.reg().is(r2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001259
1260 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001261 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001262 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001263 if (expected.immediate() == actual.immediate()) {
1264 definitely_matches = true;
1265 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001266 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1267 if (expected.immediate() == sentinel) {
1268 // Don't worry about adapting arguments for builtins that
1269 // don't want that done. Skip adaption code by making it look
1270 // like we have a match between expected and actual number of
1271 // arguments.
1272 definitely_matches = true;
1273 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001274 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00001275 mov(r2, Operand(expected.immediate()));
1276 }
1277 }
1278 } else {
1279 if (actual.is_immediate()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001280 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001281 cmp(expected.reg(), Operand(actual.immediate()));
1282 b(eq, &regular_invoke);
Steve Blocka7e24c12009-10-30 11:49:00 +00001283 } else {
1284 cmp(expected.reg(), Operand(actual.reg()));
1285 b(eq, &regular_invoke);
1286 }
1287 }
1288
1289 if (!definitely_matches) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001290 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001291 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001292 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001293 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001294 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00001295 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001296 if (!*definitely_mismatches) {
1297 b(done);
1298 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001299 } else {
1300 Jump(adaptor, RelocInfo::CODE_TARGET);
1301 }
1302 bind(&regular_invoke);
1303 }
1304}
1305
1306
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001307void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
1308 const ParameterCount& expected,
1309 const ParameterCount& actual) {
1310 Label skip_flooding;
1311 ExternalReference step_in_enabled =
1312 ExternalReference::debug_step_in_enabled_address(isolate());
1313 mov(r4, Operand(step_in_enabled));
1314 ldrb(r4, MemOperand(r4));
1315 cmp(r4, Operand(0));
1316 b(eq, &skip_flooding);
1317 {
1318 FrameScope frame(this,
1319 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1320 if (expected.is_reg()) {
1321 SmiTag(expected.reg());
1322 Push(expected.reg());
1323 }
1324 if (actual.is_reg()) {
1325 SmiTag(actual.reg());
1326 Push(actual.reg());
1327 }
1328 if (new_target.is_valid()) {
1329 Push(new_target);
1330 }
1331 Push(fun);
1332 Push(fun);
1333 CallRuntime(Runtime::kDebugPrepareStepInIfStepping, 1);
1334 Pop(fun);
1335 if (new_target.is_valid()) {
1336 Pop(new_target);
1337 }
1338 if (actual.is_reg()) {
1339 Pop(actual.reg());
1340 SmiUntag(actual.reg());
1341 }
1342 if (expected.is_reg()) {
1343 Pop(expected.reg());
1344 SmiUntag(expected.reg());
1345 }
1346 }
1347 bind(&skip_flooding);
1348}
1349
1350
1351void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
1352 const ParameterCount& expected,
1353 const ParameterCount& actual,
1354 InvokeFlag flag,
1355 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001356 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001358 DCHECK(function.is(r1));
1359 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(r3));
1360
1361 if (call_wrapper.NeedsDebugStepCheck()) {
1362 FloodFunctionIfStepping(function, new_target, expected, actual);
1363 }
1364
1365 // Clear the new.target register if not given.
1366 if (!new_target.is_valid()) {
1367 LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1368 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001369
Steve Blocka7e24c12009-10-30 11:49:00 +00001370 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001371 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001372 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001373 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001374 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001375 // We call indirectly through the code field in the function to
1376 // allow recompilation to take effect without changing any of the
1377 // call sites.
1378 Register code = r4;
1379 ldr(code, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001380 if (flag == CALL_FUNCTION) {
1381 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001382 Call(code);
1383 call_wrapper.AfterCall();
1384 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001385 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001386 Jump(code);
1387 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001388
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001389 // Continue here if InvokePrologue does handle the invocation due to
1390 // mismatched parameter counts.
1391 bind(&done);
1392 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001393}
1394
1395
Steve Blocka7e24c12009-10-30 11:49:00 +00001396void MacroAssembler::InvokeFunction(Register fun,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001397 Register new_target,
Steve Blocka7e24c12009-10-30 11:49:00 +00001398 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001399 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001400 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001401 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001402 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001403
Steve Blocka7e24c12009-10-30 11:49:00 +00001404 // Contract with called JS functions requires that function is passed in r1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001405 DCHECK(fun.is(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001406
1407 Register expected_reg = r2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001408 Register temp_reg = r4;
Steve Blocka7e24c12009-10-30 11:49:00 +00001409
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001410 ldr(temp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001411 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1412 ldr(expected_reg,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001413 FieldMemOperand(temp_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00001414 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001415 SmiUntag(expected_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001416
1417 ParameterCount expected(expected_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 InvokeFunctionCode(fun, new_target, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00001419}
1420
1421
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001422void MacroAssembler::InvokeFunction(Register function,
1423 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00001424 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00001425 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001426 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001427 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001428 DCHECK(flag == JUMP_FUNCTION || has_frame());
1429
1430 // Contract with called JS functions requires that function is passed in r1.
1431 DCHECK(function.is(r1));
Andrei Popescu402d9372010-02-26 13:31:12 +00001432
1433 // Get the function and setup the context.
Andrei Popescu402d9372010-02-26 13:31:12 +00001434 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1435
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001436 InvokeFunctionCode(r1, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001437}
1438
1439
1440void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
1441 const ParameterCount& expected,
1442 const ParameterCount& actual,
1443 InvokeFlag flag,
1444 const CallWrapper& call_wrapper) {
1445 Move(r1, function);
1446 InvokeFunction(r1, expected, actual, flag, call_wrapper);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001447}
1448
1449
Ben Murdochb0fe1622011-05-05 13:52:32 +01001450void MacroAssembler::IsObjectJSStringType(Register object,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001451 Register scratch,
1452 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001453 DCHECK(kNotStringTag != 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001454
1455 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1456 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1457 tst(scratch, Operand(kIsNotStringMask));
Steve Block1e0659c2011-05-24 12:43:12 +01001458 b(ne, fail);
Andrei Popescu402d9372010-02-26 13:31:12 +00001459}
1460
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001461
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001462void MacroAssembler::IsObjectNameType(Register object,
1463 Register scratch,
1464 Label* fail) {
1465 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1466 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1467 cmp(scratch, Operand(LAST_NAME_TYPE));
1468 b(hi, fail);
1469}
1470
1471
Andrei Popescu402d9372010-02-26 13:31:12 +00001472void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001473 mov(r0, Operand::Zero());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001474 mov(r1,
1475 Operand(ExternalReference(Runtime::kHandleDebuggerStatement, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001476 CEntryStub ces(isolate(), 1);
1477 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001478 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Andrei Popescu402d9372010-02-26 13:31:12 +00001479}
Steve Blocka7e24c12009-10-30 11:49:00 +00001480
1481
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001482void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001483 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001484 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001485 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001486
1487 // Link the current handler as the next handler.
1488 mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1489 ldr(r5, MemOperand(r6));
1490 push(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001491
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001492 // Set this new handler as the current one.
1493 str(sp, MemOperand(r6));
Steve Blocka7e24c12009-10-30 11:49:00 +00001494}
1495
1496
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001497void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001498 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Leon Clarkee46be812010-01-19 14:06:41 +00001499 pop(r1);
Ben Murdoch589d6972011-11-30 16:04:58 +00001500 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Leon Clarkee46be812010-01-19 14:06:41 +00001501 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1502 str(r1, MemOperand(ip));
1503}
1504
1505
Steve Blocka7e24c12009-10-30 11:49:00 +00001506void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1507 Register scratch,
1508 Label* miss) {
1509 Label same_contexts;
1510
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001511 DCHECK(!holder_reg.is(scratch));
1512 DCHECK(!holder_reg.is(ip));
1513 DCHECK(!scratch.is(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001514
1515 // Load current lexical context from the stack frame.
1516 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
1517 // In debug mode, make sure the lexical context is set.
1518#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001519 cmp(scratch, Operand::Zero());
1520 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001521#endif
1522
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001523 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001524 ldr(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00001525
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001526 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001527 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001528 // Cannot use ip as a temporary in this verification code. Due to the fact
1529 // that ip is clobbered as part of cmp with an object Operand.
1530 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001531 // Read the first word and compare to the native_context_map.
Steve Blocka7e24c12009-10-30 11:49:00 +00001532 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001533 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001534 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001535 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001536 pop(holder_reg); // Restore holder.
1537 }
1538
1539 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001540 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001541 cmp(scratch, Operand(ip));
1542 b(eq, &same_contexts);
1543
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001544 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001545 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001546 // Cannot use ip as a temporary in this verification code. Due to the fact
1547 // that ip is clobbered as part of cmp with an object Operand.
1548 push(holder_reg); // Temporarily save holder on the stack.
1549 mov(holder_reg, ip); // Move ip to its holding place.
1550 LoadRoot(ip, Heap::kNullValueRootIndex);
1551 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001552 Check(ne, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001553
1554 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001555 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001556 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001557 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001558 // Restore ip is not needed. ip is reloaded below.
1559 pop(holder_reg); // Restore holder.
1560 // Restore ip to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001561 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001562 }
1563
1564 // Check that the security token in the calling global object is
1565 // compatible with the security token in the receiving global
1566 // object.
1567 int token_offset = Context::kHeaderSize +
1568 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1569
1570 ldr(scratch, FieldMemOperand(scratch, token_offset));
1571 ldr(ip, FieldMemOperand(ip, token_offset));
1572 cmp(scratch, Operand(ip));
1573 b(ne, miss);
1574
1575 bind(&same_contexts);
1576}
1577
1578
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001579// Compute the hash code from the untagged key. This must be kept in sync with
1580// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1581// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001582void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
1583 // First of all we assign the hash seed to scratch.
1584 LoadRoot(scratch, Heap::kHashSeedRootIndex);
1585 SmiUntag(scratch);
1586
1587 // Xor original key with a seed.
1588 eor(t0, t0, Operand(scratch));
1589
1590 // Compute the hash code from the untagged key. This must be kept in sync
1591 // with ComputeIntegerHash in utils.h.
1592 //
1593 // hash = ~hash + (hash << 15);
1594 mvn(scratch, Operand(t0));
1595 add(t0, scratch, Operand(t0, LSL, 15));
1596 // hash = hash ^ (hash >> 12);
1597 eor(t0, t0, Operand(t0, LSR, 12));
1598 // hash = hash + (hash << 2);
1599 add(t0, t0, Operand(t0, LSL, 2));
1600 // hash = hash ^ (hash >> 4);
1601 eor(t0, t0, Operand(t0, LSR, 4));
1602 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001603 mov(scratch, Operand(t0, LSL, 11));
1604 add(t0, t0, Operand(t0, LSL, 3));
1605 add(t0, t0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001606 // hash = hash ^ (hash >> 16);
1607 eor(t0, t0, Operand(t0, LSR, 16));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001608 bic(t0, t0, Operand(0xc0000000u));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001609}
1610
1611
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001612void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1613 Register elements,
1614 Register key,
1615 Register result,
1616 Register t0,
1617 Register t1,
1618 Register t2) {
1619 // Register use:
1620 //
1621 // elements - holds the slow-case elements of the receiver on entry.
1622 // Unchanged unless 'result' is the same register.
1623 //
1624 // key - holds the smi key on entry.
1625 // Unchanged unless 'result' is the same register.
1626 //
1627 // result - holds the result on exit if the load succeeded.
1628 // Allowed to be the same as 'key' or 'result'.
1629 // Unchanged on bailout so 'key' or 'result' can be used
1630 // in further computation.
1631 //
1632 // Scratch registers:
1633 //
1634 // t0 - holds the untagged key on entry and holds the hash once computed.
1635 //
1636 // t1 - used to hold the capacity mask of the dictionary
1637 //
1638 // t2 - used for the index into the dictionary.
1639 Label done;
1640
Ben Murdochc7cc0282012-03-05 14:35:55 +00001641 GetNumberHash(t0, t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001642
1643 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001644 ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001645 SmiUntag(t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001646 sub(t1, t1, Operand(1));
1647
1648 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001649 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001650 // Use t2 for index calculations and keep the hash intact in t0.
1651 mov(t2, t0);
1652 // Compute the masked index: (hash + i + i * i) & mask.
1653 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00001654 add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001655 }
1656 and_(t2, t2, Operand(t1));
1657
1658 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001659 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001660 add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
1661
1662 // Check if the key is identical to the name.
1663 add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001664 ldr(ip, FieldMemOperand(t2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001665 cmp(key, Operand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001666 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001667 b(eq, &done);
1668 } else {
1669 b(ne, miss);
1670 }
1671 }
1672
1673 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001674 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001675 // t2: elements + (index * kPointerSize)
1676 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001677 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001678 ldr(t1, FieldMemOperand(t2, kDetailsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001679 DCHECK_EQ(DATA, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00001680 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001681 b(ne, miss);
1682
1683 // Get the value at the masked, scaled index and return.
1684 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001685 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001686 ldr(result, FieldMemOperand(t2, kValueOffset));
1687}
1688
1689
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001690void MacroAssembler::Allocate(int object_size,
1691 Register result,
1692 Register scratch1,
1693 Register scratch2,
1694 Label* gc_required,
1695 AllocationFlags flags) {
1696 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
John Reck59135872010-11-02 12:39:01 -07001697 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001698 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001699 // Trash the registers to simulate an allocation failure.
1700 mov(result, Operand(0x7091));
1701 mov(scratch1, Operand(0x7191));
1702 mov(scratch2, Operand(0x7291));
1703 }
1704 jmp(gc_required);
1705 return;
1706 }
1707
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001708 DCHECK(!AreAliased(result, scratch1, scratch2, ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001709
Kristian Monsen25f61362010-05-21 11:50:48 +01001710 // Make object size into bytes.
1711 if ((flags & SIZE_IN_WORDS) != 0) {
1712 object_size *= kPointerSize;
1713 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001714 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Kristian Monsen25f61362010-05-21 11:50:48 +01001715
Ben Murdochb0fe1622011-05-05 13:52:32 +01001716 // Check relative positions of allocation top and limit addresses.
1717 // The values must be adjacent in memory to allow the use of LDM.
1718 // Also, assert that the registers are numbered such that the values
1719 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720 ExternalReference allocation_top =
1721 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1722 ExternalReference allocation_limit =
1723 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001724
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001725 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
1726 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001727 DCHECK((limit - top) == kPointerSize);
1728 DCHECK(result.code() < ip.code());
1729
1730 // Set up allocation top address register.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001731 Register top_address = scratch1;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001732 // This code stores a temporary value in ip. This is OK, as the code below
1733 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001734 Register alloc_limit = ip;
1735 Register result_end = scratch2;
1736 mov(top_address, Operand(allocation_top));
1737
Steve Blocka7e24c12009-10-30 11:49:00 +00001738 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001739 // Load allocation top into result and allocation limit into alloc_limit.
1740 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001741 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001742 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001743 // Assert that result actually contains top on entry.
1744 ldr(alloc_limit, MemOperand(top_address));
1745 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001746 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001747 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001748 // Load allocation limit. Result already contains allocation top.
1749 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001750 }
1751
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001752 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1753 // Align the next allocation. Storing the filler map without checking top is
1754 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001756 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001757 Label aligned;
1758 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759 if ((flags & PRETENURE) != 0) {
1760 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001761 b(hs, gc_required);
1762 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001763 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
1764 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001765 bind(&aligned);
1766 }
1767
Steve Blocka7e24c12009-10-30 11:49:00 +00001768 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001769 // to calculate the new top. We must preserve the ip register at this
1770 // point, so we cannot just use add().
1771 DCHECK(object_size > 0);
1772 Register source = result;
1773 Condition cond = al;
1774 int shift = 0;
1775 while (object_size != 0) {
1776 if (((object_size >> shift) & 0x03) == 0) {
1777 shift += 2;
1778 } else {
1779 int bits = object_size & (0xff << shift);
1780 object_size -= bits;
1781 shift += 8;
1782 Operand bits_operand(bits);
1783 DCHECK(bits_operand.instructions_required(this) == 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001784 add(result_end, source, bits_operand, SetCC, cond);
1785 source = result_end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001786 cond = cc;
1787 }
1788 }
Steve Block1e0659c2011-05-24 12:43:12 +01001789 b(cs, gc_required);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001790 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00001791 b(hi, gc_required);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001792 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00001793
Ben Murdochb0fe1622011-05-05 13:52:32 +01001794 // Tag object if requested.
Steve Blocka7e24c12009-10-30 11:49:00 +00001795 if ((flags & TAG_OBJECT) != 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001796 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00001797 }
1798}
1799
1800
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001801void MacroAssembler::Allocate(Register object_size, Register result,
1802 Register result_end, Register scratch,
1803 Label* gc_required, AllocationFlags flags) {
John Reck59135872010-11-02 12:39:01 -07001804 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001805 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001806 // Trash the registers to simulate an allocation failure.
1807 mov(result, Operand(0x7091));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001808 mov(scratch, Operand(0x7191));
1809 mov(result_end, Operand(0x7291));
John Reck59135872010-11-02 12:39:01 -07001810 }
1811 jmp(gc_required);
1812 return;
1813 }
1814
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001815 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
1816 // is not specified. Other registers must not overlap.
1817 DCHECK(!AreAliased(object_size, result, scratch, ip));
1818 DCHECK(!AreAliased(result_end, result, scratch, ip));
1819 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00001820
Ben Murdochb0fe1622011-05-05 13:52:32 +01001821 // Check relative positions of allocation top and limit addresses.
1822 // The values must be adjacent in memory to allow the use of LDM.
1823 // Also, assert that the registers are numbered such that the values
1824 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001825 ExternalReference allocation_top =
1826 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1827 ExternalReference allocation_limit =
1828 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001829 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
1830 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001831 DCHECK((limit - top) == kPointerSize);
1832 DCHECK(result.code() < ip.code());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001833
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001834 // Set up allocation top address and allocation limit registers.
1835 Register top_address = scratch;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001836 // This code stores a temporary value in ip. This is OK, as the code below
1837 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001838 Register alloc_limit = ip;
1839 mov(top_address, Operand(allocation_top));
1840
Steve Blocka7e24c12009-10-30 11:49:00 +00001841 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001842 // Load allocation top into result and allocation limit into alloc_limit.
1843 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001844 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001845 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001846 // Assert that result actually contains top on entry.
1847 ldr(alloc_limit, MemOperand(top_address));
1848 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001849 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001850 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851 // Load allocation limit. Result already contains allocation top.
1852 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00001853 }
1854
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001855 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1856 // Align the next allocation. Storing the filler map without checking top is
1857 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001858 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001859 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001860 Label aligned;
1861 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001862 if ((flags & PRETENURE) != 0) {
1863 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001864 b(hs, gc_required);
1865 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001866 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
1867 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001868 bind(&aligned);
1869 }
1870
Steve Blocka7e24c12009-10-30 11:49:00 +00001871 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb0fe1622011-05-05 13:52:32 +01001872 // to calculate the new top. Object size may be in words so a shift is
1873 // required to get the number of bytes.
Kristian Monsen25f61362010-05-21 11:50:48 +01001874 if ((flags & SIZE_IN_WORDS) != 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001875 add(result_end, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01001876 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001877 add(result_end, result, Operand(object_size), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01001878 }
Steve Block1e0659c2011-05-24 12:43:12 +01001879 b(cs, gc_required);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001880 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00001881 b(hi, gc_required);
1882
Steve Blockd0582a62009-12-15 09:54:21 +00001883 // Update allocation top. result temporarily holds the new top.
Steve Block44f0eee2011-05-26 01:26:41 +01001884 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001885 tst(result_end, Operand(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001886 Check(eq, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00001887 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001888 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00001889
1890 // Tag object if requested.
1891 if ((flags & TAG_OBJECT) != 0) {
1892 add(result, result, Operand(kHeapObjectTag));
1893 }
1894}
1895
1896
Andrei Popescu31002712010-02-23 13:46:05 +00001897void MacroAssembler::AllocateTwoByteString(Register result,
1898 Register length,
1899 Register scratch1,
1900 Register scratch2,
1901 Register scratch3,
1902 Label* gc_required) {
1903 // Calculate the number of bytes needed for the characters in the string while
1904 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001905 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00001906 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
1907 add(scratch1, scratch1,
1908 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001909 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001910
1911 // Allocate two-byte string in new space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001912 Allocate(scratch1,
1913 result,
1914 scratch2,
1915 scratch3,
1916 gc_required,
1917 TAG_OBJECT);
Andrei Popescu31002712010-02-23 13:46:05 +00001918
1919 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01001920 InitializeNewString(result,
1921 length,
1922 Heap::kStringMapRootIndex,
1923 scratch1,
1924 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001925}
1926
1927
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001928void MacroAssembler::AllocateOneByteString(Register result, Register length,
1929 Register scratch1, Register scratch2,
1930 Register scratch3,
1931 Label* gc_required) {
Andrei Popescu31002712010-02-23 13:46:05 +00001932 // Calculate the number of bytes needed for the characters in the string while
1933 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001934 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1935 DCHECK(kCharSize == 1);
Andrei Popescu31002712010-02-23 13:46:05 +00001936 add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001937 Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01001938 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00001939
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001940 // Allocate one-byte string in new space.
1941 Allocate(scratch1,
1942 result,
1943 scratch2,
1944 scratch3,
1945 gc_required,
1946 TAG_OBJECT);
Andrei Popescu31002712010-02-23 13:46:05 +00001947
1948 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001949 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
1950 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001951}
1952
1953
1954void MacroAssembler::AllocateTwoByteConsString(Register result,
1955 Register length,
1956 Register scratch1,
1957 Register scratch2,
1958 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001959 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1960 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001961
1962 InitializeNewString(result,
1963 length,
1964 Heap::kConsStringMapRootIndex,
1965 scratch1,
1966 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001967}
1968
1969
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001970void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
1971 Register scratch1,
1972 Register scratch2,
1973 Label* gc_required) {
1974 Allocate(ConsString::kSize,
1975 result,
1976 scratch1,
1977 scratch2,
1978 gc_required,
1979 TAG_OBJECT);
Steve Block6ded16b2010-05-10 14:33:55 +01001980
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001981 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
1982 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00001983}
1984
1985
Ben Murdoch589d6972011-11-30 16:04:58 +00001986void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1987 Register length,
1988 Register scratch1,
1989 Register scratch2,
1990 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001991 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1992 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00001993
1994 InitializeNewString(result,
1995 length,
1996 Heap::kSlicedStringMapRootIndex,
1997 scratch1,
1998 scratch2);
1999}
2000
2001
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002002void MacroAssembler::AllocateOneByteSlicedString(Register result,
2003 Register length,
2004 Register scratch1,
2005 Register scratch2,
2006 Label* gc_required) {
2007 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2008 TAG_OBJECT);
Ben Murdoch589d6972011-11-30 16:04:58 +00002009
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002010 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
2011 scratch1, scratch2);
Ben Murdoch589d6972011-11-30 16:04:58 +00002012}
2013
2014
Steve Block6ded16b2010-05-10 14:33:55 +01002015void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00002016 Register map,
2017 Register type_reg,
2018 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002019 const Register temp = type_reg.is(no_reg) ? ip : type_reg;
2020
Steve Block6ded16b2010-05-10 14:33:55 +01002021 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002022 CompareInstanceType(map, temp, type);
2023}
2024
2025
Steve Blocka7e24c12009-10-30 11:49:00 +00002026void MacroAssembler::CompareInstanceType(Register map,
2027 Register type_reg,
2028 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002029 // Registers map and type_reg can be ip. These two lines assert
2030 // that ip can be used with the two instructions (the constants
2031 // will never need ip).
2032 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
2033 STATIC_ASSERT(LAST_TYPE < 256);
Steve Blocka7e24c12009-10-30 11:49:00 +00002034 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
2035 cmp(type_reg, Operand(type));
2036}
2037
2038
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002039void MacroAssembler::CompareRoot(Register obj,
2040 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002041 DCHECK(!obj.is(ip));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002042 LoadRoot(ip, index);
2043 cmp(obj, ip);
2044}
2045
2046
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002047void MacroAssembler::CheckFastElements(Register map,
2048 Register scratch,
2049 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002050 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2051 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2052 STATIC_ASSERT(FAST_ELEMENTS == 2);
2053 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002054 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002055 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002056 b(hi, fail);
2057}
2058
2059
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002060void MacroAssembler::CheckFastObjectElements(Register map,
2061 Register scratch,
2062 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002063 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2064 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2065 STATIC_ASSERT(FAST_ELEMENTS == 2);
2066 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002067 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002068 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002069 b(ls, fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002070 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002071 b(hi, fail);
2072}
2073
2074
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002075void MacroAssembler::CheckFastSmiElements(Register map,
2076 Register scratch,
2077 Label* fail) {
2078 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2079 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002080 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002081 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002082 b(hi, fail);
2083}
2084
2085
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002086void MacroAssembler::StoreNumberToDoubleElements(
2087 Register value_reg,
2088 Register key_reg,
2089 Register elements_reg,
2090 Register scratch1,
2091 LowDwVfpRegister double_scratch,
2092 Label* fail,
2093 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002094 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002095 Label smi_value, store;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002096
2097 // Handle smi values specially.
2098 JumpIfSmi(value_reg, &smi_value);
2099
2100 // Ensure that the object is a heap number
2101 CheckMap(value_reg,
2102 scratch1,
2103 isolate()->factory()->heap_number_map(),
2104 fail,
2105 DONT_DO_SMI_CHECK);
2106
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002107 vldr(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
2108 // Force a canonical NaN.
2109 if (emit_debug_code()) {
2110 vmrs(ip);
2111 tst(ip, Operand(kVFPDefaultNaNModeControlBit));
2112 Assert(ne, kDefaultNaNModeNotSet);
2113 }
2114 VFPCanonicalizeNaN(double_scratch);
2115 b(&store);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002116
2117 bind(&smi_value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002118 SmiToDouble(double_scratch, value_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002119
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002120 bind(&store);
2121 add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg));
2122 vstr(double_scratch,
2123 FieldMemOperand(scratch1,
2124 FixedDoubleArray::kHeaderSize - elements_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002125}
2126
2127
2128void MacroAssembler::CompareMap(Register obj,
2129 Register scratch,
2130 Handle<Map> map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002131 Label* early_success) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002132 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002133 CompareMap(scratch, map, early_success);
2134}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002135
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002136
2137void MacroAssembler::CompareMap(Register obj_map,
2138 Handle<Map> map,
2139 Label* early_success) {
2140 cmp(obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002141}
2142
2143
Andrei Popescu31002712010-02-23 13:46:05 +00002144void MacroAssembler::CheckMap(Register obj,
2145 Register scratch,
2146 Handle<Map> map,
2147 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002148 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002149 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002150 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +00002151 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002152
2153 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002154 CompareMap(obj, scratch, map, &success);
Andrei Popescu31002712010-02-23 13:46:05 +00002155 b(ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002156 bind(&success);
Andrei Popescu31002712010-02-23 13:46:05 +00002157}
2158
2159
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002160void MacroAssembler::CheckMap(Register obj,
2161 Register scratch,
2162 Heap::RootListIndex index,
2163 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002164 SmiCheckType smi_check_type) {
2165 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002166 JumpIfSmi(obj, fail);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002167 }
2168 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2169 LoadRoot(ip, index);
2170 cmp(scratch, ip);
2171 b(ne, fail);
2172}
2173
2174
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002175void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
2176 Register scratch2, Handle<WeakCell> cell,
2177 Handle<Code> success,
2178 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002179 Label fail;
2180 if (smi_check_type == DO_SMI_CHECK) {
2181 JumpIfSmi(obj, &fail);
2182 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002183 ldr(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
2184 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002185 Jump(success, RelocInfo::CODE_TARGET, eq);
2186 bind(&fail);
2187}
2188
2189
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002190void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2191 Register scratch) {
2192 mov(scratch, Operand(cell));
2193 ldr(scratch, FieldMemOperand(scratch, WeakCell::kValueOffset));
2194 cmp(value, scratch);
2195}
2196
2197
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002198void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002199 mov(value, Operand(cell));
2200 ldr(value, FieldMemOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002201}
2202
2203
2204void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2205 Label* miss) {
2206 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002207 JumpIfSmi(value, miss);
2208}
2209
2210
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002211void MacroAssembler::GetMapConstructor(Register result, Register map,
2212 Register temp, Register temp2) {
2213 Label done, loop;
2214 ldr(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
2215 bind(&loop);
2216 JumpIfSmi(result, &done);
2217 CompareObjectType(result, temp, temp2, MAP_TYPE);
2218 b(ne, &done);
2219 ldr(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
2220 b(&loop);
2221 bind(&done);
2222}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002223
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002224
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002225void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
2226 Register scratch, Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002227 // Get the prototype or initial map from the function.
2228 ldr(result,
2229 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2230
2231 // If the prototype or initial map is the hole, don't return it and
2232 // simply miss the cache instead. This will allow us to allocate a
2233 // prototype object on-demand in the runtime system.
2234 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2235 cmp(result, ip);
2236 b(eq, miss);
2237
2238 // If the function does not have an initial map, we're done.
2239 Label done;
2240 CompareObjectType(result, scratch, scratch, MAP_TYPE);
2241 b(ne, &done);
2242
2243 // Get the prototype from the initial map.
2244 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002245
Steve Blocka7e24c12009-10-30 11:49:00 +00002246 // All done.
2247 bind(&done);
2248}
2249
2250
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002251void MacroAssembler::CallStub(CodeStub* stub,
2252 TypeFeedbackId ast_id,
2253 Condition cond) {
2254 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
2255 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00002256}
2257
2258
Andrei Popescu31002712010-02-23 13:46:05 +00002259void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +00002260 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2261}
2262
2263
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002264bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002265 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00002266}
2267
2268
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002269void MacroAssembler::IndexFromHash(Register hash, Register index) {
2270 // If the hash field contains an array index pick it out. The assert checks
2271 // that the constants for the maximum number of digits for an array index
2272 // cached in the hash field and the number of bits reserved for it does not
2273 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002274 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002275 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002276 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002277}
2278
2279
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002280void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002281 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002282 vmov(value.low(), smi);
2283 vcvt_f64_s32(value, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01002284 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002285 SmiUntag(ip, smi);
2286 vmov(value.low(), ip);
2287 vcvt_f64_s32(value, value.low());
Iain Merrick9ac36c92010-09-13 15:29:50 +01002288 }
2289}
2290
2291
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002292void MacroAssembler::TestDoubleIsInt32(DwVfpRegister double_input,
2293 LowDwVfpRegister double_scratch) {
2294 DCHECK(!double_input.is(double_scratch));
2295 vcvt_s32_f64(double_scratch.low(), double_input);
2296 vcvt_f64_s32(double_scratch, double_scratch.low());
2297 VFPCompareAndSetFlags(double_input, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002298}
2299
2300
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002301void MacroAssembler::TryDoubleToInt32Exact(Register result,
2302 DwVfpRegister double_input,
2303 LowDwVfpRegister double_scratch) {
2304 DCHECK(!double_input.is(double_scratch));
2305 vcvt_s32_f64(double_scratch.low(), double_input);
2306 vmov(result, double_scratch.low());
2307 vcvt_f64_s32(double_scratch, double_scratch.low());
2308 VFPCompareAndSetFlags(double_input, double_scratch);
2309}
Steve Block44f0eee2011-05-26 01:26:41 +01002310
Steve Block44f0eee2011-05-26 01:26:41 +01002311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002312void MacroAssembler::TryInt32Floor(Register result,
2313 DwVfpRegister double_input,
2314 Register input_high,
2315 LowDwVfpRegister double_scratch,
2316 Label* done,
2317 Label* exact) {
2318 DCHECK(!result.is(input_high));
2319 DCHECK(!double_input.is(double_scratch));
2320 Label negative, exception;
Steve Block44f0eee2011-05-26 01:26:41 +01002321
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002322 VmovHigh(input_high, double_input);
Steve Block44f0eee2011-05-26 01:26:41 +01002323
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002324 // Test for NaN and infinities.
2325 Sbfx(result, input_high,
2326 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
2327 cmp(result, Operand(-1));
2328 b(eq, &exception);
2329 // Test for values that can be exactly represented as a
2330 // signed 32-bit integer.
2331 TryDoubleToInt32Exact(result, double_input, double_scratch);
2332 // If exact, return (result already fetched).
2333 b(eq, exact);
2334 cmp(input_high, Operand::Zero());
2335 b(mi, &negative);
Steve Block44f0eee2011-05-26 01:26:41 +01002336
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002337 // Input is in ]+0, +inf[.
2338 // If result equals 0x7fffffff input was out of range or
2339 // in ]0x7fffffff, 0x80000000[. We ignore this last case which
2340 // could fits into an int32, that means we always think input was
2341 // out of range and always go to exception.
2342 // If result < 0x7fffffff, go to done, result fetched.
2343 cmn(result, Operand(1));
2344 b(mi, &exception);
2345 b(done);
Steve Block44f0eee2011-05-26 01:26:41 +01002346
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002347 // Input is in ]-inf, -0[.
2348 // If x is a non integer negative number,
2349 // floor(x) <=> round_to_zero(x) - 1.
2350 bind(&negative);
2351 sub(result, result, Operand(1), SetCC);
2352 // If result is still negative, go to done, result fetched.
2353 // Else, we had an overflow and we fall through exception.
2354 b(mi, done);
2355 bind(&exception);
2356}
Steve Block44f0eee2011-05-26 01:26:41 +01002357
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002358void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2359 DwVfpRegister double_input,
2360 Label* done) {
2361 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2362 vcvt_s32_f64(double_scratch.low(), double_input);
2363 vmov(result, double_scratch.low());
Steve Block44f0eee2011-05-26 01:26:41 +01002364
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002365 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
2366 sub(ip, result, Operand(1));
2367 cmp(ip, Operand(0x7ffffffe));
2368 b(lt, done);
2369}
Steve Block44f0eee2011-05-26 01:26:41 +01002370
Steve Block44f0eee2011-05-26 01:26:41 +01002371
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002372void MacroAssembler::TruncateDoubleToI(Register result,
2373 DwVfpRegister double_input) {
2374 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002375
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002376 TryInlineTruncateDoubleToI(result, double_input, &done);
2377
2378 // If we fell through then inline version didn't succeed - call stub instead.
2379 push(lr);
2380 sub(sp, sp, Operand(kDoubleSize)); // Put input on stack.
2381 vstr(double_input, MemOperand(sp, 0));
2382
2383 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
2384 CallStub(&stub);
2385
2386 add(sp, sp, Operand(kDoubleSize));
2387 pop(lr);
2388
Steve Block44f0eee2011-05-26 01:26:41 +01002389 bind(&done);
2390}
2391
2392
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002393void MacroAssembler::TruncateHeapNumberToI(Register result,
2394 Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002395 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002396 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2397 DCHECK(!result.is(object));
Steve Block44f0eee2011-05-26 01:26:41 +01002398
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002399 vldr(double_scratch,
2400 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
2401 TryInlineTruncateDoubleToI(result, double_scratch, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01002402
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002403 // If we fell through then inline version didn't succeed - call stub instead.
2404 push(lr);
2405 DoubleToIStub stub(isolate(),
2406 object,
2407 result,
2408 HeapNumber::kValueOffset - kHeapObjectTag,
2409 true,
2410 true);
2411 CallStub(&stub);
2412 pop(lr);
2413
2414 bind(&done);
2415}
2416
2417
2418void MacroAssembler::TruncateNumberToI(Register object,
2419 Register result,
2420 Register heap_number_map,
2421 Register scratch1,
2422 Label* not_number) {
2423 Label done;
2424 DCHECK(!result.is(object));
2425
2426 UntagAndJumpIfSmi(result, object, &done);
2427 JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
2428 TruncateHeapNumberToI(result, object);
2429
Steve Block44f0eee2011-05-26 01:26:41 +01002430 bind(&done);
2431}
2432
2433
Andrei Popescu31002712010-02-23 13:46:05 +00002434void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2435 Register src,
2436 int num_least_bits) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002437 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002438 ubfx(dst, src, kSmiTagSize, num_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +00002439 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002440 SmiUntag(dst, src);
Andrei Popescu31002712010-02-23 13:46:05 +00002441 and_(dst, dst, Operand((1 << num_least_bits) - 1));
2442 }
2443}
2444
2445
Steve Block1e0659c2011-05-24 12:43:12 +01002446void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2447 Register src,
2448 int num_least_bits) {
2449 and_(dst, src, Operand((1 << num_least_bits) - 1));
2450}
2451
2452
Steve Block44f0eee2011-05-26 01:26:41 +01002453void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002454 int num_arguments,
2455 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002456 // All parameters are on the stack. r0 has the return value after call.
2457
2458 // If the expected number of arguments of the runtime function is
2459 // constant, we check that the actual number of arguments match the
2460 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002461 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00002462
Leon Clarke4515c472010-02-03 11:58:03 +00002463 // TODO(1236192): Most runtime routines don't need the number of
2464 // arguments passed in because it is constant. At some point we
2465 // should remove this need and make the runtime routine entry code
2466 // smarter.
2467 mov(r0, Operand(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01002468 mov(r1, Operand(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002469 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002470 CallStub(&stub);
2471}
2472
2473
Andrei Popescu402d9372010-02-26 13:31:12 +00002474void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2475 int num_arguments) {
2476 mov(r0, Operand(num_arguments));
2477 mov(r1, Operand(ext));
2478
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002479 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002480 CallStub(&stub);
2481}
2482
2483
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002484void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
2485 const Runtime::Function* function = Runtime::FunctionForId(fid);
2486 DCHECK_EQ(1, function->result_size);
2487 if (function->nargs >= 0) {
2488 // TODO(1236192): Most runtime routines don't need the number of
2489 // arguments passed in because it is constant. At some point we
2490 // should remove this need and make the runtime routine entry code
2491 // smarter.
2492 mov(r0, Operand(function->nargs));
2493 }
2494 JumpToExternalReference(ExternalReference(fid, isolate()));
Steve Block6ded16b2010-05-10 14:33:55 +01002495}
2496
2497
2498void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002499#if defined(__thumb__)
2500 // Thumb mode builtin.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002501 DCHECK((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002502#endif
2503 mov(r1, Operand(builtin));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002504 CEntryStub stub(isolate(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002505 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2506}
2507
2508
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002509void MacroAssembler::InvokeBuiltin(int native_context_index, InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +00002510 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002511 // You can't call a builtin without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002512 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002513
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002514 // Fake a parameter count to avoid emitting code to do the check.
2515 ParameterCount expected(0);
2516 LoadNativeContextSlot(native_context_index, r1);
2517 InvokeFunctionCode(r1, no_reg, expected, expected, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00002518}
2519
2520
2521void MacroAssembler::SetCounter(StatsCounter* counter, int value,
2522 Register scratch1, Register scratch2) {
2523 if (FLAG_native_code_counters && counter->Enabled()) {
2524 mov(scratch1, Operand(value));
2525 mov(scratch2, Operand(ExternalReference(counter)));
2526 str(scratch1, MemOperand(scratch2));
2527 }
2528}
2529
2530
2531void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
2532 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002533 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002534 if (FLAG_native_code_counters && counter->Enabled()) {
2535 mov(scratch2, Operand(ExternalReference(counter)));
2536 ldr(scratch1, MemOperand(scratch2));
2537 add(scratch1, scratch1, Operand(value));
2538 str(scratch1, MemOperand(scratch2));
2539 }
2540}
2541
2542
2543void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
2544 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002545 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002546 if (FLAG_native_code_counters && counter->Enabled()) {
2547 mov(scratch2, Operand(ExternalReference(counter)));
2548 ldr(scratch1, MemOperand(scratch2));
2549 sub(scratch1, scratch1, Operand(value));
2550 str(scratch1, MemOperand(scratch2));
2551 }
2552}
2553
2554
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002555void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01002556 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002557 Check(cond, reason);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002558}
2559
2560
Iain Merrick75681382010-08-19 15:07:18 +01002561void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002562 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002563 DCHECK(!elements.is(ip));
Iain Merrick75681382010-08-19 15:07:18 +01002564 Label ok;
2565 push(elements);
2566 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
2567 LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2568 cmp(elements, ip);
2569 b(eq, &ok);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002570 LoadRoot(ip, Heap::kFixedDoubleArrayMapRootIndex);
2571 cmp(elements, ip);
2572 b(eq, &ok);
Iain Merrick75681382010-08-19 15:07:18 +01002573 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2574 cmp(elements, ip);
2575 b(eq, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002576 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002577 bind(&ok);
2578 pop(elements);
2579 }
2580}
2581
2582
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002583void MacroAssembler::Check(Condition cond, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002584 Label L;
Steve Block1e0659c2011-05-24 12:43:12 +01002585 b(cond, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002586 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002587 // will not return here
2588 bind(&L);
2589}
2590
2591
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002592void MacroAssembler::Abort(BailoutReason reason) {
Steve Block8defd9f2010-07-08 12:39:36 +01002593 Label abort_start;
2594 bind(&abort_start);
Steve Blocka7e24c12009-10-30 11:49:00 +00002595#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002596 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002597 if (msg != NULL) {
2598 RecordComment("Abort message: ");
2599 RecordComment(msg);
2600 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002601
2602 if (FLAG_trap_on_abort) {
2603 stop(msg);
2604 return;
2605 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002606#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002607
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002608 mov(r0, Operand(Smi::FromInt(reason)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002609 push(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002610
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002611 // Disable stub call restrictions to always allow calls to abort.
2612 if (!has_frame_) {
2613 // We don't actually want to generate a pile of code for this, so just
2614 // claim there is a stack frame, without generating one.
2615 FrameScope scope(this, StackFrame::NONE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002616 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002617 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002618 CallRuntime(Runtime::kAbort, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002619 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002620 // will not return here
Steve Block8defd9f2010-07-08 12:39:36 +01002621 if (is_const_pool_blocked()) {
2622 // If the calling code cares about the exact number of
2623 // instructions generated, we insert padding here to keep the size
2624 // of the Abort macro constant.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002625 static const int kExpectedAbortInstructions = 7;
Steve Block8defd9f2010-07-08 12:39:36 +01002626 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002627 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block8defd9f2010-07-08 12:39:36 +01002628 while (abort_instructions++ < kExpectedAbortInstructions) {
2629 nop();
2630 }
2631 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002632}
2633
2634
Steve Blockd0582a62009-12-15 09:54:21 +00002635void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2636 if (context_chain_length > 0) {
2637 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002638 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002639 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002640 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002641 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002642 } else {
2643 // Slot is in the current function context. Move it into the
2644 // destination register in case we store into it (the write barrier
2645 // cannot be allowed to destroy the context in esi).
2646 mov(dst, cp);
2647 }
Steve Blockd0582a62009-12-15 09:54:21 +00002648}
2649
2650
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002651void MacroAssembler::LoadTransitionedArrayMapConditional(
2652 ElementsKind expected_kind,
2653 ElementsKind transitioned_kind,
2654 Register map_in_out,
2655 Register scratch,
2656 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002657 DCHECK(IsFastElementsKind(expected_kind));
2658 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002659
2660 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002661 ldr(scratch, NativeContextMemOperand());
2662 ldr(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002663 cmp(map_in_out, ip);
2664 b(ne, no_map_match);
2665
2666 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002667 ldr(map_in_out,
2668 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002669}
2670
2671
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002672void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
2673 ldr(dst, NativeContextMemOperand());
2674 ldr(dst, ContextMemOperand(dst, index));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002675}
2676
2677
2678void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2679 Register map,
2680 Register scratch) {
2681 // Load the initial map. The global functions all have initial maps.
2682 ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002683 if (emit_debug_code()) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002684 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00002685 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002686 b(&ok);
2687 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002688 Abort(kGlobalFunctionsMustHaveInitialMap);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002689 bind(&ok);
2690 }
2691}
2692
2693
Steve Block1e0659c2011-05-24 12:43:12 +01002694void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
2695 Register reg,
2696 Register scratch,
2697 Label* not_power_of_two_or_zero) {
2698 sub(scratch, reg, Operand(1), SetCC);
2699 b(mi, not_power_of_two_or_zero);
2700 tst(scratch, reg);
2701 b(ne, not_power_of_two_or_zero);
2702}
2703
2704
Steve Block44f0eee2011-05-26 01:26:41 +01002705void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
2706 Register reg,
2707 Register scratch,
2708 Label* zero_and_neg,
2709 Label* not_power_of_two) {
2710 sub(scratch, reg, Operand(1), SetCC);
2711 b(mi, zero_and_neg);
2712 tst(scratch, reg);
2713 b(ne, not_power_of_two);
2714}
2715
2716
Andrei Popescu31002712010-02-23 13:46:05 +00002717void MacroAssembler::JumpIfNotBothSmi(Register reg1,
2718 Register reg2,
2719 Label* on_not_both_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01002720 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00002721 tst(reg1, Operand(kSmiTagMask));
2722 tst(reg2, Operand(kSmiTagMask), eq);
2723 b(ne, on_not_both_smi);
2724}
2725
2726
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002727void MacroAssembler::UntagAndJumpIfSmi(
2728 Register dst, Register src, Label* smi_case) {
2729 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002730 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002731 b(cc, smi_case); // Shifter carry is not set for a smi.
2732}
2733
2734
2735void MacroAssembler::UntagAndJumpIfNotSmi(
2736 Register dst, Register src, Label* non_smi_case) {
2737 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002738 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002739 b(cs, non_smi_case); // Shifter carry is set for a non-smi.
2740}
2741
2742
Andrei Popescu31002712010-02-23 13:46:05 +00002743void MacroAssembler::JumpIfEitherSmi(Register reg1,
2744 Register reg2,
2745 Label* on_either_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01002746 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00002747 tst(reg1, Operand(kSmiTagMask));
2748 tst(reg2, Operand(kSmiTagMask), ne);
2749 b(eq, on_either_smi);
2750}
2751
2752
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002753void MacroAssembler::AssertNotSmi(Register object) {
2754 if (emit_debug_code()) {
2755 STATIC_ASSERT(kSmiTag == 0);
2756 tst(object, Operand(kSmiTagMask));
2757 Check(ne, kOperandIsASmi);
2758 }
Iain Merrick75681382010-08-19 15:07:18 +01002759}
2760
2761
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002762void MacroAssembler::AssertSmi(Register object) {
2763 if (emit_debug_code()) {
2764 STATIC_ASSERT(kSmiTag == 0);
2765 tst(object, Operand(kSmiTagMask));
2766 Check(eq, kOperandIsNotSmi);
2767 }
Steve Block1e0659c2011-05-24 12:43:12 +01002768}
2769
2770
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002771void MacroAssembler::AssertString(Register object) {
2772 if (emit_debug_code()) {
2773 STATIC_ASSERT(kSmiTag == 0);
2774 tst(object, Operand(kSmiTagMask));
2775 Check(ne, kOperandIsASmiAndNotAString);
2776 push(object);
2777 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
2778 CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
2779 pop(object);
2780 Check(lo, kOperandIsNotAString);
2781 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002782}
2783
2784
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002785void MacroAssembler::AssertName(Register object) {
2786 if (emit_debug_code()) {
2787 STATIC_ASSERT(kSmiTag == 0);
2788 tst(object, Operand(kSmiTagMask));
2789 Check(ne, kOperandIsASmiAndNotAName);
2790 push(object);
2791 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
2792 CompareInstanceType(object, object, LAST_NAME_TYPE);
2793 pop(object);
2794 Check(le, kOperandIsNotAName);
2795 }
2796}
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002797
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002798
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002799void MacroAssembler::AssertFunction(Register object) {
2800 if (emit_debug_code()) {
2801 STATIC_ASSERT(kSmiTag == 0);
2802 tst(object, Operand(kSmiTagMask));
2803 Check(ne, kOperandIsASmiAndNotAFunction);
2804 push(object);
2805 CompareObjectType(object, object, object, JS_FUNCTION_TYPE);
2806 pop(object);
2807 Check(eq, kOperandIsNotAFunction);
2808 }
2809}
2810
2811
2812void MacroAssembler::AssertBoundFunction(Register object) {
2813 if (emit_debug_code()) {
2814 STATIC_ASSERT(kSmiTag == 0);
2815 tst(object, Operand(kSmiTagMask));
2816 Check(ne, kOperandIsASmiAndNotABoundFunction);
2817 push(object);
2818 CompareObjectType(object, object, object, JS_BOUND_FUNCTION_TYPE);
2819 pop(object);
2820 Check(eq, kOperandIsNotABoundFunction);
2821 }
2822}
2823
2824
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002825void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
2826 Register scratch) {
2827 if (emit_debug_code()) {
2828 Label done_checking;
2829 AssertNotSmi(object);
2830 CompareRoot(object, Heap::kUndefinedValueRootIndex);
2831 b(eq, &done_checking);
2832 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2833 CompareRoot(scratch, Heap::kAllocationSiteMapRootIndex);
2834 Assert(eq, kExpectedUndefinedOrCell);
2835 bind(&done_checking);
2836 }
2837}
2838
2839
2840void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
2841 if (emit_debug_code()) {
2842 CompareRoot(reg, index);
2843 Check(eq, kHeapNumberMapRegisterClobbered);
2844 }
Steve Block1e0659c2011-05-24 12:43:12 +01002845}
2846
2847
2848void MacroAssembler::JumpIfNotHeapNumber(Register object,
2849 Register heap_number_map,
2850 Register scratch,
2851 Label* on_not_heap_number) {
2852 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002853 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01002854 cmp(scratch, heap_number_map);
2855 b(ne, on_not_heap_number);
2856}
2857
2858
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002859void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
2860 Register first, Register second, Register scratch1, Register scratch2,
Leon Clarked91b9f72010-01-27 17:25:45 +00002861 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002862 // Test that both first and second are sequential one-byte strings.
Leon Clarked91b9f72010-01-27 17:25:45 +00002863 // Assume that they are non-smis.
2864 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
2865 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
2866 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
2867 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01002868
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002869 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
2870 scratch2, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00002871}
2872
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002873void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
2874 Register second,
2875 Register scratch1,
2876 Register scratch2,
2877 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002878 // Check that neither is a smi.
Leon Clarked91b9f72010-01-27 17:25:45 +00002879 and_(scratch1, first, Operand(second));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002880 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002881 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
2882 scratch2, failure);
2883}
2884
2885
2886void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
2887 Label* not_unique_name) {
2888 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2889 Label succeed;
2890 tst(reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
2891 b(eq, &succeed);
2892 cmp(reg, Operand(SYMBOL_TYPE));
2893 b(ne, not_unique_name);
2894
2895 bind(&succeed);
Leon Clarked91b9f72010-01-27 17:25:45 +00002896}
2897
Steve Blockd0582a62009-12-15 09:54:21 +00002898
Steve Block6ded16b2010-05-10 14:33:55 +01002899// Allocates a heap number or jumps to the need_gc label if the young space
2900// is full and a scavenge is needed.
2901void MacroAssembler::AllocateHeapNumber(Register result,
2902 Register scratch1,
2903 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002904 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002905 Label* gc_required,
2906 TaggingMode tagging_mode,
2907 MutableMode mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01002908 // Allocate an object in the heap for the heap number and tag it as a heap
2909 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002910 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
2911 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS);
2912
2913 Heap::RootListIndex map_index = mode == MUTABLE
2914 ? Heap::kMutableHeapNumberMapRootIndex
2915 : Heap::kHeapNumberMapRootIndex;
2916 AssertIsRoot(heap_number_map, map_index);
Steve Block6ded16b2010-05-10 14:33:55 +01002917
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002918 // Store heap number map in the allocated object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002919 if (tagging_mode == TAG_RESULT) {
2920 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
2921 } else {
2922 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset));
2923 }
Steve Block6ded16b2010-05-10 14:33:55 +01002924}
2925
2926
Steve Block8defd9f2010-07-08 12:39:36 +01002927void MacroAssembler::AllocateHeapNumberWithValue(Register result,
2928 DwVfpRegister value,
2929 Register scratch1,
2930 Register scratch2,
2931 Register heap_number_map,
2932 Label* gc_required) {
2933 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
2934 sub(scratch1, result, Operand(kHeapObjectTag));
2935 vstr(value, scratch1, HeapNumber::kValueOffset);
2936}
2937
2938
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002939void MacroAssembler::AllocateJSValue(Register result, Register constructor,
2940 Register value, Register scratch1,
2941 Register scratch2, Label* gc_required) {
2942 DCHECK(!result.is(constructor));
2943 DCHECK(!result.is(scratch1));
2944 DCHECK(!result.is(scratch2));
2945 DCHECK(!result.is(value));
Ben Murdochbb769b22010-08-11 14:56:33 +01002946
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002947 // Allocate JSValue in new space.
2948 Allocate(JSValue::kSize, result, scratch1, scratch2, gc_required, TAG_OBJECT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002949
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002950 // Initialize the JSValue.
2951 LoadGlobalFunctionInitialMap(constructor, scratch1, scratch2);
2952 str(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
2953 LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
2954 str(scratch1, FieldMemOperand(result, JSObject::kPropertiesOffset));
2955 str(scratch1, FieldMemOperand(result, JSObject::kElementsOffset));
2956 str(value, FieldMemOperand(result, JSValue::kValueOffset));
2957 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
Ben Murdochbb769b22010-08-11 14:56:33 +01002958}
2959
2960
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002961void MacroAssembler::CopyBytes(Register src,
2962 Register dst,
2963 Register length,
2964 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002965 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002966
2967 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002968 cmp(length, Operand(kPointerSize));
2969 b(le, &byte_loop);
2970
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002971 bind(&align_loop_1);
2972 tst(src, Operand(kPointerSize - 1));
2973 b(eq, &word_loop);
2974 ldrb(scratch, MemOperand(src, 1, PostIndex));
2975 strb(scratch, MemOperand(dst, 1, PostIndex));
2976 sub(length, length, Operand(1), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002977 b(&align_loop_1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002978 // Copy bytes in word size chunks.
2979 bind(&word_loop);
Steve Block44f0eee2011-05-26 01:26:41 +01002980 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002981 tst(src, Operand(kPointerSize - 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002982 Assert(eq, kExpectingAlignmentForCopyBytes);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002983 }
2984 cmp(length, Operand(kPointerSize));
2985 b(lt, &byte_loop);
2986 ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002987 if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) {
2988 str(scratch, MemOperand(dst, kPointerSize, PostIndex));
2989 } else {
2990 strb(scratch, MemOperand(dst, 1, PostIndex));
2991 mov(scratch, Operand(scratch, LSR, 8));
2992 strb(scratch, MemOperand(dst, 1, PostIndex));
2993 mov(scratch, Operand(scratch, LSR, 8));
2994 strb(scratch, MemOperand(dst, 1, PostIndex));
2995 mov(scratch, Operand(scratch, LSR, 8));
2996 strb(scratch, MemOperand(dst, 1, PostIndex));
2997 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002998 sub(length, length, Operand(kPointerSize));
2999 b(&word_loop);
3000
3001 // Copy the last bytes if any left.
3002 bind(&byte_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003003 cmp(length, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003004 b(eq, &done);
3005 bind(&byte_loop_1);
3006 ldrb(scratch, MemOperand(src, 1, PostIndex));
3007 strb(scratch, MemOperand(dst, 1, PostIndex));
3008 sub(length, length, Operand(1), SetCC);
3009 b(ne, &byte_loop_1);
3010 bind(&done);
3011}
3012
3013
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003014void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
3015 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003016 Register filler) {
3017 Label loop, entry;
3018 b(&entry);
3019 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003020 str(filler, MemOperand(current_address, kPointerSize, PostIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003021 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003022 cmp(current_address, end_address);
3023 b(lo, &loop);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003024}
3025
3026
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003027void MacroAssembler::CheckFor32DRegs(Register scratch) {
3028 mov(scratch, Operand(ExternalReference::cpu_features()));
3029 ldr(scratch, MemOperand(scratch));
3030 tst(scratch, Operand(1u << VFP32DREGS));
Steve Block6ded16b2010-05-10 14:33:55 +01003031}
3032
3033
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003034void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
3035 CheckFor32DRegs(scratch);
3036 vstm(db_w, location, d16, d31, ne);
3037 sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3038 vstm(db_w, location, d0, d15);
3039}
3040
3041
3042void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
3043 CheckFor32DRegs(scratch);
3044 vldm(ia_w, location, d0, d15);
3045 vldm(ia_w, location, d16, d31, ne);
3046 add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3047}
3048
3049
3050void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
3051 Register first, Register second, Register scratch1, Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +01003052 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003053 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003054 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003055 const int kFlatOneByteStringTag =
3056 kStringTag | kOneByteStringTag | kSeqStringTag;
3057 and_(scratch1, first, Operand(kFlatOneByteStringMask));
3058 and_(scratch2, second, Operand(kFlatOneByteStringMask));
3059 cmp(scratch1, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003060 // Ignore second test if first test failed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003061 cmp(scratch2, Operand(kFlatOneByteStringTag), eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003062 b(ne, failure);
3063}
3064
3065
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003066void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
3067 Register scratch,
3068 Label* failure) {
3069 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003070 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003071 const int kFlatOneByteStringTag =
3072 kStringTag | kOneByteStringTag | kSeqStringTag;
3073 and_(scratch, type, Operand(kFlatOneByteStringMask));
3074 cmp(scratch, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003075 b(ne, failure);
3076}
3077
Steve Block44f0eee2011-05-26 01:26:41 +01003078static const int kRegisterPassedArguments = 4;
Steve Block6ded16b2010-05-10 14:33:55 +01003079
Steve Block44f0eee2011-05-26 01:26:41 +01003080
Ben Murdoch257744e2011-11-30 15:57:28 +00003081int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
3082 int num_double_arguments) {
3083 int stack_passed_words = 0;
3084 if (use_eabi_hardfloat()) {
3085 // In the hard floating point calling convention, we can use
3086 // all double registers to pass doubles.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003087 if (num_double_arguments > DoubleRegister::NumRegisters()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003088 stack_passed_words +=
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003089 2 * (num_double_arguments - DoubleRegister::NumRegisters());
Ben Murdoch257744e2011-11-30 15:57:28 +00003090 }
3091 } else {
3092 // In the soft floating point calling convention, every double
3093 // argument is passed using two registers.
3094 num_reg_arguments += 2 * num_double_arguments;
3095 }
Steve Block6ded16b2010-05-10 14:33:55 +01003096 // Up to four simple arguments are passed in registers r0..r3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003097 if (num_reg_arguments > kRegisterPassedArguments) {
3098 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
3099 }
3100 return stack_passed_words;
3101}
3102
3103
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003104void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3105 Register index,
3106 Register value,
3107 uint32_t encoding_mask) {
3108 Label is_object;
3109 SmiTst(string);
3110 Check(ne, kNonObject);
3111
3112 ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
3113 ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
3114
3115 and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
3116 cmp(ip, Operand(encoding_mask));
3117 Check(eq, kUnexpectedStringType);
3118
3119 // The index is assumed to be untagged coming in, tag it to compare with the
3120 // string length without using a temp register, it is restored at the end of
3121 // this function.
3122 Label index_tag_ok, index_tag_bad;
3123 TrySmiTag(index, index, &index_tag_bad);
3124 b(&index_tag_ok);
3125 bind(&index_tag_bad);
3126 Abort(kIndexIsTooLarge);
3127 bind(&index_tag_ok);
3128
3129 ldr(ip, FieldMemOperand(string, String::kLengthOffset));
3130 cmp(index, ip);
3131 Check(lt, kIndexIsTooLarge);
3132
3133 cmp(index, Operand(Smi::FromInt(0)));
3134 Check(ge, kIndexIsNegative);
3135
3136 SmiUntag(index, index);
3137}
3138
3139
Ben Murdoch257744e2011-11-30 15:57:28 +00003140void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3141 int num_double_arguments,
3142 Register scratch) {
3143 int frame_alignment = ActivationFrameAlignment();
3144 int stack_passed_arguments = CalculateStackPassedWords(
3145 num_reg_arguments, num_double_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01003146 if (frame_alignment > kPointerSize) {
3147 // Make stack end at alignment and make room for num_arguments - 4 words
3148 // and the original value of sp.
3149 mov(scratch, sp);
3150 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003151 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003152 and_(sp, sp, Operand(-frame_alignment));
3153 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
3154 } else {
3155 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
3156 }
3157}
3158
3159
Ben Murdoch257744e2011-11-30 15:57:28 +00003160void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3161 Register scratch) {
3162 PrepareCallCFunction(num_reg_arguments, 0, scratch);
3163}
3164
3165
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003166void MacroAssembler::MovToFloatParameter(DwVfpRegister src) {
3167 DCHECK(src.is(d0));
3168 if (!use_eabi_hardfloat()) {
3169 vmov(r0, r1, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003170 }
3171}
3172
3173
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003174// On ARM this is just a synonym to make the purpose clear.
3175void MacroAssembler::MovToFloatResult(DwVfpRegister src) {
3176 MovToFloatParameter(src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003177}
3178
3179
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003180void MacroAssembler::MovToFloatParameters(DwVfpRegister src1,
3181 DwVfpRegister src2) {
3182 DCHECK(src1.is(d0));
3183 DCHECK(src2.is(d1));
3184 if (!use_eabi_hardfloat()) {
3185 vmov(r0, r1, src1);
3186 vmov(r2, r3, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003187 }
3188}
3189
3190
3191void MacroAssembler::CallCFunction(ExternalReference function,
3192 int num_reg_arguments,
3193 int num_double_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003194 mov(ip, Operand(function));
3195 CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003196}
3197
3198
3199void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003200 int num_reg_arguments,
3201 int num_double_arguments) {
3202 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003203}
3204
3205
Steve Block6ded16b2010-05-10 14:33:55 +01003206void MacroAssembler::CallCFunction(ExternalReference function,
3207 int num_arguments) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003208 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003209}
3210
Ben Murdoch257744e2011-11-30 15:57:28 +00003211
Steve Block44f0eee2011-05-26 01:26:41 +01003212void MacroAssembler::CallCFunction(Register function,
Steve Block44f0eee2011-05-26 01:26:41 +01003213 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003214 CallCFunction(function, num_arguments, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01003215}
3216
3217
Steve Block44f0eee2011-05-26 01:26:41 +01003218void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch257744e2011-11-30 15:57:28 +00003219 int num_reg_arguments,
3220 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003221 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01003222 // Make sure that the stack is aligned before calling a C function unless
3223 // running in the simulator. The simulator has its own alignment check which
3224 // provides more information.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003225#if V8_HOST_ARCH_ARM
Steve Block44f0eee2011-05-26 01:26:41 +01003226 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003227 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01003228 int frame_alignment_mask = frame_alignment - 1;
3229 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003230 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003231 Label alignment_as_expected;
3232 tst(sp, Operand(frame_alignment_mask));
3233 b(eq, &alignment_as_expected);
3234 // Don't use Check here, as it will call Runtime_Abort possibly
3235 // re-entering here.
3236 stop("Unexpected alignment");
3237 bind(&alignment_as_expected);
3238 }
3239 }
3240#endif
3241
3242 // Just call directly. The function called cannot cause a GC, or
3243 // allow preemption, so the return address in the link register
3244 // stays correct.
3245 Call(function);
Ben Murdoch257744e2011-11-30 15:57:28 +00003246 int stack_passed_arguments = CalculateStackPassedWords(
3247 num_reg_arguments, num_double_arguments);
3248 if (ActivationFrameAlignment() > kPointerSize) {
Steve Block6ded16b2010-05-10 14:33:55 +01003249 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3250 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003251 add(sp, sp, Operand(stack_passed_arguments * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003252 }
Steve Block1e0659c2011-05-24 12:43:12 +01003253}
3254
3255
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003256void MacroAssembler::CheckPageFlag(
3257 Register object,
3258 Register scratch,
3259 int mask,
3260 Condition cc,
3261 Label* condition_met) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003262 Bfc(scratch, object, 0, kPageSizeBits);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003263 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
3264 tst(scratch, Operand(mask));
3265 b(cc, condition_met);
3266}
3267
3268
3269void MacroAssembler::JumpIfBlack(Register object,
3270 Register scratch0,
3271 Register scratch1,
3272 Label* on_black) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003273 HasColor(object, scratch0, scratch1, on_black, 1, 1); // kBlackBitPattern.
3274 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003275}
3276
3277
3278void MacroAssembler::HasColor(Register object,
3279 Register bitmap_scratch,
3280 Register mask_scratch,
3281 Label* has_color,
3282 int first_bit,
3283 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003284 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003285
3286 GetMarkBits(object, bitmap_scratch, mask_scratch);
3287
3288 Label other_color, word_boundary;
3289 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3290 tst(ip, Operand(mask_scratch));
3291 b(first_bit == 1 ? eq : ne, &other_color);
3292 // Shift left 1 by adding.
3293 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC);
3294 b(eq, &word_boundary);
3295 tst(ip, Operand(mask_scratch));
3296 b(second_bit == 1 ? ne : eq, has_color);
3297 jmp(&other_color);
3298
3299 bind(&word_boundary);
3300 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
3301 tst(ip, Operand(1));
3302 b(second_bit == 1 ? ne : eq, has_color);
3303 bind(&other_color);
3304}
3305
3306
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003307void MacroAssembler::GetMarkBits(Register addr_reg,
3308 Register bitmap_reg,
3309 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003310 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003311 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3312 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
3313 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
3314 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits);
3315 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2));
3316 mov(ip, Operand(1));
3317 mov(mask_reg, Operand(ip, LSL, mask_reg));
3318}
3319
3320
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003321void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
3322 Register mask_scratch, Register load_scratch,
3323 Label* value_is_white) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003324 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ip));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003325 GetMarkBits(value, bitmap_scratch, mask_scratch);
3326
3327 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003328 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003329 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3330 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003331 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003332
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003333 // Since both black and grey have a 1 in the first position and white does
3334 // not have a 1 there we only need to check one bit.
3335 ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3336 tst(mask_scratch, load_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003337 b(eq, value_is_white);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003338}
3339
3340
Ben Murdoch257744e2011-11-30 15:57:28 +00003341void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
3342 Usat(output_reg, 8, Operand(input_reg));
3343}
3344
3345
3346void MacroAssembler::ClampDoubleToUint8(Register result_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003347 DwVfpRegister input_reg,
3348 LowDwVfpRegister double_scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003349 Label done;
Ben Murdoch257744e2011-11-30 15:57:28 +00003350
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003351 // Handle inputs >= 255 (including +infinity).
3352 Vmov(double_scratch, 255.0, result_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003353 mov(result_reg, Operand(255));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003354 VFPCompareAndSetFlags(input_reg, double_scratch);
3355 b(ge, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003356
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003357 // For inputs < 255 (including negative) vcvt_u32_f64 with round-to-nearest
3358 // rounding mode will provide the correct result.
3359 vcvt_u32_f64(double_scratch.low(), input_reg, kFPSCRRounding);
3360 vmov(result_reg, double_scratch.low());
3361
Ben Murdoch257744e2011-11-30 15:57:28 +00003362 bind(&done);
3363}
3364
3365
3366void MacroAssembler::LoadInstanceDescriptors(Register map,
3367 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003368 ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
3369}
3370
3371
3372void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3373 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3374 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3375}
3376
3377
3378void MacroAssembler::EnumLength(Register dst, Register map) {
3379 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3380 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3381 and_(dst, dst, Operand(Map::EnumLengthBits::kMask));
3382 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003383}
3384
3385
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003386void MacroAssembler::LoadAccessor(Register dst, Register holder,
3387 int accessor_index,
3388 AccessorComponent accessor) {
3389 ldr(dst, FieldMemOperand(holder, HeapObject::kMapOffset));
3390 LoadInstanceDescriptors(dst, dst);
3391 ldr(dst,
3392 FieldMemOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3393 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
3394 : AccessorPair::kSetterOffset;
3395 ldr(dst, FieldMemOperand(dst, offset));
3396}
3397
3398
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003399void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003400 Register empty_fixed_array_value = r6;
3401 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003402 Label next, start;
3403 mov(r2, r0);
3404
3405 // Check if the enum length field is properly initialized, indicating that
3406 // there is an enum cache.
3407 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
3408
3409 EnumLength(r3, r1);
3410 cmp(r3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
3411 b(eq, call_runtime);
3412
3413 jmp(&start);
3414
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003415 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003416 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003417
3418 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003419 EnumLength(r3, r1);
3420 cmp(r3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003421 b(ne, call_runtime);
3422
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003423 bind(&start);
3424
3425 // Check that there are no elements. Register r2 contains the current JS
3426 // object we've reached through the prototype chain.
3427 Label no_elements;
3428 ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset));
3429 cmp(r2, empty_fixed_array_value);
3430 b(eq, &no_elements);
3431
3432 // Second chance, the object may be using the empty slow element dictionary.
3433 CompareRoot(r2, Heap::kEmptySlowElementDictionaryRootIndex);
3434 b(ne, call_runtime);
3435
3436 bind(&no_elements);
3437 ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3438 cmp(r2, null_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003439 b(ne, &next);
3440}
3441
3442
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003443void MacroAssembler::TestJSArrayForAllocationMemento(
3444 Register receiver_reg,
3445 Register scratch_reg,
3446 Label* no_memento_found) {
3447 ExternalReference new_space_start =
3448 ExternalReference::new_space_start(isolate());
3449 ExternalReference new_space_allocation_top =
3450 ExternalReference::new_space_allocation_top_address(isolate());
3451 add(scratch_reg, receiver_reg,
3452 Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3453 cmp(scratch_reg, Operand(new_space_start));
3454 b(lt, no_memento_found);
3455 mov(ip, Operand(new_space_allocation_top));
3456 ldr(ip, MemOperand(ip));
3457 cmp(scratch_reg, ip);
3458 b(gt, no_memento_found);
3459 ldr(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
3460 cmp(scratch_reg,
3461 Operand(isolate()->factory()->allocation_memento_map()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003462}
3463
3464
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003465Register GetRegisterThatIsNotOneOf(Register reg1,
3466 Register reg2,
3467 Register reg3,
3468 Register reg4,
3469 Register reg5,
3470 Register reg6) {
3471 RegList regs = 0;
3472 if (reg1.is_valid()) regs |= reg1.bit();
3473 if (reg2.is_valid()) regs |= reg2.bit();
3474 if (reg3.is_valid()) regs |= reg3.bit();
3475 if (reg4.is_valid()) regs |= reg4.bit();
3476 if (reg5.is_valid()) regs |= reg5.bit();
3477 if (reg6.is_valid()) regs |= reg6.bit();
3478
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003479 const RegisterConfiguration* config =
3480 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
3481 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
3482 int code = config->GetAllocatableGeneralCode(i);
3483 Register candidate = Register::from_code(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003484 if (regs & candidate.bit()) continue;
3485 return candidate;
3486 }
3487 UNREACHABLE();
3488 return no_reg;
3489}
3490
3491
3492void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3493 Register object,
3494 Register scratch0,
3495 Register scratch1,
3496 Label* found) {
3497 DCHECK(!scratch1.is(scratch0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003498 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003499 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003500
3501 // scratch contained elements pointer.
3502 mov(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003503 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
3504 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
3505 CompareRoot(current, Heap::kNullValueRootIndex);
3506 b(eq, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003507
3508 // Loop based on the map going up the prototype chain.
3509 bind(&loop_again);
3510 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003511
3512 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3513 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3514 ldrb(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset));
3515 cmp(scratch1, Operand(JS_OBJECT_TYPE));
3516 b(lo, found);
3517
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003518 ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
3519 DecodeField<Map::ElementsKindBits>(scratch1);
3520 cmp(scratch1, Operand(DICTIONARY_ELEMENTS));
3521 b(eq, found);
3522 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003523 CompareRoot(current, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003524 b(ne, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003525
3526 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003527}
3528
3529
3530#ifdef DEBUG
3531bool AreAliased(Register reg1,
3532 Register reg2,
3533 Register reg3,
3534 Register reg4,
3535 Register reg5,
3536 Register reg6,
3537 Register reg7,
3538 Register reg8) {
3539 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3540 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3541 reg7.is_valid() + reg8.is_valid();
3542
3543 RegList regs = 0;
3544 if (reg1.is_valid()) regs |= reg1.bit();
3545 if (reg2.is_valid()) regs |= reg2.bit();
3546 if (reg3.is_valid()) regs |= reg3.bit();
3547 if (reg4.is_valid()) regs |= reg4.bit();
3548 if (reg5.is_valid()) regs |= reg5.bit();
3549 if (reg6.is_valid()) regs |= reg6.bit();
3550 if (reg7.is_valid()) regs |= reg7.bit();
3551 if (reg8.is_valid()) regs |= reg8.bit();
3552 int n_of_non_aliasing_regs = NumRegs(regs);
3553
3554 return n_of_valid_regs != n_of_non_aliasing_regs;
3555}
3556#endif
3557
3558
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003559CodePatcher::CodePatcher(Isolate* isolate, byte* address, int instructions,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003560 FlushICache flush_cache)
Steve Blocka7e24c12009-10-30 11:49:00 +00003561 : address_(address),
Steve Blocka7e24c12009-10-30 11:49:00 +00003562 size_(instructions * Assembler::kInstrSize),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003563 masm_(isolate, address, size_ + Assembler::kGap, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003564 flush_cache_(flush_cache) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003565 // Create a new macro assembler pointing to the address of the code to patch.
3566 // The size is adjusted with kGap on order for the assembler to generate size
3567 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003568 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003569}
3570
3571
3572CodePatcher::~CodePatcher() {
3573 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003574 if (flush_cache_ == FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003575 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003576 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003577
3578 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003579 DCHECK(masm_.pc_ == address_ + size_);
3580 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003581}
3582
3583
Steve Block1e0659c2011-05-24 12:43:12 +01003584void CodePatcher::Emit(Instr instr) {
3585 masm()->emit(instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00003586}
3587
3588
3589void CodePatcher::Emit(Address addr) {
3590 masm()->emit(reinterpret_cast<Instr>(addr));
3591}
Steve Block1e0659c2011-05-24 12:43:12 +01003592
3593
3594void CodePatcher::EmitCondition(Condition cond) {
3595 Instr instr = Assembler::instr_at(masm_.pc_);
3596 instr = (instr & ~kCondMask) | cond;
3597 masm_.emit(instr);
3598}
Steve Blocka7e24c12009-10-30 11:49:00 +00003599
3600
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003601void MacroAssembler::TruncatingDiv(Register result,
3602 Register dividend,
3603 int32_t divisor) {
3604 DCHECK(!dividend.is(result));
3605 DCHECK(!dividend.is(ip));
3606 DCHECK(!result.is(ip));
3607 base::MagicNumbersForDivision<uint32_t> mag =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003608 base::SignedDivisionByConstant(bit_cast<uint32_t>(divisor));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003609 mov(ip, Operand(mag.multiplier));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003610 bool neg = (mag.multiplier & (1U << 31)) != 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003611 if (divisor > 0 && neg) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003612 smmla(result, dividend, ip, dividend);
3613 } else {
3614 smmul(result, dividend, ip);
3615 if (divisor < 0 && !neg && mag.multiplier > 0) {
3616 sub(result, result, Operand(dividend));
3617 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003618 }
3619 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift));
3620 add(result, result, Operand(dividend, LSR, 31));
3621}
3622
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003623} // namespace internal
3624} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01003625
3626#endif // V8_TARGET_ARCH_ARM