blob: 4feadb7350af05247cb001654c7e71590e8fc63b [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Iain Merrick9ac36c92010-09-13 15:29:50 +01005#include <limits.h> // For LONG_MIN, LONG_MAX.
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/debug/debug.h"
14#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040015#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000016
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017#include "src/arm/macro-assembler-arm.h"
18
Steve Blocka7e24c12009-10-30 11:49:00 +000019namespace v8 {
20namespace internal {
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010024 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010030 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Steve Blocka7e24c12009-10-30 11:49:00 +000034void MacroAssembler::Jump(Register target, Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +000035 bx(target, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000036}
37
38
39void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
40 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000042 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000043}
44
45
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000046void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode,
Steve Blocka7e24c12009-10-30 11:49:00 +000047 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000049 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
50}
51
52
53void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
54 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000056 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 AllowDeferredHandleDereference embedding_raw_address;
Steve Blocka7e24c12009-10-30 11:49:00 +000058 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
59}
60
61
Steve Block44f0eee2011-05-26 01:26:41 +010062int MacroAssembler::CallSize(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010063 return kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +010064}
65
66
Steve Blocka7e24c12009-10-30 11:49:00 +000067void MacroAssembler::Call(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010068 // Block constant pool for the call instruction sequence.
69 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000070 Label start;
71 bind(&start);
Steve Blocka7e24c12009-10-30 11:49:00 +000072 blx(target, cond);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 DCHECK_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block44f0eee2011-05-26 01:26:41 +010077int MacroAssembler::CallSize(
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000078 Address target, RelocInfo::Mode rmode, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010079 Instr mov_instr = cond | MOV | LeaveCC;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
81 return kInstrSize +
82 mov_operand.instructions_required(this, mov_instr) * kInstrSize;
83}
84
85
86int MacroAssembler::CallStubSize(
87 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) {
88 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
89}
90
91
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000092void MacroAssembler::Call(Address target,
Ben Murdoch257744e2011-11-30 15:57:28 +000093 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000094 Condition cond,
95 TargetAddressStorageMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +010096 // Block constant pool for the call instruction sequence.
97 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000098 Label start;
99 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000100
101 bool old_predictable_code_size = predictable_code_size();
102 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
103 set_predictable_code_size(true);
104 }
105
106#ifdef DEBUG
107 // Check the expected size before generating code to ensure we assume the same
108 // constant pool availability (e.g., whether constant pool is full or not).
109 int expected_size = CallSize(target, rmode, cond);
110#endif
111
112 // Call sequence on V7 or later may be :
113 // movw ip, #... @ call address low 16
114 // movt ip, #... @ call address high 16
115 // blx ip
116 // @ return address
117 // Or for pre-V7 or values that may be back-patched
118 // to avoid ICache flushes:
119 // ldr ip, [pc, #...] @ call address
120 // blx ip
121 // @ return address
Steve Block6ded16b2010-05-10 14:33:55 +0100122
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000123 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
Steve Block44f0eee2011-05-26 01:26:41 +0100124 blx(ip, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100125
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126 DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
127 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
128 set_predictable_code_size(old_predictable_code_size);
129 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000130}
131
132
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000133int MacroAssembler::CallSize(Handle<Code> code,
134 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000136 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000137 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000138 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
Ben Murdoch257744e2011-11-30 15:57:28 +0000139}
140
141
142void MacroAssembler::Call(Handle<Code> code,
143 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144 TypeFeedbackId ast_id,
145 Condition cond,
146 TargetAddressStorageMode mode) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000147 Label start;
148 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149 DCHECK(RelocInfo::IsCodeTarget(rmode));
150 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000151 SetRecordedAstId(ast_id);
152 rmode = RelocInfo::CODE_TARGET_WITH_ID;
153 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000154 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155 AllowDeferredHandleDereference embedding_raw_address;
156 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000157}
158
Ben Murdoch61f157c2016-09-16 13:49:30 +0100159void MacroAssembler::CallDeoptimizer(Address target) {
160 BlockConstPoolScope block_const_pool(this);
161
162 uintptr_t target_raw = reinterpret_cast<uintptr_t>(target);
163
164 // We use blx, like a call, but it does not return here. The link register is
165 // used by the deoptimizer to work out what called it.
166 if (CpuFeatures::IsSupported(ARMv7)) {
167 CpuFeatureScope scope(this, ARMv7);
168 movw(ip, target_raw & 0xffff);
169 movt(ip, (target_raw >> 16) & 0xffff);
170 blx(ip);
171 } else {
172 // We need to load a literal, but we can't use the usual constant pool
173 // because we call this from a patcher, and cannot afford the guard
174 // instruction and other administrative overhead.
175 ldr(ip, MemOperand(pc, (2 * kInstrSize) - kPcLoadDelta));
176 blx(ip);
177 dd(target_raw);
178 }
179}
180
181int MacroAssembler::CallDeoptimizerSize() {
182 // ARMv7+:
183 // movw ip, ...
184 // movt ip, ...
185 // blx ip @ This never returns.
186 //
187 // ARMv6:
188 // ldr ip, =address
189 // blx ip @ This never returns.
190 // .word address
191 return 3 * kInstrSize;
192}
Steve Blocka7e24c12009-10-30 11:49:00 +0000193
194void MacroAssembler::Ret(Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 bx(lr, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000196}
197
198
Leon Clarkee46be812010-01-19 14:06:41 +0000199void MacroAssembler::Drop(int count, Condition cond) {
200 if (count > 0) {
201 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
202 }
203}
204
Ben Murdoch097c5b22016-05-18 11:27:45 +0100205void MacroAssembler::Drop(Register count, Condition cond) {
206 add(sp, sp, Operand(count, LSL, kPointerSizeLog2), LeaveCC, cond);
207}
Leon Clarkee46be812010-01-19 14:06:41 +0000208
Ben Murdochb0fe1622011-05-05 13:52:32 +0100209void MacroAssembler::Ret(int drop, Condition cond) {
210 Drop(drop, cond);
211 Ret(cond);
212}
213
214
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100215void MacroAssembler::Swap(Register reg1,
216 Register reg2,
217 Register scratch,
218 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100219 if (scratch.is(no_reg)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100220 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
221 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
222 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100223 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100224 mov(scratch, reg1, LeaveCC, cond);
225 mov(reg1, reg2, LeaveCC, cond);
226 mov(reg2, scratch, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100227 }
228}
229
230
Leon Clarkee46be812010-01-19 14:06:41 +0000231void MacroAssembler::Call(Label* target) {
232 bl(target);
233}
234
235
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000236void MacroAssembler::Push(Handle<Object> handle) {
237 mov(ip, Operand(handle));
238 push(ip);
239}
240
241
Leon Clarkee46be812010-01-19 14:06:41 +0000242void MacroAssembler::Move(Register dst, Handle<Object> value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000243 AllowDeferredHandleDereference smi_check;
244 if (value->IsSmi()) {
245 mov(dst, Operand(value));
246 } else {
247 DCHECK(value->IsHeapObject());
248 if (isolate()->heap()->InNewSpace(*value)) {
249 Handle<Cell> cell = isolate()->factory()->NewCell(value);
250 mov(dst, Operand(cell));
251 ldr(dst, FieldMemOperand(dst, Cell::kValueOffset));
252 } else {
253 mov(dst, Operand(value));
254 }
255 }
Leon Clarkee46be812010-01-19 14:06:41 +0000256}
Steve Blockd0582a62009-12-15 09:54:21 +0000257
258
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000259void MacroAssembler::Move(Register dst, Register src, Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100260 if (!dst.is(src)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000261 mov(dst, src, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100262 }
263}
264
Ben Murdoch61f157c2016-09-16 13:49:30 +0100265void MacroAssembler::Move(SwVfpRegister dst, SwVfpRegister src) {
266 if (!dst.is(src)) {
267 vmov(dst, src);
268 }
269}
Steve Block6ded16b2010-05-10 14:33:55 +0100270
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000271void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000272 if (!dst.is(src)) {
273 vmov(dst, src);
274 }
275}
276
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000277void MacroAssembler::Mls(Register dst, Register src1, Register src2,
278 Register srcA, Condition cond) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100279 if (CpuFeatures::IsSupported(ARMv7)) {
280 CpuFeatureScope scope(this, ARMv7);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000281 mls(dst, src1, src2, srcA, cond);
282 } else {
283 DCHECK(!srcA.is(ip));
284 mul(ip, src1, src2, LeaveCC, cond);
285 sub(dst, srcA, ip, LeaveCC, cond);
286 }
287}
288
289
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100290void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
291 Condition cond) {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800292 if (!src2.is_reg() &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000293 !src2.must_output_reloc_info(this) &&
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800294 src2.immediate() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000295 mov(dst, Operand::Zero(), LeaveCC, cond);
296 } else if (!(src2.instructions_required(this) == 1) &&
297 !src2.must_output_reloc_info(this) &&
Ben Murdoch8b112d22011-06-08 16:22:53 +0100298 CpuFeatures::IsSupported(ARMv7) &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000299 base::bits::IsPowerOfTwo32(src2.immediate() + 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000300 ubfx(dst, src1, 0,
301 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800302 } else {
303 and_(dst, src1, src2, LeaveCC, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100304 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100305}
306
307
308void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
309 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000310 DCHECK(lsb < 32);
311 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100312 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
313 and_(dst, src1, Operand(mask), LeaveCC, cond);
314 if (lsb != 0) {
315 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
316 }
317 } else {
318 ubfx(dst, src1, lsb, width, cond);
319 }
320}
321
322
323void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
324 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000325 DCHECK(lsb < 32);
326 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100327 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
328 and_(dst, src1, Operand(mask), LeaveCC, cond);
329 int shift_up = 32 - lsb - width;
330 int shift_down = lsb + shift_up;
331 if (shift_up != 0) {
332 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
333 }
334 if (shift_down != 0) {
335 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
336 }
337 } else {
338 sbfx(dst, src1, lsb, width, cond);
339 }
340}
341
342
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100343void MacroAssembler::Bfi(Register dst,
344 Register src,
345 Register scratch,
346 int lsb,
347 int width,
348 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000349 DCHECK(0 <= lsb && lsb < 32);
350 DCHECK(0 <= width && width < 32);
351 DCHECK(lsb + width < 32);
352 DCHECK(!scratch.is(dst));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100353 if (width == 0) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100355 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
356 bic(dst, dst, Operand(mask));
357 and_(scratch, src, Operand((1 << width) - 1));
358 mov(scratch, Operand(scratch, LSL, lsb));
359 orr(dst, dst, scratch);
360 } else {
361 bfi(dst, src, lsb, width, cond);
362 }
363}
364
365
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000366void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width,
367 Condition cond) {
368 DCHECK(lsb < 32);
369 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100370 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000371 bic(dst, src, Operand(mask));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100372 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000373 Move(dst, src, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100374 bfc(dst, lsb, width, cond);
375 }
376}
377
378
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000379void MacroAssembler::Load(Register dst,
380 const MemOperand& src,
381 Representation r) {
382 DCHECK(!r.IsDouble());
383 if (r.IsInteger8()) {
384 ldrsb(dst, src);
385 } else if (r.IsUInteger8()) {
386 ldrb(dst, src);
387 } else if (r.IsInteger16()) {
388 ldrsh(dst, src);
389 } else if (r.IsUInteger16()) {
390 ldrh(dst, src);
391 } else {
392 ldr(dst, src);
393 }
394}
395
396
397void MacroAssembler::Store(Register src,
398 const MemOperand& dst,
399 Representation r) {
400 DCHECK(!r.IsDouble());
401 if (r.IsInteger8() || r.IsUInteger8()) {
402 strb(src, dst);
403 } else if (r.IsInteger16() || r.IsUInteger16()) {
404 strh(src, dst);
405 } else {
406 if (r.IsHeapObject()) {
407 AssertNotSmi(src);
408 } else if (r.IsSmi()) {
409 AssertSmi(src);
410 }
411 str(src, dst);
412 }
413}
414
415
Steve Blocka7e24c12009-10-30 11:49:00 +0000416void MacroAssembler::LoadRoot(Register destination,
417 Heap::RootListIndex index,
418 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000419 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
420 isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
421 !predictable_code_size()) {
422 // The CPU supports fast immediate values, and this root will never
423 // change. We will load it as a relocatable immediate value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000424 Handle<Object> root = isolate()->heap()->root_handle(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000425 mov(destination, Operand(root), LeaveCC, cond);
426 return;
427 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000428 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000429}
430
431
Kristian Monsen25f61362010-05-21 11:50:48 +0100432void MacroAssembler::StoreRoot(Register source,
433 Heap::RootListIndex index,
434 Condition cond) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000435 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000436 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
437}
438
439
Steve Block6ded16b2010-05-10 14:33:55 +0100440void MacroAssembler::InNewSpace(Register object,
441 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100442 Condition cond,
Steve Block6ded16b2010-05-10 14:33:55 +0100443 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444 DCHECK(cond == eq || cond == ne);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100445 const int mask =
446 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
447 CheckPageFlag(object, scratch, mask, cond, branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100448}
449
450
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100451void MacroAssembler::RecordWriteField(
452 Register object,
453 int offset,
454 Register value,
455 Register dst,
456 LinkRegisterStatus lr_status,
457 SaveFPRegsMode save_fp,
458 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000459 SmiCheck smi_check,
460 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100461 // First, check if a write barrier is even needed. The tests below
462 // catch stores of Smis.
Steve Block6ded16b2010-05-10 14:33:55 +0100463 Label done;
464
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100465 // Skip barrier if writing a smi.
466 if (smi_check == INLINE_SMI_CHECK) {
467 JumpIfSmi(value, &done);
468 }
Steve Block6ded16b2010-05-10 14:33:55 +0100469
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100470 // Although the object register is tagged, the offset is relative to the start
471 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 DCHECK(IsAligned(offset, kPointerSize));
Steve Block8defd9f2010-07-08 12:39:36 +0100473
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100474 add(dst, object, Operand(offset - kHeapObjectTag));
475 if (emit_debug_code()) {
476 Label ok;
477 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
478 b(eq, &ok);
479 stop("Unaligned cell in write barrier");
480 bind(&ok);
481 }
482
483 RecordWrite(object,
484 dst,
485 value,
486 lr_status,
487 save_fp,
488 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000489 OMIT_SMI_CHECK,
490 pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000491
492 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000493
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100494 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000495 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100496 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000497 mov(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
498 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
499 }
500}
501
502
503// Will clobber 4 registers: object, map, dst, ip. The
504// register 'object' contains a heap object pointer.
505void MacroAssembler::RecordWriteForMap(Register object,
506 Register map,
507 Register dst,
508 LinkRegisterStatus lr_status,
509 SaveFPRegsMode fp_mode) {
510 if (emit_debug_code()) {
511 ldr(dst, FieldMemOperand(map, HeapObject::kMapOffset));
512 cmp(dst, Operand(isolate()->factory()->meta_map()));
513 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
514 }
515
516 if (!FLAG_incremental_marking) {
517 return;
518 }
519
520 if (emit_debug_code()) {
521 ldr(ip, FieldMemOperand(object, HeapObject::kMapOffset));
522 cmp(ip, map);
523 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
524 }
525
526 Label done;
527
528 // A single check of the map's pages interesting flag suffices, since it is
529 // only set during incremental collection, and then it's also guaranteed that
530 // the from object's page's interesting flag is also set. This optimization
531 // relies on the fact that maps can never be in new space.
532 CheckPageFlag(map,
533 map, // Used as scratch.
534 MemoryChunk::kPointersToHereAreInterestingMask,
535 eq,
536 &done);
537
538 add(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
539 if (emit_debug_code()) {
540 Label ok;
541 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
542 b(eq, &ok);
543 stop("Unaligned cell in write barrier");
544 bind(&ok);
545 }
546
547 // Record the actual write.
548 if (lr_status == kLRHasNotBeenSaved) {
549 push(lr);
550 }
551 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
552 fp_mode);
553 CallStub(&stub);
554 if (lr_status == kLRHasNotBeenSaved) {
555 pop(lr);
556 }
557
558 bind(&done);
559
560 // Count number of write barriers in generated code.
561 isolate()->counters()->write_barriers_static()->Increment();
562 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst);
563
564 // Clobber clobbered registers when running with the debug-code flag
565 // turned on to provoke errors.
566 if (emit_debug_code()) {
567 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
568 mov(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Leon Clarke4515c472010-02-03 11:58:03 +0000569 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000570}
571
572
Steve Block8defd9f2010-07-08 12:39:36 +0100573// Will clobber 4 registers: object, address, scratch, ip. The
574// register 'object' contains a heap object pointer. The heap object
575// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000576void MacroAssembler::RecordWrite(
577 Register object,
578 Register address,
579 Register value,
580 LinkRegisterStatus lr_status,
581 SaveFPRegsMode fp_mode,
582 RememberedSetAction remembered_set_action,
583 SmiCheck smi_check,
584 PointersToHereCheck pointers_to_here_check_for_value) {
585 DCHECK(!object.is(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100586 if (emit_debug_code()) {
587 ldr(ip, MemOperand(address));
588 cmp(ip, value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100590 }
Steve Block8defd9f2010-07-08 12:39:36 +0100591
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592 if (remembered_set_action == OMIT_REMEMBERED_SET &&
593 !FLAG_incremental_marking) {
594 return;
595 }
596
597 // First, check if a write barrier is even needed. The tests below
598 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100599 Label done;
600
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100601 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 JumpIfSmi(value, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100603 }
604
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000605 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
606 CheckPageFlag(value,
607 value, // Used as scratch.
608 MemoryChunk::kPointersToHereAreInterestingMask,
609 eq,
610 &done);
611 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100612 CheckPageFlag(object,
613 value, // Used as scratch.
614 MemoryChunk::kPointersFromHereAreInterestingMask,
615 eq,
616 &done);
Steve Block8defd9f2010-07-08 12:39:36 +0100617
618 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100619 if (lr_status == kLRHasNotBeenSaved) {
620 push(lr);
621 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000622 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
623 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100624 CallStub(&stub);
625 if (lr_status == kLRHasNotBeenSaved) {
626 pop(lr);
627 }
Steve Block8defd9f2010-07-08 12:39:36 +0100628
629 bind(&done);
630
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000631 // Count number of write barriers in generated code.
632 isolate()->counters()->write_barriers_static()->Increment();
633 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip,
634 value);
635
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100636 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100637 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100638 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639 mov(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
640 mov(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100641 }
642}
643
Ben Murdoch097c5b22016-05-18 11:27:45 +0100644void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
645 Register code_entry,
646 Register scratch) {
647 const int offset = JSFunction::kCodeEntryOffset;
648
649 // Since a code entry (value) is always in old space, we don't need to update
650 // remembered set. If incremental marking is off, there is nothing for us to
651 // do.
652 if (!FLAG_incremental_marking) return;
653
654 DCHECK(js_function.is(r1));
655 DCHECK(code_entry.is(r4));
656 DCHECK(scratch.is(r5));
657 AssertNotSmi(js_function);
658
659 if (emit_debug_code()) {
660 add(scratch, js_function, Operand(offset - kHeapObjectTag));
661 ldr(ip, MemOperand(scratch));
662 cmp(ip, code_entry);
663 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
664 }
665
666 // First, check if a write barrier is even needed. The tests below
667 // catch stores of Smis and stores into young gen.
668 Label done;
669
670 CheckPageFlag(code_entry, scratch,
671 MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
672 CheckPageFlag(js_function, scratch,
673 MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
674
675 const Register dst = scratch;
676 add(dst, js_function, Operand(offset - kHeapObjectTag));
677
678 push(code_entry);
679
680 // Save caller-saved registers, which includes js_function.
681 DCHECK((kCallerSaved & js_function.bit()) != 0);
682 DCHECK_EQ(kCallerSaved & code_entry.bit(), 0);
683 stm(db_w, sp, (kCallerSaved | lr.bit()));
684
685 int argument_count = 3;
686 PrepareCallCFunction(argument_count, code_entry);
687
688 mov(r0, js_function);
689 mov(r1, dst);
690 mov(r2, Operand(ExternalReference::isolate_address(isolate())));
691
692 {
693 AllowExternalCallThatCantCauseGC scope(this);
694 CallCFunction(
695 ExternalReference::incremental_marking_record_write_code_entry_function(
696 isolate()),
697 argument_count);
698 }
699
700 // Restore caller-saved registers (including js_function and code_entry).
701 ldm(ia_w, sp, (kCallerSaved | lr.bit()));
702
703 pop(code_entry);
704
705 bind(&done);
706}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100707
708void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
709 Register address,
710 Register scratch,
711 SaveFPRegsMode fp_mode,
712 RememberedSetFinalAction and_then) {
713 Label done;
714 if (emit_debug_code()) {
715 Label ok;
716 JumpIfNotInNewSpace(object, scratch, &ok);
717 stop("Remembered set pointer is in new space");
718 bind(&ok);
719 }
720 // Load store buffer top.
721 ExternalReference store_buffer =
722 ExternalReference::store_buffer_top(isolate());
723 mov(ip, Operand(store_buffer));
724 ldr(scratch, MemOperand(ip));
725 // Store pointer to buffer and increment buffer top.
726 str(address, MemOperand(scratch, kPointerSize, PostIndex));
727 // Write back new top of buffer.
728 str(scratch, MemOperand(ip));
729 // Call stub on end of buffer.
730 // Check for end of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100731 tst(scratch, Operand(StoreBuffer::kStoreBufferMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100732 if (and_then == kFallThroughAtEnd) {
Ben Murdochda12d292016-06-02 14:46:10 +0100733 b(ne, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100734 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 DCHECK(and_then == kReturnAtEnd);
Ben Murdochda12d292016-06-02 14:46:10 +0100736 Ret(ne);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100737 }
738 push(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000739 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100740 CallStub(&store_buffer_overflow);
741 pop(lr);
742 bind(&done);
743 if (and_then == kReturnAtEnd) {
744 Ret();
Steve Block8defd9f2010-07-08 12:39:36 +0100745 }
746}
747
Ben Murdochda12d292016-06-02 14:46:10 +0100748void MacroAssembler::PushCommonFrame(Register marker_reg) {
749 if (marker_reg.is_valid()) {
750 if (FLAG_enable_embedded_constant_pool) {
751 if (marker_reg.code() > pp.code()) {
752 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
753 add(fp, sp, Operand(kPointerSize));
754 Push(marker_reg);
755 } else {
756 stm(db_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
757 add(fp, sp, Operand(2 * kPointerSize));
758 }
759 } else {
760 if (marker_reg.code() > fp.code()) {
761 stm(db_w, sp, fp.bit() | lr.bit());
762 mov(fp, Operand(sp));
763 Push(marker_reg);
764 } else {
765 stm(db_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
766 add(fp, sp, Operand(kPointerSize));
767 }
768 }
769 } else {
770 stm(db_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
771 fp.bit() | lr.bit());
772 add(fp, sp, Operand(FLAG_enable_embedded_constant_pool ? kPointerSize : 0));
773 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000774}
775
Ben Murdochda12d292016-06-02 14:46:10 +0100776void MacroAssembler::PopCommonFrame(Register marker_reg) {
777 if (marker_reg.is_valid()) {
778 if (FLAG_enable_embedded_constant_pool) {
779 if (marker_reg.code() > pp.code()) {
780 pop(marker_reg);
781 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
782 } else {
783 ldm(ia_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
784 }
785 } else {
786 if (marker_reg.code() > fp.code()) {
787 pop(marker_reg);
788 ldm(ia_w, sp, fp.bit() | lr.bit());
789 } else {
790 ldm(ia_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
791 }
792 }
793 } else {
794 ldm(ia_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
795 fp.bit() | lr.bit());
796 }
797}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000798
Ben Murdochda12d292016-06-02 14:46:10 +0100799void MacroAssembler::PushStandardFrame(Register function_reg) {
800 DCHECK(!function_reg.is_valid() || function_reg.code() < cp.code());
801 stm(db_w, sp, (function_reg.is_valid() ? function_reg.bit() : 0) | cp.bit() |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000802 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
803 fp.bit() | lr.bit());
Ben Murdochda12d292016-06-02 14:46:10 +0100804 int offset = -StandardFrameConstants::kContextOffset;
805 offset += function_reg.is_valid() ? kPointerSize : 0;
806 add(fp, sp, Operand(offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000807}
808
809
Ben Murdochb0fe1622011-05-05 13:52:32 +0100810// Push and pop all registers that can hold pointers.
811void MacroAssembler::PushSafepointRegisters() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000812 // Safepoints expect a block of contiguous register values starting with r0.
813 // except when FLAG_enable_embedded_constant_pool, which omits pp.
814 DCHECK(kSafepointSavedRegisters ==
815 (FLAG_enable_embedded_constant_pool
816 ? ((1 << (kNumSafepointSavedRegisters + 1)) - 1) & ~pp.bit()
817 : (1 << kNumSafepointSavedRegisters) - 1));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100818 // Safepoints expect a block of kNumSafepointRegisters values on the
819 // stack, so adjust the stack for unsaved registers.
820 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000821 DCHECK(num_unsaved >= 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100822 sub(sp, sp, Operand(num_unsaved * kPointerSize));
823 stm(db_w, sp, kSafepointSavedRegisters);
824}
825
826
827void MacroAssembler::PopSafepointRegisters() {
828 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
829 ldm(ia_w, sp, kSafepointSavedRegisters);
830 add(sp, sp, Operand(num_unsaved * kPointerSize));
831}
832
833
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100834void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
835 str(src, SafepointRegisterSlot(dst));
Steve Block1e0659c2011-05-24 12:43:12 +0100836}
837
838
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100839void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
840 ldr(dst, SafepointRegisterSlot(src));
Steve Block1e0659c2011-05-24 12:43:12 +0100841}
842
843
Ben Murdochb0fe1622011-05-05 13:52:32 +0100844int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
845 // The registers are pushed starting with the highest encoding,
846 // which means that lowest encodings are closest to the stack pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000847 if (FLAG_enable_embedded_constant_pool && reg_code > pp.code()) {
848 // RegList omits pp.
849 reg_code -= 1;
850 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100852 return reg_code;
853}
854
855
Steve Block1e0659c2011-05-24 12:43:12 +0100856MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
857 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
858}
859
860
861MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000862 // Number of d-regs not known at snapshot time.
863 DCHECK(!serializer_enabled());
Steve Block1e0659c2011-05-24 12:43:12 +0100864 // General purpose registers are pushed last on the stack.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100865 const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000866 int doubles_size = config->num_allocatable_double_registers() * kDoubleSize;
Steve Block1e0659c2011-05-24 12:43:12 +0100867 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
868 return MemOperand(sp, doubles_size + register_offset);
869}
870
871
Leon Clarkef7060e22010-06-03 12:02:55 +0100872void MacroAssembler::Ldrd(Register dst1, Register dst2,
873 const MemOperand& src, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000874 DCHECK(src.rm().is(no_reg));
875 DCHECK(!dst1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100876
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000877 // V8 does not use this addressing mode, so the fallback code
878 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000879 DCHECK((src.am() != PreIndex) && (src.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000880
Ben Murdochc5610432016-08-08 18:44:38 +0100881 // Generate two ldr instructions if ldrd is not applicable.
882 if ((dst1.code() % 2 == 0) && (dst1.code() + 1 == dst2.code())) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100883 ldrd(dst1, dst2, src, cond);
884 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000885 if ((src.am() == Offset) || (src.am() == NegOffset)) {
886 MemOperand src2(src);
887 src2.set_offset(src2.offset() + 4);
888 if (dst1.is(src.rn())) {
889 ldr(dst2, src2, cond);
890 ldr(dst1, src, cond);
891 } else {
892 ldr(dst1, src, cond);
893 ldr(dst2, src2, cond);
894 }
895 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000896 DCHECK((src.am() == PostIndex) || (src.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000897 if (dst1.is(src.rn())) {
898 ldr(dst2, MemOperand(src.rn(), 4, Offset), cond);
899 ldr(dst1, src, cond);
900 } else {
901 MemOperand src2(src);
902 src2.set_offset(src2.offset() - 4);
903 ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond);
904 ldr(dst2, src2, cond);
905 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100906 }
907 }
908}
909
910
911void MacroAssembler::Strd(Register src1, Register src2,
912 const MemOperand& dst, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000913 DCHECK(dst.rm().is(no_reg));
914 DCHECK(!src1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100915
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000916 // V8 does not use this addressing mode, so the fallback code
917 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000918 DCHECK((dst.am() != PreIndex) && (dst.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000919
Ben Murdochc5610432016-08-08 18:44:38 +0100920 // Generate two str instructions if strd is not applicable.
921 if ((src1.code() % 2 == 0) && (src1.code() + 1 == src2.code())) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100922 strd(src1, src2, dst, cond);
923 } else {
924 MemOperand dst2(dst);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000925 if ((dst.am() == Offset) || (dst.am() == NegOffset)) {
926 dst2.set_offset(dst2.offset() + 4);
927 str(src1, dst, cond);
928 str(src2, dst2, cond);
929 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000930 DCHECK((dst.am() == PostIndex) || (dst.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000931 dst2.set_offset(dst2.offset() - 4);
932 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond);
933 str(src2, dst2, cond);
934 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100935 }
936}
937
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000938void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister dst,
939 const DwVfpRegister src,
940 const Condition cond) {
Ben Murdochc5610432016-08-08 18:44:38 +0100941 // Subtracting 0.0 preserves all inputs except for signalling NaNs, which
942 // become quiet NaNs. We use vsub rather than vadd because vsub preserves -0.0
943 // inputs: -0.0 + 0.0 = 0.0, but -0.0 - 0.0 = -0.0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000944 vsub(dst, src, kDoubleRegZero, cond);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100945}
946
947
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000948void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
949 const SwVfpRegister src2,
950 const Condition cond) {
951 // Compare and move FPSCR flags to the normal condition flags.
952 VFPCompareAndLoadFlags(src1, src2, pc, cond);
953}
954
955void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
956 const float src2,
957 const Condition cond) {
958 // Compare and move FPSCR flags to the normal condition flags.
959 VFPCompareAndLoadFlags(src1, src2, pc, cond);
960}
961
962
Ben Murdochb8e0da22011-05-16 14:20:40 +0100963void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
964 const DwVfpRegister src2,
965 const Condition cond) {
966 // Compare and move FPSCR flags to the normal condition flags.
967 VFPCompareAndLoadFlags(src1, src2, pc, cond);
968}
969
970void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
971 const double src2,
972 const Condition cond) {
973 // Compare and move FPSCR flags to the normal condition flags.
974 VFPCompareAndLoadFlags(src1, src2, pc, cond);
975}
976
977
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000978void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
979 const SwVfpRegister src2,
980 const Register fpscr_flags,
981 const Condition cond) {
982 // Compare and load FPSCR.
983 vcmp(src1, src2, cond);
984 vmrs(fpscr_flags, cond);
985}
986
987void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
988 const float src2,
989 const Register fpscr_flags,
990 const Condition cond) {
991 // Compare and load FPSCR.
992 vcmp(src1, src2, cond);
993 vmrs(fpscr_flags, cond);
994}
995
996
Ben Murdochb8e0da22011-05-16 14:20:40 +0100997void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
998 const DwVfpRegister src2,
999 const Register fpscr_flags,
1000 const Condition cond) {
1001 // Compare and load FPSCR.
1002 vcmp(src1, src2, cond);
1003 vmrs(fpscr_flags, cond);
1004}
1005
1006void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
1007 const double src2,
1008 const Register fpscr_flags,
1009 const Condition cond) {
1010 // Compare and load FPSCR.
1011 vcmp(src1, src2, cond);
1012 vmrs(fpscr_flags, cond);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001013}
1014
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001015
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001016void MacroAssembler::Vmov(const DwVfpRegister dst,
1017 const double imm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001018 const Register scratch) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001019 static const DoubleRepresentation minus_zero(-0.0);
1020 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001021 DoubleRepresentation value_rep(imm);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001022 // Handle special values first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023 if (value_rep == zero) {
1024 vmov(dst, kDoubleRegZero);
1025 } else if (value_rep == minus_zero) {
1026 vneg(dst, kDoubleRegZero);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001027 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001028 vmov(dst, imm, scratch);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001029 }
1030}
1031
Ben Murdoch086aeea2011-05-13 15:57:08 +01001032
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001033void MacroAssembler::VmovHigh(Register dst, DwVfpRegister src) {
1034 if (src.code() < 16) {
1035 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1036 vmov(dst, loc.high());
1037 } else {
1038 vmov(dst, VmovIndexHi, src);
1039 }
1040}
1041
1042
1043void MacroAssembler::VmovHigh(DwVfpRegister dst, Register src) {
1044 if (dst.code() < 16) {
1045 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1046 vmov(loc.high(), src);
1047 } else {
1048 vmov(dst, VmovIndexHi, src);
1049 }
1050}
1051
1052
1053void MacroAssembler::VmovLow(Register dst, DwVfpRegister src) {
1054 if (src.code() < 16) {
1055 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1056 vmov(dst, loc.low());
1057 } else {
1058 vmov(dst, VmovIndexLo, src);
1059 }
1060}
1061
1062
1063void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) {
1064 if (dst.code() < 16) {
1065 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1066 vmov(loc.low(), src);
1067 } else {
1068 vmov(dst, VmovIndexLo, src);
1069 }
1070}
Ben Murdochda12d292016-06-02 14:46:10 +01001071void MacroAssembler::LslPair(Register dst_low, Register dst_high,
1072 Register src_low, Register src_high,
1073 Register scratch, Register shift) {
1074 DCHECK(!AreAliased(dst_high, src_low));
1075 DCHECK(!AreAliased(dst_high, shift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001076
Ben Murdochda12d292016-06-02 14:46:10 +01001077 Label less_than_32;
1078 Label done;
1079 rsb(scratch, shift, Operand(32), SetCC);
1080 b(gt, &less_than_32);
1081 // If shift >= 32
1082 and_(scratch, shift, Operand(0x1f));
1083 lsl(dst_high, src_low, Operand(scratch));
1084 mov(dst_low, Operand(0));
1085 jmp(&done);
1086 bind(&less_than_32);
1087 // If shift < 32
1088 lsl(dst_high, src_high, Operand(shift));
1089 orr(dst_high, dst_high, Operand(src_low, LSR, scratch));
1090 lsl(dst_low, src_low, Operand(shift));
1091 bind(&done);
1092}
1093
1094void MacroAssembler::LslPair(Register dst_low, Register dst_high,
1095 Register src_low, Register src_high,
1096 uint32_t shift) {
1097 DCHECK(!AreAliased(dst_high, src_low));
1098 Label less_than_32;
1099 Label done;
1100 if (shift == 0) {
1101 Move(dst_high, src_high);
1102 Move(dst_low, src_low);
1103 } else if (shift == 32) {
1104 Move(dst_high, src_low);
1105 Move(dst_low, Operand(0));
1106 } else if (shift >= 32) {
1107 shift &= 0x1f;
1108 lsl(dst_high, src_low, Operand(shift));
1109 mov(dst_low, Operand(0));
1110 } else {
1111 lsl(dst_high, src_high, Operand(shift));
1112 orr(dst_high, dst_high, Operand(src_low, LSR, 32 - shift));
1113 lsl(dst_low, src_low, Operand(shift));
1114 }
1115}
1116
1117void MacroAssembler::LsrPair(Register dst_low, Register dst_high,
1118 Register src_low, Register src_high,
1119 Register scratch, Register shift) {
1120 DCHECK(!AreAliased(dst_low, src_high));
1121 DCHECK(!AreAliased(dst_low, shift));
1122
1123 Label less_than_32;
1124 Label done;
1125 rsb(scratch, shift, Operand(32), SetCC);
1126 b(gt, &less_than_32);
1127 // If shift >= 32
1128 and_(scratch, shift, Operand(0x1f));
1129 lsr(dst_low, src_high, Operand(scratch));
1130 mov(dst_high, Operand(0));
1131 jmp(&done);
1132 bind(&less_than_32);
1133 // If shift < 32
1134
1135 lsr(dst_low, src_low, Operand(shift));
1136 orr(dst_low, dst_low, Operand(src_high, LSL, scratch));
1137 lsr(dst_high, src_high, Operand(shift));
1138 bind(&done);
1139}
1140
1141void MacroAssembler::LsrPair(Register dst_low, Register dst_high,
1142 Register src_low, Register src_high,
1143 uint32_t shift) {
1144 DCHECK(!AreAliased(dst_low, src_high));
1145 Label less_than_32;
1146 Label done;
1147 if (shift == 32) {
1148 mov(dst_low, src_high);
1149 mov(dst_high, Operand(0));
1150 } else if (shift > 32) {
1151 shift &= 0x1f;
1152 lsr(dst_low, src_high, Operand(shift));
1153 mov(dst_high, Operand(0));
1154 } else if (shift == 0) {
1155 Move(dst_low, src_low);
1156 Move(dst_high, src_high);
1157 } else {
1158 lsr(dst_low, src_low, Operand(shift));
1159 orr(dst_low, dst_low, Operand(src_high, LSL, 32 - shift));
1160 lsr(dst_high, src_high, Operand(shift));
1161 }
1162}
1163
1164void MacroAssembler::AsrPair(Register dst_low, Register dst_high,
1165 Register src_low, Register src_high,
1166 Register scratch, Register shift) {
1167 DCHECK(!AreAliased(dst_low, src_high));
1168 DCHECK(!AreAliased(dst_low, shift));
1169
1170 Label less_than_32;
1171 Label done;
1172 rsb(scratch, shift, Operand(32), SetCC);
1173 b(gt, &less_than_32);
1174 // If shift >= 32
1175 and_(scratch, shift, Operand(0x1f));
1176 asr(dst_low, src_high, Operand(scratch));
1177 asr(dst_high, src_high, Operand(31));
1178 jmp(&done);
1179 bind(&less_than_32);
1180 // If shift < 32
1181 lsr(dst_low, src_low, Operand(shift));
1182 orr(dst_low, dst_low, Operand(src_high, LSL, scratch));
1183 asr(dst_high, src_high, Operand(shift));
1184 bind(&done);
1185}
1186
1187void MacroAssembler::AsrPair(Register dst_low, Register dst_high,
1188 Register src_low, Register src_high,
1189 uint32_t shift) {
1190 DCHECK(!AreAliased(dst_low, src_high));
1191 Label less_than_32;
1192 Label done;
1193 if (shift == 32) {
1194 mov(dst_low, src_high);
1195 asr(dst_high, src_high, Operand(31));
1196 } else if (shift > 32) {
1197 shift &= 0x1f;
1198 asr(dst_low, src_high, Operand(shift));
1199 asr(dst_high, src_high, Operand(31));
1200 } else if (shift == 0) {
1201 Move(dst_low, src_low);
1202 Move(dst_high, src_high);
1203 } else {
1204 lsr(dst_low, src_low, Operand(shift));
1205 orr(dst_low, dst_low, Operand(src_high, LSL, 32 - shift));
1206 asr(dst_high, src_high, Operand(shift));
1207 }
1208}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001209
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001210void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
1211 Register code_target_address) {
1212 DCHECK(FLAG_enable_embedded_constant_pool);
1213 ldr(pp, MemOperand(code_target_address,
1214 Code::kConstantPoolOffset - Code::kHeaderSize));
1215 add(pp, pp, code_target_address);
1216}
1217
1218
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001219void MacroAssembler::LoadConstantPoolPointerRegister() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001220 DCHECK(FLAG_enable_embedded_constant_pool);
1221 int entry_offset = pc_offset() + Instruction::kPCReadOffset;
1222 sub(ip, pc, Operand(entry_offset));
1223 LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001224}
1225
Ben Murdochda12d292016-06-02 14:46:10 +01001226void MacroAssembler::StubPrologue(StackFrame::Type type) {
1227 mov(ip, Operand(Smi::FromInt(type)));
1228 PushCommonFrame(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001229 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001230 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001231 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001232 }
1233}
1234
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001235void MacroAssembler::Prologue(bool code_pre_aging) {
1236 { PredictableCodeSizeScope predictible_code_size_scope(
1237 this, kNoCodeAgeSequenceLength);
1238 // The following three instructions must remain together and unmodified
1239 // for code aging to work properly.
1240 if (code_pre_aging) {
1241 // Pre-age the code.
1242 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
1243 add(r0, pc, Operand(-8));
1244 ldr(pc, MemOperand(pc, -4));
1245 emit_code_stub_address(stub);
1246 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01001247 PushStandardFrame(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001248 nop(ip.code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001249 }
1250 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001251 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001252 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001253 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001254 }
1255}
1256
1257
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001258void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1259 ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001260 ldr(vector, FieldMemOperand(vector, JSFunction::kLiteralsOffset));
1261 ldr(vector, FieldMemOperand(vector, LiteralsArray::kFeedbackVectorOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001262}
1263
1264
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001265void MacroAssembler::EnterFrame(StackFrame::Type type,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001266 bool load_constant_pool_pointer_reg) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001267 // r0-r3: preserved
Ben Murdochda12d292016-06-02 14:46:10 +01001268 mov(ip, Operand(Smi::FromInt(type)));
1269 PushCommonFrame(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001270 if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001271 LoadConstantPoolPointerRegister();
1272 }
Ben Murdochda12d292016-06-02 14:46:10 +01001273 if (type == StackFrame::INTERNAL) {
1274 mov(ip, Operand(CodeObject()));
1275 push(ip);
1276 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001277}
1278
1279
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001280int MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001281 // r0: preserved
1282 // r1: preserved
1283 // r2: preserved
1284
1285 // Drop the execution stack down to the frame pointer and restore
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001286 // the caller frame pointer, return address and constant pool pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001287 // (if FLAG_enable_embedded_constant_pool).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001288 int frame_ends;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001289 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001290 add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
1291 frame_ends = pc_offset();
1292 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
1293 } else {
1294 mov(sp, fp);
1295 frame_ends = pc_offset();
1296 ldm(ia_w, sp, fp.bit() | lr.bit());
1297 }
1298 return frame_ends;
Steve Blocka7e24c12009-10-30 11:49:00 +00001299}
1300
1301
Steve Block1e0659c2011-05-24 12:43:12 +01001302void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001303 // Set up the frame structure on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001304 DCHECK_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1305 DCHECK_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1306 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Ben Murdochda12d292016-06-02 14:46:10 +01001307 mov(ip, Operand(Smi::FromInt(StackFrame::EXIT)));
1308 PushCommonFrame(ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001309 // Reserve room for saved entry sp and code object.
Ben Murdochda12d292016-06-02 14:46:10 +01001310 sub(sp, fp, Operand(ExitFrameConstants::kFixedFrameSizeFromFp));
Steve Block44f0eee2011-05-26 01:26:41 +01001311 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001312 mov(ip, Operand::Zero());
Steve Block1e0659c2011-05-24 12:43:12 +01001313 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
1314 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001315 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001316 str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1317 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001318 mov(ip, Operand(CodeObject()));
Steve Block1e0659c2011-05-24 12:43:12 +01001319 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001320
1321 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00001322 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001323 str(fp, MemOperand(ip));
Ben Murdoch589d6972011-11-30 16:04:58 +00001324 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001325 str(cp, MemOperand(ip));
1326
Ben Murdochb0fe1622011-05-05 13:52:32 +01001327 // Optionally save all double registers.
1328 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001329 SaveFPRegs(sp, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001330 // Note that d0 will be accessible at
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001331 // fp - ExitFrameConstants::kFrameSize -
1332 // DwVfpRegister::kMaxNumRegisters * kDoubleSize,
1333 // since the sp slot, code slot and constant pool slot (if
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001334 // FLAG_enable_embedded_constant_pool) were pushed after the fp.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001335 }
Steve Block1e0659c2011-05-24 12:43:12 +01001336
1337 // Reserve place for the return address and stack space and align the frame
1338 // preparing for calling the runtime function.
1339 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1340 sub(sp, sp, Operand((stack_space + 1) * kPointerSize));
1341 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001342 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +01001343 and_(sp, sp, Operand(-frame_alignment));
1344 }
1345
1346 // Set the exit frame sp value to point just before the return address
1347 // location.
1348 add(ip, sp, Operand(kPointerSize));
1349 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001350}
1351
1352
Steve Block6ded16b2010-05-10 14:33:55 +01001353void MacroAssembler::InitializeNewString(Register string,
1354 Register length,
1355 Heap::RootListIndex map_index,
1356 Register scratch1,
1357 Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001358 SmiTag(scratch1, length);
Steve Block6ded16b2010-05-10 14:33:55 +01001359 LoadRoot(scratch2, map_index);
1360 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
1361 mov(scratch1, Operand(String::kEmptyHashField));
1362 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
1363 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
1364}
1365
1366
1367int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001368#if V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001369 // Running on the real platform. Use the alignment as mandated by the local
1370 // environment.
1371 // Note: This will break if we ever start generating snapshots on one ARM
1372 // platform for another ARM platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001373 return base::OS::ActivationFrameAlignment();
1374#else // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001375 // If we are using the simulator then we should always align to the expected
1376 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +01001377 // if the target platform will need alignment, so this is controlled from a
1378 // flag.
1379 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001380#endif // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001381}
1382
1383
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001384void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
1385 bool restore_context,
1386 bool argument_count_is_length) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001387 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1388
Ben Murdochb0fe1622011-05-05 13:52:32 +01001389 // Optionally restore all double registers.
1390 if (save_doubles) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001391 // Calculate the stack location of the saved doubles and restore them.
Ben Murdochda12d292016-06-02 14:46:10 +01001392 const int offset = ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001393 sub(r3, fp,
1394 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize));
1395 RestoreFPRegs(r3, ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001396 }
1397
Steve Blocka7e24c12009-10-30 11:49:00 +00001398 // Clear top frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001399 mov(r3, Operand::Zero());
Ben Murdoch589d6972011-11-30 16:04:58 +00001400 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001401 str(r3, MemOperand(ip));
1402
1403 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001404 if (restore_context) {
1405 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1406 ldr(cp, MemOperand(ip));
1407 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001408#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001409 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001410 str(r3, MemOperand(ip));
1411#endif
1412
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001413 // Tear down the exit frame, pop the arguments, and return.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001414 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001415 ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1416 }
Steve Block1e0659c2011-05-24 12:43:12 +01001417 mov(sp, Operand(fp));
1418 ldm(ia_w, sp, fp.bit() | lr.bit());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001419 if (argument_count.is_valid()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001420 if (argument_count_is_length) {
1421 add(sp, sp, argument_count);
1422 } else {
1423 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
1424 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001425 }
1426}
1427
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001428
1429void MacroAssembler::MovFromFloatResult(const DwVfpRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001430 if (use_eabi_hardfloat()) {
1431 Move(dst, d0);
1432 } else {
1433 vmov(dst, r0, r1);
1434 }
1435}
1436
1437
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001438// On ARM this is just a synonym to make the purpose clear.
1439void MacroAssembler::MovFromFloatParameter(DwVfpRegister dst) {
1440 MovFromFloatResult(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001441}
1442
Ben Murdochda12d292016-06-02 14:46:10 +01001443void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
1444 Register caller_args_count_reg,
1445 Register scratch0, Register scratch1) {
1446#if DEBUG
1447 if (callee_args_count.is_reg()) {
1448 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
1449 scratch1));
1450 } else {
1451 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
1452 }
1453#endif
1454
1455 // Calculate the end of destination area where we will put the arguments
1456 // after we drop current frame. We add kPointerSize to count the receiver
1457 // argument which is not included into formal parameters count.
1458 Register dst_reg = scratch0;
1459 add(dst_reg, fp, Operand(caller_args_count_reg, LSL, kPointerSizeLog2));
1460 add(dst_reg, dst_reg,
1461 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
1462
1463 Register src_reg = caller_args_count_reg;
1464 // Calculate the end of source area. +kPointerSize is for the receiver.
1465 if (callee_args_count.is_reg()) {
1466 add(src_reg, sp, Operand(callee_args_count.reg(), LSL, kPointerSizeLog2));
1467 add(src_reg, src_reg, Operand(kPointerSize));
1468 } else {
1469 add(src_reg, sp,
1470 Operand((callee_args_count.immediate() + 1) * kPointerSize));
1471 }
1472
1473 if (FLAG_debug_code) {
1474 cmp(src_reg, dst_reg);
1475 Check(lo, kStackAccessBelowStackPointer);
1476 }
1477
1478 // Restore caller's frame pointer and return address now as they will be
1479 // overwritten by the copying loop.
1480 ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
1481 ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1482
1483 // Now copy callee arguments to the caller frame going backwards to avoid
1484 // callee arguments corruption (source and destination areas could overlap).
1485
1486 // Both src_reg and dst_reg are pointing to the word after the one to copy,
1487 // so they must be pre-decremented in the loop.
1488 Register tmp_reg = scratch1;
1489 Label loop, entry;
1490 b(&entry);
1491 bind(&loop);
1492 ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
1493 str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
1494 bind(&entry);
1495 cmp(sp, src_reg);
1496 b(ne, &loop);
1497
1498 // Leave current frame.
1499 mov(sp, dst_reg);
1500}
Steve Blocka7e24c12009-10-30 11:49:00 +00001501
1502void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1503 const ParameterCount& actual,
Steve Blocka7e24c12009-10-30 11:49:00 +00001504 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001505 bool* definitely_mismatches,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001506 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001507 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001508 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001509 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00001510 Label regular_invoke;
1511
1512 // Check whether the expected and actual arguments count match. If not,
1513 // setup registers according to contract with ArgumentsAdaptorTrampoline:
1514 // r0: actual arguments count
1515 // r1: function (passed through to callee)
1516 // r2: expected arguments count
Steve Blocka7e24c12009-10-30 11:49:00 +00001517
1518 // The code below is made a lot easier because the calling code already sets
1519 // up actual and expected registers according to the contract if values are
1520 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001521 DCHECK(actual.is_immediate() || actual.reg().is(r0));
1522 DCHECK(expected.is_immediate() || expected.reg().is(r2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001523
1524 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001525 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001526 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001527 if (expected.immediate() == actual.immediate()) {
1528 definitely_matches = true;
1529 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001530 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1531 if (expected.immediate() == sentinel) {
1532 // Don't worry about adapting arguments for builtins that
1533 // don't want that done. Skip adaption code by making it look
1534 // like we have a match between expected and actual number of
1535 // arguments.
1536 definitely_matches = true;
1537 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001538 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00001539 mov(r2, Operand(expected.immediate()));
1540 }
1541 }
1542 } else {
1543 if (actual.is_immediate()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001545 cmp(expected.reg(), Operand(actual.immediate()));
1546 b(eq, &regular_invoke);
Steve Blocka7e24c12009-10-30 11:49:00 +00001547 } else {
1548 cmp(expected.reg(), Operand(actual.reg()));
1549 b(eq, &regular_invoke);
1550 }
1551 }
1552
1553 if (!definitely_matches) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001554 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001555 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001556 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001557 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001558 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00001559 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001560 if (!*definitely_mismatches) {
1561 b(done);
1562 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001563 } else {
1564 Jump(adaptor, RelocInfo::CODE_TARGET);
1565 }
1566 bind(&regular_invoke);
1567 }
1568}
1569
1570
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001571void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
1572 const ParameterCount& expected,
1573 const ParameterCount& actual) {
1574 Label skip_flooding;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001575 ExternalReference last_step_action =
1576 ExternalReference::debug_last_step_action_address(isolate());
1577 STATIC_ASSERT(StepFrame > StepIn);
1578 mov(r4, Operand(last_step_action));
1579 ldrsb(r4, MemOperand(r4));
1580 cmp(r4, Operand(StepIn));
1581 b(lt, &skip_flooding);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001582 {
1583 FrameScope frame(this,
1584 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1585 if (expected.is_reg()) {
1586 SmiTag(expected.reg());
1587 Push(expected.reg());
1588 }
1589 if (actual.is_reg()) {
1590 SmiTag(actual.reg());
1591 Push(actual.reg());
1592 }
1593 if (new_target.is_valid()) {
1594 Push(new_target);
1595 }
1596 Push(fun);
1597 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001598 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001599 Pop(fun);
1600 if (new_target.is_valid()) {
1601 Pop(new_target);
1602 }
1603 if (actual.is_reg()) {
1604 Pop(actual.reg());
1605 SmiUntag(actual.reg());
1606 }
1607 if (expected.is_reg()) {
1608 Pop(expected.reg());
1609 SmiUntag(expected.reg());
1610 }
1611 }
1612 bind(&skip_flooding);
1613}
1614
1615
1616void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
1617 const ParameterCount& expected,
1618 const ParameterCount& actual,
1619 InvokeFlag flag,
1620 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001621 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001622 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001623 DCHECK(function.is(r1));
1624 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(r3));
1625
1626 if (call_wrapper.NeedsDebugStepCheck()) {
1627 FloodFunctionIfStepping(function, new_target, expected, actual);
1628 }
1629
1630 // Clear the new.target register if not given.
1631 if (!new_target.is_valid()) {
1632 LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1633 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001634
Steve Blocka7e24c12009-10-30 11:49:00 +00001635 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001636 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001637 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001638 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001639 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001640 // We call indirectly through the code field in the function to
1641 // allow recompilation to take effect without changing any of the
1642 // call sites.
1643 Register code = r4;
1644 ldr(code, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001645 if (flag == CALL_FUNCTION) {
1646 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001647 Call(code);
1648 call_wrapper.AfterCall();
1649 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001650 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001651 Jump(code);
1652 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001653
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001654 // Continue here if InvokePrologue does handle the invocation due to
1655 // mismatched parameter counts.
1656 bind(&done);
1657 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001658}
1659
1660
Steve Blocka7e24c12009-10-30 11:49:00 +00001661void MacroAssembler::InvokeFunction(Register fun,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001662 Register new_target,
Steve Blocka7e24c12009-10-30 11:49:00 +00001663 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001664 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001665 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001666 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001667 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001668
Steve Blocka7e24c12009-10-30 11:49:00 +00001669 // Contract with called JS functions requires that function is passed in r1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001670 DCHECK(fun.is(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001671
1672 Register expected_reg = r2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001673 Register temp_reg = r4;
Steve Blocka7e24c12009-10-30 11:49:00 +00001674
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001675 ldr(temp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001676 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1677 ldr(expected_reg,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001678 FieldMemOperand(temp_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00001679 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001680 SmiUntag(expected_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001681
1682 ParameterCount expected(expected_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001683 InvokeFunctionCode(fun, new_target, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00001684}
1685
1686
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001687void MacroAssembler::InvokeFunction(Register function,
1688 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00001689 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00001690 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001692 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 DCHECK(flag == JUMP_FUNCTION || has_frame());
1694
1695 // Contract with called JS functions requires that function is passed in r1.
1696 DCHECK(function.is(r1));
Andrei Popescu402d9372010-02-26 13:31:12 +00001697
1698 // Get the function and setup the context.
Andrei Popescu402d9372010-02-26 13:31:12 +00001699 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1700
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001701 InvokeFunctionCode(r1, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001702}
1703
1704
1705void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
1706 const ParameterCount& expected,
1707 const ParameterCount& actual,
1708 InvokeFlag flag,
1709 const CallWrapper& call_wrapper) {
1710 Move(r1, function);
1711 InvokeFunction(r1, expected, actual, flag, call_wrapper);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001712}
1713
1714
Ben Murdochb0fe1622011-05-05 13:52:32 +01001715void MacroAssembler::IsObjectJSStringType(Register object,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001716 Register scratch,
1717 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001718 DCHECK(kNotStringTag != 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001719
1720 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1721 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1722 tst(scratch, Operand(kIsNotStringMask));
Steve Block1e0659c2011-05-24 12:43:12 +01001723 b(ne, fail);
Andrei Popescu402d9372010-02-26 13:31:12 +00001724}
1725
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001726
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001727void MacroAssembler::IsObjectNameType(Register object,
1728 Register scratch,
1729 Label* fail) {
1730 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1731 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1732 cmp(scratch, Operand(LAST_NAME_TYPE));
1733 b(hi, fail);
1734}
1735
1736
Andrei Popescu402d9372010-02-26 13:31:12 +00001737void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001738 mov(r0, Operand::Zero());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001739 mov(r1,
1740 Operand(ExternalReference(Runtime::kHandleDebuggerStatement, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001741 CEntryStub ces(isolate(), 1);
1742 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001743 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Andrei Popescu402d9372010-02-26 13:31:12 +00001744}
Steve Blocka7e24c12009-10-30 11:49:00 +00001745
1746
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001747void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001748 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001749 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001750 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001751
1752 // Link the current handler as the next handler.
1753 mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1754 ldr(r5, MemOperand(r6));
1755 push(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001756
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001757 // Set this new handler as the current one.
1758 str(sp, MemOperand(r6));
Steve Blocka7e24c12009-10-30 11:49:00 +00001759}
1760
1761
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001762void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001763 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Leon Clarkee46be812010-01-19 14:06:41 +00001764 pop(r1);
Ben Murdoch589d6972011-11-30 16:04:58 +00001765 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Leon Clarkee46be812010-01-19 14:06:41 +00001766 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1767 str(r1, MemOperand(ip));
1768}
1769
1770
Steve Blocka7e24c12009-10-30 11:49:00 +00001771void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1772 Register scratch,
1773 Label* miss) {
1774 Label same_contexts;
1775
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001776 DCHECK(!holder_reg.is(scratch));
1777 DCHECK(!holder_reg.is(ip));
1778 DCHECK(!scratch.is(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001779
Ben Murdochda12d292016-06-02 14:46:10 +01001780 // Load current lexical context from the active StandardFrame, which
1781 // may require crawling past STUB frames.
1782 Label load_context;
1783 Label has_context;
1784 DCHECK(!ip.is(scratch));
1785 mov(ip, fp);
1786 bind(&load_context);
1787 ldr(scratch, MemOperand(ip, CommonFrameConstants::kContextOrFrameTypeOffset));
1788 JumpIfNotSmi(scratch, &has_context);
1789 ldr(ip, MemOperand(ip, CommonFrameConstants::kCallerFPOffset));
1790 b(&load_context);
1791 bind(&has_context);
1792
Steve Blocka7e24c12009-10-30 11:49:00 +00001793 // In debug mode, make sure the lexical context is set.
1794#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001795 cmp(scratch, Operand::Zero());
1796 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001797#endif
1798
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001799 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001800 ldr(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00001801
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001802 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001803 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001804 // Cannot use ip as a temporary in this verification code. Due to the fact
1805 // that ip is clobbered as part of cmp with an object Operand.
1806 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001807 // Read the first word and compare to the native_context_map.
Steve Blocka7e24c12009-10-30 11:49:00 +00001808 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001809 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001810 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001811 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001812 pop(holder_reg); // Restore holder.
1813 }
1814
1815 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001816 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001817 cmp(scratch, Operand(ip));
1818 b(eq, &same_contexts);
1819
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001820 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001821 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001822 // Cannot use ip as a temporary in this verification code. Due to the fact
1823 // that ip is clobbered as part of cmp with an object Operand.
1824 push(holder_reg); // Temporarily save holder on the stack.
1825 mov(holder_reg, ip); // Move ip to its holding place.
1826 LoadRoot(ip, Heap::kNullValueRootIndex);
1827 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001828 Check(ne, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001829
1830 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001831 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001832 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001833 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001834 // Restore ip is not needed. ip is reloaded below.
1835 pop(holder_reg); // Restore holder.
1836 // Restore ip to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001837 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001838 }
1839
1840 // Check that the security token in the calling global object is
1841 // compatible with the security token in the receiving global
1842 // object.
1843 int token_offset = Context::kHeaderSize +
1844 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1845
1846 ldr(scratch, FieldMemOperand(scratch, token_offset));
1847 ldr(ip, FieldMemOperand(ip, token_offset));
1848 cmp(scratch, Operand(ip));
1849 b(ne, miss);
1850
1851 bind(&same_contexts);
1852}
1853
1854
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001855// Compute the hash code from the untagged key. This must be kept in sync with
1856// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1857// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001858void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
1859 // First of all we assign the hash seed to scratch.
1860 LoadRoot(scratch, Heap::kHashSeedRootIndex);
1861 SmiUntag(scratch);
1862
1863 // Xor original key with a seed.
1864 eor(t0, t0, Operand(scratch));
1865
1866 // Compute the hash code from the untagged key. This must be kept in sync
1867 // with ComputeIntegerHash in utils.h.
1868 //
1869 // hash = ~hash + (hash << 15);
1870 mvn(scratch, Operand(t0));
1871 add(t0, scratch, Operand(t0, LSL, 15));
1872 // hash = hash ^ (hash >> 12);
1873 eor(t0, t0, Operand(t0, LSR, 12));
1874 // hash = hash + (hash << 2);
1875 add(t0, t0, Operand(t0, LSL, 2));
1876 // hash = hash ^ (hash >> 4);
1877 eor(t0, t0, Operand(t0, LSR, 4));
1878 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001879 mov(scratch, Operand(t0, LSL, 11));
1880 add(t0, t0, Operand(t0, LSL, 3));
1881 add(t0, t0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001882 // hash = hash ^ (hash >> 16);
1883 eor(t0, t0, Operand(t0, LSR, 16));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001884 bic(t0, t0, Operand(0xc0000000u));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001885}
1886
1887
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001888void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1889 Register elements,
1890 Register key,
1891 Register result,
1892 Register t0,
1893 Register t1,
1894 Register t2) {
1895 // Register use:
1896 //
1897 // elements - holds the slow-case elements of the receiver on entry.
1898 // Unchanged unless 'result' is the same register.
1899 //
1900 // key - holds the smi key on entry.
1901 // Unchanged unless 'result' is the same register.
1902 //
1903 // result - holds the result on exit if the load succeeded.
1904 // Allowed to be the same as 'key' or 'result'.
1905 // Unchanged on bailout so 'key' or 'result' can be used
1906 // in further computation.
1907 //
1908 // Scratch registers:
1909 //
1910 // t0 - holds the untagged key on entry and holds the hash once computed.
1911 //
1912 // t1 - used to hold the capacity mask of the dictionary
1913 //
1914 // t2 - used for the index into the dictionary.
1915 Label done;
1916
Ben Murdochc7cc0282012-03-05 14:35:55 +00001917 GetNumberHash(t0, t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001918
1919 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001920 ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001921 SmiUntag(t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001922 sub(t1, t1, Operand(1));
1923
1924 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001925 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001926 // Use t2 for index calculations and keep the hash intact in t0.
1927 mov(t2, t0);
1928 // Compute the masked index: (hash + i + i * i) & mask.
1929 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00001930 add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001931 }
1932 and_(t2, t2, Operand(t1));
1933
1934 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001935 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001936 add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
1937
1938 // Check if the key is identical to the name.
1939 add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001940 ldr(ip, FieldMemOperand(t2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001941 cmp(key, Operand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001942 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001943 b(eq, &done);
1944 } else {
1945 b(ne, miss);
1946 }
1947 }
1948
1949 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001950 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001951 // t2: elements + (index * kPointerSize)
1952 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001953 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001954 ldr(t1, FieldMemOperand(t2, kDetailsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001955 DCHECK_EQ(DATA, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00001956 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001957 b(ne, miss);
1958
1959 // Get the value at the masked, scaled index and return.
1960 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001961 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001962 ldr(result, FieldMemOperand(t2, kValueOffset));
1963}
1964
1965
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001966void MacroAssembler::Allocate(int object_size,
1967 Register result,
1968 Register scratch1,
1969 Register scratch2,
1970 Label* gc_required,
1971 AllocationFlags flags) {
1972 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
Ben Murdochc5610432016-08-08 18:44:38 +01001973 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07001974 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001975 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001976 // Trash the registers to simulate an allocation failure.
1977 mov(result, Operand(0x7091));
1978 mov(scratch1, Operand(0x7191));
1979 mov(scratch2, Operand(0x7291));
1980 }
1981 jmp(gc_required);
1982 return;
1983 }
1984
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001985 DCHECK(!AreAliased(result, scratch1, scratch2, ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001986
Kristian Monsen25f61362010-05-21 11:50:48 +01001987 // Make object size into bytes.
1988 if ((flags & SIZE_IN_WORDS) != 0) {
1989 object_size *= kPointerSize;
1990 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001991 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Kristian Monsen25f61362010-05-21 11:50:48 +01001992
Ben Murdochb0fe1622011-05-05 13:52:32 +01001993 // Check relative positions of allocation top and limit addresses.
1994 // The values must be adjacent in memory to allow the use of LDM.
1995 // Also, assert that the registers are numbered such that the values
1996 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001997 ExternalReference allocation_top =
1998 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1999 ExternalReference allocation_limit =
2000 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002001
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002002 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
2003 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002004 DCHECK((limit - top) == kPointerSize);
2005 DCHECK(result.code() < ip.code());
2006
2007 // Set up allocation top address register.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002008 Register top_address = scratch1;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002009 // This code stores a temporary value in ip. This is OK, as the code below
2010 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002011 Register alloc_limit = ip;
2012 Register result_end = scratch2;
2013 mov(top_address, Operand(allocation_top));
2014
Steve Blocka7e24c12009-10-30 11:49:00 +00002015 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002016 // Load allocation top into result and allocation limit into alloc_limit.
2017 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002018 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002019 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002020 // Assert that result actually contains top on entry.
2021 ldr(alloc_limit, MemOperand(top_address));
2022 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002023 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002024 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002025 // Load allocation limit. Result already contains allocation top.
2026 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00002027 }
2028
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002029 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2030 // Align the next allocation. Storing the filler map without checking top is
2031 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002032 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002033 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002034 Label aligned;
2035 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002036 if ((flags & PRETENURE) != 0) {
2037 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002038 b(hs, gc_required);
2039 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002040 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2041 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002042 bind(&aligned);
2043 }
2044
Steve Blocka7e24c12009-10-30 11:49:00 +00002045 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002046 // to calculate the new top. We must preserve the ip register at this
2047 // point, so we cannot just use add().
2048 DCHECK(object_size > 0);
2049 Register source = result;
2050 Condition cond = al;
2051 int shift = 0;
2052 while (object_size != 0) {
2053 if (((object_size >> shift) & 0x03) == 0) {
2054 shift += 2;
2055 } else {
2056 int bits = object_size & (0xff << shift);
2057 object_size -= bits;
2058 shift += 8;
2059 Operand bits_operand(bits);
2060 DCHECK(bits_operand.instructions_required(this) == 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002061 add(result_end, source, bits_operand, LeaveCC, cond);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002062 source = result_end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002063 cond = cc;
2064 }
2065 }
Ben Murdochc5610432016-08-08 18:44:38 +01002066
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002067 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00002068 b(hi, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00002069
Ben Murdochc5610432016-08-08 18:44:38 +01002070 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
2071 // The top pointer is not updated for allocation folding dominators.
2072 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00002073 }
Ben Murdochc5610432016-08-08 18:44:38 +01002074
2075 // Tag object.
2076 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00002077}
2078
2079
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002080void MacroAssembler::Allocate(Register object_size, Register result,
2081 Register result_end, Register scratch,
2082 Label* gc_required, AllocationFlags flags) {
Ben Murdochc5610432016-08-08 18:44:38 +01002083 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07002084 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002085 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002086 // Trash the registers to simulate an allocation failure.
2087 mov(result, Operand(0x7091));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002088 mov(scratch, Operand(0x7191));
2089 mov(result_end, Operand(0x7291));
John Reck59135872010-11-02 12:39:01 -07002090 }
2091 jmp(gc_required);
2092 return;
2093 }
2094
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002095 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
2096 // is not specified. Other registers must not overlap.
2097 DCHECK(!AreAliased(object_size, result, scratch, ip));
2098 DCHECK(!AreAliased(result_end, result, scratch, ip));
2099 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00002100
Ben Murdochb0fe1622011-05-05 13:52:32 +01002101 // Check relative positions of allocation top and limit addresses.
2102 // The values must be adjacent in memory to allow the use of LDM.
2103 // Also, assert that the registers are numbered such that the values
2104 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002105 ExternalReference allocation_top =
2106 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2107 ExternalReference allocation_limit =
2108 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002109 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
2110 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002111 DCHECK((limit - top) == kPointerSize);
2112 DCHECK(result.code() < ip.code());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002113
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002114 // Set up allocation top address and allocation limit registers.
2115 Register top_address = scratch;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002116 // This code stores a temporary value in ip. This is OK, as the code below
2117 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002118 Register alloc_limit = ip;
2119 mov(top_address, Operand(allocation_top));
2120
Steve Blocka7e24c12009-10-30 11:49:00 +00002121 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002122 // Load allocation top into result and allocation limit into alloc_limit.
2123 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002124 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002125 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002126 // Assert that result actually contains top on entry.
2127 ldr(alloc_limit, MemOperand(top_address));
2128 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002129 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002130 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002131 // Load allocation limit. Result already contains allocation top.
2132 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00002133 }
2134
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002135 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2136 // Align the next allocation. Storing the filler map without checking top is
2137 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002138 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002139 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002140 Label aligned;
2141 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002142 if ((flags & PRETENURE) != 0) {
2143 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002144 b(hs, gc_required);
2145 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002146 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2147 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002148 bind(&aligned);
2149 }
2150
Steve Blocka7e24c12009-10-30 11:49:00 +00002151 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb0fe1622011-05-05 13:52:32 +01002152 // to calculate the new top. Object size may be in words so a shift is
2153 // required to get the number of bytes.
Kristian Monsen25f61362010-05-21 11:50:48 +01002154 if ((flags & SIZE_IN_WORDS) != 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002155 add(result_end, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01002156 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002157 add(result_end, result, Operand(object_size), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01002158 }
Ben Murdochc5610432016-08-08 18:44:38 +01002159
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002160 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00002161 b(hi, gc_required);
2162
Steve Blockd0582a62009-12-15 09:54:21 +00002163 // Update allocation top. result temporarily holds the new top.
Steve Block44f0eee2011-05-26 01:26:41 +01002164 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002165 tst(result_end, Operand(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002166 Check(eq, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00002167 }
Ben Murdochc5610432016-08-08 18:44:38 +01002168 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
2169 // The top pointer is not updated for allocation folding dominators.
2170 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00002171 }
Ben Murdochc5610432016-08-08 18:44:38 +01002172
2173 // Tag object.
2174 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00002175}
2176
Ben Murdochc5610432016-08-08 18:44:38 +01002177void MacroAssembler::FastAllocate(Register object_size, Register result,
2178 Register result_end, Register scratch,
2179 AllocationFlags flags) {
2180 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
2181 // is not specified. Other registers must not overlap.
2182 DCHECK(!AreAliased(object_size, result, scratch, ip));
2183 DCHECK(!AreAliased(result_end, result, scratch, ip));
2184 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
2185
2186 ExternalReference allocation_top =
2187 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2188
2189 Register top_address = scratch;
2190 mov(top_address, Operand(allocation_top));
2191 ldr(result, MemOperand(top_address));
2192
2193 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2194 // Align the next allocation. Storing the filler map without checking top is
2195 // safe in new-space because the limit of the heap is aligned there.
2196 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
2197 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
2198 Label aligned;
2199 b(eq, &aligned);
2200 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2201 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
2202 bind(&aligned);
2203 }
2204
2205 // Calculate new top using result. Object size may be in words so a shift is
2206 // required to get the number of bytes.
2207 if ((flags & SIZE_IN_WORDS) != 0) {
2208 add(result_end, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
2209 } else {
2210 add(result_end, result, Operand(object_size), SetCC);
2211 }
2212
2213 // Update allocation top. result temporarily holds the new top.
2214 if (emit_debug_code()) {
2215 tst(result_end, Operand(kObjectAlignmentMask));
2216 Check(eq, kUnalignedAllocationInNewSpace);
2217 }
2218 // The top pointer is not updated for allocation folding dominators.
2219 str(result_end, MemOperand(top_address));
2220
2221 add(result, result, Operand(kHeapObjectTag));
2222}
2223
2224void MacroAssembler::FastAllocate(int object_size, Register result,
2225 Register scratch1, Register scratch2,
2226 AllocationFlags flags) {
2227 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
2228 DCHECK(!AreAliased(result, scratch1, scratch2, ip));
2229
2230 // Make object size into bytes.
2231 if ((flags & SIZE_IN_WORDS) != 0) {
2232 object_size *= kPointerSize;
2233 }
2234 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
2235
2236 ExternalReference allocation_top =
2237 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2238
2239 // Set up allocation top address register.
2240 Register top_address = scratch1;
2241 Register result_end = scratch2;
2242 mov(top_address, Operand(allocation_top));
2243 ldr(result, MemOperand(top_address));
2244
2245 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2246 // Align the next allocation. Storing the filler map without checking top is
2247 // safe in new-space because the limit of the heap is aligned there.
2248 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
2249 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
2250 Label aligned;
2251 b(eq, &aligned);
2252 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2253 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
2254 bind(&aligned);
2255 }
2256
2257 // Calculate new top using result. Object size may be in words so a shift is
2258 // required to get the number of bytes. We must preserve the ip register at
2259 // this point, so we cannot just use add().
2260 DCHECK(object_size > 0);
2261 Register source = result;
2262 Condition cond = al;
2263 int shift = 0;
2264 while (object_size != 0) {
2265 if (((object_size >> shift) & 0x03) == 0) {
2266 shift += 2;
2267 } else {
2268 int bits = object_size & (0xff << shift);
2269 object_size -= bits;
2270 shift += 8;
2271 Operand bits_operand(bits);
2272 DCHECK(bits_operand.instructions_required(this) == 1);
2273 add(result_end, source, bits_operand, LeaveCC, cond);
2274 source = result_end;
2275 cond = cc;
2276 }
2277 }
2278
2279 // The top pointer is not updated for allocation folding dominators.
2280 str(result_end, MemOperand(top_address));
2281
2282 add(result, result, Operand(kHeapObjectTag));
2283}
Steve Blocka7e24c12009-10-30 11:49:00 +00002284
Andrei Popescu31002712010-02-23 13:46:05 +00002285void MacroAssembler::AllocateTwoByteString(Register result,
2286 Register length,
2287 Register scratch1,
2288 Register scratch2,
2289 Register scratch3,
2290 Label* gc_required) {
2291 // Calculate the number of bytes needed for the characters in the string while
2292 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002293 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00002294 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
2295 add(scratch1, scratch1,
2296 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002297 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002298
2299 // Allocate two-byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01002300 Allocate(scratch1, result, scratch2, scratch3, gc_required,
2301 NO_ALLOCATION_FLAGS);
Andrei Popescu31002712010-02-23 13:46:05 +00002302
2303 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01002304 InitializeNewString(result,
2305 length,
2306 Heap::kStringMapRootIndex,
2307 scratch1,
2308 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002309}
2310
2311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002312void MacroAssembler::AllocateOneByteString(Register result, Register length,
2313 Register scratch1, Register scratch2,
2314 Register scratch3,
2315 Label* gc_required) {
Andrei Popescu31002712010-02-23 13:46:05 +00002316 // Calculate the number of bytes needed for the characters in the string while
2317 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002318 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2319 DCHECK(kCharSize == 1);
Andrei Popescu31002712010-02-23 13:46:05 +00002320 add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002321 Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002322 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002323
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002324 // Allocate one-byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01002325 Allocate(scratch1, result, scratch2, scratch3, gc_required,
2326 NO_ALLOCATION_FLAGS);
Andrei Popescu31002712010-02-23 13:46:05 +00002327
2328 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002329 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
2330 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002331}
2332
2333
2334void MacroAssembler::AllocateTwoByteConsString(Register result,
2335 Register length,
2336 Register scratch1,
2337 Register scratch2,
2338 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002339 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01002340 NO_ALLOCATION_FLAGS);
Steve Block6ded16b2010-05-10 14:33:55 +01002341
2342 InitializeNewString(result,
2343 length,
2344 Heap::kConsStringMapRootIndex,
2345 scratch1,
2346 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002347}
2348
2349
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002350void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
2351 Register scratch1,
2352 Register scratch2,
2353 Label* gc_required) {
Ben Murdochc5610432016-08-08 18:44:38 +01002354 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
2355 NO_ALLOCATION_FLAGS);
Steve Block6ded16b2010-05-10 14:33:55 +01002356
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002357 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
2358 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002359}
2360
2361
Ben Murdoch589d6972011-11-30 16:04:58 +00002362void MacroAssembler::AllocateTwoByteSlicedString(Register result,
2363 Register length,
2364 Register scratch1,
2365 Register scratch2,
2366 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002367 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01002368 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00002369
2370 InitializeNewString(result,
2371 length,
2372 Heap::kSlicedStringMapRootIndex,
2373 scratch1,
2374 scratch2);
2375}
2376
2377
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002378void MacroAssembler::AllocateOneByteSlicedString(Register result,
2379 Register length,
2380 Register scratch1,
2381 Register scratch2,
2382 Label* gc_required) {
2383 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01002384 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00002385
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002386 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
2387 scratch1, scratch2);
Ben Murdoch589d6972011-11-30 16:04:58 +00002388}
2389
2390
Steve Block6ded16b2010-05-10 14:33:55 +01002391void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00002392 Register map,
2393 Register type_reg,
2394 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002395 const Register temp = type_reg.is(no_reg) ? ip : type_reg;
2396
Steve Block6ded16b2010-05-10 14:33:55 +01002397 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002398 CompareInstanceType(map, temp, type);
2399}
2400
2401
Steve Blocka7e24c12009-10-30 11:49:00 +00002402void MacroAssembler::CompareInstanceType(Register map,
2403 Register type_reg,
2404 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002405 // Registers map and type_reg can be ip. These two lines assert
2406 // that ip can be used with the two instructions (the constants
2407 // will never need ip).
2408 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
2409 STATIC_ASSERT(LAST_TYPE < 256);
Steve Blocka7e24c12009-10-30 11:49:00 +00002410 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
2411 cmp(type_reg, Operand(type));
2412}
2413
2414
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002415void MacroAssembler::CompareRoot(Register obj,
2416 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002417 DCHECK(!obj.is(ip));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002418 LoadRoot(ip, index);
2419 cmp(obj, ip);
2420}
2421
2422
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002423void MacroAssembler::CheckFastElements(Register map,
2424 Register scratch,
2425 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002426 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2427 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2428 STATIC_ASSERT(FAST_ELEMENTS == 2);
2429 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002430 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002431 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002432 b(hi, fail);
2433}
2434
2435
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002436void MacroAssembler::CheckFastObjectElements(Register map,
2437 Register scratch,
2438 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002439 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2440 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2441 STATIC_ASSERT(FAST_ELEMENTS == 2);
2442 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002443 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002444 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002445 b(ls, fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002446 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002447 b(hi, fail);
2448}
2449
2450
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002451void MacroAssembler::CheckFastSmiElements(Register map,
2452 Register scratch,
2453 Label* fail) {
2454 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2455 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002456 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002457 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002458 b(hi, fail);
2459}
2460
2461
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002462void MacroAssembler::StoreNumberToDoubleElements(
2463 Register value_reg,
2464 Register key_reg,
2465 Register elements_reg,
2466 Register scratch1,
2467 LowDwVfpRegister double_scratch,
2468 Label* fail,
2469 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002470 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002471 Label smi_value, store;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002472
2473 // Handle smi values specially.
2474 JumpIfSmi(value_reg, &smi_value);
2475
2476 // Ensure that the object is a heap number
2477 CheckMap(value_reg,
2478 scratch1,
2479 isolate()->factory()->heap_number_map(),
2480 fail,
2481 DONT_DO_SMI_CHECK);
2482
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002483 vldr(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002484 VFPCanonicalizeNaN(double_scratch);
2485 b(&store);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002486
2487 bind(&smi_value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002488 SmiToDouble(double_scratch, value_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002489
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002490 bind(&store);
2491 add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg));
2492 vstr(double_scratch,
2493 FieldMemOperand(scratch1,
2494 FixedDoubleArray::kHeaderSize - elements_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002495}
2496
2497
2498void MacroAssembler::CompareMap(Register obj,
2499 Register scratch,
2500 Handle<Map> map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002501 Label* early_success) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002502 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002503 CompareMap(scratch, map, early_success);
2504}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002505
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002506
2507void MacroAssembler::CompareMap(Register obj_map,
2508 Handle<Map> map,
2509 Label* early_success) {
2510 cmp(obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002511}
2512
2513
Andrei Popescu31002712010-02-23 13:46:05 +00002514void MacroAssembler::CheckMap(Register obj,
2515 Register scratch,
2516 Handle<Map> map,
2517 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002518 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002519 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002520 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +00002521 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002522
2523 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002524 CompareMap(obj, scratch, map, &success);
Andrei Popescu31002712010-02-23 13:46:05 +00002525 b(ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002526 bind(&success);
Andrei Popescu31002712010-02-23 13:46:05 +00002527}
2528
2529
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002530void MacroAssembler::CheckMap(Register obj,
2531 Register scratch,
2532 Heap::RootListIndex index,
2533 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002534 SmiCheckType smi_check_type) {
2535 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002536 JumpIfSmi(obj, fail);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002537 }
2538 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2539 LoadRoot(ip, index);
2540 cmp(scratch, ip);
2541 b(ne, fail);
2542}
2543
2544
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002545void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
2546 Register scratch2, Handle<WeakCell> cell,
2547 Handle<Code> success,
2548 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002549 Label fail;
2550 if (smi_check_type == DO_SMI_CHECK) {
2551 JumpIfSmi(obj, &fail);
2552 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002553 ldr(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
2554 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002555 Jump(success, RelocInfo::CODE_TARGET, eq);
2556 bind(&fail);
2557}
2558
2559
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002560void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2561 Register scratch) {
2562 mov(scratch, Operand(cell));
2563 ldr(scratch, FieldMemOperand(scratch, WeakCell::kValueOffset));
2564 cmp(value, scratch);
2565}
2566
2567
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002568void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002569 mov(value, Operand(cell));
2570 ldr(value, FieldMemOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002571}
2572
2573
2574void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2575 Label* miss) {
2576 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002577 JumpIfSmi(value, miss);
2578}
2579
2580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002581void MacroAssembler::GetMapConstructor(Register result, Register map,
2582 Register temp, Register temp2) {
2583 Label done, loop;
2584 ldr(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
2585 bind(&loop);
2586 JumpIfSmi(result, &done);
2587 CompareObjectType(result, temp, temp2, MAP_TYPE);
2588 b(ne, &done);
2589 ldr(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
2590 b(&loop);
2591 bind(&done);
2592}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002593
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002594
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002595void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
2596 Register scratch, Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002597 // Get the prototype or initial map from the function.
2598 ldr(result,
2599 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2600
2601 // If the prototype or initial map is the hole, don't return it and
2602 // simply miss the cache instead. This will allow us to allocate a
2603 // prototype object on-demand in the runtime system.
2604 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2605 cmp(result, ip);
2606 b(eq, miss);
2607
2608 // If the function does not have an initial map, we're done.
2609 Label done;
2610 CompareObjectType(result, scratch, scratch, MAP_TYPE);
2611 b(ne, &done);
2612
2613 // Get the prototype from the initial map.
2614 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002615
Steve Blocka7e24c12009-10-30 11:49:00 +00002616 // All done.
2617 bind(&done);
2618}
2619
2620
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002621void MacroAssembler::CallStub(CodeStub* stub,
2622 TypeFeedbackId ast_id,
2623 Condition cond) {
2624 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
2625 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00002626}
2627
2628
Andrei Popescu31002712010-02-23 13:46:05 +00002629void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +00002630 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2631}
2632
2633
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002634bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002635 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00002636}
2637
2638
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002639void MacroAssembler::IndexFromHash(Register hash, Register index) {
2640 // If the hash field contains an array index pick it out. The assert checks
2641 // that the constants for the maximum number of digits for an array index
2642 // cached in the hash field and the number of bits reserved for it does not
2643 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002644 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002645 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002646 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002647}
2648
2649
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002650void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002651 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002652 vmov(value.low(), smi);
2653 vcvt_f64_s32(value, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01002654 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002655 SmiUntag(ip, smi);
2656 vmov(value.low(), ip);
2657 vcvt_f64_s32(value, value.low());
Iain Merrick9ac36c92010-09-13 15:29:50 +01002658 }
2659}
2660
2661
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002662void MacroAssembler::TestDoubleIsInt32(DwVfpRegister double_input,
2663 LowDwVfpRegister double_scratch) {
2664 DCHECK(!double_input.is(double_scratch));
2665 vcvt_s32_f64(double_scratch.low(), double_input);
2666 vcvt_f64_s32(double_scratch, double_scratch.low());
2667 VFPCompareAndSetFlags(double_input, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002668}
2669
2670
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002671void MacroAssembler::TryDoubleToInt32Exact(Register result,
2672 DwVfpRegister double_input,
2673 LowDwVfpRegister double_scratch) {
2674 DCHECK(!double_input.is(double_scratch));
2675 vcvt_s32_f64(double_scratch.low(), double_input);
2676 vmov(result, double_scratch.low());
2677 vcvt_f64_s32(double_scratch, double_scratch.low());
2678 VFPCompareAndSetFlags(double_input, double_scratch);
2679}
Steve Block44f0eee2011-05-26 01:26:41 +01002680
Steve Block44f0eee2011-05-26 01:26:41 +01002681
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002682void MacroAssembler::TryInt32Floor(Register result,
2683 DwVfpRegister double_input,
2684 Register input_high,
2685 LowDwVfpRegister double_scratch,
2686 Label* done,
2687 Label* exact) {
2688 DCHECK(!result.is(input_high));
2689 DCHECK(!double_input.is(double_scratch));
2690 Label negative, exception;
Steve Block44f0eee2011-05-26 01:26:41 +01002691
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002692 VmovHigh(input_high, double_input);
Steve Block44f0eee2011-05-26 01:26:41 +01002693
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002694 // Test for NaN and infinities.
2695 Sbfx(result, input_high,
2696 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
2697 cmp(result, Operand(-1));
2698 b(eq, &exception);
2699 // Test for values that can be exactly represented as a
2700 // signed 32-bit integer.
2701 TryDoubleToInt32Exact(result, double_input, double_scratch);
2702 // If exact, return (result already fetched).
2703 b(eq, exact);
2704 cmp(input_high, Operand::Zero());
2705 b(mi, &negative);
Steve Block44f0eee2011-05-26 01:26:41 +01002706
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002707 // Input is in ]+0, +inf[.
2708 // If result equals 0x7fffffff input was out of range or
2709 // in ]0x7fffffff, 0x80000000[. We ignore this last case which
2710 // could fits into an int32, that means we always think input was
2711 // out of range and always go to exception.
2712 // If result < 0x7fffffff, go to done, result fetched.
2713 cmn(result, Operand(1));
2714 b(mi, &exception);
2715 b(done);
Steve Block44f0eee2011-05-26 01:26:41 +01002716
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002717 // Input is in ]-inf, -0[.
2718 // If x is a non integer negative number,
2719 // floor(x) <=> round_to_zero(x) - 1.
2720 bind(&negative);
2721 sub(result, result, Operand(1), SetCC);
2722 // If result is still negative, go to done, result fetched.
2723 // Else, we had an overflow and we fall through exception.
2724 b(mi, done);
2725 bind(&exception);
2726}
Steve Block44f0eee2011-05-26 01:26:41 +01002727
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002728void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2729 DwVfpRegister double_input,
2730 Label* done) {
2731 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2732 vcvt_s32_f64(double_scratch.low(), double_input);
2733 vmov(result, double_scratch.low());
Steve Block44f0eee2011-05-26 01:26:41 +01002734
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002735 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
2736 sub(ip, result, Operand(1));
2737 cmp(ip, Operand(0x7ffffffe));
2738 b(lt, done);
2739}
Steve Block44f0eee2011-05-26 01:26:41 +01002740
Steve Block44f0eee2011-05-26 01:26:41 +01002741
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002742void MacroAssembler::TruncateDoubleToI(Register result,
2743 DwVfpRegister double_input) {
2744 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002745
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002746 TryInlineTruncateDoubleToI(result, double_input, &done);
2747
2748 // If we fell through then inline version didn't succeed - call stub instead.
2749 push(lr);
2750 sub(sp, sp, Operand(kDoubleSize)); // Put input on stack.
2751 vstr(double_input, MemOperand(sp, 0));
2752
2753 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
2754 CallStub(&stub);
2755
2756 add(sp, sp, Operand(kDoubleSize));
2757 pop(lr);
2758
Steve Block44f0eee2011-05-26 01:26:41 +01002759 bind(&done);
2760}
2761
2762
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002763void MacroAssembler::TruncateHeapNumberToI(Register result,
2764 Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002765 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002766 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2767 DCHECK(!result.is(object));
Steve Block44f0eee2011-05-26 01:26:41 +01002768
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002769 vldr(double_scratch,
2770 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
2771 TryInlineTruncateDoubleToI(result, double_scratch, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01002772
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002773 // If we fell through then inline version didn't succeed - call stub instead.
2774 push(lr);
2775 DoubleToIStub stub(isolate(),
2776 object,
2777 result,
2778 HeapNumber::kValueOffset - kHeapObjectTag,
2779 true,
2780 true);
2781 CallStub(&stub);
2782 pop(lr);
2783
2784 bind(&done);
2785}
2786
2787
2788void MacroAssembler::TruncateNumberToI(Register object,
2789 Register result,
2790 Register heap_number_map,
2791 Register scratch1,
2792 Label* not_number) {
2793 Label done;
2794 DCHECK(!result.is(object));
2795
2796 UntagAndJumpIfSmi(result, object, &done);
2797 JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
2798 TruncateHeapNumberToI(result, object);
2799
Steve Block44f0eee2011-05-26 01:26:41 +01002800 bind(&done);
2801}
2802
2803
Andrei Popescu31002712010-02-23 13:46:05 +00002804void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2805 Register src,
2806 int num_least_bits) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002807 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002808 ubfx(dst, src, kSmiTagSize, num_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +00002809 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002810 SmiUntag(dst, src);
Andrei Popescu31002712010-02-23 13:46:05 +00002811 and_(dst, dst, Operand((1 << num_least_bits) - 1));
2812 }
2813}
2814
2815
Steve Block1e0659c2011-05-24 12:43:12 +01002816void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2817 Register src,
2818 int num_least_bits) {
2819 and_(dst, src, Operand((1 << num_least_bits) - 1));
2820}
2821
2822
Steve Block44f0eee2011-05-26 01:26:41 +01002823void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002824 int num_arguments,
2825 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002826 // All parameters are on the stack. r0 has the return value after call.
2827
2828 // If the expected number of arguments of the runtime function is
2829 // constant, we check that the actual number of arguments match the
2830 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002831 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00002832
Leon Clarke4515c472010-02-03 11:58:03 +00002833 // TODO(1236192): Most runtime routines don't need the number of
2834 // arguments passed in because it is constant. At some point we
2835 // should remove this need and make the runtime routine entry code
2836 // smarter.
2837 mov(r0, Operand(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01002838 mov(r1, Operand(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002839 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002840 CallStub(&stub);
2841}
2842
2843
Andrei Popescu402d9372010-02-26 13:31:12 +00002844void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2845 int num_arguments) {
2846 mov(r0, Operand(num_arguments));
2847 mov(r1, Operand(ext));
2848
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002849 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002850 CallStub(&stub);
2851}
2852
2853
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002854void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
2855 const Runtime::Function* function = Runtime::FunctionForId(fid);
2856 DCHECK_EQ(1, function->result_size);
2857 if (function->nargs >= 0) {
2858 // TODO(1236192): Most runtime routines don't need the number of
2859 // arguments passed in because it is constant. At some point we
2860 // should remove this need and make the runtime routine entry code
2861 // smarter.
2862 mov(r0, Operand(function->nargs));
2863 }
2864 JumpToExternalReference(ExternalReference(fid, isolate()));
Steve Block6ded16b2010-05-10 14:33:55 +01002865}
2866
2867
2868void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002869#if defined(__thumb__)
2870 // Thumb mode builtin.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002871 DCHECK((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002872#endif
2873 mov(r1, Operand(builtin));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002874 CEntryStub stub(isolate(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002875 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2876}
2877
2878
Steve Blocka7e24c12009-10-30 11:49:00 +00002879void MacroAssembler::SetCounter(StatsCounter* counter, int value,
2880 Register scratch1, Register scratch2) {
2881 if (FLAG_native_code_counters && counter->Enabled()) {
2882 mov(scratch1, Operand(value));
2883 mov(scratch2, Operand(ExternalReference(counter)));
2884 str(scratch1, MemOperand(scratch2));
2885 }
2886}
2887
2888
2889void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
2890 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002891 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002892 if (FLAG_native_code_counters && counter->Enabled()) {
2893 mov(scratch2, Operand(ExternalReference(counter)));
2894 ldr(scratch1, MemOperand(scratch2));
2895 add(scratch1, scratch1, Operand(value));
2896 str(scratch1, MemOperand(scratch2));
2897 }
2898}
2899
2900
2901void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
2902 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002903 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002904 if (FLAG_native_code_counters && counter->Enabled()) {
2905 mov(scratch2, Operand(ExternalReference(counter)));
2906 ldr(scratch1, MemOperand(scratch2));
2907 sub(scratch1, scratch1, Operand(value));
2908 str(scratch1, MemOperand(scratch2));
2909 }
2910}
2911
2912
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002913void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01002914 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002915 Check(cond, reason);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002916}
2917
2918
Iain Merrick75681382010-08-19 15:07:18 +01002919void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002920 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002921 DCHECK(!elements.is(ip));
Iain Merrick75681382010-08-19 15:07:18 +01002922 Label ok;
2923 push(elements);
2924 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
2925 LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2926 cmp(elements, ip);
2927 b(eq, &ok);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002928 LoadRoot(ip, Heap::kFixedDoubleArrayMapRootIndex);
2929 cmp(elements, ip);
2930 b(eq, &ok);
Iain Merrick75681382010-08-19 15:07:18 +01002931 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2932 cmp(elements, ip);
2933 b(eq, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002934 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002935 bind(&ok);
2936 pop(elements);
2937 }
2938}
2939
2940
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002941void MacroAssembler::Check(Condition cond, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002942 Label L;
Steve Block1e0659c2011-05-24 12:43:12 +01002943 b(cond, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002944 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002945 // will not return here
2946 bind(&L);
2947}
2948
2949
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002950void MacroAssembler::Abort(BailoutReason reason) {
Steve Block8defd9f2010-07-08 12:39:36 +01002951 Label abort_start;
2952 bind(&abort_start);
Steve Blocka7e24c12009-10-30 11:49:00 +00002953#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002954 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002955 if (msg != NULL) {
2956 RecordComment("Abort message: ");
2957 RecordComment(msg);
2958 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002959
2960 if (FLAG_trap_on_abort) {
2961 stop(msg);
2962 return;
2963 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002964#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002965
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002966 mov(r0, Operand(Smi::FromInt(reason)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002967 push(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002968
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002969 // Disable stub call restrictions to always allow calls to abort.
2970 if (!has_frame_) {
2971 // We don't actually want to generate a pile of code for this, so just
2972 // claim there is a stack frame, without generating one.
2973 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002974 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002975 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002976 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002977 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002978 // will not return here
Steve Block8defd9f2010-07-08 12:39:36 +01002979 if (is_const_pool_blocked()) {
2980 // If the calling code cares about the exact number of
2981 // instructions generated, we insert padding here to keep the size
2982 // of the Abort macro constant.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002983 static const int kExpectedAbortInstructions = 7;
Steve Block8defd9f2010-07-08 12:39:36 +01002984 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002985 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block8defd9f2010-07-08 12:39:36 +01002986 while (abort_instructions++ < kExpectedAbortInstructions) {
2987 nop();
2988 }
2989 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002990}
2991
2992
Steve Blockd0582a62009-12-15 09:54:21 +00002993void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2994 if (context_chain_length > 0) {
2995 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002996 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002997 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002998 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002999 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003000 } else {
3001 // Slot is in the current function context. Move it into the
3002 // destination register in case we store into it (the write barrier
3003 // cannot be allowed to destroy the context in esi).
3004 mov(dst, cp);
3005 }
Steve Blockd0582a62009-12-15 09:54:21 +00003006}
3007
3008
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003009void MacroAssembler::LoadTransitionedArrayMapConditional(
3010 ElementsKind expected_kind,
3011 ElementsKind transitioned_kind,
3012 Register map_in_out,
3013 Register scratch,
3014 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003015 DCHECK(IsFastElementsKind(expected_kind));
3016 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003017
3018 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003019 ldr(scratch, NativeContextMemOperand());
3020 ldr(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003021 cmp(map_in_out, ip);
3022 b(ne, no_map_match);
3023
3024 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003025 ldr(map_in_out,
3026 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003027}
3028
3029
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003030void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
3031 ldr(dst, NativeContextMemOperand());
3032 ldr(dst, ContextMemOperand(dst, index));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003033}
3034
3035
3036void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
3037 Register map,
3038 Register scratch) {
3039 // Load the initial map. The global functions all have initial maps.
3040 ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003041 if (emit_debug_code()) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003042 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00003043 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003044 b(&ok);
3045 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003046 Abort(kGlobalFunctionsMustHaveInitialMap);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003047 bind(&ok);
3048 }
3049}
3050
3051
Steve Block1e0659c2011-05-24 12:43:12 +01003052void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
3053 Register reg,
3054 Register scratch,
3055 Label* not_power_of_two_or_zero) {
3056 sub(scratch, reg, Operand(1), SetCC);
3057 b(mi, not_power_of_two_or_zero);
3058 tst(scratch, reg);
3059 b(ne, not_power_of_two_or_zero);
3060}
3061
3062
Steve Block44f0eee2011-05-26 01:26:41 +01003063void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
3064 Register reg,
3065 Register scratch,
3066 Label* zero_and_neg,
3067 Label* not_power_of_two) {
3068 sub(scratch, reg, Operand(1), SetCC);
3069 b(mi, zero_and_neg);
3070 tst(scratch, reg);
3071 b(ne, not_power_of_two);
3072}
3073
3074
Andrei Popescu31002712010-02-23 13:46:05 +00003075void MacroAssembler::JumpIfNotBothSmi(Register reg1,
3076 Register reg2,
3077 Label* on_not_both_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01003078 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00003079 tst(reg1, Operand(kSmiTagMask));
3080 tst(reg2, Operand(kSmiTagMask), eq);
3081 b(ne, on_not_both_smi);
3082}
3083
3084
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003085void MacroAssembler::UntagAndJumpIfSmi(
3086 Register dst, Register src, Label* smi_case) {
3087 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003088 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003089 b(cc, smi_case); // Shifter carry is not set for a smi.
3090}
3091
3092
3093void MacroAssembler::UntagAndJumpIfNotSmi(
3094 Register dst, Register src, Label* non_smi_case) {
3095 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003096 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003097 b(cs, non_smi_case); // Shifter carry is set for a non-smi.
3098}
3099
3100
Andrei Popescu31002712010-02-23 13:46:05 +00003101void MacroAssembler::JumpIfEitherSmi(Register reg1,
3102 Register reg2,
3103 Label* on_either_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01003104 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00003105 tst(reg1, Operand(kSmiTagMask));
3106 tst(reg2, Operand(kSmiTagMask), ne);
3107 b(eq, on_either_smi);
3108}
3109
Ben Murdochda12d292016-06-02 14:46:10 +01003110void MacroAssembler::AssertNotNumber(Register object) {
3111 if (emit_debug_code()) {
3112 STATIC_ASSERT(kSmiTag == 0);
3113 tst(object, Operand(kSmiTagMask));
3114 Check(ne, kOperandIsANumber);
3115 push(object);
3116 CompareObjectType(object, object, object, HEAP_NUMBER_TYPE);
3117 pop(object);
3118 Check(ne, kOperandIsANumber);
3119 }
3120}
Andrei Popescu31002712010-02-23 13:46:05 +00003121
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003122void MacroAssembler::AssertNotSmi(Register object) {
3123 if (emit_debug_code()) {
3124 STATIC_ASSERT(kSmiTag == 0);
3125 tst(object, Operand(kSmiTagMask));
3126 Check(ne, kOperandIsASmi);
3127 }
Iain Merrick75681382010-08-19 15:07:18 +01003128}
3129
3130
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003131void MacroAssembler::AssertSmi(Register object) {
3132 if (emit_debug_code()) {
3133 STATIC_ASSERT(kSmiTag == 0);
3134 tst(object, Operand(kSmiTagMask));
3135 Check(eq, kOperandIsNotSmi);
3136 }
Steve Block1e0659c2011-05-24 12:43:12 +01003137}
3138
3139
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003140void MacroAssembler::AssertString(Register object) {
3141 if (emit_debug_code()) {
3142 STATIC_ASSERT(kSmiTag == 0);
3143 tst(object, Operand(kSmiTagMask));
3144 Check(ne, kOperandIsASmiAndNotAString);
3145 push(object);
3146 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3147 CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
3148 pop(object);
3149 Check(lo, kOperandIsNotAString);
3150 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003151}
3152
3153
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003154void MacroAssembler::AssertName(Register object) {
3155 if (emit_debug_code()) {
3156 STATIC_ASSERT(kSmiTag == 0);
3157 tst(object, Operand(kSmiTagMask));
3158 Check(ne, kOperandIsASmiAndNotAName);
3159 push(object);
3160 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3161 CompareInstanceType(object, object, LAST_NAME_TYPE);
3162 pop(object);
3163 Check(le, kOperandIsNotAName);
3164 }
3165}
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003166
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003167
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003168void MacroAssembler::AssertFunction(Register object) {
3169 if (emit_debug_code()) {
3170 STATIC_ASSERT(kSmiTag == 0);
3171 tst(object, Operand(kSmiTagMask));
3172 Check(ne, kOperandIsASmiAndNotAFunction);
3173 push(object);
3174 CompareObjectType(object, object, object, JS_FUNCTION_TYPE);
3175 pop(object);
3176 Check(eq, kOperandIsNotAFunction);
3177 }
3178}
3179
3180
3181void MacroAssembler::AssertBoundFunction(Register object) {
3182 if (emit_debug_code()) {
3183 STATIC_ASSERT(kSmiTag == 0);
3184 tst(object, Operand(kSmiTagMask));
3185 Check(ne, kOperandIsASmiAndNotABoundFunction);
3186 push(object);
3187 CompareObjectType(object, object, object, JS_BOUND_FUNCTION_TYPE);
3188 pop(object);
3189 Check(eq, kOperandIsNotABoundFunction);
3190 }
3191}
3192
Ben Murdochc5610432016-08-08 18:44:38 +01003193void MacroAssembler::AssertGeneratorObject(Register object) {
3194 if (emit_debug_code()) {
3195 STATIC_ASSERT(kSmiTag == 0);
3196 tst(object, Operand(kSmiTagMask));
3197 Check(ne, kOperandIsASmiAndNotAGeneratorObject);
3198 push(object);
3199 CompareObjectType(object, object, object, JS_GENERATOR_OBJECT_TYPE);
3200 pop(object);
3201 Check(eq, kOperandIsNotAGeneratorObject);
3202 }
3203}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003204
Ben Murdoch097c5b22016-05-18 11:27:45 +01003205void MacroAssembler::AssertReceiver(Register object) {
3206 if (emit_debug_code()) {
3207 STATIC_ASSERT(kSmiTag == 0);
3208 tst(object, Operand(kSmiTagMask));
3209 Check(ne, kOperandIsASmiAndNotAReceiver);
3210 push(object);
3211 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3212 CompareObjectType(object, object, object, FIRST_JS_RECEIVER_TYPE);
3213 pop(object);
3214 Check(hs, kOperandIsNotAReceiver);
3215 }
3216}
3217
3218
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003219void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
3220 Register scratch) {
3221 if (emit_debug_code()) {
3222 Label done_checking;
3223 AssertNotSmi(object);
3224 CompareRoot(object, Heap::kUndefinedValueRootIndex);
3225 b(eq, &done_checking);
3226 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3227 CompareRoot(scratch, Heap::kAllocationSiteMapRootIndex);
3228 Assert(eq, kExpectedUndefinedOrCell);
3229 bind(&done_checking);
3230 }
3231}
3232
3233
3234void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
3235 if (emit_debug_code()) {
3236 CompareRoot(reg, index);
3237 Check(eq, kHeapNumberMapRegisterClobbered);
3238 }
Steve Block1e0659c2011-05-24 12:43:12 +01003239}
3240
3241
3242void MacroAssembler::JumpIfNotHeapNumber(Register object,
3243 Register heap_number_map,
3244 Register scratch,
3245 Label* on_not_heap_number) {
3246 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003247 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01003248 cmp(scratch, heap_number_map);
3249 b(ne, on_not_heap_number);
3250}
3251
3252
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003253void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
3254 Register first, Register second, Register scratch1, Register scratch2,
Leon Clarked91b9f72010-01-27 17:25:45 +00003255 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003256 // Test that both first and second are sequential one-byte strings.
Leon Clarked91b9f72010-01-27 17:25:45 +00003257 // Assume that they are non-smis.
3258 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
3259 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
3260 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3261 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003262
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003263 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
3264 scratch2, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00003265}
3266
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003267void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
3268 Register second,
3269 Register scratch1,
3270 Register scratch2,
3271 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003272 // Check that neither is a smi.
Leon Clarked91b9f72010-01-27 17:25:45 +00003273 and_(scratch1, first, Operand(second));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003274 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003275 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
3276 scratch2, failure);
3277}
3278
3279
3280void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3281 Label* not_unique_name) {
3282 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3283 Label succeed;
3284 tst(reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3285 b(eq, &succeed);
3286 cmp(reg, Operand(SYMBOL_TYPE));
3287 b(ne, not_unique_name);
3288
3289 bind(&succeed);
Leon Clarked91b9f72010-01-27 17:25:45 +00003290}
3291
Steve Blockd0582a62009-12-15 09:54:21 +00003292
Steve Block6ded16b2010-05-10 14:33:55 +01003293// Allocates a heap number or jumps to the need_gc label if the young space
3294// is full and a scavenge is needed.
3295void MacroAssembler::AllocateHeapNumber(Register result,
3296 Register scratch1,
3297 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003298 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003299 Label* gc_required,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003300 MutableMode mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01003301 // Allocate an object in the heap for the heap number and tag it as a heap
3302 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003303 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01003304 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003305
3306 Heap::RootListIndex map_index = mode == MUTABLE
3307 ? Heap::kMutableHeapNumberMapRootIndex
3308 : Heap::kHeapNumberMapRootIndex;
3309 AssertIsRoot(heap_number_map, map_index);
Steve Block6ded16b2010-05-10 14:33:55 +01003310
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003311 // Store heap number map in the allocated object.
Ben Murdochc5610432016-08-08 18:44:38 +01003312 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003313}
3314
3315
Steve Block8defd9f2010-07-08 12:39:36 +01003316void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3317 DwVfpRegister value,
3318 Register scratch1,
3319 Register scratch2,
3320 Register heap_number_map,
3321 Label* gc_required) {
3322 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
3323 sub(scratch1, result, Operand(kHeapObjectTag));
3324 vstr(value, scratch1, HeapNumber::kValueOffset);
3325}
3326
3327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003328void MacroAssembler::AllocateJSValue(Register result, Register constructor,
3329 Register value, Register scratch1,
3330 Register scratch2, Label* gc_required) {
3331 DCHECK(!result.is(constructor));
3332 DCHECK(!result.is(scratch1));
3333 DCHECK(!result.is(scratch2));
3334 DCHECK(!result.is(value));
Ben Murdochbb769b22010-08-11 14:56:33 +01003335
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003336 // Allocate JSValue in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01003337 Allocate(JSValue::kSize, result, scratch1, scratch2, gc_required,
3338 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003339
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003340 // Initialize the JSValue.
3341 LoadGlobalFunctionInitialMap(constructor, scratch1, scratch2);
3342 str(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
3343 LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
3344 str(scratch1, FieldMemOperand(result, JSObject::kPropertiesOffset));
3345 str(scratch1, FieldMemOperand(result, JSObject::kElementsOffset));
3346 str(value, FieldMemOperand(result, JSValue::kValueOffset));
3347 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
Ben Murdochbb769b22010-08-11 14:56:33 +01003348}
3349
3350
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003351void MacroAssembler::CopyBytes(Register src,
3352 Register dst,
3353 Register length,
3354 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003355 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003356
3357 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003358 cmp(length, Operand(kPointerSize));
3359 b(le, &byte_loop);
3360
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003361 bind(&align_loop_1);
3362 tst(src, Operand(kPointerSize - 1));
3363 b(eq, &word_loop);
3364 ldrb(scratch, MemOperand(src, 1, PostIndex));
3365 strb(scratch, MemOperand(dst, 1, PostIndex));
3366 sub(length, length, Operand(1), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003367 b(&align_loop_1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003368 // Copy bytes in word size chunks.
3369 bind(&word_loop);
Steve Block44f0eee2011-05-26 01:26:41 +01003370 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003371 tst(src, Operand(kPointerSize - 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003372 Assert(eq, kExpectingAlignmentForCopyBytes);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003373 }
3374 cmp(length, Operand(kPointerSize));
3375 b(lt, &byte_loop);
3376 ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003377 if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) {
3378 str(scratch, MemOperand(dst, kPointerSize, PostIndex));
3379 } else {
3380 strb(scratch, MemOperand(dst, 1, PostIndex));
3381 mov(scratch, Operand(scratch, LSR, 8));
3382 strb(scratch, MemOperand(dst, 1, PostIndex));
3383 mov(scratch, Operand(scratch, LSR, 8));
3384 strb(scratch, MemOperand(dst, 1, PostIndex));
3385 mov(scratch, Operand(scratch, LSR, 8));
3386 strb(scratch, MemOperand(dst, 1, PostIndex));
3387 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003388 sub(length, length, Operand(kPointerSize));
3389 b(&word_loop);
3390
3391 // Copy the last bytes if any left.
3392 bind(&byte_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003393 cmp(length, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003394 b(eq, &done);
3395 bind(&byte_loop_1);
3396 ldrb(scratch, MemOperand(src, 1, PostIndex));
3397 strb(scratch, MemOperand(dst, 1, PostIndex));
3398 sub(length, length, Operand(1), SetCC);
3399 b(ne, &byte_loop_1);
3400 bind(&done);
3401}
3402
3403
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003404void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
3405 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003406 Register filler) {
3407 Label loop, entry;
3408 b(&entry);
3409 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003410 str(filler, MemOperand(current_address, kPointerSize, PostIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003411 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003412 cmp(current_address, end_address);
3413 b(lo, &loop);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003414}
3415
3416
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003417void MacroAssembler::CheckFor32DRegs(Register scratch) {
3418 mov(scratch, Operand(ExternalReference::cpu_features()));
3419 ldr(scratch, MemOperand(scratch));
3420 tst(scratch, Operand(1u << VFP32DREGS));
Steve Block6ded16b2010-05-10 14:33:55 +01003421}
3422
3423
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003424void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
3425 CheckFor32DRegs(scratch);
3426 vstm(db_w, location, d16, d31, ne);
3427 sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3428 vstm(db_w, location, d0, d15);
3429}
3430
3431
3432void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
3433 CheckFor32DRegs(scratch);
3434 vldm(ia_w, location, d0, d15);
3435 vldm(ia_w, location, d16, d31, ne);
3436 add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3437}
3438
3439
3440void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
3441 Register first, Register second, Register scratch1, Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +01003442 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003443 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003444 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003445 const int kFlatOneByteStringTag =
3446 kStringTag | kOneByteStringTag | kSeqStringTag;
3447 and_(scratch1, first, Operand(kFlatOneByteStringMask));
3448 and_(scratch2, second, Operand(kFlatOneByteStringMask));
3449 cmp(scratch1, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003450 // Ignore second test if first test failed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003451 cmp(scratch2, Operand(kFlatOneByteStringTag), eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003452 b(ne, failure);
3453}
3454
3455
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003456void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
3457 Register scratch,
3458 Label* failure) {
3459 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003460 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003461 const int kFlatOneByteStringTag =
3462 kStringTag | kOneByteStringTag | kSeqStringTag;
3463 and_(scratch, type, Operand(kFlatOneByteStringMask));
3464 cmp(scratch, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003465 b(ne, failure);
3466}
3467
Steve Block44f0eee2011-05-26 01:26:41 +01003468static const int kRegisterPassedArguments = 4;
Steve Block6ded16b2010-05-10 14:33:55 +01003469
Steve Block44f0eee2011-05-26 01:26:41 +01003470
Ben Murdoch257744e2011-11-30 15:57:28 +00003471int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
3472 int num_double_arguments) {
3473 int stack_passed_words = 0;
3474 if (use_eabi_hardfloat()) {
3475 // In the hard floating point calling convention, we can use
3476 // all double registers to pass doubles.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003477 if (num_double_arguments > DoubleRegister::NumRegisters()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003478 stack_passed_words +=
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003479 2 * (num_double_arguments - DoubleRegister::NumRegisters());
Ben Murdoch257744e2011-11-30 15:57:28 +00003480 }
3481 } else {
3482 // In the soft floating point calling convention, every double
3483 // argument is passed using two registers.
3484 num_reg_arguments += 2 * num_double_arguments;
3485 }
Steve Block6ded16b2010-05-10 14:33:55 +01003486 // Up to four simple arguments are passed in registers r0..r3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003487 if (num_reg_arguments > kRegisterPassedArguments) {
3488 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
3489 }
3490 return stack_passed_words;
3491}
3492
3493
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003494void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3495 Register index,
3496 Register value,
3497 uint32_t encoding_mask) {
3498 Label is_object;
3499 SmiTst(string);
3500 Check(ne, kNonObject);
3501
3502 ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
3503 ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
3504
3505 and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
3506 cmp(ip, Operand(encoding_mask));
3507 Check(eq, kUnexpectedStringType);
3508
3509 // The index is assumed to be untagged coming in, tag it to compare with the
3510 // string length without using a temp register, it is restored at the end of
3511 // this function.
3512 Label index_tag_ok, index_tag_bad;
3513 TrySmiTag(index, index, &index_tag_bad);
3514 b(&index_tag_ok);
3515 bind(&index_tag_bad);
3516 Abort(kIndexIsTooLarge);
3517 bind(&index_tag_ok);
3518
3519 ldr(ip, FieldMemOperand(string, String::kLengthOffset));
3520 cmp(index, ip);
3521 Check(lt, kIndexIsTooLarge);
3522
3523 cmp(index, Operand(Smi::FromInt(0)));
3524 Check(ge, kIndexIsNegative);
3525
3526 SmiUntag(index, index);
3527}
3528
3529
Ben Murdoch257744e2011-11-30 15:57:28 +00003530void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3531 int num_double_arguments,
3532 Register scratch) {
3533 int frame_alignment = ActivationFrameAlignment();
3534 int stack_passed_arguments = CalculateStackPassedWords(
3535 num_reg_arguments, num_double_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01003536 if (frame_alignment > kPointerSize) {
3537 // Make stack end at alignment and make room for num_arguments - 4 words
3538 // and the original value of sp.
3539 mov(scratch, sp);
3540 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003541 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003542 and_(sp, sp, Operand(-frame_alignment));
3543 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
3544 } else {
3545 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
3546 }
3547}
3548
3549
Ben Murdoch257744e2011-11-30 15:57:28 +00003550void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3551 Register scratch) {
3552 PrepareCallCFunction(num_reg_arguments, 0, scratch);
3553}
3554
3555
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003556void MacroAssembler::MovToFloatParameter(DwVfpRegister src) {
3557 DCHECK(src.is(d0));
3558 if (!use_eabi_hardfloat()) {
3559 vmov(r0, r1, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003560 }
3561}
3562
3563
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003564// On ARM this is just a synonym to make the purpose clear.
3565void MacroAssembler::MovToFloatResult(DwVfpRegister src) {
3566 MovToFloatParameter(src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003567}
3568
3569
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003570void MacroAssembler::MovToFloatParameters(DwVfpRegister src1,
3571 DwVfpRegister src2) {
3572 DCHECK(src1.is(d0));
3573 DCHECK(src2.is(d1));
3574 if (!use_eabi_hardfloat()) {
3575 vmov(r0, r1, src1);
3576 vmov(r2, r3, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003577 }
3578}
3579
3580
3581void MacroAssembler::CallCFunction(ExternalReference function,
3582 int num_reg_arguments,
3583 int num_double_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003584 mov(ip, Operand(function));
3585 CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003586}
3587
3588
3589void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003590 int num_reg_arguments,
3591 int num_double_arguments) {
3592 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003593}
3594
3595
Steve Block6ded16b2010-05-10 14:33:55 +01003596void MacroAssembler::CallCFunction(ExternalReference function,
3597 int num_arguments) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003598 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003599}
3600
Ben Murdoch257744e2011-11-30 15:57:28 +00003601
Steve Block44f0eee2011-05-26 01:26:41 +01003602void MacroAssembler::CallCFunction(Register function,
Steve Block44f0eee2011-05-26 01:26:41 +01003603 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003604 CallCFunction(function, num_arguments, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01003605}
3606
3607
Steve Block44f0eee2011-05-26 01:26:41 +01003608void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch257744e2011-11-30 15:57:28 +00003609 int num_reg_arguments,
3610 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003611 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01003612 // Make sure that the stack is aligned before calling a C function unless
3613 // running in the simulator. The simulator has its own alignment check which
3614 // provides more information.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003615#if V8_HOST_ARCH_ARM
Steve Block44f0eee2011-05-26 01:26:41 +01003616 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003617 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01003618 int frame_alignment_mask = frame_alignment - 1;
3619 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003620 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003621 Label alignment_as_expected;
3622 tst(sp, Operand(frame_alignment_mask));
3623 b(eq, &alignment_as_expected);
3624 // Don't use Check here, as it will call Runtime_Abort possibly
3625 // re-entering here.
3626 stop("Unexpected alignment");
3627 bind(&alignment_as_expected);
3628 }
3629 }
3630#endif
3631
3632 // Just call directly. The function called cannot cause a GC, or
3633 // allow preemption, so the return address in the link register
3634 // stays correct.
3635 Call(function);
Ben Murdoch257744e2011-11-30 15:57:28 +00003636 int stack_passed_arguments = CalculateStackPassedWords(
3637 num_reg_arguments, num_double_arguments);
3638 if (ActivationFrameAlignment() > kPointerSize) {
Steve Block6ded16b2010-05-10 14:33:55 +01003639 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3640 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003641 add(sp, sp, Operand(stack_passed_arguments * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003642 }
Steve Block1e0659c2011-05-24 12:43:12 +01003643}
3644
3645
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003646void MacroAssembler::CheckPageFlag(
3647 Register object,
3648 Register scratch,
3649 int mask,
3650 Condition cc,
3651 Label* condition_met) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003652 DCHECK(cc == eq || cc == ne);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003653 Bfc(scratch, object, 0, kPageSizeBits);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003654 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
3655 tst(scratch, Operand(mask));
3656 b(cc, condition_met);
3657}
3658
3659
3660void MacroAssembler::JumpIfBlack(Register object,
3661 Register scratch0,
3662 Register scratch1,
3663 Label* on_black) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003664 HasColor(object, scratch0, scratch1, on_black, 1, 1); // kBlackBitPattern.
3665 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003666}
3667
3668
3669void MacroAssembler::HasColor(Register object,
3670 Register bitmap_scratch,
3671 Register mask_scratch,
3672 Label* has_color,
3673 int first_bit,
3674 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003675 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003676
3677 GetMarkBits(object, bitmap_scratch, mask_scratch);
3678
3679 Label other_color, word_boundary;
3680 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3681 tst(ip, Operand(mask_scratch));
3682 b(first_bit == 1 ? eq : ne, &other_color);
3683 // Shift left 1 by adding.
3684 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC);
3685 b(eq, &word_boundary);
3686 tst(ip, Operand(mask_scratch));
3687 b(second_bit == 1 ? ne : eq, has_color);
3688 jmp(&other_color);
3689
3690 bind(&word_boundary);
3691 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
3692 tst(ip, Operand(1));
3693 b(second_bit == 1 ? ne : eq, has_color);
3694 bind(&other_color);
3695}
3696
3697
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003698void MacroAssembler::GetMarkBits(Register addr_reg,
3699 Register bitmap_reg,
3700 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003701 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003702 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3703 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
3704 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
3705 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits);
3706 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2));
3707 mov(ip, Operand(1));
3708 mov(mask_reg, Operand(ip, LSL, mask_reg));
3709}
3710
3711
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003712void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
3713 Register mask_scratch, Register load_scratch,
3714 Label* value_is_white) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003715 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ip));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003716 GetMarkBits(value, bitmap_scratch, mask_scratch);
3717
3718 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003719 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003720 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3721 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003722 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003723
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003724 // Since both black and grey have a 1 in the first position and white does
3725 // not have a 1 there we only need to check one bit.
3726 ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3727 tst(mask_scratch, load_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003728 b(eq, value_is_white);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003729}
3730
3731
Ben Murdoch257744e2011-11-30 15:57:28 +00003732void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
Ben Murdochc5610432016-08-08 18:44:38 +01003733 usat(output_reg, 8, Operand(input_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00003734}
3735
3736
3737void MacroAssembler::ClampDoubleToUint8(Register result_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003738 DwVfpRegister input_reg,
3739 LowDwVfpRegister double_scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003740 Label done;
Ben Murdoch257744e2011-11-30 15:57:28 +00003741
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003742 // Handle inputs >= 255 (including +infinity).
3743 Vmov(double_scratch, 255.0, result_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003744 mov(result_reg, Operand(255));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003745 VFPCompareAndSetFlags(input_reg, double_scratch);
3746 b(ge, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003747
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003748 // For inputs < 255 (including negative) vcvt_u32_f64 with round-to-nearest
3749 // rounding mode will provide the correct result.
3750 vcvt_u32_f64(double_scratch.low(), input_reg, kFPSCRRounding);
3751 vmov(result_reg, double_scratch.low());
3752
Ben Murdoch257744e2011-11-30 15:57:28 +00003753 bind(&done);
3754}
3755
3756
3757void MacroAssembler::LoadInstanceDescriptors(Register map,
3758 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003759 ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
3760}
3761
3762
3763void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3764 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3765 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3766}
3767
3768
3769void MacroAssembler::EnumLength(Register dst, Register map) {
3770 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3771 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3772 and_(dst, dst, Operand(Map::EnumLengthBits::kMask));
3773 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003774}
3775
3776
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003777void MacroAssembler::LoadAccessor(Register dst, Register holder,
3778 int accessor_index,
3779 AccessorComponent accessor) {
3780 ldr(dst, FieldMemOperand(holder, HeapObject::kMapOffset));
3781 LoadInstanceDescriptors(dst, dst);
3782 ldr(dst,
3783 FieldMemOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3784 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
3785 : AccessorPair::kSetterOffset;
3786 ldr(dst, FieldMemOperand(dst, offset));
3787}
3788
3789
Ben Murdoch097c5b22016-05-18 11:27:45 +01003790void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3791 Register null_value = r5;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003792 Register empty_fixed_array_value = r6;
3793 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003794 Label next, start;
3795 mov(r2, r0);
3796
3797 // Check if the enum length field is properly initialized, indicating that
3798 // there is an enum cache.
3799 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
3800
3801 EnumLength(r3, r1);
3802 cmp(r3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
3803 b(eq, call_runtime);
3804
Ben Murdoch097c5b22016-05-18 11:27:45 +01003805 LoadRoot(null_value, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003806 jmp(&start);
3807
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003808 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003809 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003810
3811 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003812 EnumLength(r3, r1);
3813 cmp(r3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003814 b(ne, call_runtime);
3815
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003816 bind(&start);
3817
3818 // Check that there are no elements. Register r2 contains the current JS
3819 // object we've reached through the prototype chain.
3820 Label no_elements;
3821 ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset));
3822 cmp(r2, empty_fixed_array_value);
3823 b(eq, &no_elements);
3824
3825 // Second chance, the object may be using the empty slow element dictionary.
3826 CompareRoot(r2, Heap::kEmptySlowElementDictionaryRootIndex);
3827 b(ne, call_runtime);
3828
3829 bind(&no_elements);
3830 ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3831 cmp(r2, null_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003832 b(ne, &next);
3833}
3834
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003835void MacroAssembler::TestJSArrayForAllocationMemento(
3836 Register receiver_reg,
3837 Register scratch_reg,
3838 Label* no_memento_found) {
Ben Murdochda12d292016-06-02 14:46:10 +01003839 Label map_check;
3840 Label top_check;
Ben Murdochc5610432016-08-08 18:44:38 +01003841 ExternalReference new_space_allocation_top_adr =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003842 ExternalReference::new_space_allocation_top_address(isolate());
Ben Murdochda12d292016-06-02 14:46:10 +01003843 const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
3844 const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003845
Ben Murdochda12d292016-06-02 14:46:10 +01003846 // Bail out if the object is not in new space.
3847 JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
3848 // If the object is in new space, we need to check whether it is on the same
3849 // page as the current top.
3850 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01003851 mov(ip, Operand(new_space_allocation_top_adr));
3852 ldr(ip, MemOperand(ip));
3853 eor(scratch_reg, scratch_reg, Operand(ip));
Ben Murdochda12d292016-06-02 14:46:10 +01003854 tst(scratch_reg, Operand(~Page::kPageAlignmentMask));
3855 b(eq, &top_check);
3856 // The object is on a different page than allocation top. Bail out if the
3857 // object sits on the page boundary as no memento can follow and we cannot
3858 // touch the memory following it.
3859 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
3860 eor(scratch_reg, scratch_reg, Operand(receiver_reg));
3861 tst(scratch_reg, Operand(~Page::kPageAlignmentMask));
3862 b(ne, no_memento_found);
3863 // Continue with the actual map check.
3864 jmp(&map_check);
3865 // If top is on the same page as the current object, we need to check whether
3866 // we are below top.
3867 bind(&top_check);
3868 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01003869 mov(ip, Operand(new_space_allocation_top_adr));
3870 ldr(ip, MemOperand(ip));
3871 cmp(scratch_reg, ip);
Ben Murdochda12d292016-06-02 14:46:10 +01003872 b(gt, no_memento_found);
3873 // Memento map check.
3874 bind(&map_check);
3875 ldr(scratch_reg, MemOperand(receiver_reg, kMementoMapOffset));
3876 cmp(scratch_reg, Operand(isolate()->factory()->allocation_memento_map()));
3877}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003878
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003879Register GetRegisterThatIsNotOneOf(Register reg1,
3880 Register reg2,
3881 Register reg3,
3882 Register reg4,
3883 Register reg5,
3884 Register reg6) {
3885 RegList regs = 0;
3886 if (reg1.is_valid()) regs |= reg1.bit();
3887 if (reg2.is_valid()) regs |= reg2.bit();
3888 if (reg3.is_valid()) regs |= reg3.bit();
3889 if (reg4.is_valid()) regs |= reg4.bit();
3890 if (reg5.is_valid()) regs |= reg5.bit();
3891 if (reg6.is_valid()) regs |= reg6.bit();
3892
Ben Murdoch61f157c2016-09-16 13:49:30 +01003893 const RegisterConfiguration* config = RegisterConfiguration::Crankshaft();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003894 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
3895 int code = config->GetAllocatableGeneralCode(i);
3896 Register candidate = Register::from_code(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003897 if (regs & candidate.bit()) continue;
3898 return candidate;
3899 }
3900 UNREACHABLE();
3901 return no_reg;
3902}
3903
3904
3905void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3906 Register object,
3907 Register scratch0,
3908 Register scratch1,
3909 Label* found) {
3910 DCHECK(!scratch1.is(scratch0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003911 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003912 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003913
3914 // scratch contained elements pointer.
3915 mov(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003916 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
3917 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
3918 CompareRoot(current, Heap::kNullValueRootIndex);
3919 b(eq, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003920
3921 // Loop based on the map going up the prototype chain.
3922 bind(&loop_again);
3923 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003924
3925 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3926 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3927 ldrb(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset));
3928 cmp(scratch1, Operand(JS_OBJECT_TYPE));
3929 b(lo, found);
3930
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003931 ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
3932 DecodeField<Map::ElementsKindBits>(scratch1);
3933 cmp(scratch1, Operand(DICTIONARY_ELEMENTS));
3934 b(eq, found);
3935 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003936 CompareRoot(current, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003937 b(ne, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003938
3939 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003940}
3941
3942
3943#ifdef DEBUG
3944bool AreAliased(Register reg1,
3945 Register reg2,
3946 Register reg3,
3947 Register reg4,
3948 Register reg5,
3949 Register reg6,
3950 Register reg7,
3951 Register reg8) {
3952 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3953 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3954 reg7.is_valid() + reg8.is_valid();
3955
3956 RegList regs = 0;
3957 if (reg1.is_valid()) regs |= reg1.bit();
3958 if (reg2.is_valid()) regs |= reg2.bit();
3959 if (reg3.is_valid()) regs |= reg3.bit();
3960 if (reg4.is_valid()) regs |= reg4.bit();
3961 if (reg5.is_valid()) regs |= reg5.bit();
3962 if (reg6.is_valid()) regs |= reg6.bit();
3963 if (reg7.is_valid()) regs |= reg7.bit();
3964 if (reg8.is_valid()) regs |= reg8.bit();
3965 int n_of_non_aliasing_regs = NumRegs(regs);
3966
3967 return n_of_valid_regs != n_of_non_aliasing_regs;
3968}
3969#endif
3970
3971
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003972CodePatcher::CodePatcher(Isolate* isolate, byte* address, int instructions,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003973 FlushICache flush_cache)
Steve Blocka7e24c12009-10-30 11:49:00 +00003974 : address_(address),
Steve Blocka7e24c12009-10-30 11:49:00 +00003975 size_(instructions * Assembler::kInstrSize),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003976 masm_(isolate, address, size_ + Assembler::kGap, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003977 flush_cache_(flush_cache) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003978 // Create a new macro assembler pointing to the address of the code to patch.
3979 // The size is adjusted with kGap on order for the assembler to generate size
3980 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003981 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003982}
3983
3984
3985CodePatcher::~CodePatcher() {
3986 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003987 if (flush_cache_ == FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003988 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003989 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003990
Ben Murdoch61f157c2016-09-16 13:49:30 +01003991 // Check that we don't have any pending constant pools.
3992 DCHECK(masm_.pending_32_bit_constants_.empty());
3993 DCHECK(masm_.pending_64_bit_constants_.empty());
3994
Steve Blocka7e24c12009-10-30 11:49:00 +00003995 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003996 DCHECK(masm_.pc_ == address_ + size_);
3997 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003998}
3999
4000
Steve Block1e0659c2011-05-24 12:43:12 +01004001void CodePatcher::Emit(Instr instr) {
4002 masm()->emit(instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00004003}
4004
4005
4006void CodePatcher::Emit(Address addr) {
4007 masm()->emit(reinterpret_cast<Instr>(addr));
4008}
Steve Block1e0659c2011-05-24 12:43:12 +01004009
4010
4011void CodePatcher::EmitCondition(Condition cond) {
4012 Instr instr = Assembler::instr_at(masm_.pc_);
4013 instr = (instr & ~kCondMask) | cond;
4014 masm_.emit(instr);
4015}
Steve Blocka7e24c12009-10-30 11:49:00 +00004016
4017
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004018void MacroAssembler::TruncatingDiv(Register result,
4019 Register dividend,
4020 int32_t divisor) {
4021 DCHECK(!dividend.is(result));
4022 DCHECK(!dividend.is(ip));
4023 DCHECK(!result.is(ip));
4024 base::MagicNumbersForDivision<uint32_t> mag =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004025 base::SignedDivisionByConstant(bit_cast<uint32_t>(divisor));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004026 mov(ip, Operand(mag.multiplier));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004027 bool neg = (mag.multiplier & (1U << 31)) != 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004028 if (divisor > 0 && neg) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004029 smmla(result, dividend, ip, dividend);
4030 } else {
4031 smmul(result, dividend, ip);
4032 if (divisor < 0 && !neg && mag.multiplier > 0) {
4033 sub(result, result, Operand(dividend));
4034 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004035 }
4036 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift));
4037 add(result, result, Operand(dividend, LSR, 31));
4038}
4039
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004040} // namespace internal
4041} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01004042
4043#endif // V8_TARGET_ARCH_ARM