blob: d7232514eee51350cbd67ea52fe3dd9abb11e218 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Iain Merrick9ac36c92010-09-13 15:29:50 +01005#include <limits.h> // For LONG_MIN, LONG_MAX.
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/base/division-by-constant.h"
11#include "src/bootstrapper.h"
12#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/debug/debug.h"
14#include "src/register-configuration.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040015#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000016
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017#include "src/arm/macro-assembler-arm.h"
18
Steve Blocka7e24c12009-10-30 11:49:00 +000019namespace v8 {
20namespace internal {
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23 CodeObjectRequired create_code_object)
Ben Murdoch8b112d22011-06-08 16:22:53 +010024 : Assembler(arg_isolate, buffer, size),
Steve Blocka7e24c12009-10-30 11:49:00 +000025 generating_stub_(false),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 has_frame_(false) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 if (create_code_object == CodeObjectRequired::kYes) {
28 code_object_ =
29 Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
Ben Murdoch8b112d22011-06-08 16:22:53 +010030 }
Steve Blocka7e24c12009-10-30 11:49:00 +000031}
32
33
Steve Blocka7e24c12009-10-30 11:49:00 +000034void MacroAssembler::Jump(Register target, Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +000035 bx(target, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000036}
37
38
39void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
40 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000042 mov(pc, Operand(target, rmode), LeaveCC, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +000043}
44
45
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000046void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode,
Steve Blocka7e24c12009-10-30 11:49:00 +000047 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 DCHECK(!RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000049 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
50}
51
52
53void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
54 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 DCHECK(RelocInfo::IsCodeTarget(rmode));
Steve Blocka7e24c12009-10-30 11:49:00 +000056 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 AllowDeferredHandleDereference embedding_raw_address;
Steve Blocka7e24c12009-10-30 11:49:00 +000058 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
59}
60
61
Steve Block44f0eee2011-05-26 01:26:41 +010062int MacroAssembler::CallSize(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010063 return kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +010064}
65
66
Steve Blocka7e24c12009-10-30 11:49:00 +000067void MacroAssembler::Call(Register target, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010068 // Block constant pool for the call instruction sequence.
69 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000070 Label start;
71 bind(&start);
Steve Blocka7e24c12009-10-30 11:49:00 +000072 blx(target, cond);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 DCHECK_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
Steve Blocka7e24c12009-10-30 11:49:00 +000074}
75
76
Steve Block44f0eee2011-05-26 01:26:41 +010077int MacroAssembler::CallSize(
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000078 Address target, RelocInfo::Mode rmode, Condition cond) {
Steve Block44f0eee2011-05-26 01:26:41 +010079 Instr mov_instr = cond | MOV | LeaveCC;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
81 return kInstrSize +
82 mov_operand.instructions_required(this, mov_instr) * kInstrSize;
83}
84
85
86int MacroAssembler::CallStubSize(
87 CodeStub* stub, TypeFeedbackId ast_id, Condition cond) {
88 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
89}
90
91
92int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate,
93 Address target,
94 RelocInfo::Mode rmode,
95 Condition cond) {
96 Instr mov_instr = cond | MOV | LeaveCC;
97 Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
98 return kInstrSize +
99 mov_operand.instructions_required(NULL, mov_instr) * kInstrSize;
Steve Block44f0eee2011-05-26 01:26:41 +0100100}
101
102
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000103void MacroAssembler::Call(Address target,
Ben Murdoch257744e2011-11-30 15:57:28 +0000104 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105 Condition cond,
106 TargetAddressStorageMode mode) {
Steve Block44f0eee2011-05-26 01:26:41 +0100107 // Block constant pool for the call instruction sequence.
108 BlockConstPoolScope block_const_pool(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000109 Label start;
110 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111
112 bool old_predictable_code_size = predictable_code_size();
113 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
114 set_predictable_code_size(true);
115 }
116
117#ifdef DEBUG
118 // Check the expected size before generating code to ensure we assume the same
119 // constant pool availability (e.g., whether constant pool is full or not).
120 int expected_size = CallSize(target, rmode, cond);
121#endif
122
123 // Call sequence on V7 or later may be :
124 // movw ip, #... @ call address low 16
125 // movt ip, #... @ call address high 16
126 // blx ip
127 // @ return address
128 // Or for pre-V7 or values that may be back-patched
129 // to avoid ICache flushes:
130 // ldr ip, [pc, #...] @ call address
131 // blx ip
132 // @ return address
Steve Block6ded16b2010-05-10 14:33:55 +0100133
Steve Block44f0eee2011-05-26 01:26:41 +0100134 // Statement positions are expected to be recorded when the target
135 // address is loaded. The mov method will automatically record
136 // positions when pc is the target, since this is not the case here
137 // we have to do it explicitly.
138 positions_recorder()->WriteRecordedPositions();
Steve Block6ded16b2010-05-10 14:33:55 +0100139
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000140 mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
Steve Block44f0eee2011-05-26 01:26:41 +0100141 blx(ip, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100142
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
144 if (mode == NEVER_INLINE_TARGET_ADDRESS) {
145 set_predictable_code_size(old_predictable_code_size);
146 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000147}
148
149
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000150int MacroAssembler::CallSize(Handle<Code> code,
151 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 TypeFeedbackId ast_id,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000153 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 AllowDeferredHandleDereference using_raw_address;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000155 return CallSize(reinterpret_cast<Address>(code.location()), rmode, cond);
Ben Murdoch257744e2011-11-30 15:57:28 +0000156}
157
158
159void MacroAssembler::Call(Handle<Code> code,
160 RelocInfo::Mode rmode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 TypeFeedbackId ast_id,
162 Condition cond,
163 TargetAddressStorageMode mode) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000164 Label start;
165 bind(&start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000166 DCHECK(RelocInfo::IsCodeTarget(rmode));
167 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000168 SetRecordedAstId(ast_id);
169 rmode = RelocInfo::CODE_TARGET_WITH_ID;
170 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000171 // 'code' is always generated ARM code, never THUMB code
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172 AllowDeferredHandleDereference embedding_raw_address;
173 Call(reinterpret_cast<Address>(code.location()), rmode, cond, mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000174}
175
176
177void MacroAssembler::Ret(Condition cond) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000178 bx(lr, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000179}
180
181
Leon Clarkee46be812010-01-19 14:06:41 +0000182void MacroAssembler::Drop(int count, Condition cond) {
183 if (count > 0) {
184 add(sp, sp, Operand(count * kPointerSize), LeaveCC, cond);
185 }
186}
187
Ben Murdoch097c5b22016-05-18 11:27:45 +0100188void MacroAssembler::Drop(Register count, Condition cond) {
189 add(sp, sp, Operand(count, LSL, kPointerSizeLog2), LeaveCC, cond);
190}
Leon Clarkee46be812010-01-19 14:06:41 +0000191
Ben Murdochb0fe1622011-05-05 13:52:32 +0100192void MacroAssembler::Ret(int drop, Condition cond) {
193 Drop(drop, cond);
194 Ret(cond);
195}
196
197
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100198void MacroAssembler::Swap(Register reg1,
199 Register reg2,
200 Register scratch,
201 Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100202 if (scratch.is(no_reg)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100203 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
204 eor(reg2, reg2, Operand(reg1), LeaveCC, cond);
205 eor(reg1, reg1, Operand(reg2), LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100206 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100207 mov(scratch, reg1, LeaveCC, cond);
208 mov(reg1, reg2, LeaveCC, cond);
209 mov(reg2, scratch, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100210 }
211}
212
213
Leon Clarkee46be812010-01-19 14:06:41 +0000214void MacroAssembler::Call(Label* target) {
215 bl(target);
216}
217
218
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000219void MacroAssembler::Push(Handle<Object> handle) {
220 mov(ip, Operand(handle));
221 push(ip);
222}
223
224
Leon Clarkee46be812010-01-19 14:06:41 +0000225void MacroAssembler::Move(Register dst, Handle<Object> value) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000226 AllowDeferredHandleDereference smi_check;
227 if (value->IsSmi()) {
228 mov(dst, Operand(value));
229 } else {
230 DCHECK(value->IsHeapObject());
231 if (isolate()->heap()->InNewSpace(*value)) {
232 Handle<Cell> cell = isolate()->factory()->NewCell(value);
233 mov(dst, Operand(cell));
234 ldr(dst, FieldMemOperand(dst, Cell::kValueOffset));
235 } else {
236 mov(dst, Operand(value));
237 }
238 }
Leon Clarkee46be812010-01-19 14:06:41 +0000239}
Steve Blockd0582a62009-12-15 09:54:21 +0000240
241
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000242void MacroAssembler::Move(Register dst, Register src, Condition cond) {
Steve Block6ded16b2010-05-10 14:33:55 +0100243 if (!dst.is(src)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000244 mov(dst, src, LeaveCC, cond);
Steve Block6ded16b2010-05-10 14:33:55 +0100245 }
246}
247
248
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000250 if (!dst.is(src)) {
251 vmov(dst, src);
252 }
253}
254
255
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256void MacroAssembler::Mls(Register dst, Register src1, Register src2,
257 Register srcA, Condition cond) {
258 if (CpuFeatures::IsSupported(MLS)) {
259 CpuFeatureScope scope(this, MLS);
260 mls(dst, src1, src2, srcA, cond);
261 } else {
262 DCHECK(!srcA.is(ip));
263 mul(ip, src1, src2, LeaveCC, cond);
264 sub(dst, srcA, ip, LeaveCC, cond);
265 }
266}
267
268
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100269void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
270 Condition cond) {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800271 if (!src2.is_reg() &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 !src2.must_output_reloc_info(this) &&
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800273 src2.immediate() == 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274 mov(dst, Operand::Zero(), LeaveCC, cond);
275 } else if (!(src2.instructions_required(this) == 1) &&
276 !src2.must_output_reloc_info(this) &&
Ben Murdoch8b112d22011-06-08 16:22:53 +0100277 CpuFeatures::IsSupported(ARMv7) &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 base::bits::IsPowerOfTwo32(src2.immediate() + 1)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000279 ubfx(dst, src1, 0,
280 WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800281 } else {
282 and_(dst, src1, src2, LeaveCC, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100283 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100284}
285
286
287void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
288 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289 DCHECK(lsb < 32);
290 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100291 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
292 and_(dst, src1, Operand(mask), LeaveCC, cond);
293 if (lsb != 0) {
294 mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
295 }
296 } else {
297 ubfx(dst, src1, lsb, width, cond);
298 }
299}
300
301
302void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
303 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000304 DCHECK(lsb < 32);
305 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100306 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
307 and_(dst, src1, Operand(mask), LeaveCC, cond);
308 int shift_up = 32 - lsb - width;
309 int shift_down = lsb + shift_up;
310 if (shift_up != 0) {
311 mov(dst, Operand(dst, LSL, shift_up), LeaveCC, cond);
312 }
313 if (shift_down != 0) {
314 mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
315 }
316 } else {
317 sbfx(dst, src1, lsb, width, cond);
318 }
319}
320
321
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100322void MacroAssembler::Bfi(Register dst,
323 Register src,
324 Register scratch,
325 int lsb,
326 int width,
327 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 DCHECK(0 <= lsb && lsb < 32);
329 DCHECK(0 <= width && width < 32);
330 DCHECK(lsb + width < 32);
331 DCHECK(!scratch.is(dst));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100332 if (width == 0) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100334 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
335 bic(dst, dst, Operand(mask));
336 and_(scratch, src, Operand((1 << width) - 1));
337 mov(scratch, Operand(scratch, LSL, lsb));
338 orr(dst, dst, scratch);
339 } else {
340 bfi(dst, src, lsb, width, cond);
341 }
342}
343
344
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000345void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width,
346 Condition cond) {
347 DCHECK(lsb < 32);
348 if (!CpuFeatures::IsSupported(ARMv7) || predictable_code_size()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100349 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350 bic(dst, src, Operand(mask));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100351 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000352 Move(dst, src, cond);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100353 bfc(dst, lsb, width, cond);
354 }
355}
356
357
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358void MacroAssembler::Load(Register dst,
359 const MemOperand& src,
360 Representation r) {
361 DCHECK(!r.IsDouble());
362 if (r.IsInteger8()) {
363 ldrsb(dst, src);
364 } else if (r.IsUInteger8()) {
365 ldrb(dst, src);
366 } else if (r.IsInteger16()) {
367 ldrsh(dst, src);
368 } else if (r.IsUInteger16()) {
369 ldrh(dst, src);
370 } else {
371 ldr(dst, src);
372 }
373}
374
375
376void MacroAssembler::Store(Register src,
377 const MemOperand& dst,
378 Representation r) {
379 DCHECK(!r.IsDouble());
380 if (r.IsInteger8() || r.IsUInteger8()) {
381 strb(src, dst);
382 } else if (r.IsInteger16() || r.IsUInteger16()) {
383 strh(src, dst);
384 } else {
385 if (r.IsHeapObject()) {
386 AssertNotSmi(src);
387 } else if (r.IsSmi()) {
388 AssertSmi(src);
389 }
390 str(src, dst);
391 }
392}
393
394
Steve Blocka7e24c12009-10-30 11:49:00 +0000395void MacroAssembler::LoadRoot(Register destination,
396 Heap::RootListIndex index,
397 Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000398 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
399 isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
400 !predictable_code_size()) {
401 // The CPU supports fast immediate values, and this root will never
402 // change. We will load it as a relocatable immediate value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000403 Handle<Object> root = isolate()->heap()->root_handle(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000404 mov(destination, Operand(root), LeaveCC, cond);
405 return;
406 }
Ben Murdochc7cc0282012-03-05 14:35:55 +0000407 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000408}
409
410
Kristian Monsen25f61362010-05-21 11:50:48 +0100411void MacroAssembler::StoreRoot(Register source,
412 Heap::RootListIndex index,
413 Condition cond) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000414 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000415 str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
416}
417
418
Steve Block6ded16b2010-05-10 14:33:55 +0100419void MacroAssembler::InNewSpace(Register object,
420 Register scratch,
Steve Block1e0659c2011-05-24 12:43:12 +0100421 Condition cond,
Steve Block6ded16b2010-05-10 14:33:55 +0100422 Label* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000423 DCHECK(cond == eq || cond == ne);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100424 const int mask =
425 (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
426 CheckPageFlag(object, scratch, mask, cond, branch);
Steve Block6ded16b2010-05-10 14:33:55 +0100427}
428
429
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100430void MacroAssembler::RecordWriteField(
431 Register object,
432 int offset,
433 Register value,
434 Register dst,
435 LinkRegisterStatus lr_status,
436 SaveFPRegsMode save_fp,
437 RememberedSetAction remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000438 SmiCheck smi_check,
439 PointersToHereCheck pointers_to_here_check_for_value) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100440 // First, check if a write barrier is even needed. The tests below
441 // catch stores of Smis.
Steve Block6ded16b2010-05-10 14:33:55 +0100442 Label done;
443
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100444 // Skip barrier if writing a smi.
445 if (smi_check == INLINE_SMI_CHECK) {
446 JumpIfSmi(value, &done);
447 }
Steve Block6ded16b2010-05-10 14:33:55 +0100448
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100449 // Although the object register is tagged, the offset is relative to the start
450 // of the object, so so offset must be a multiple of kPointerSize.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000451 DCHECK(IsAligned(offset, kPointerSize));
Steve Block8defd9f2010-07-08 12:39:36 +0100452
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100453 add(dst, object, Operand(offset - kHeapObjectTag));
454 if (emit_debug_code()) {
455 Label ok;
456 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
457 b(eq, &ok);
458 stop("Unaligned cell in write barrier");
459 bind(&ok);
460 }
461
462 RecordWrite(object,
463 dst,
464 value,
465 lr_status,
466 save_fp,
467 remembered_set_action,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000468 OMIT_SMI_CHECK,
469 pointers_to_here_check_for_value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000470
471 bind(&done);
Leon Clarke4515c472010-02-03 11:58:03 +0000472
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100473 // Clobber clobbered input registers when running with the debug-code flag
Leon Clarke4515c472010-02-03 11:58:03 +0000474 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100475 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 mov(value, Operand(bit_cast<int32_t>(kZapValue + 4)));
477 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 8)));
478 }
479}
480
481
482// Will clobber 4 registers: object, map, dst, ip. The
483// register 'object' contains a heap object pointer.
484void MacroAssembler::RecordWriteForMap(Register object,
485 Register map,
486 Register dst,
487 LinkRegisterStatus lr_status,
488 SaveFPRegsMode fp_mode) {
489 if (emit_debug_code()) {
490 ldr(dst, FieldMemOperand(map, HeapObject::kMapOffset));
491 cmp(dst, Operand(isolate()->factory()->meta_map()));
492 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
493 }
494
495 if (!FLAG_incremental_marking) {
496 return;
497 }
498
499 if (emit_debug_code()) {
500 ldr(ip, FieldMemOperand(object, HeapObject::kMapOffset));
501 cmp(ip, map);
502 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
503 }
504
505 Label done;
506
507 // A single check of the map's pages interesting flag suffices, since it is
508 // only set during incremental collection, and then it's also guaranteed that
509 // the from object's page's interesting flag is also set. This optimization
510 // relies on the fact that maps can never be in new space.
511 CheckPageFlag(map,
512 map, // Used as scratch.
513 MemoryChunk::kPointersToHereAreInterestingMask,
514 eq,
515 &done);
516
517 add(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
518 if (emit_debug_code()) {
519 Label ok;
520 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
521 b(eq, &ok);
522 stop("Unaligned cell in write barrier");
523 bind(&ok);
524 }
525
526 // Record the actual write.
527 if (lr_status == kLRHasNotBeenSaved) {
528 push(lr);
529 }
530 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
531 fp_mode);
532 CallStub(&stub);
533 if (lr_status == kLRHasNotBeenSaved) {
534 pop(lr);
535 }
536
537 bind(&done);
538
539 // Count number of write barriers in generated code.
540 isolate()->counters()->write_barriers_static()->Increment();
541 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst);
542
543 // Clobber clobbered registers when running with the debug-code flag
544 // turned on to provoke errors.
545 if (emit_debug_code()) {
546 mov(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
547 mov(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
Leon Clarke4515c472010-02-03 11:58:03 +0000548 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000549}
550
551
Steve Block8defd9f2010-07-08 12:39:36 +0100552// Will clobber 4 registers: object, address, scratch, ip. The
553// register 'object' contains a heap object pointer. The heap object
554// tag is shifted away.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555void MacroAssembler::RecordWrite(
556 Register object,
557 Register address,
558 Register value,
559 LinkRegisterStatus lr_status,
560 SaveFPRegsMode fp_mode,
561 RememberedSetAction remembered_set_action,
562 SmiCheck smi_check,
563 PointersToHereCheck pointers_to_here_check_for_value) {
564 DCHECK(!object.is(value));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100565 if (emit_debug_code()) {
566 ldr(ip, MemOperand(address));
567 cmp(ip, value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000568 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100569 }
Steve Block8defd9f2010-07-08 12:39:36 +0100570
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000571 if (remembered_set_action == OMIT_REMEMBERED_SET &&
572 !FLAG_incremental_marking) {
573 return;
574 }
575
576 // First, check if a write barrier is even needed. The tests below
577 // catch stores of smis and stores into the young generation.
Steve Block8defd9f2010-07-08 12:39:36 +0100578 Label done;
579
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100580 if (smi_check == INLINE_SMI_CHECK) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000581 JumpIfSmi(value, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100582 }
583
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000584 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
585 CheckPageFlag(value,
586 value, // Used as scratch.
587 MemoryChunk::kPointersToHereAreInterestingMask,
588 eq,
589 &done);
590 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 CheckPageFlag(object,
592 value, // Used as scratch.
593 MemoryChunk::kPointersFromHereAreInterestingMask,
594 eq,
595 &done);
Steve Block8defd9f2010-07-08 12:39:36 +0100596
597 // Record the actual write.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100598 if (lr_status == kLRHasNotBeenSaved) {
599 push(lr);
600 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000601 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
602 fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100603 CallStub(&stub);
604 if (lr_status == kLRHasNotBeenSaved) {
605 pop(lr);
606 }
Steve Block8defd9f2010-07-08 12:39:36 +0100607
608 bind(&done);
609
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000610 // Count number of write barriers in generated code.
611 isolate()->counters()->write_barriers_static()->Increment();
612 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip,
613 value);
614
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100615 // Clobber clobbered registers when running with the debug-code flag
Steve Block8defd9f2010-07-08 12:39:36 +0100616 // turned on to provoke errors.
Steve Block44f0eee2011-05-26 01:26:41 +0100617 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618 mov(address, Operand(bit_cast<int32_t>(kZapValue + 12)));
619 mov(value, Operand(bit_cast<int32_t>(kZapValue + 16)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100620 }
621}
622
Ben Murdoch097c5b22016-05-18 11:27:45 +0100623void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
624 Register code_entry,
625 Register scratch) {
626 const int offset = JSFunction::kCodeEntryOffset;
627
628 // Since a code entry (value) is always in old space, we don't need to update
629 // remembered set. If incremental marking is off, there is nothing for us to
630 // do.
631 if (!FLAG_incremental_marking) return;
632
633 DCHECK(js_function.is(r1));
634 DCHECK(code_entry.is(r4));
635 DCHECK(scratch.is(r5));
636 AssertNotSmi(js_function);
637
638 if (emit_debug_code()) {
639 add(scratch, js_function, Operand(offset - kHeapObjectTag));
640 ldr(ip, MemOperand(scratch));
641 cmp(ip, code_entry);
642 Check(eq, kWrongAddressOrValuePassedToRecordWrite);
643 }
644
645 // First, check if a write barrier is even needed. The tests below
646 // catch stores of Smis and stores into young gen.
647 Label done;
648
649 CheckPageFlag(code_entry, scratch,
650 MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
651 CheckPageFlag(js_function, scratch,
652 MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
653
654 const Register dst = scratch;
655 add(dst, js_function, Operand(offset - kHeapObjectTag));
656
657 push(code_entry);
658
659 // Save caller-saved registers, which includes js_function.
660 DCHECK((kCallerSaved & js_function.bit()) != 0);
661 DCHECK_EQ(kCallerSaved & code_entry.bit(), 0);
662 stm(db_w, sp, (kCallerSaved | lr.bit()));
663
664 int argument_count = 3;
665 PrepareCallCFunction(argument_count, code_entry);
666
667 mov(r0, js_function);
668 mov(r1, dst);
669 mov(r2, Operand(ExternalReference::isolate_address(isolate())));
670
671 {
672 AllowExternalCallThatCantCauseGC scope(this);
673 CallCFunction(
674 ExternalReference::incremental_marking_record_write_code_entry_function(
675 isolate()),
676 argument_count);
677 }
678
679 // Restore caller-saved registers (including js_function and code_entry).
680 ldm(ia_w, sp, (kCallerSaved | lr.bit()));
681
682 pop(code_entry);
683
684 bind(&done);
685}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100686
687void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
688 Register address,
689 Register scratch,
690 SaveFPRegsMode fp_mode,
691 RememberedSetFinalAction and_then) {
692 Label done;
693 if (emit_debug_code()) {
694 Label ok;
695 JumpIfNotInNewSpace(object, scratch, &ok);
696 stop("Remembered set pointer is in new space");
697 bind(&ok);
698 }
699 // Load store buffer top.
700 ExternalReference store_buffer =
701 ExternalReference::store_buffer_top(isolate());
702 mov(ip, Operand(store_buffer));
703 ldr(scratch, MemOperand(ip));
704 // Store pointer to buffer and increment buffer top.
705 str(address, MemOperand(scratch, kPointerSize, PostIndex));
706 // Write back new top of buffer.
707 str(scratch, MemOperand(ip));
708 // Call stub on end of buffer.
709 // Check for end of buffer.
Ben Murdochda12d292016-06-02 14:46:10 +0100710 tst(scratch, Operand(StoreBuffer::kStoreBufferMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100711 if (and_then == kFallThroughAtEnd) {
Ben Murdochda12d292016-06-02 14:46:10 +0100712 b(ne, &done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100713 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000714 DCHECK(and_then == kReturnAtEnd);
Ben Murdochda12d292016-06-02 14:46:10 +0100715 Ret(ne);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100716 }
717 push(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000718 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100719 CallStub(&store_buffer_overflow);
720 pop(lr);
721 bind(&done);
722 if (and_then == kReturnAtEnd) {
723 Ret();
Steve Block8defd9f2010-07-08 12:39:36 +0100724 }
725}
726
Ben Murdochda12d292016-06-02 14:46:10 +0100727void MacroAssembler::PushCommonFrame(Register marker_reg) {
728 if (marker_reg.is_valid()) {
729 if (FLAG_enable_embedded_constant_pool) {
730 if (marker_reg.code() > pp.code()) {
731 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit());
732 add(fp, sp, Operand(kPointerSize));
733 Push(marker_reg);
734 } else {
735 stm(db_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
736 add(fp, sp, Operand(2 * kPointerSize));
737 }
738 } else {
739 if (marker_reg.code() > fp.code()) {
740 stm(db_w, sp, fp.bit() | lr.bit());
741 mov(fp, Operand(sp));
742 Push(marker_reg);
743 } else {
744 stm(db_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
745 add(fp, sp, Operand(kPointerSize));
746 }
747 }
748 } else {
749 stm(db_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
750 fp.bit() | lr.bit());
751 add(fp, sp, Operand(FLAG_enable_embedded_constant_pool ? kPointerSize : 0));
752 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000753}
754
Ben Murdochda12d292016-06-02 14:46:10 +0100755void MacroAssembler::PopCommonFrame(Register marker_reg) {
756 if (marker_reg.is_valid()) {
757 if (FLAG_enable_embedded_constant_pool) {
758 if (marker_reg.code() > pp.code()) {
759 pop(marker_reg);
760 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
761 } else {
762 ldm(ia_w, sp, marker_reg.bit() | pp.bit() | fp.bit() | lr.bit());
763 }
764 } else {
765 if (marker_reg.code() > fp.code()) {
766 pop(marker_reg);
767 ldm(ia_w, sp, fp.bit() | lr.bit());
768 } else {
769 ldm(ia_w, sp, marker_reg.bit() | fp.bit() | lr.bit());
770 }
771 }
772 } else {
773 ldm(ia_w, sp, (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
774 fp.bit() | lr.bit());
775 }
776}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000777
Ben Murdochda12d292016-06-02 14:46:10 +0100778void MacroAssembler::PushStandardFrame(Register function_reg) {
779 DCHECK(!function_reg.is_valid() || function_reg.code() < cp.code());
780 stm(db_w, sp, (function_reg.is_valid() ? function_reg.bit() : 0) | cp.bit() |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000781 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
782 fp.bit() | lr.bit());
Ben Murdochda12d292016-06-02 14:46:10 +0100783 int offset = -StandardFrameConstants::kContextOffset;
784 offset += function_reg.is_valid() ? kPointerSize : 0;
785 add(fp, sp, Operand(offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000786}
787
788
Ben Murdochb0fe1622011-05-05 13:52:32 +0100789// Push and pop all registers that can hold pointers.
790void MacroAssembler::PushSafepointRegisters() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000791 // Safepoints expect a block of contiguous register values starting with r0.
792 // except when FLAG_enable_embedded_constant_pool, which omits pp.
793 DCHECK(kSafepointSavedRegisters ==
794 (FLAG_enable_embedded_constant_pool
795 ? ((1 << (kNumSafepointSavedRegisters + 1)) - 1) & ~pp.bit()
796 : (1 << kNumSafepointSavedRegisters) - 1));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100797 // Safepoints expect a block of kNumSafepointRegisters values on the
798 // stack, so adjust the stack for unsaved registers.
799 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000800 DCHECK(num_unsaved >= 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100801 sub(sp, sp, Operand(num_unsaved * kPointerSize));
802 stm(db_w, sp, kSafepointSavedRegisters);
803}
804
805
806void MacroAssembler::PopSafepointRegisters() {
807 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
808 ldm(ia_w, sp, kSafepointSavedRegisters);
809 add(sp, sp, Operand(num_unsaved * kPointerSize));
810}
811
812
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100813void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
814 str(src, SafepointRegisterSlot(dst));
Steve Block1e0659c2011-05-24 12:43:12 +0100815}
816
817
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100818void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
819 ldr(dst, SafepointRegisterSlot(src));
Steve Block1e0659c2011-05-24 12:43:12 +0100820}
821
822
Ben Murdochb0fe1622011-05-05 13:52:32 +0100823int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
824 // The registers are pushed starting with the highest encoding,
825 // which means that lowest encodings are closest to the stack pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000826 if (FLAG_enable_embedded_constant_pool && reg_code > pp.code()) {
827 // RegList omits pp.
828 reg_code -= 1;
829 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000830 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100831 return reg_code;
832}
833
834
Steve Block1e0659c2011-05-24 12:43:12 +0100835MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
836 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
837}
838
839
840MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000841 // Number of d-regs not known at snapshot time.
842 DCHECK(!serializer_enabled());
Steve Block1e0659c2011-05-24 12:43:12 +0100843 // General purpose registers are pushed last on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000844 const RegisterConfiguration* config =
845 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
846 int doubles_size = config->num_allocatable_double_registers() * kDoubleSize;
Steve Block1e0659c2011-05-24 12:43:12 +0100847 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
848 return MemOperand(sp, doubles_size + register_offset);
849}
850
851
Leon Clarkef7060e22010-06-03 12:02:55 +0100852void MacroAssembler::Ldrd(Register dst1, Register dst2,
853 const MemOperand& src, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000854 DCHECK(src.rm().is(no_reg));
855 DCHECK(!dst1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100856
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000857 // V8 does not use this addressing mode, so the fallback code
858 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000859 DCHECK((src.am() != PreIndex) && (src.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000860
Ben Murdochc5610432016-08-08 18:44:38 +0100861 // Generate two ldr instructions if ldrd is not applicable.
862 if ((dst1.code() % 2 == 0) && (dst1.code() + 1 == dst2.code())) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100863 ldrd(dst1, dst2, src, cond);
864 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000865 if ((src.am() == Offset) || (src.am() == NegOffset)) {
866 MemOperand src2(src);
867 src2.set_offset(src2.offset() + 4);
868 if (dst1.is(src.rn())) {
869 ldr(dst2, src2, cond);
870 ldr(dst1, src, cond);
871 } else {
872 ldr(dst1, src, cond);
873 ldr(dst2, src2, cond);
874 }
875 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000876 DCHECK((src.am() == PostIndex) || (src.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000877 if (dst1.is(src.rn())) {
878 ldr(dst2, MemOperand(src.rn(), 4, Offset), cond);
879 ldr(dst1, src, cond);
880 } else {
881 MemOperand src2(src);
882 src2.set_offset(src2.offset() - 4);
883 ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond);
884 ldr(dst2, src2, cond);
885 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100886 }
887 }
888}
889
890
891void MacroAssembler::Strd(Register src1, Register src2,
892 const MemOperand& dst, Condition cond) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000893 DCHECK(dst.rm().is(no_reg));
894 DCHECK(!src1.is(lr)); // r14.
Leon Clarkef7060e22010-06-03 12:02:55 +0100895
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000896 // V8 does not use this addressing mode, so the fallback code
897 // below doesn't support it yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000898 DCHECK((dst.am() != PreIndex) && (dst.am() != NegPreIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000899
Ben Murdochc5610432016-08-08 18:44:38 +0100900 // Generate two str instructions if strd is not applicable.
901 if ((src1.code() % 2 == 0) && (src1.code() + 1 == src2.code())) {
Leon Clarkef7060e22010-06-03 12:02:55 +0100902 strd(src1, src2, dst, cond);
903 } else {
904 MemOperand dst2(dst);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000905 if ((dst.am() == Offset) || (dst.am() == NegOffset)) {
906 dst2.set_offset(dst2.offset() + 4);
907 str(src1, dst, cond);
908 str(src2, dst2, cond);
909 } else { // PostIndex or NegPostIndex.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000910 DCHECK((dst.am() == PostIndex) || (dst.am() == NegPostIndex));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000911 dst2.set_offset(dst2.offset() - 4);
912 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond);
913 str(src2, dst2, cond);
914 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100915 }
916}
917
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000918void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister dst,
919 const DwVfpRegister src,
920 const Condition cond) {
Ben Murdochc5610432016-08-08 18:44:38 +0100921 // Subtracting 0.0 preserves all inputs except for signalling NaNs, which
922 // become quiet NaNs. We use vsub rather than vadd because vsub preserves -0.0
923 // inputs: -0.0 + 0.0 = 0.0, but -0.0 - 0.0 = -0.0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000924 vsub(dst, src, kDoubleRegZero, cond);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100925}
926
927
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000928void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
929 const SwVfpRegister src2,
930 const Condition cond) {
931 // Compare and move FPSCR flags to the normal condition flags.
932 VFPCompareAndLoadFlags(src1, src2, pc, cond);
933}
934
935void MacroAssembler::VFPCompareAndSetFlags(const SwVfpRegister src1,
936 const float src2,
937 const Condition cond) {
938 // Compare and move FPSCR flags to the normal condition flags.
939 VFPCompareAndLoadFlags(src1, src2, pc, cond);
940}
941
942
Ben Murdochb8e0da22011-05-16 14:20:40 +0100943void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
944 const DwVfpRegister src2,
945 const Condition cond) {
946 // Compare and move FPSCR flags to the normal condition flags.
947 VFPCompareAndLoadFlags(src1, src2, pc, cond);
948}
949
950void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
951 const double src2,
952 const Condition cond) {
953 // Compare and move FPSCR flags to the normal condition flags.
954 VFPCompareAndLoadFlags(src1, src2, pc, cond);
955}
956
957
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000958void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
959 const SwVfpRegister src2,
960 const Register fpscr_flags,
961 const Condition cond) {
962 // Compare and load FPSCR.
963 vcmp(src1, src2, cond);
964 vmrs(fpscr_flags, cond);
965}
966
967void MacroAssembler::VFPCompareAndLoadFlags(const SwVfpRegister src1,
968 const float src2,
969 const Register fpscr_flags,
970 const Condition cond) {
971 // Compare and load FPSCR.
972 vcmp(src1, src2, cond);
973 vmrs(fpscr_flags, cond);
974}
975
976
Ben Murdochb8e0da22011-05-16 14:20:40 +0100977void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
978 const DwVfpRegister src2,
979 const Register fpscr_flags,
980 const Condition cond) {
981 // Compare and load FPSCR.
982 vcmp(src1, src2, cond);
983 vmrs(fpscr_flags, cond);
984}
985
986void MacroAssembler::VFPCompareAndLoadFlags(const DwVfpRegister src1,
987 const double src2,
988 const Register fpscr_flags,
989 const Condition cond) {
990 // Compare and load FPSCR.
991 vcmp(src1, src2, cond);
992 vmrs(fpscr_flags, cond);
Ben Murdoch086aeea2011-05-13 15:57:08 +0100993}
994
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000995
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000996void MacroAssembler::Vmov(const DwVfpRegister dst,
997 const double imm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000998 const Register scratch) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000999 static const DoubleRepresentation minus_zero(-0.0);
1000 static const DoubleRepresentation zero(0.0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001001 DoubleRepresentation value_rep(imm);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001002 // Handle special values first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001003 if (value_rep == zero) {
1004 vmov(dst, kDoubleRegZero);
1005 } else if (value_rep == minus_zero) {
1006 vneg(dst, kDoubleRegZero);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001007 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001008 vmov(dst, imm, scratch);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001009 }
1010}
1011
Ben Murdoch086aeea2011-05-13 15:57:08 +01001012
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001013void MacroAssembler::VmovHigh(Register dst, DwVfpRegister src) {
1014 if (src.code() < 16) {
1015 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1016 vmov(dst, loc.high());
1017 } else {
1018 vmov(dst, VmovIndexHi, src);
1019 }
1020}
1021
1022
1023void MacroAssembler::VmovHigh(DwVfpRegister dst, Register src) {
1024 if (dst.code() < 16) {
1025 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1026 vmov(loc.high(), src);
1027 } else {
1028 vmov(dst, VmovIndexHi, src);
1029 }
1030}
1031
1032
1033void MacroAssembler::VmovLow(Register dst, DwVfpRegister src) {
1034 if (src.code() < 16) {
1035 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
1036 vmov(dst, loc.low());
1037 } else {
1038 vmov(dst, VmovIndexLo, src);
1039 }
1040}
1041
1042
1043void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) {
1044 if (dst.code() < 16) {
1045 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
1046 vmov(loc.low(), src);
1047 } else {
1048 vmov(dst, VmovIndexLo, src);
1049 }
1050}
Ben Murdochda12d292016-06-02 14:46:10 +01001051void MacroAssembler::LslPair(Register dst_low, Register dst_high,
1052 Register src_low, Register src_high,
1053 Register scratch, Register shift) {
1054 DCHECK(!AreAliased(dst_high, src_low));
1055 DCHECK(!AreAliased(dst_high, shift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001056
Ben Murdochda12d292016-06-02 14:46:10 +01001057 Label less_than_32;
1058 Label done;
1059 rsb(scratch, shift, Operand(32), SetCC);
1060 b(gt, &less_than_32);
1061 // If shift >= 32
1062 and_(scratch, shift, Operand(0x1f));
1063 lsl(dst_high, src_low, Operand(scratch));
1064 mov(dst_low, Operand(0));
1065 jmp(&done);
1066 bind(&less_than_32);
1067 // If shift < 32
1068 lsl(dst_high, src_high, Operand(shift));
1069 orr(dst_high, dst_high, Operand(src_low, LSR, scratch));
1070 lsl(dst_low, src_low, Operand(shift));
1071 bind(&done);
1072}
1073
1074void MacroAssembler::LslPair(Register dst_low, Register dst_high,
1075 Register src_low, Register src_high,
1076 uint32_t shift) {
1077 DCHECK(!AreAliased(dst_high, src_low));
1078 Label less_than_32;
1079 Label done;
1080 if (shift == 0) {
1081 Move(dst_high, src_high);
1082 Move(dst_low, src_low);
1083 } else if (shift == 32) {
1084 Move(dst_high, src_low);
1085 Move(dst_low, Operand(0));
1086 } else if (shift >= 32) {
1087 shift &= 0x1f;
1088 lsl(dst_high, src_low, Operand(shift));
1089 mov(dst_low, Operand(0));
1090 } else {
1091 lsl(dst_high, src_high, Operand(shift));
1092 orr(dst_high, dst_high, Operand(src_low, LSR, 32 - shift));
1093 lsl(dst_low, src_low, Operand(shift));
1094 }
1095}
1096
1097void MacroAssembler::LsrPair(Register dst_low, Register dst_high,
1098 Register src_low, Register src_high,
1099 Register scratch, Register shift) {
1100 DCHECK(!AreAliased(dst_low, src_high));
1101 DCHECK(!AreAliased(dst_low, shift));
1102
1103 Label less_than_32;
1104 Label done;
1105 rsb(scratch, shift, Operand(32), SetCC);
1106 b(gt, &less_than_32);
1107 // If shift >= 32
1108 and_(scratch, shift, Operand(0x1f));
1109 lsr(dst_low, src_high, Operand(scratch));
1110 mov(dst_high, Operand(0));
1111 jmp(&done);
1112 bind(&less_than_32);
1113 // If shift < 32
1114
1115 lsr(dst_low, src_low, Operand(shift));
1116 orr(dst_low, dst_low, Operand(src_high, LSL, scratch));
1117 lsr(dst_high, src_high, Operand(shift));
1118 bind(&done);
1119}
1120
1121void MacroAssembler::LsrPair(Register dst_low, Register dst_high,
1122 Register src_low, Register src_high,
1123 uint32_t shift) {
1124 DCHECK(!AreAliased(dst_low, src_high));
1125 Label less_than_32;
1126 Label done;
1127 if (shift == 32) {
1128 mov(dst_low, src_high);
1129 mov(dst_high, Operand(0));
1130 } else if (shift > 32) {
1131 shift &= 0x1f;
1132 lsr(dst_low, src_high, Operand(shift));
1133 mov(dst_high, Operand(0));
1134 } else if (shift == 0) {
1135 Move(dst_low, src_low);
1136 Move(dst_high, src_high);
1137 } else {
1138 lsr(dst_low, src_low, Operand(shift));
1139 orr(dst_low, dst_low, Operand(src_high, LSL, 32 - shift));
1140 lsr(dst_high, src_high, Operand(shift));
1141 }
1142}
1143
1144void MacroAssembler::AsrPair(Register dst_low, Register dst_high,
1145 Register src_low, Register src_high,
1146 Register scratch, Register shift) {
1147 DCHECK(!AreAliased(dst_low, src_high));
1148 DCHECK(!AreAliased(dst_low, shift));
1149
1150 Label less_than_32;
1151 Label done;
1152 rsb(scratch, shift, Operand(32), SetCC);
1153 b(gt, &less_than_32);
1154 // If shift >= 32
1155 and_(scratch, shift, Operand(0x1f));
1156 asr(dst_low, src_high, Operand(scratch));
1157 asr(dst_high, src_high, Operand(31));
1158 jmp(&done);
1159 bind(&less_than_32);
1160 // If shift < 32
1161 lsr(dst_low, src_low, Operand(shift));
1162 orr(dst_low, dst_low, Operand(src_high, LSL, scratch));
1163 asr(dst_high, src_high, Operand(shift));
1164 bind(&done);
1165}
1166
1167void MacroAssembler::AsrPair(Register dst_low, Register dst_high,
1168 Register src_low, Register src_high,
1169 uint32_t shift) {
1170 DCHECK(!AreAliased(dst_low, src_high));
1171 Label less_than_32;
1172 Label done;
1173 if (shift == 32) {
1174 mov(dst_low, src_high);
1175 asr(dst_high, src_high, Operand(31));
1176 } else if (shift > 32) {
1177 shift &= 0x1f;
1178 asr(dst_low, src_high, Operand(shift));
1179 asr(dst_high, src_high, Operand(31));
1180 } else if (shift == 0) {
1181 Move(dst_low, src_low);
1182 Move(dst_high, src_high);
1183 } else {
1184 lsr(dst_low, src_low, Operand(shift));
1185 orr(dst_low, dst_low, Operand(src_high, LSL, 32 - shift));
1186 asr(dst_high, src_high, Operand(shift));
1187 }
1188}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001189
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001190void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
1191 Register code_target_address) {
1192 DCHECK(FLAG_enable_embedded_constant_pool);
1193 ldr(pp, MemOperand(code_target_address,
1194 Code::kConstantPoolOffset - Code::kHeaderSize));
1195 add(pp, pp, code_target_address);
1196}
1197
1198
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001199void MacroAssembler::LoadConstantPoolPointerRegister() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001200 DCHECK(FLAG_enable_embedded_constant_pool);
1201 int entry_offset = pc_offset() + Instruction::kPCReadOffset;
1202 sub(ip, pc, Operand(entry_offset));
1203 LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001204}
1205
Ben Murdochda12d292016-06-02 14:46:10 +01001206void MacroAssembler::StubPrologue(StackFrame::Type type) {
1207 mov(ip, Operand(Smi::FromInt(type)));
1208 PushCommonFrame(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001209 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001211 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001212 }
1213}
1214
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001215void MacroAssembler::Prologue(bool code_pre_aging) {
1216 { PredictableCodeSizeScope predictible_code_size_scope(
1217 this, kNoCodeAgeSequenceLength);
1218 // The following three instructions must remain together and unmodified
1219 // for code aging to work properly.
1220 if (code_pre_aging) {
1221 // Pre-age the code.
1222 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
1223 add(r0, pc, Operand(-8));
1224 ldr(pc, MemOperand(pc, -4));
1225 emit_code_stub_address(stub);
1226 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01001227 PushStandardFrame(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001228 nop(ip.code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001229 }
1230 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001231 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001232 LoadConstantPoolPointerRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001233 set_constant_pool_available(true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001234 }
1235}
1236
1237
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001238void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
1239 ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1240 ldr(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
1241 ldr(vector,
1242 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
1243}
1244
1245
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001246void MacroAssembler::EnterFrame(StackFrame::Type type,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001247 bool load_constant_pool_pointer_reg) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 // r0-r3: preserved
Ben Murdochda12d292016-06-02 14:46:10 +01001249 mov(ip, Operand(Smi::FromInt(type)));
1250 PushCommonFrame(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001251 if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001252 LoadConstantPoolPointerRegister();
1253 }
Ben Murdochda12d292016-06-02 14:46:10 +01001254 if (type == StackFrame::INTERNAL) {
1255 mov(ip, Operand(CodeObject()));
1256 push(ip);
1257 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001258}
1259
1260
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001261int MacroAssembler::LeaveFrame(StackFrame::Type type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001262 // r0: preserved
1263 // r1: preserved
1264 // r2: preserved
1265
1266 // Drop the execution stack down to the frame pointer and restore
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001267 // the caller frame pointer, return address and constant pool pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001268 // (if FLAG_enable_embedded_constant_pool).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001269 int frame_ends;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001270 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001271 add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
1272 frame_ends = pc_offset();
1273 ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
1274 } else {
1275 mov(sp, fp);
1276 frame_ends = pc_offset();
1277 ldm(ia_w, sp, fp.bit() | lr.bit());
1278 }
1279 return frame_ends;
Steve Blocka7e24c12009-10-30 11:49:00 +00001280}
1281
1282
Steve Block1e0659c2011-05-24 12:43:12 +01001283void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001284 // Set up the frame structure on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 DCHECK_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1286 DCHECK_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1287 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Ben Murdochda12d292016-06-02 14:46:10 +01001288 mov(ip, Operand(Smi::FromInt(StackFrame::EXIT)));
1289 PushCommonFrame(ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001290 // Reserve room for saved entry sp and code object.
Ben Murdochda12d292016-06-02 14:46:10 +01001291 sub(sp, fp, Operand(ExitFrameConstants::kFixedFrameSizeFromFp));
Steve Block44f0eee2011-05-26 01:26:41 +01001292 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001293 mov(ip, Operand::Zero());
Steve Block1e0659c2011-05-24 12:43:12 +01001294 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
1295 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001296 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001297 str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1298 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001299 mov(ip, Operand(CodeObject()));
Steve Block1e0659c2011-05-24 12:43:12 +01001300 str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001301
1302 // Save the frame pointer and the context in top.
Ben Murdoch589d6972011-11-30 16:04:58 +00001303 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001304 str(fp, MemOperand(ip));
Ben Murdoch589d6972011-11-30 16:04:58 +00001305 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001306 str(cp, MemOperand(ip));
1307
Ben Murdochb0fe1622011-05-05 13:52:32 +01001308 // Optionally save all double registers.
1309 if (save_doubles) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001310 SaveFPRegs(sp, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01001311 // Note that d0 will be accessible at
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001312 // fp - ExitFrameConstants::kFrameSize -
1313 // DwVfpRegister::kMaxNumRegisters * kDoubleSize,
1314 // since the sp slot, code slot and constant pool slot (if
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001315 // FLAG_enable_embedded_constant_pool) were pushed after the fp.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001316 }
Steve Block1e0659c2011-05-24 12:43:12 +01001317
1318 // Reserve place for the return address and stack space and align the frame
1319 // preparing for calling the runtime function.
1320 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1321 sub(sp, sp, Operand((stack_space + 1) * kPointerSize));
1322 if (frame_alignment > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001323 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +01001324 and_(sp, sp, Operand(-frame_alignment));
1325 }
1326
1327 // Set the exit frame sp value to point just before the return address
1328 // location.
1329 add(ip, sp, Operand(kPointerSize));
1330 str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001331}
1332
1333
Steve Block6ded16b2010-05-10 14:33:55 +01001334void MacroAssembler::InitializeNewString(Register string,
1335 Register length,
1336 Heap::RootListIndex map_index,
1337 Register scratch1,
1338 Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001339 SmiTag(scratch1, length);
Steve Block6ded16b2010-05-10 14:33:55 +01001340 LoadRoot(scratch2, map_index);
1341 str(scratch1, FieldMemOperand(string, String::kLengthOffset));
1342 mov(scratch1, Operand(String::kEmptyHashField));
1343 str(scratch2, FieldMemOperand(string, HeapObject::kMapOffset));
1344 str(scratch1, FieldMemOperand(string, String::kHashFieldOffset));
1345}
1346
1347
1348int MacroAssembler::ActivationFrameAlignment() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001349#if V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001350 // Running on the real platform. Use the alignment as mandated by the local
1351 // environment.
1352 // Note: This will break if we ever start generating snapshots on one ARM
1353 // platform for another ARM platform with a different alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001354 return base::OS::ActivationFrameAlignment();
1355#else // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001356 // If we are using the simulator then we should always align to the expected
1357 // alignment. As the simulator is used to generate snapshots we do not know
Steve Block6ded16b2010-05-10 14:33:55 +01001358 // if the target platform will need alignment, so this is controlled from a
1359 // flag.
1360 return FLAG_sim_stack_alignment;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001361#endif // V8_HOST_ARCH_ARM
Steve Blocka7e24c12009-10-30 11:49:00 +00001362}
1363
1364
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001365void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
1366 bool restore_context,
1367 bool argument_count_is_length) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001368 ConstantPoolUnavailableScope constant_pool_unavailable(this);
1369
Ben Murdochb0fe1622011-05-05 13:52:32 +01001370 // Optionally restore all double registers.
1371 if (save_doubles) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001372 // Calculate the stack location of the saved doubles and restore them.
Ben Murdochda12d292016-06-02 14:46:10 +01001373 const int offset = ExitFrameConstants::kFixedFrameSizeFromFp;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001374 sub(r3, fp,
1375 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize));
1376 RestoreFPRegs(r3, ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001377 }
1378
Steve Blocka7e24c12009-10-30 11:49:00 +00001379 // Clear top frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001380 mov(r3, Operand::Zero());
Ben Murdoch589d6972011-11-30 16:04:58 +00001381 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001382 str(r3, MemOperand(ip));
1383
1384 // Restore current context from top and clear it in debug mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001385 if (restore_context) {
1386 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1387 ldr(cp, MemOperand(ip));
1388 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001389#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001390 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
Steve Blocka7e24c12009-10-30 11:49:00 +00001391 str(r3, MemOperand(ip));
1392#endif
1393
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001394 // Tear down the exit frame, pop the arguments, and return.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001395 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001396 ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1397 }
Steve Block1e0659c2011-05-24 12:43:12 +01001398 mov(sp, Operand(fp));
1399 ldm(ia_w, sp, fp.bit() | lr.bit());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001400 if (argument_count.is_valid()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001401 if (argument_count_is_length) {
1402 add(sp, sp, argument_count);
1403 } else {
1404 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
1405 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001406 }
1407}
1408
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001409
1410void MacroAssembler::MovFromFloatResult(const DwVfpRegister dst) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001411 if (use_eabi_hardfloat()) {
1412 Move(dst, d0);
1413 } else {
1414 vmov(dst, r0, r1);
1415 }
1416}
1417
1418
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419// On ARM this is just a synonym to make the purpose clear.
1420void MacroAssembler::MovFromFloatParameter(DwVfpRegister dst) {
1421 MovFromFloatResult(dst);
Steve Blocka7e24c12009-10-30 11:49:00 +00001422}
1423
Ben Murdochda12d292016-06-02 14:46:10 +01001424void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
1425 Register caller_args_count_reg,
1426 Register scratch0, Register scratch1) {
1427#if DEBUG
1428 if (callee_args_count.is_reg()) {
1429 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
1430 scratch1));
1431 } else {
1432 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
1433 }
1434#endif
1435
1436 // Calculate the end of destination area where we will put the arguments
1437 // after we drop current frame. We add kPointerSize to count the receiver
1438 // argument which is not included into formal parameters count.
1439 Register dst_reg = scratch0;
1440 add(dst_reg, fp, Operand(caller_args_count_reg, LSL, kPointerSizeLog2));
1441 add(dst_reg, dst_reg,
1442 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
1443
1444 Register src_reg = caller_args_count_reg;
1445 // Calculate the end of source area. +kPointerSize is for the receiver.
1446 if (callee_args_count.is_reg()) {
1447 add(src_reg, sp, Operand(callee_args_count.reg(), LSL, kPointerSizeLog2));
1448 add(src_reg, src_reg, Operand(kPointerSize));
1449 } else {
1450 add(src_reg, sp,
1451 Operand((callee_args_count.immediate() + 1) * kPointerSize));
1452 }
1453
1454 if (FLAG_debug_code) {
1455 cmp(src_reg, dst_reg);
1456 Check(lo, kStackAccessBelowStackPointer);
1457 }
1458
1459 // Restore caller's frame pointer and return address now as they will be
1460 // overwritten by the copying loop.
1461 ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
1462 ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1463
1464 // Now copy callee arguments to the caller frame going backwards to avoid
1465 // callee arguments corruption (source and destination areas could overlap).
1466
1467 // Both src_reg and dst_reg are pointing to the word after the one to copy,
1468 // so they must be pre-decremented in the loop.
1469 Register tmp_reg = scratch1;
1470 Label loop, entry;
1471 b(&entry);
1472 bind(&loop);
1473 ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
1474 str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
1475 bind(&entry);
1476 cmp(sp, src_reg);
1477 b(ne, &loop);
1478
1479 // Leave current frame.
1480 mov(sp, dst_reg);
1481}
Steve Blocka7e24c12009-10-30 11:49:00 +00001482
1483void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1484 const ParameterCount& actual,
Steve Blocka7e24c12009-10-30 11:49:00 +00001485 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001486 bool* definitely_mismatches,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001487 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001488 const CallWrapper& call_wrapper) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001489 bool definitely_matches = false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001490 *definitely_mismatches = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00001491 Label regular_invoke;
1492
1493 // Check whether the expected and actual arguments count match. If not,
1494 // setup registers according to contract with ArgumentsAdaptorTrampoline:
1495 // r0: actual arguments count
1496 // r1: function (passed through to callee)
1497 // r2: expected arguments count
Steve Blocka7e24c12009-10-30 11:49:00 +00001498
1499 // The code below is made a lot easier because the calling code already sets
1500 // up actual and expected registers according to the contract if values are
1501 // passed in registers.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001502 DCHECK(actual.is_immediate() || actual.reg().is(r0));
1503 DCHECK(expected.is_immediate() || expected.reg().is(r2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001504
1505 if (expected.is_immediate()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001506 DCHECK(actual.is_immediate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001507 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001508 if (expected.immediate() == actual.immediate()) {
1509 definitely_matches = true;
1510 } else {
Steve Blocka7e24c12009-10-30 11:49:00 +00001511 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1512 if (expected.immediate() == sentinel) {
1513 // Don't worry about adapting arguments for builtins that
1514 // don't want that done. Skip adaption code by making it look
1515 // like we have a match between expected and actual number of
1516 // arguments.
1517 definitely_matches = true;
1518 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001519 *definitely_mismatches = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00001520 mov(r2, Operand(expected.immediate()));
1521 }
1522 }
1523 } else {
1524 if (actual.is_immediate()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001525 mov(r0, Operand(actual.immediate()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001526 cmp(expected.reg(), Operand(actual.immediate()));
1527 b(eq, &regular_invoke);
Steve Blocka7e24c12009-10-30 11:49:00 +00001528 } else {
1529 cmp(expected.reg(), Operand(actual.reg()));
1530 b(eq, &regular_invoke);
1531 }
1532 }
1533
1534 if (!definitely_matches) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001535 Handle<Code> adaptor =
Steve Block44f0eee2011-05-26 01:26:41 +01001536 isolate()->builtins()->ArgumentsAdaptorTrampoline();
Steve Blocka7e24c12009-10-30 11:49:00 +00001537 if (flag == CALL_FUNCTION) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001538 call_wrapper.BeforeCall(CallSize(adaptor));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001539 Call(adaptor);
Ben Murdoch257744e2011-11-30 15:57:28 +00001540 call_wrapper.AfterCall();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001541 if (!*definitely_mismatches) {
1542 b(done);
1543 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001544 } else {
1545 Jump(adaptor, RelocInfo::CODE_TARGET);
1546 }
1547 bind(&regular_invoke);
1548 }
1549}
1550
1551
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001552void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
1553 const ParameterCount& expected,
1554 const ParameterCount& actual) {
1555 Label skip_flooding;
1556 ExternalReference step_in_enabled =
1557 ExternalReference::debug_step_in_enabled_address(isolate());
1558 mov(r4, Operand(step_in_enabled));
1559 ldrb(r4, MemOperand(r4));
1560 cmp(r4, Operand(0));
1561 b(eq, &skip_flooding);
1562 {
1563 FrameScope frame(this,
1564 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1565 if (expected.is_reg()) {
1566 SmiTag(expected.reg());
1567 Push(expected.reg());
1568 }
1569 if (actual.is_reg()) {
1570 SmiTag(actual.reg());
1571 Push(actual.reg());
1572 }
1573 if (new_target.is_valid()) {
1574 Push(new_target);
1575 }
1576 Push(fun);
1577 Push(fun);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001578 CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001579 Pop(fun);
1580 if (new_target.is_valid()) {
1581 Pop(new_target);
1582 }
1583 if (actual.is_reg()) {
1584 Pop(actual.reg());
1585 SmiUntag(actual.reg());
1586 }
1587 if (expected.is_reg()) {
1588 Pop(expected.reg());
1589 SmiUntag(expected.reg());
1590 }
1591 }
1592 bind(&skip_flooding);
1593}
1594
1595
1596void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
1597 const ParameterCount& expected,
1598 const ParameterCount& actual,
1599 InvokeFlag flag,
1600 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001601 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001602 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001603 DCHECK(function.is(r1));
1604 DCHECK_IMPLIES(new_target.is_valid(), new_target.is(r3));
1605
1606 if (call_wrapper.NeedsDebugStepCheck()) {
1607 FloodFunctionIfStepping(function, new_target, expected, actual);
1608 }
1609
1610 // Clear the new.target register if not given.
1611 if (!new_target.is_valid()) {
1612 LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1613 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001614
Steve Blocka7e24c12009-10-30 11:49:00 +00001615 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001616 bool definitely_mismatches = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001617 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618 call_wrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001619 if (!definitely_mismatches) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001620 // We call indirectly through the code field in the function to
1621 // allow recompilation to take effect without changing any of the
1622 // call sites.
1623 Register code = r4;
1624 ldr(code, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001625 if (flag == CALL_FUNCTION) {
1626 call_wrapper.BeforeCall(CallSize(code));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001627 Call(code);
1628 call_wrapper.AfterCall();
1629 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001630 DCHECK(flag == JUMP_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001631 Jump(code);
1632 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001633
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001634 // Continue here if InvokePrologue does handle the invocation due to
1635 // mismatched parameter counts.
1636 bind(&done);
1637 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001638}
1639
1640
Steve Blocka7e24c12009-10-30 11:49:00 +00001641void MacroAssembler::InvokeFunction(Register fun,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001642 Register new_target,
Steve Blocka7e24c12009-10-30 11:49:00 +00001643 const ParameterCount& actual,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001644 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001645 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001646 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001647 DCHECK(flag == JUMP_FUNCTION || has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001648
Steve Blocka7e24c12009-10-30 11:49:00 +00001649 // Contract with called JS functions requires that function is passed in r1.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001650 DCHECK(fun.is(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001651
1652 Register expected_reg = r2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001653 Register temp_reg = r4;
Steve Blocka7e24c12009-10-30 11:49:00 +00001654
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001655 ldr(temp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001656 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1657 ldr(expected_reg,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001658 FieldMemOperand(temp_reg,
Steve Blocka7e24c12009-10-30 11:49:00 +00001659 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001660 SmiUntag(expected_reg);
Steve Blocka7e24c12009-10-30 11:49:00 +00001661
1662 ParameterCount expected(expected_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001663 InvokeFunctionCode(fun, new_target, expected, actual, flag, call_wrapper);
Steve Blocka7e24c12009-10-30 11:49:00 +00001664}
1665
1666
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001667void MacroAssembler::InvokeFunction(Register function,
1668 const ParameterCount& expected,
Andrei Popescu402d9372010-02-26 13:31:12 +00001669 const ParameterCount& actual,
Ben Murdoch257744e2011-11-30 15:57:28 +00001670 InvokeFlag flag,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001671 const CallWrapper& call_wrapper) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001672 // You can't call a function without a valid frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001673 DCHECK(flag == JUMP_FUNCTION || has_frame());
1674
1675 // Contract with called JS functions requires that function is passed in r1.
1676 DCHECK(function.is(r1));
Andrei Popescu402d9372010-02-26 13:31:12 +00001677
1678 // Get the function and setup the context.
Andrei Popescu402d9372010-02-26 13:31:12 +00001679 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1680
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001681 InvokeFunctionCode(r1, no_reg, expected, actual, flag, call_wrapper);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001682}
1683
1684
1685void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
1686 const ParameterCount& expected,
1687 const ParameterCount& actual,
1688 InvokeFlag flag,
1689 const CallWrapper& call_wrapper) {
1690 Move(r1, function);
1691 InvokeFunction(r1, expected, actual, flag, call_wrapper);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001692}
1693
1694
Ben Murdochb0fe1622011-05-05 13:52:32 +01001695void MacroAssembler::IsObjectJSStringType(Register object,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001696 Register scratch,
1697 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698 DCHECK(kNotStringTag != 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001699
1700 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1701 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1702 tst(scratch, Operand(kIsNotStringMask));
Steve Block1e0659c2011-05-24 12:43:12 +01001703 b(ne, fail);
Andrei Popescu402d9372010-02-26 13:31:12 +00001704}
1705
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001706
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001707void MacroAssembler::IsObjectNameType(Register object,
1708 Register scratch,
1709 Label* fail) {
1710 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1711 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1712 cmp(scratch, Operand(LAST_NAME_TYPE));
1713 b(hi, fail);
1714}
1715
1716
Andrei Popescu402d9372010-02-26 13:31:12 +00001717void MacroAssembler::DebugBreak() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001718 mov(r0, Operand::Zero());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001719 mov(r1,
1720 Operand(ExternalReference(Runtime::kHandleDebuggerStatement, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001721 CEntryStub ces(isolate(), 1);
1722 DCHECK(AllowThisStubCall(&ces));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001723 Call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
Andrei Popescu402d9372010-02-26 13:31:12 +00001724}
Steve Blocka7e24c12009-10-30 11:49:00 +00001725
1726
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001727void MacroAssembler::PushStackHandler() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001728 // Adjust this code if not the case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001729 STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001730 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001731
1732 // Link the current handler as the next handler.
1733 mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1734 ldr(r5, MemOperand(r6));
1735 push(r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001736
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001737 // Set this new handler as the current one.
1738 str(sp, MemOperand(r6));
Steve Blocka7e24c12009-10-30 11:49:00 +00001739}
1740
1741
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001742void MacroAssembler::PopStackHandler() {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001743 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Leon Clarkee46be812010-01-19 14:06:41 +00001744 pop(r1);
Ben Murdoch589d6972011-11-30 16:04:58 +00001745 mov(ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
Leon Clarkee46be812010-01-19 14:06:41 +00001746 add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
1747 str(r1, MemOperand(ip));
1748}
1749
1750
Steve Blocka7e24c12009-10-30 11:49:00 +00001751void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1752 Register scratch,
1753 Label* miss) {
1754 Label same_contexts;
1755
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001756 DCHECK(!holder_reg.is(scratch));
1757 DCHECK(!holder_reg.is(ip));
1758 DCHECK(!scratch.is(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001759
Ben Murdochda12d292016-06-02 14:46:10 +01001760 // Load current lexical context from the active StandardFrame, which
1761 // may require crawling past STUB frames.
1762 Label load_context;
1763 Label has_context;
1764 DCHECK(!ip.is(scratch));
1765 mov(ip, fp);
1766 bind(&load_context);
1767 ldr(scratch, MemOperand(ip, CommonFrameConstants::kContextOrFrameTypeOffset));
1768 JumpIfNotSmi(scratch, &has_context);
1769 ldr(ip, MemOperand(ip, CommonFrameConstants::kCallerFPOffset));
1770 b(&load_context);
1771 bind(&has_context);
1772
Steve Blocka7e24c12009-10-30 11:49:00 +00001773 // In debug mode, make sure the lexical context is set.
1774#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001775 cmp(scratch, Operand::Zero());
1776 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001777#endif
1778
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001779 // Load the native context of the current context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001780 ldr(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
Steve Blocka7e24c12009-10-30 11:49:00 +00001781
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001782 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001783 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001784 // Cannot use ip as a temporary in this verification code. Due to the fact
1785 // that ip is clobbered as part of cmp with an object Operand.
1786 push(holder_reg); // Temporarily save holder on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001787 // Read the first word and compare to the native_context_map.
Steve Blocka7e24c12009-10-30 11:49:00 +00001788 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001789 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001790 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001791 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001792 pop(holder_reg); // Restore holder.
1793 }
1794
1795 // Check if both contexts are the same.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001796 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001797 cmp(scratch, Operand(ip));
1798 b(eq, &same_contexts);
1799
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001800 // Check the context is a native context.
Steve Block44f0eee2011-05-26 01:26:41 +01001801 if (emit_debug_code()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001802 // Cannot use ip as a temporary in this verification code. Due to the fact
1803 // that ip is clobbered as part of cmp with an object Operand.
1804 push(holder_reg); // Temporarily save holder on the stack.
1805 mov(holder_reg, ip); // Move ip to its holding place.
1806 LoadRoot(ip, Heap::kNullValueRootIndex);
1807 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001808 Check(ne, kJSGlobalProxyContextShouldNotBeNull);
Steve Blocka7e24c12009-10-30 11:49:00 +00001809
1810 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001811 LoadRoot(ip, Heap::kNativeContextMapRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001812 cmp(holder_reg, ip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001813 Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
Steve Blocka7e24c12009-10-30 11:49:00 +00001814 // Restore ip is not needed. ip is reloaded below.
1815 pop(holder_reg); // Restore holder.
1816 // Restore ip to holder's context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001817 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001818 }
1819
1820 // Check that the security token in the calling global object is
1821 // compatible with the security token in the receiving global
1822 // object.
1823 int token_offset = Context::kHeaderSize +
1824 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1825
1826 ldr(scratch, FieldMemOperand(scratch, token_offset));
1827 ldr(ip, FieldMemOperand(ip, token_offset));
1828 cmp(scratch, Operand(ip));
1829 b(ne, miss);
1830
1831 bind(&same_contexts);
1832}
1833
1834
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001835// Compute the hash code from the untagged key. This must be kept in sync with
1836// ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1837// code-stub-hydrogen.cc
Ben Murdochc7cc0282012-03-05 14:35:55 +00001838void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
1839 // First of all we assign the hash seed to scratch.
1840 LoadRoot(scratch, Heap::kHashSeedRootIndex);
1841 SmiUntag(scratch);
1842
1843 // Xor original key with a seed.
1844 eor(t0, t0, Operand(scratch));
1845
1846 // Compute the hash code from the untagged key. This must be kept in sync
1847 // with ComputeIntegerHash in utils.h.
1848 //
1849 // hash = ~hash + (hash << 15);
1850 mvn(scratch, Operand(t0));
1851 add(t0, scratch, Operand(t0, LSL, 15));
1852 // hash = hash ^ (hash >> 12);
1853 eor(t0, t0, Operand(t0, LSR, 12));
1854 // hash = hash + (hash << 2);
1855 add(t0, t0, Operand(t0, LSL, 2));
1856 // hash = hash ^ (hash >> 4);
1857 eor(t0, t0, Operand(t0, LSR, 4));
1858 // hash = hash * 2057;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001859 mov(scratch, Operand(t0, LSL, 11));
1860 add(t0, t0, Operand(t0, LSL, 3));
1861 add(t0, t0, scratch);
Ben Murdochc7cc0282012-03-05 14:35:55 +00001862 // hash = hash ^ (hash >> 16);
1863 eor(t0, t0, Operand(t0, LSR, 16));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001864 bic(t0, t0, Operand(0xc0000000u));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001865}
1866
1867
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001868void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1869 Register elements,
1870 Register key,
1871 Register result,
1872 Register t0,
1873 Register t1,
1874 Register t2) {
1875 // Register use:
1876 //
1877 // elements - holds the slow-case elements of the receiver on entry.
1878 // Unchanged unless 'result' is the same register.
1879 //
1880 // key - holds the smi key on entry.
1881 // Unchanged unless 'result' is the same register.
1882 //
1883 // result - holds the result on exit if the load succeeded.
1884 // Allowed to be the same as 'key' or 'result'.
1885 // Unchanged on bailout so 'key' or 'result' can be used
1886 // in further computation.
1887 //
1888 // Scratch registers:
1889 //
1890 // t0 - holds the untagged key on entry and holds the hash once computed.
1891 //
1892 // t1 - used to hold the capacity mask of the dictionary
1893 //
1894 // t2 - used for the index into the dictionary.
1895 Label done;
1896
Ben Murdochc7cc0282012-03-05 14:35:55 +00001897 GetNumberHash(t0, t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001898
1899 // Compute the capacity mask.
Ben Murdochc7cc0282012-03-05 14:35:55 +00001900 ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001901 SmiUntag(t1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001902 sub(t1, t1, Operand(1));
1903
1904 // Generate an unrolled loop that performs a few probes before giving up.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001905 for (int i = 0; i < kNumberDictionaryProbes; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001906 // Use t2 for index calculations and keep the hash intact in t0.
1907 mov(t2, t0);
1908 // Compute the masked index: (hash + i + i * i) & mask.
1909 if (i > 0) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00001910 add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001911 }
1912 and_(t2, t2, Operand(t1));
1913
1914 // Scale the index by multiplying by the element size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001915 DCHECK(SeededNumberDictionary::kEntrySize == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001916 add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
1917
1918 // Check if the key is identical to the name.
1919 add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
Ben Murdochc7cc0282012-03-05 14:35:55 +00001920 ldr(ip, FieldMemOperand(t2, SeededNumberDictionary::kElementsStartOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001921 cmp(key, Operand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001922 if (i != kNumberDictionaryProbes - 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001923 b(eq, &done);
1924 } else {
1925 b(ne, miss);
1926 }
1927 }
1928
1929 bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001930 // Check that the value is a field property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001931 // t2: elements + (index * kPointerSize)
1932 const int kDetailsOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001933 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001934 ldr(t1, FieldMemOperand(t2, kDetailsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001935 DCHECK_EQ(DATA, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00001936 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001937 b(ne, miss);
1938
1939 // Get the value at the masked, scaled index and return.
1940 const int kValueOffset =
Ben Murdochc7cc0282012-03-05 14:35:55 +00001941 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001942 ldr(result, FieldMemOperand(t2, kValueOffset));
1943}
1944
1945
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001946void MacroAssembler::Allocate(int object_size,
1947 Register result,
1948 Register scratch1,
1949 Register scratch2,
1950 Label* gc_required,
1951 AllocationFlags flags) {
1952 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
Ben Murdochc5610432016-08-08 18:44:38 +01001953 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07001954 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01001955 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07001956 // Trash the registers to simulate an allocation failure.
1957 mov(result, Operand(0x7091));
1958 mov(scratch1, Operand(0x7191));
1959 mov(scratch2, Operand(0x7291));
1960 }
1961 jmp(gc_required);
1962 return;
1963 }
1964
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001965 DCHECK(!AreAliased(result, scratch1, scratch2, ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00001966
Kristian Monsen25f61362010-05-21 11:50:48 +01001967 // Make object size into bytes.
1968 if ((flags & SIZE_IN_WORDS) != 0) {
1969 object_size *= kPointerSize;
1970 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001971 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
Kristian Monsen25f61362010-05-21 11:50:48 +01001972
Ben Murdochb0fe1622011-05-05 13:52:32 +01001973 // Check relative positions of allocation top and limit addresses.
1974 // The values must be adjacent in memory to allow the use of LDM.
1975 // Also, assert that the registers are numbered such that the values
1976 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001977 ExternalReference allocation_top =
1978 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1979 ExternalReference allocation_limit =
1980 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001981
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001982 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
1983 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001984 DCHECK((limit - top) == kPointerSize);
1985 DCHECK(result.code() < ip.code());
1986
1987 // Set up allocation top address register.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001988 Register top_address = scratch1;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001989 // This code stores a temporary value in ip. This is OK, as the code below
1990 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001991 Register alloc_limit = ip;
1992 Register result_end = scratch2;
1993 mov(top_address, Operand(allocation_top));
1994
Steve Blocka7e24c12009-10-30 11:49:00 +00001995 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001996 // Load allocation top into result and allocation limit into alloc_limit.
1997 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001998 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01001999 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002000 // Assert that result actually contains top on entry.
2001 ldr(alloc_limit, MemOperand(top_address));
2002 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002003 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002004 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002005 // Load allocation limit. Result already contains allocation top.
2006 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00002007 }
2008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002009 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2010 // Align the next allocation. Storing the filler map without checking top is
2011 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002012 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002013 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002014 Label aligned;
2015 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002016 if ((flags & PRETENURE) != 0) {
2017 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002018 b(hs, gc_required);
2019 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002020 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2021 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002022 bind(&aligned);
2023 }
2024
Steve Blocka7e24c12009-10-30 11:49:00 +00002025 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002026 // to calculate the new top. We must preserve the ip register at this
2027 // point, so we cannot just use add().
2028 DCHECK(object_size > 0);
2029 Register source = result;
2030 Condition cond = al;
2031 int shift = 0;
2032 while (object_size != 0) {
2033 if (((object_size >> shift) & 0x03) == 0) {
2034 shift += 2;
2035 } else {
2036 int bits = object_size & (0xff << shift);
2037 object_size -= bits;
2038 shift += 8;
2039 Operand bits_operand(bits);
2040 DCHECK(bits_operand.instructions_required(this) == 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002041 add(result_end, source, bits_operand, LeaveCC, cond);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002042 source = result_end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002043 cond = cc;
2044 }
2045 }
Ben Murdochc5610432016-08-08 18:44:38 +01002046
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002047 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00002048 b(hi, gc_required);
Steve Blocka7e24c12009-10-30 11:49:00 +00002049
Ben Murdochc5610432016-08-08 18:44:38 +01002050 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
2051 // The top pointer is not updated for allocation folding dominators.
2052 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00002053 }
Ben Murdochc5610432016-08-08 18:44:38 +01002054
2055 // Tag object.
2056 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00002057}
2058
2059
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002060void MacroAssembler::Allocate(Register object_size, Register result,
2061 Register result_end, Register scratch,
2062 Label* gc_required, AllocationFlags flags) {
Ben Murdochc5610432016-08-08 18:44:38 +01002063 DCHECK((flags & ALLOCATION_FOLDED) == 0);
John Reck59135872010-11-02 12:39:01 -07002064 if (!FLAG_inline_new) {
Steve Block44f0eee2011-05-26 01:26:41 +01002065 if (emit_debug_code()) {
John Reck59135872010-11-02 12:39:01 -07002066 // Trash the registers to simulate an allocation failure.
2067 mov(result, Operand(0x7091));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002068 mov(scratch, Operand(0x7191));
2069 mov(result_end, Operand(0x7291));
John Reck59135872010-11-02 12:39:01 -07002070 }
2071 jmp(gc_required);
2072 return;
2073 }
2074
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002075 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
2076 // is not specified. Other registers must not overlap.
2077 DCHECK(!AreAliased(object_size, result, scratch, ip));
2078 DCHECK(!AreAliased(result_end, result, scratch, ip));
2079 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
Steve Blocka7e24c12009-10-30 11:49:00 +00002080
Ben Murdochb0fe1622011-05-05 13:52:32 +01002081 // Check relative positions of allocation top and limit addresses.
2082 // The values must be adjacent in memory to allow the use of LDM.
2083 // Also, assert that the registers are numbered such that the values
2084 // are loaded in the correct order.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002085 ExternalReference allocation_top =
2086 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2087 ExternalReference allocation_limit =
2088 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002089 intptr_t top = reinterpret_cast<intptr_t>(allocation_top.address());
2090 intptr_t limit = reinterpret_cast<intptr_t>(allocation_limit.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002091 DCHECK((limit - top) == kPointerSize);
2092 DCHECK(result.code() < ip.code());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002093
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002094 // Set up allocation top address and allocation limit registers.
2095 Register top_address = scratch;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002096 // This code stores a temporary value in ip. This is OK, as the code below
2097 // does not need ip for implicit literal generation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002098 Register alloc_limit = ip;
2099 mov(top_address, Operand(allocation_top));
2100
Steve Blocka7e24c12009-10-30 11:49:00 +00002101 if ((flags & RESULT_CONTAINS_TOP) == 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002102 // Load allocation top into result and allocation limit into alloc_limit.
2103 ldm(ia, top_address, result.bit() | alloc_limit.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002104 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01002105 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002106 // Assert that result actually contains top on entry.
2107 ldr(alloc_limit, MemOperand(top_address));
2108 cmp(result, alloc_limit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002109 Check(eq, kUnexpectedAllocationTop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002110 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002111 // Load allocation limit. Result already contains allocation top.
2112 ldr(alloc_limit, MemOperand(top_address, limit - top));
Steve Blocka7e24c12009-10-30 11:49:00 +00002113 }
2114
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002115 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2116 // Align the next allocation. Storing the filler map without checking top is
2117 // safe in new-space because the limit of the heap is aligned there.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002118 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002119 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002120 Label aligned;
2121 b(eq, &aligned);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002122 if ((flags & PRETENURE) != 0) {
2123 cmp(result, Operand(alloc_limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002124 b(hs, gc_required);
2125 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002126 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2127 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002128 bind(&aligned);
2129 }
2130
Steve Blocka7e24c12009-10-30 11:49:00 +00002131 // Calculate new top and bail out if new space is exhausted. Use result
Ben Murdochb0fe1622011-05-05 13:52:32 +01002132 // to calculate the new top. Object size may be in words so a shift is
2133 // required to get the number of bytes.
Kristian Monsen25f61362010-05-21 11:50:48 +01002134 if ((flags & SIZE_IN_WORDS) != 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002135 add(result_end, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01002136 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002137 add(result_end, result, Operand(object_size), SetCC);
Kristian Monsen25f61362010-05-21 11:50:48 +01002138 }
Ben Murdochc5610432016-08-08 18:44:38 +01002139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002140 cmp(result_end, Operand(alloc_limit));
Steve Blocka7e24c12009-10-30 11:49:00 +00002141 b(hi, gc_required);
2142
Steve Blockd0582a62009-12-15 09:54:21 +00002143 // Update allocation top. result temporarily holds the new top.
Steve Block44f0eee2011-05-26 01:26:41 +01002144 if (emit_debug_code()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002145 tst(result_end, Operand(kObjectAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146 Check(eq, kUnalignedAllocationInNewSpace);
Steve Blockd0582a62009-12-15 09:54:21 +00002147 }
Ben Murdochc5610432016-08-08 18:44:38 +01002148 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
2149 // The top pointer is not updated for allocation folding dominators.
2150 str(result_end, MemOperand(top_address));
Steve Blocka7e24c12009-10-30 11:49:00 +00002151 }
Ben Murdochc5610432016-08-08 18:44:38 +01002152
2153 // Tag object.
2154 add(result, result, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +00002155}
2156
Ben Murdochc5610432016-08-08 18:44:38 +01002157void MacroAssembler::FastAllocate(Register object_size, Register result,
2158 Register result_end, Register scratch,
2159 AllocationFlags flags) {
2160 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
2161 // is not specified. Other registers must not overlap.
2162 DCHECK(!AreAliased(object_size, result, scratch, ip));
2163 DCHECK(!AreAliased(result_end, result, scratch, ip));
2164 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
2165
2166 ExternalReference allocation_top =
2167 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2168
2169 Register top_address = scratch;
2170 mov(top_address, Operand(allocation_top));
2171 ldr(result, MemOperand(top_address));
2172
2173 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2174 // Align the next allocation. Storing the filler map without checking top is
2175 // safe in new-space because the limit of the heap is aligned there.
2176 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
2177 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
2178 Label aligned;
2179 b(eq, &aligned);
2180 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2181 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
2182 bind(&aligned);
2183 }
2184
2185 // Calculate new top using result. Object size may be in words so a shift is
2186 // required to get the number of bytes.
2187 if ((flags & SIZE_IN_WORDS) != 0) {
2188 add(result_end, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
2189 } else {
2190 add(result_end, result, Operand(object_size), SetCC);
2191 }
2192
2193 // Update allocation top. result temporarily holds the new top.
2194 if (emit_debug_code()) {
2195 tst(result_end, Operand(kObjectAlignmentMask));
2196 Check(eq, kUnalignedAllocationInNewSpace);
2197 }
2198 // The top pointer is not updated for allocation folding dominators.
2199 str(result_end, MemOperand(top_address));
2200
2201 add(result, result, Operand(kHeapObjectTag));
2202}
2203
2204void MacroAssembler::FastAllocate(int object_size, Register result,
2205 Register scratch1, Register scratch2,
2206 AllocationFlags flags) {
2207 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
2208 DCHECK(!AreAliased(result, scratch1, scratch2, ip));
2209
2210 // Make object size into bytes.
2211 if ((flags & SIZE_IN_WORDS) != 0) {
2212 object_size *= kPointerSize;
2213 }
2214 DCHECK_EQ(0, object_size & kObjectAlignmentMask);
2215
2216 ExternalReference allocation_top =
2217 AllocationUtils::GetAllocationTopReference(isolate(), flags);
2218
2219 // Set up allocation top address register.
2220 Register top_address = scratch1;
2221 Register result_end = scratch2;
2222 mov(top_address, Operand(allocation_top));
2223 ldr(result, MemOperand(top_address));
2224
2225 if ((flags & DOUBLE_ALIGNMENT) != 0) {
2226 // Align the next allocation. Storing the filler map without checking top is
2227 // safe in new-space because the limit of the heap is aligned there.
2228 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
2229 and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
2230 Label aligned;
2231 b(eq, &aligned);
2232 mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
2233 str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
2234 bind(&aligned);
2235 }
2236
2237 // Calculate new top using result. Object size may be in words so a shift is
2238 // required to get the number of bytes. We must preserve the ip register at
2239 // this point, so we cannot just use add().
2240 DCHECK(object_size > 0);
2241 Register source = result;
2242 Condition cond = al;
2243 int shift = 0;
2244 while (object_size != 0) {
2245 if (((object_size >> shift) & 0x03) == 0) {
2246 shift += 2;
2247 } else {
2248 int bits = object_size & (0xff << shift);
2249 object_size -= bits;
2250 shift += 8;
2251 Operand bits_operand(bits);
2252 DCHECK(bits_operand.instructions_required(this) == 1);
2253 add(result_end, source, bits_operand, LeaveCC, cond);
2254 source = result_end;
2255 cond = cc;
2256 }
2257 }
2258
2259 // The top pointer is not updated for allocation folding dominators.
2260 str(result_end, MemOperand(top_address));
2261
2262 add(result, result, Operand(kHeapObjectTag));
2263}
Steve Blocka7e24c12009-10-30 11:49:00 +00002264
Andrei Popescu31002712010-02-23 13:46:05 +00002265void MacroAssembler::AllocateTwoByteString(Register result,
2266 Register length,
2267 Register scratch1,
2268 Register scratch2,
2269 Register scratch3,
2270 Label* gc_required) {
2271 // Calculate the number of bytes needed for the characters in the string while
2272 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002273 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00002274 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
2275 add(scratch1, scratch1,
2276 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002277 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002278
2279 // Allocate two-byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01002280 Allocate(scratch1, result, scratch2, scratch3, gc_required,
2281 NO_ALLOCATION_FLAGS);
Andrei Popescu31002712010-02-23 13:46:05 +00002282
2283 // Set the map, length and hash field.
Steve Block6ded16b2010-05-10 14:33:55 +01002284 InitializeNewString(result,
2285 length,
2286 Heap::kStringMapRootIndex,
2287 scratch1,
2288 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002289}
2290
2291
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002292void MacroAssembler::AllocateOneByteString(Register result, Register length,
2293 Register scratch1, Register scratch2,
2294 Register scratch3,
2295 Label* gc_required) {
Andrei Popescu31002712010-02-23 13:46:05 +00002296 // Calculate the number of bytes needed for the characters in the string while
2297 // observing object alignment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002298 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2299 DCHECK(kCharSize == 1);
Andrei Popescu31002712010-02-23 13:46:05 +00002300 add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002301 Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01002302 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
Andrei Popescu31002712010-02-23 13:46:05 +00002303
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002304 // Allocate one-byte string in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01002305 Allocate(scratch1, result, scratch2, scratch3, gc_required,
2306 NO_ALLOCATION_FLAGS);
Andrei Popescu31002712010-02-23 13:46:05 +00002307
2308 // Set the map, length and hash field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002309 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
2310 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002311}
2312
2313
2314void MacroAssembler::AllocateTwoByteConsString(Register result,
2315 Register length,
2316 Register scratch1,
2317 Register scratch2,
2318 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002319 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01002320 NO_ALLOCATION_FLAGS);
Steve Block6ded16b2010-05-10 14:33:55 +01002321
2322 InitializeNewString(result,
2323 length,
2324 Heap::kConsStringMapRootIndex,
2325 scratch1,
2326 scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002327}
2328
2329
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002330void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
2331 Register scratch1,
2332 Register scratch2,
2333 Label* gc_required) {
Ben Murdochc5610432016-08-08 18:44:38 +01002334 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
2335 NO_ALLOCATION_FLAGS);
Steve Block6ded16b2010-05-10 14:33:55 +01002336
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002337 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
2338 scratch1, scratch2);
Andrei Popescu31002712010-02-23 13:46:05 +00002339}
2340
2341
Ben Murdoch589d6972011-11-30 16:04:58 +00002342void MacroAssembler::AllocateTwoByteSlicedString(Register result,
2343 Register length,
2344 Register scratch1,
2345 Register scratch2,
2346 Label* gc_required) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002347 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01002348 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00002349
2350 InitializeNewString(result,
2351 length,
2352 Heap::kSlicedStringMapRootIndex,
2353 scratch1,
2354 scratch2);
2355}
2356
2357
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002358void MacroAssembler::AllocateOneByteSlicedString(Register result,
2359 Register length,
2360 Register scratch1,
2361 Register scratch2,
2362 Label* gc_required) {
2363 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01002364 NO_ALLOCATION_FLAGS);
Ben Murdoch589d6972011-11-30 16:04:58 +00002365
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002366 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
2367 scratch1, scratch2);
Ben Murdoch589d6972011-11-30 16:04:58 +00002368}
2369
2370
Steve Block6ded16b2010-05-10 14:33:55 +01002371void MacroAssembler::CompareObjectType(Register object,
Steve Blocka7e24c12009-10-30 11:49:00 +00002372 Register map,
2373 Register type_reg,
2374 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002375 const Register temp = type_reg.is(no_reg) ? ip : type_reg;
2376
Steve Block6ded16b2010-05-10 14:33:55 +01002377 ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002378 CompareInstanceType(map, temp, type);
2379}
2380
2381
Steve Blocka7e24c12009-10-30 11:49:00 +00002382void MacroAssembler::CompareInstanceType(Register map,
2383 Register type_reg,
2384 InstanceType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002385 // Registers map and type_reg can be ip. These two lines assert
2386 // that ip can be used with the two instructions (the constants
2387 // will never need ip).
2388 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
2389 STATIC_ASSERT(LAST_TYPE < 256);
Steve Blocka7e24c12009-10-30 11:49:00 +00002390 ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
2391 cmp(type_reg, Operand(type));
2392}
2393
2394
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002395void MacroAssembler::CompareRoot(Register obj,
2396 Heap::RootListIndex index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002397 DCHECK(!obj.is(ip));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002398 LoadRoot(ip, index);
2399 cmp(obj, ip);
2400}
2401
2402
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002403void MacroAssembler::CheckFastElements(Register map,
2404 Register scratch,
2405 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002406 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2407 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2408 STATIC_ASSERT(FAST_ELEMENTS == 2);
2409 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002410 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002411 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002412 b(hi, fail);
2413}
2414
2415
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002416void MacroAssembler::CheckFastObjectElements(Register map,
2417 Register scratch,
2418 Label* fail) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002419 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2420 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2421 STATIC_ASSERT(FAST_ELEMENTS == 2);
2422 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002423 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002424 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002425 b(ls, fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002426 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002427 b(hi, fail);
2428}
2429
2430
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002431void MacroAssembler::CheckFastSmiElements(Register map,
2432 Register scratch,
2433 Label* fail) {
2434 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2435 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002436 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002437 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002438 b(hi, fail);
2439}
2440
2441
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002442void MacroAssembler::StoreNumberToDoubleElements(
2443 Register value_reg,
2444 Register key_reg,
2445 Register elements_reg,
2446 Register scratch1,
2447 LowDwVfpRegister double_scratch,
2448 Label* fail,
2449 int elements_offset) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002450 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002451 Label smi_value, store;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002452
2453 // Handle smi values specially.
2454 JumpIfSmi(value_reg, &smi_value);
2455
2456 // Ensure that the object is a heap number
2457 CheckMap(value_reg,
2458 scratch1,
2459 isolate()->factory()->heap_number_map(),
2460 fail,
2461 DONT_DO_SMI_CHECK);
2462
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002463 vldr(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002464 VFPCanonicalizeNaN(double_scratch);
2465 b(&store);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002466
2467 bind(&smi_value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002468 SmiToDouble(double_scratch, value_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002469
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002470 bind(&store);
2471 add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg));
2472 vstr(double_scratch,
2473 FieldMemOperand(scratch1,
2474 FixedDoubleArray::kHeaderSize - elements_offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002475}
2476
2477
2478void MacroAssembler::CompareMap(Register obj,
2479 Register scratch,
2480 Handle<Map> map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002481 Label* early_success) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002482 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002483 CompareMap(scratch, map, early_success);
2484}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002485
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002486
2487void MacroAssembler::CompareMap(Register obj_map,
2488 Handle<Map> map,
2489 Label* early_success) {
2490 cmp(obj_map, Operand(map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002491}
2492
2493
Andrei Popescu31002712010-02-23 13:46:05 +00002494void MacroAssembler::CheckMap(Register obj,
2495 Register scratch,
2496 Handle<Map> map,
2497 Label* fail,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002498 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002499 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002500 JumpIfSmi(obj, fail);
Andrei Popescu31002712010-02-23 13:46:05 +00002501 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002502
2503 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002504 CompareMap(obj, scratch, map, &success);
Andrei Popescu31002712010-02-23 13:46:05 +00002505 b(ne, fail);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002506 bind(&success);
Andrei Popescu31002712010-02-23 13:46:05 +00002507}
2508
2509
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002510void MacroAssembler::CheckMap(Register obj,
2511 Register scratch,
2512 Heap::RootListIndex index,
2513 Label* fail,
Ben Murdoch257744e2011-11-30 15:57:28 +00002514 SmiCheckType smi_check_type) {
2515 if (smi_check_type == DO_SMI_CHECK) {
Steve Block1e0659c2011-05-24 12:43:12 +01002516 JumpIfSmi(obj, fail);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002517 }
2518 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2519 LoadRoot(ip, index);
2520 cmp(scratch, ip);
2521 b(ne, fail);
2522}
2523
2524
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002525void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
2526 Register scratch2, Handle<WeakCell> cell,
2527 Handle<Code> success,
2528 SmiCheckType smi_check_type) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002529 Label fail;
2530 if (smi_check_type == DO_SMI_CHECK) {
2531 JumpIfSmi(obj, &fail);
2532 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002533 ldr(scratch1, FieldMemOperand(obj, HeapObject::kMapOffset));
2534 CmpWeakValue(scratch1, cell, scratch2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002535 Jump(success, RelocInfo::CODE_TARGET, eq);
2536 bind(&fail);
2537}
2538
2539
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002540void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2541 Register scratch) {
2542 mov(scratch, Operand(cell));
2543 ldr(scratch, FieldMemOperand(scratch, WeakCell::kValueOffset));
2544 cmp(value, scratch);
2545}
2546
2547
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002548void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002549 mov(value, Operand(cell));
2550 ldr(value, FieldMemOperand(value, WeakCell::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002551}
2552
2553
2554void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2555 Label* miss) {
2556 GetWeakValue(value, cell);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002557 JumpIfSmi(value, miss);
2558}
2559
2560
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002561void MacroAssembler::GetMapConstructor(Register result, Register map,
2562 Register temp, Register temp2) {
2563 Label done, loop;
2564 ldr(result, FieldMemOperand(map, Map::kConstructorOrBackPointerOffset));
2565 bind(&loop);
2566 JumpIfSmi(result, &done);
2567 CompareObjectType(result, temp, temp2, MAP_TYPE);
2568 b(ne, &done);
2569 ldr(result, FieldMemOperand(result, Map::kConstructorOrBackPointerOffset));
2570 b(&loop);
2571 bind(&done);
2572}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002573
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002574
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002575void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
2576 Register scratch, Label* miss) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002577 // Get the prototype or initial map from the function.
2578 ldr(result,
2579 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2580
2581 // If the prototype or initial map is the hole, don't return it and
2582 // simply miss the cache instead. This will allow us to allocate a
2583 // prototype object on-demand in the runtime system.
2584 LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2585 cmp(result, ip);
2586 b(eq, miss);
2587
2588 // If the function does not have an initial map, we're done.
2589 Label done;
2590 CompareObjectType(result, scratch, scratch, MAP_TYPE);
2591 b(ne, &done);
2592
2593 // Get the prototype from the initial map.
2594 ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002595
Steve Blocka7e24c12009-10-30 11:49:00 +00002596 // All done.
2597 bind(&done);
2598}
2599
2600
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002601void MacroAssembler::CallStub(CodeStub* stub,
2602 TypeFeedbackId ast_id,
2603 Condition cond) {
2604 DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
2605 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00002606}
2607
2608
Andrei Popescu31002712010-02-23 13:46:05 +00002609void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
Andrei Popescu31002712010-02-23 13:46:05 +00002610 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2611}
2612
2613
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002614bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002615 return has_frame_ || !stub->SometimesSetsUpAFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00002616}
2617
2618
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002619void MacroAssembler::IndexFromHash(Register hash, Register index) {
2620 // If the hash field contains an array index pick it out. The assert checks
2621 // that the constants for the maximum number of digits for an array index
2622 // cached in the hash field and the number of bits reserved for it does not
2623 // conflict.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002624 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002625 (1 << String::kArrayIndexValueBits));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002626 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002627}
2628
2629
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002630void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002631 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002632 vmov(value.low(), smi);
2633 vcvt_f64_s32(value, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01002634 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002635 SmiUntag(ip, smi);
2636 vmov(value.low(), ip);
2637 vcvt_f64_s32(value, value.low());
Iain Merrick9ac36c92010-09-13 15:29:50 +01002638 }
2639}
2640
2641
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002642void MacroAssembler::TestDoubleIsInt32(DwVfpRegister double_input,
2643 LowDwVfpRegister double_scratch) {
2644 DCHECK(!double_input.is(double_scratch));
2645 vcvt_s32_f64(double_scratch.low(), double_input);
2646 vcvt_f64_s32(double_scratch, double_scratch.low());
2647 VFPCompareAndSetFlags(double_input, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002648}
2649
2650
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002651void MacroAssembler::TryDoubleToInt32Exact(Register result,
2652 DwVfpRegister double_input,
2653 LowDwVfpRegister double_scratch) {
2654 DCHECK(!double_input.is(double_scratch));
2655 vcvt_s32_f64(double_scratch.low(), double_input);
2656 vmov(result, double_scratch.low());
2657 vcvt_f64_s32(double_scratch, double_scratch.low());
2658 VFPCompareAndSetFlags(double_input, double_scratch);
2659}
Steve Block44f0eee2011-05-26 01:26:41 +01002660
Steve Block44f0eee2011-05-26 01:26:41 +01002661
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002662void MacroAssembler::TryInt32Floor(Register result,
2663 DwVfpRegister double_input,
2664 Register input_high,
2665 LowDwVfpRegister double_scratch,
2666 Label* done,
2667 Label* exact) {
2668 DCHECK(!result.is(input_high));
2669 DCHECK(!double_input.is(double_scratch));
2670 Label negative, exception;
Steve Block44f0eee2011-05-26 01:26:41 +01002671
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002672 VmovHigh(input_high, double_input);
Steve Block44f0eee2011-05-26 01:26:41 +01002673
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002674 // Test for NaN and infinities.
2675 Sbfx(result, input_high,
2676 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
2677 cmp(result, Operand(-1));
2678 b(eq, &exception);
2679 // Test for values that can be exactly represented as a
2680 // signed 32-bit integer.
2681 TryDoubleToInt32Exact(result, double_input, double_scratch);
2682 // If exact, return (result already fetched).
2683 b(eq, exact);
2684 cmp(input_high, Operand::Zero());
2685 b(mi, &negative);
Steve Block44f0eee2011-05-26 01:26:41 +01002686
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002687 // Input is in ]+0, +inf[.
2688 // If result equals 0x7fffffff input was out of range or
2689 // in ]0x7fffffff, 0x80000000[. We ignore this last case which
2690 // could fits into an int32, that means we always think input was
2691 // out of range and always go to exception.
2692 // If result < 0x7fffffff, go to done, result fetched.
2693 cmn(result, Operand(1));
2694 b(mi, &exception);
2695 b(done);
Steve Block44f0eee2011-05-26 01:26:41 +01002696
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002697 // Input is in ]-inf, -0[.
2698 // If x is a non integer negative number,
2699 // floor(x) <=> round_to_zero(x) - 1.
2700 bind(&negative);
2701 sub(result, result, Operand(1), SetCC);
2702 // If result is still negative, go to done, result fetched.
2703 // Else, we had an overflow and we fall through exception.
2704 b(mi, done);
2705 bind(&exception);
2706}
Steve Block44f0eee2011-05-26 01:26:41 +01002707
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002708void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2709 DwVfpRegister double_input,
2710 Label* done) {
2711 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2712 vcvt_s32_f64(double_scratch.low(), double_input);
2713 vmov(result, double_scratch.low());
Steve Block44f0eee2011-05-26 01:26:41 +01002714
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002715 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
2716 sub(ip, result, Operand(1));
2717 cmp(ip, Operand(0x7ffffffe));
2718 b(lt, done);
2719}
Steve Block44f0eee2011-05-26 01:26:41 +01002720
Steve Block44f0eee2011-05-26 01:26:41 +01002721
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002722void MacroAssembler::TruncateDoubleToI(Register result,
2723 DwVfpRegister double_input) {
2724 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002725
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002726 TryInlineTruncateDoubleToI(result, double_input, &done);
2727
2728 // If we fell through then inline version didn't succeed - call stub instead.
2729 push(lr);
2730 sub(sp, sp, Operand(kDoubleSize)); // Put input on stack.
2731 vstr(double_input, MemOperand(sp, 0));
2732
2733 DoubleToIStub stub(isolate(), sp, result, 0, true, true);
2734 CallStub(&stub);
2735
2736 add(sp, sp, Operand(kDoubleSize));
2737 pop(lr);
2738
Steve Block44f0eee2011-05-26 01:26:41 +01002739 bind(&done);
2740}
2741
2742
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002743void MacroAssembler::TruncateHeapNumberToI(Register result,
2744 Register object) {
Steve Block44f0eee2011-05-26 01:26:41 +01002745 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002746 LowDwVfpRegister double_scratch = kScratchDoubleReg;
2747 DCHECK(!result.is(object));
Steve Block44f0eee2011-05-26 01:26:41 +01002748
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002749 vldr(double_scratch,
2750 MemOperand(object, HeapNumber::kValueOffset - kHeapObjectTag));
2751 TryInlineTruncateDoubleToI(result, double_scratch, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01002752
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002753 // If we fell through then inline version didn't succeed - call stub instead.
2754 push(lr);
2755 DoubleToIStub stub(isolate(),
2756 object,
2757 result,
2758 HeapNumber::kValueOffset - kHeapObjectTag,
2759 true,
2760 true);
2761 CallStub(&stub);
2762 pop(lr);
2763
2764 bind(&done);
2765}
2766
2767
2768void MacroAssembler::TruncateNumberToI(Register object,
2769 Register result,
2770 Register heap_number_map,
2771 Register scratch1,
2772 Label* not_number) {
2773 Label done;
2774 DCHECK(!result.is(object));
2775
2776 UntagAndJumpIfSmi(result, object, &done);
2777 JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
2778 TruncateHeapNumberToI(result, object);
2779
Steve Block44f0eee2011-05-26 01:26:41 +01002780 bind(&done);
2781}
2782
2783
Andrei Popescu31002712010-02-23 13:46:05 +00002784void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2785 Register src,
2786 int num_least_bits) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002787 if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002788 ubfx(dst, src, kSmiTagSize, num_least_bits);
Andrei Popescu31002712010-02-23 13:46:05 +00002789 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002790 SmiUntag(dst, src);
Andrei Popescu31002712010-02-23 13:46:05 +00002791 and_(dst, dst, Operand((1 << num_least_bits) - 1));
2792 }
2793}
2794
2795
Steve Block1e0659c2011-05-24 12:43:12 +01002796void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2797 Register src,
2798 int num_least_bits) {
2799 and_(dst, src, Operand((1 << num_least_bits) - 1));
2800}
2801
2802
Steve Block44f0eee2011-05-26 01:26:41 +01002803void MacroAssembler::CallRuntime(const Runtime::Function* f,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002804 int num_arguments,
2805 SaveFPRegsMode save_doubles) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002806 // All parameters are on the stack. r0 has the return value after call.
2807
2808 // If the expected number of arguments of the runtime function is
2809 // constant, we check that the actual number of arguments match the
2810 // expectation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002811 CHECK(f->nargs < 0 || f->nargs == num_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00002812
Leon Clarke4515c472010-02-03 11:58:03 +00002813 // TODO(1236192): Most runtime routines don't need the number of
2814 // arguments passed in because it is constant. At some point we
2815 // should remove this need and make the runtime routine entry code
2816 // smarter.
2817 mov(r0, Operand(num_arguments));
Steve Block44f0eee2011-05-26 01:26:41 +01002818 mov(r1, Operand(ExternalReference(f, isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002819 CEntryStub stub(isolate(), 1, save_doubles);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002820 CallStub(&stub);
2821}
2822
2823
Andrei Popescu402d9372010-02-26 13:31:12 +00002824void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2825 int num_arguments) {
2826 mov(r0, Operand(num_arguments));
2827 mov(r1, Operand(ext));
2828
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002829 CEntryStub stub(isolate(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002830 CallStub(&stub);
2831}
2832
2833
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002834void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
2835 const Runtime::Function* function = Runtime::FunctionForId(fid);
2836 DCHECK_EQ(1, function->result_size);
2837 if (function->nargs >= 0) {
2838 // TODO(1236192): Most runtime routines don't need the number of
2839 // arguments passed in because it is constant. At some point we
2840 // should remove this need and make the runtime routine entry code
2841 // smarter.
2842 mov(r0, Operand(function->nargs));
2843 }
2844 JumpToExternalReference(ExternalReference(fid, isolate()));
Steve Block6ded16b2010-05-10 14:33:55 +01002845}
2846
2847
2848void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002849#if defined(__thumb__)
2850 // Thumb mode builtin.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002851 DCHECK((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002852#endif
2853 mov(r1, Operand(builtin));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002854 CEntryStub stub(isolate(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002855 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2856}
2857
2858
Steve Blocka7e24c12009-10-30 11:49:00 +00002859void MacroAssembler::SetCounter(StatsCounter* counter, int value,
2860 Register scratch1, Register scratch2) {
2861 if (FLAG_native_code_counters && counter->Enabled()) {
2862 mov(scratch1, Operand(value));
2863 mov(scratch2, Operand(ExternalReference(counter)));
2864 str(scratch1, MemOperand(scratch2));
2865 }
2866}
2867
2868
2869void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
2870 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002871 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002872 if (FLAG_native_code_counters && counter->Enabled()) {
2873 mov(scratch2, Operand(ExternalReference(counter)));
2874 ldr(scratch1, MemOperand(scratch2));
2875 add(scratch1, scratch1, Operand(value));
2876 str(scratch1, MemOperand(scratch2));
2877 }
2878}
2879
2880
2881void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
2882 Register scratch1, Register scratch2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002883 DCHECK(value > 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002884 if (FLAG_native_code_counters && counter->Enabled()) {
2885 mov(scratch2, Operand(ExternalReference(counter)));
2886 ldr(scratch1, MemOperand(scratch2));
2887 sub(scratch1, scratch1, Operand(value));
2888 str(scratch1, MemOperand(scratch2));
2889 }
2890}
2891
2892
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002893void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
Steve Block44f0eee2011-05-26 01:26:41 +01002894 if (emit_debug_code())
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002895 Check(cond, reason);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002896}
2897
2898
Iain Merrick75681382010-08-19 15:07:18 +01002899void MacroAssembler::AssertFastElements(Register elements) {
Steve Block44f0eee2011-05-26 01:26:41 +01002900 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002901 DCHECK(!elements.is(ip));
Iain Merrick75681382010-08-19 15:07:18 +01002902 Label ok;
2903 push(elements);
2904 ldr(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
2905 LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2906 cmp(elements, ip);
2907 b(eq, &ok);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002908 LoadRoot(ip, Heap::kFixedDoubleArrayMapRootIndex);
2909 cmp(elements, ip);
2910 b(eq, &ok);
Iain Merrick75681382010-08-19 15:07:18 +01002911 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2912 cmp(elements, ip);
2913 b(eq, &ok);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002914 Abort(kJSObjectWithFastElementsMapHasSlowElements);
Iain Merrick75681382010-08-19 15:07:18 +01002915 bind(&ok);
2916 pop(elements);
2917 }
2918}
2919
2920
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002921void MacroAssembler::Check(Condition cond, BailoutReason reason) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002922 Label L;
Steve Block1e0659c2011-05-24 12:43:12 +01002923 b(cond, &L);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002924 Abort(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002925 // will not return here
2926 bind(&L);
2927}
2928
2929
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002930void MacroAssembler::Abort(BailoutReason reason) {
Steve Block8defd9f2010-07-08 12:39:36 +01002931 Label abort_start;
2932 bind(&abort_start);
Steve Blocka7e24c12009-10-30 11:49:00 +00002933#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002934 const char* msg = GetBailoutReason(reason);
Steve Blocka7e24c12009-10-30 11:49:00 +00002935 if (msg != NULL) {
2936 RecordComment("Abort message: ");
2937 RecordComment(msg);
2938 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002939
2940 if (FLAG_trap_on_abort) {
2941 stop(msg);
2942 return;
2943 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002944#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002945
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002946 mov(r0, Operand(Smi::FromInt(reason)));
Steve Blocka7e24c12009-10-30 11:49:00 +00002947 push(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002948
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002949 // Disable stub call restrictions to always allow calls to abort.
2950 if (!has_frame_) {
2951 // We don't actually want to generate a pile of code for this, so just
2952 // claim there is a stack frame, without generating one.
2953 FrameScope scope(this, StackFrame::NONE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002954 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002955 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002956 CallRuntime(Runtime::kAbort);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002957 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002958 // will not return here
Steve Block8defd9f2010-07-08 12:39:36 +01002959 if (is_const_pool_blocked()) {
2960 // If the calling code cares about the exact number of
2961 // instructions generated, we insert padding here to keep the size
2962 // of the Abort macro constant.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002963 static const int kExpectedAbortInstructions = 7;
Steve Block8defd9f2010-07-08 12:39:36 +01002964 int abort_instructions = InstructionsGeneratedSince(&abort_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002965 DCHECK(abort_instructions <= kExpectedAbortInstructions);
Steve Block8defd9f2010-07-08 12:39:36 +01002966 while (abort_instructions++ < kExpectedAbortInstructions) {
2967 nop();
2968 }
2969 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002970}
2971
2972
Steve Blockd0582a62009-12-15 09:54:21 +00002973void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2974 if (context_chain_length > 0) {
2975 // Move up the chain of contexts to the context containing the slot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002976 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002977 for (int i = 1; i < context_chain_length; i++) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002978 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Blockd0582a62009-12-15 09:54:21 +00002979 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002980 } else {
2981 // Slot is in the current function context. Move it into the
2982 // destination register in case we store into it (the write barrier
2983 // cannot be allowed to destroy the context in esi).
2984 mov(dst, cp);
2985 }
Steve Blockd0582a62009-12-15 09:54:21 +00002986}
2987
2988
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002989void MacroAssembler::LoadTransitionedArrayMapConditional(
2990 ElementsKind expected_kind,
2991 ElementsKind transitioned_kind,
2992 Register map_in_out,
2993 Register scratch,
2994 Label* no_map_match) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002995 DCHECK(IsFastElementsKind(expected_kind));
2996 DCHECK(IsFastElementsKind(transitioned_kind));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002997
2998 // Check that the function's map is the same as the expected cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002999 ldr(scratch, NativeContextMemOperand());
3000 ldr(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003001 cmp(map_in_out, ip);
3002 b(ne, no_map_match);
3003
3004 // Use the transitioned cached map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003005 ldr(map_in_out,
3006 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003007}
3008
3009
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003010void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
3011 ldr(dst, NativeContextMemOperand());
3012 ldr(dst, ContextMemOperand(dst, index));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003013}
3014
3015
3016void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
3017 Register map,
3018 Register scratch) {
3019 // Load the initial map. The global functions all have initial maps.
3020 ldr(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003021 if (emit_debug_code()) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003022 Label ok, fail;
Ben Murdoch257744e2011-11-30 15:57:28 +00003023 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003024 b(&ok);
3025 bind(&fail);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003026 Abort(kGlobalFunctionsMustHaveInitialMap);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003027 bind(&ok);
3028 }
3029}
3030
3031
Steve Block1e0659c2011-05-24 12:43:12 +01003032void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
3033 Register reg,
3034 Register scratch,
3035 Label* not_power_of_two_or_zero) {
3036 sub(scratch, reg, Operand(1), SetCC);
3037 b(mi, not_power_of_two_or_zero);
3038 tst(scratch, reg);
3039 b(ne, not_power_of_two_or_zero);
3040}
3041
3042
Steve Block44f0eee2011-05-26 01:26:41 +01003043void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
3044 Register reg,
3045 Register scratch,
3046 Label* zero_and_neg,
3047 Label* not_power_of_two) {
3048 sub(scratch, reg, Operand(1), SetCC);
3049 b(mi, zero_and_neg);
3050 tst(scratch, reg);
3051 b(ne, not_power_of_two);
3052}
3053
3054
Andrei Popescu31002712010-02-23 13:46:05 +00003055void MacroAssembler::JumpIfNotBothSmi(Register reg1,
3056 Register reg2,
3057 Label* on_not_both_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01003058 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00003059 tst(reg1, Operand(kSmiTagMask));
3060 tst(reg2, Operand(kSmiTagMask), eq);
3061 b(ne, on_not_both_smi);
3062}
3063
3064
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003065void MacroAssembler::UntagAndJumpIfSmi(
3066 Register dst, Register src, Label* smi_case) {
3067 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003068 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003069 b(cc, smi_case); // Shifter carry is not set for a smi.
3070}
3071
3072
3073void MacroAssembler::UntagAndJumpIfNotSmi(
3074 Register dst, Register src, Label* non_smi_case) {
3075 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003076 SmiUntag(dst, src, SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003077 b(cs, non_smi_case); // Shifter carry is set for a non-smi.
3078}
3079
3080
Andrei Popescu31002712010-02-23 13:46:05 +00003081void MacroAssembler::JumpIfEitherSmi(Register reg1,
3082 Register reg2,
3083 Label* on_either_smi) {
Steve Block1e0659c2011-05-24 12:43:12 +01003084 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu31002712010-02-23 13:46:05 +00003085 tst(reg1, Operand(kSmiTagMask));
3086 tst(reg2, Operand(kSmiTagMask), ne);
3087 b(eq, on_either_smi);
3088}
3089
Ben Murdochda12d292016-06-02 14:46:10 +01003090void MacroAssembler::AssertNotNumber(Register object) {
3091 if (emit_debug_code()) {
3092 STATIC_ASSERT(kSmiTag == 0);
3093 tst(object, Operand(kSmiTagMask));
3094 Check(ne, kOperandIsANumber);
3095 push(object);
3096 CompareObjectType(object, object, object, HEAP_NUMBER_TYPE);
3097 pop(object);
3098 Check(ne, kOperandIsANumber);
3099 }
3100}
Andrei Popescu31002712010-02-23 13:46:05 +00003101
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003102void MacroAssembler::AssertNotSmi(Register object) {
3103 if (emit_debug_code()) {
3104 STATIC_ASSERT(kSmiTag == 0);
3105 tst(object, Operand(kSmiTagMask));
3106 Check(ne, kOperandIsASmi);
3107 }
Iain Merrick75681382010-08-19 15:07:18 +01003108}
3109
3110
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003111void MacroAssembler::AssertSmi(Register object) {
3112 if (emit_debug_code()) {
3113 STATIC_ASSERT(kSmiTag == 0);
3114 tst(object, Operand(kSmiTagMask));
3115 Check(eq, kOperandIsNotSmi);
3116 }
Steve Block1e0659c2011-05-24 12:43:12 +01003117}
3118
3119
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003120void MacroAssembler::AssertString(Register object) {
3121 if (emit_debug_code()) {
3122 STATIC_ASSERT(kSmiTag == 0);
3123 tst(object, Operand(kSmiTagMask));
3124 Check(ne, kOperandIsASmiAndNotAString);
3125 push(object);
3126 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3127 CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
3128 pop(object);
3129 Check(lo, kOperandIsNotAString);
3130 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003131}
3132
3133
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003134void MacroAssembler::AssertName(Register object) {
3135 if (emit_debug_code()) {
3136 STATIC_ASSERT(kSmiTag == 0);
3137 tst(object, Operand(kSmiTagMask));
3138 Check(ne, kOperandIsASmiAndNotAName);
3139 push(object);
3140 ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
3141 CompareInstanceType(object, object, LAST_NAME_TYPE);
3142 pop(object);
3143 Check(le, kOperandIsNotAName);
3144 }
3145}
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003146
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003147
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003148void MacroAssembler::AssertFunction(Register object) {
3149 if (emit_debug_code()) {
3150 STATIC_ASSERT(kSmiTag == 0);
3151 tst(object, Operand(kSmiTagMask));
3152 Check(ne, kOperandIsASmiAndNotAFunction);
3153 push(object);
3154 CompareObjectType(object, object, object, JS_FUNCTION_TYPE);
3155 pop(object);
3156 Check(eq, kOperandIsNotAFunction);
3157 }
3158}
3159
3160
3161void MacroAssembler::AssertBoundFunction(Register object) {
3162 if (emit_debug_code()) {
3163 STATIC_ASSERT(kSmiTag == 0);
3164 tst(object, Operand(kSmiTagMask));
3165 Check(ne, kOperandIsASmiAndNotABoundFunction);
3166 push(object);
3167 CompareObjectType(object, object, object, JS_BOUND_FUNCTION_TYPE);
3168 pop(object);
3169 Check(eq, kOperandIsNotABoundFunction);
3170 }
3171}
3172
Ben Murdochc5610432016-08-08 18:44:38 +01003173void MacroAssembler::AssertGeneratorObject(Register object) {
3174 if (emit_debug_code()) {
3175 STATIC_ASSERT(kSmiTag == 0);
3176 tst(object, Operand(kSmiTagMask));
3177 Check(ne, kOperandIsASmiAndNotAGeneratorObject);
3178 push(object);
3179 CompareObjectType(object, object, object, JS_GENERATOR_OBJECT_TYPE);
3180 pop(object);
3181 Check(eq, kOperandIsNotAGeneratorObject);
3182 }
3183}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003184
Ben Murdoch097c5b22016-05-18 11:27:45 +01003185void MacroAssembler::AssertReceiver(Register object) {
3186 if (emit_debug_code()) {
3187 STATIC_ASSERT(kSmiTag == 0);
3188 tst(object, Operand(kSmiTagMask));
3189 Check(ne, kOperandIsASmiAndNotAReceiver);
3190 push(object);
3191 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3192 CompareObjectType(object, object, object, FIRST_JS_RECEIVER_TYPE);
3193 pop(object);
3194 Check(hs, kOperandIsNotAReceiver);
3195 }
3196}
3197
3198
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003199void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
3200 Register scratch) {
3201 if (emit_debug_code()) {
3202 Label done_checking;
3203 AssertNotSmi(object);
3204 CompareRoot(object, Heap::kUndefinedValueRootIndex);
3205 b(eq, &done_checking);
3206 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3207 CompareRoot(scratch, Heap::kAllocationSiteMapRootIndex);
3208 Assert(eq, kExpectedUndefinedOrCell);
3209 bind(&done_checking);
3210 }
3211}
3212
3213
3214void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
3215 if (emit_debug_code()) {
3216 CompareRoot(reg, index);
3217 Check(eq, kHeapNumberMapRegisterClobbered);
3218 }
Steve Block1e0659c2011-05-24 12:43:12 +01003219}
3220
3221
3222void MacroAssembler::JumpIfNotHeapNumber(Register object,
3223 Register heap_number_map,
3224 Register scratch,
3225 Label* on_not_heap_number) {
3226 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003227 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01003228 cmp(scratch, heap_number_map);
3229 b(ne, on_not_heap_number);
3230}
3231
3232
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003233void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
3234 Register first, Register second, Register scratch1, Register scratch2,
Leon Clarked91b9f72010-01-27 17:25:45 +00003235 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003236 // Test that both first and second are sequential one-byte strings.
Leon Clarked91b9f72010-01-27 17:25:45 +00003237 // Assume that they are non-smis.
3238 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
3239 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
3240 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3241 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003242
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003243 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
3244 scratch2, failure);
Leon Clarked91b9f72010-01-27 17:25:45 +00003245}
3246
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003247void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
3248 Register second,
3249 Register scratch1,
3250 Register scratch2,
3251 Label* failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003252 // Check that neither is a smi.
Leon Clarked91b9f72010-01-27 17:25:45 +00003253 and_(scratch1, first, Operand(second));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003254 JumpIfSmi(scratch1, failure);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003255 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
3256 scratch2, failure);
3257}
3258
3259
3260void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3261 Label* not_unique_name) {
3262 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3263 Label succeed;
3264 tst(reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3265 b(eq, &succeed);
3266 cmp(reg, Operand(SYMBOL_TYPE));
3267 b(ne, not_unique_name);
3268
3269 bind(&succeed);
Leon Clarked91b9f72010-01-27 17:25:45 +00003270}
3271
Steve Blockd0582a62009-12-15 09:54:21 +00003272
Steve Block6ded16b2010-05-10 14:33:55 +01003273// Allocates a heap number or jumps to the need_gc label if the young space
3274// is full and a scavenge is needed.
3275void MacroAssembler::AllocateHeapNumber(Register result,
3276 Register scratch1,
3277 Register scratch2,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003278 Register heap_number_map,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003279 Label* gc_required,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003280 MutableMode mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01003281 // Allocate an object in the heap for the heap number and tag it as a heap
3282 // object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003283 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
Ben Murdochc5610432016-08-08 18:44:38 +01003284 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003285
3286 Heap::RootListIndex map_index = mode == MUTABLE
3287 ? Heap::kMutableHeapNumberMapRootIndex
3288 : Heap::kHeapNumberMapRootIndex;
3289 AssertIsRoot(heap_number_map, map_index);
Steve Block6ded16b2010-05-10 14:33:55 +01003290
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003291 // Store heap number map in the allocated object.
Ben Murdochc5610432016-08-08 18:44:38 +01003292 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003293}
3294
3295
Steve Block8defd9f2010-07-08 12:39:36 +01003296void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3297 DwVfpRegister value,
3298 Register scratch1,
3299 Register scratch2,
3300 Register heap_number_map,
3301 Label* gc_required) {
3302 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
3303 sub(scratch1, result, Operand(kHeapObjectTag));
3304 vstr(value, scratch1, HeapNumber::kValueOffset);
3305}
3306
3307
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003308void MacroAssembler::AllocateJSValue(Register result, Register constructor,
3309 Register value, Register scratch1,
3310 Register scratch2, Label* gc_required) {
3311 DCHECK(!result.is(constructor));
3312 DCHECK(!result.is(scratch1));
3313 DCHECK(!result.is(scratch2));
3314 DCHECK(!result.is(value));
Ben Murdochbb769b22010-08-11 14:56:33 +01003315
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003316 // Allocate JSValue in new space.
Ben Murdochc5610432016-08-08 18:44:38 +01003317 Allocate(JSValue::kSize, result, scratch1, scratch2, gc_required,
3318 NO_ALLOCATION_FLAGS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003319
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003320 // Initialize the JSValue.
3321 LoadGlobalFunctionInitialMap(constructor, scratch1, scratch2);
3322 str(scratch1, FieldMemOperand(result, HeapObject::kMapOffset));
3323 LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
3324 str(scratch1, FieldMemOperand(result, JSObject::kPropertiesOffset));
3325 str(scratch1, FieldMemOperand(result, JSObject::kElementsOffset));
3326 str(value, FieldMemOperand(result, JSValue::kValueOffset));
3327 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
Ben Murdochbb769b22010-08-11 14:56:33 +01003328}
3329
3330
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003331void MacroAssembler::CopyBytes(Register src,
3332 Register dst,
3333 Register length,
3334 Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003335 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003336
3337 // Align src before copying in word size chunks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003338 cmp(length, Operand(kPointerSize));
3339 b(le, &byte_loop);
3340
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003341 bind(&align_loop_1);
3342 tst(src, Operand(kPointerSize - 1));
3343 b(eq, &word_loop);
3344 ldrb(scratch, MemOperand(src, 1, PostIndex));
3345 strb(scratch, MemOperand(dst, 1, PostIndex));
3346 sub(length, length, Operand(1), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003347 b(&align_loop_1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003348 // Copy bytes in word size chunks.
3349 bind(&word_loop);
Steve Block44f0eee2011-05-26 01:26:41 +01003350 if (emit_debug_code()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003351 tst(src, Operand(kPointerSize - 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003352 Assert(eq, kExpectingAlignmentForCopyBytes);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003353 }
3354 cmp(length, Operand(kPointerSize));
3355 b(lt, &byte_loop);
3356 ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003357 if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) {
3358 str(scratch, MemOperand(dst, kPointerSize, PostIndex));
3359 } else {
3360 strb(scratch, MemOperand(dst, 1, PostIndex));
3361 mov(scratch, Operand(scratch, LSR, 8));
3362 strb(scratch, MemOperand(dst, 1, PostIndex));
3363 mov(scratch, Operand(scratch, LSR, 8));
3364 strb(scratch, MemOperand(dst, 1, PostIndex));
3365 mov(scratch, Operand(scratch, LSR, 8));
3366 strb(scratch, MemOperand(dst, 1, PostIndex));
3367 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003368 sub(length, length, Operand(kPointerSize));
3369 b(&word_loop);
3370
3371 // Copy the last bytes if any left.
3372 bind(&byte_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003373 cmp(length, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003374 b(eq, &done);
3375 bind(&byte_loop_1);
3376 ldrb(scratch, MemOperand(src, 1, PostIndex));
3377 strb(scratch, MemOperand(dst, 1, PostIndex));
3378 sub(length, length, Operand(1), SetCC);
3379 b(ne, &byte_loop_1);
3380 bind(&done);
3381}
3382
3383
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003384void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
3385 Register end_address,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003386 Register filler) {
3387 Label loop, entry;
3388 b(&entry);
3389 bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003390 str(filler, MemOperand(current_address, kPointerSize, PostIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003391 bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003392 cmp(current_address, end_address);
3393 b(lo, &loop);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003394}
3395
3396
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003397void MacroAssembler::CheckFor32DRegs(Register scratch) {
3398 mov(scratch, Operand(ExternalReference::cpu_features()));
3399 ldr(scratch, MemOperand(scratch));
3400 tst(scratch, Operand(1u << VFP32DREGS));
Steve Block6ded16b2010-05-10 14:33:55 +01003401}
3402
3403
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003404void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
3405 CheckFor32DRegs(scratch);
3406 vstm(db_w, location, d16, d31, ne);
3407 sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3408 vstm(db_w, location, d0, d15);
3409}
3410
3411
3412void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
3413 CheckFor32DRegs(scratch);
3414 vldm(ia_w, location, d0, d15);
3415 vldm(ia_w, location, d16, d31, ne);
3416 add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
3417}
3418
3419
3420void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
3421 Register first, Register second, Register scratch1, Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +01003422 Label* failure) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003423 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003424 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003425 const int kFlatOneByteStringTag =
3426 kStringTag | kOneByteStringTag | kSeqStringTag;
3427 and_(scratch1, first, Operand(kFlatOneByteStringMask));
3428 and_(scratch2, second, Operand(kFlatOneByteStringMask));
3429 cmp(scratch1, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003430 // Ignore second test if first test failed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003431 cmp(scratch2, Operand(kFlatOneByteStringTag), eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003432 b(ne, failure);
3433}
3434
3435
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003436void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
3437 Register scratch,
3438 Label* failure) {
3439 const int kFlatOneByteStringMask =
Steve Block6ded16b2010-05-10 14:33:55 +01003440 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003441 const int kFlatOneByteStringTag =
3442 kStringTag | kOneByteStringTag | kSeqStringTag;
3443 and_(scratch, type, Operand(kFlatOneByteStringMask));
3444 cmp(scratch, Operand(kFlatOneByteStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01003445 b(ne, failure);
3446}
3447
Steve Block44f0eee2011-05-26 01:26:41 +01003448static const int kRegisterPassedArguments = 4;
Steve Block6ded16b2010-05-10 14:33:55 +01003449
Steve Block44f0eee2011-05-26 01:26:41 +01003450
Ben Murdoch257744e2011-11-30 15:57:28 +00003451int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments,
3452 int num_double_arguments) {
3453 int stack_passed_words = 0;
3454 if (use_eabi_hardfloat()) {
3455 // In the hard floating point calling convention, we can use
3456 // all double registers to pass doubles.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003457 if (num_double_arguments > DoubleRegister::NumRegisters()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003458 stack_passed_words +=
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003459 2 * (num_double_arguments - DoubleRegister::NumRegisters());
Ben Murdoch257744e2011-11-30 15:57:28 +00003460 }
3461 } else {
3462 // In the soft floating point calling convention, every double
3463 // argument is passed using two registers.
3464 num_reg_arguments += 2 * num_double_arguments;
3465 }
Steve Block6ded16b2010-05-10 14:33:55 +01003466 // Up to four simple arguments are passed in registers r0..r3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003467 if (num_reg_arguments > kRegisterPassedArguments) {
3468 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
3469 }
3470 return stack_passed_words;
3471}
3472
3473
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003474void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3475 Register index,
3476 Register value,
3477 uint32_t encoding_mask) {
3478 Label is_object;
3479 SmiTst(string);
3480 Check(ne, kNonObject);
3481
3482 ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
3483 ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
3484
3485 and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
3486 cmp(ip, Operand(encoding_mask));
3487 Check(eq, kUnexpectedStringType);
3488
3489 // The index is assumed to be untagged coming in, tag it to compare with the
3490 // string length without using a temp register, it is restored at the end of
3491 // this function.
3492 Label index_tag_ok, index_tag_bad;
3493 TrySmiTag(index, index, &index_tag_bad);
3494 b(&index_tag_ok);
3495 bind(&index_tag_bad);
3496 Abort(kIndexIsTooLarge);
3497 bind(&index_tag_ok);
3498
3499 ldr(ip, FieldMemOperand(string, String::kLengthOffset));
3500 cmp(index, ip);
3501 Check(lt, kIndexIsTooLarge);
3502
3503 cmp(index, Operand(Smi::FromInt(0)));
3504 Check(ge, kIndexIsNegative);
3505
3506 SmiUntag(index, index);
3507}
3508
3509
Ben Murdoch257744e2011-11-30 15:57:28 +00003510void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3511 int num_double_arguments,
3512 Register scratch) {
3513 int frame_alignment = ActivationFrameAlignment();
3514 int stack_passed_arguments = CalculateStackPassedWords(
3515 num_reg_arguments, num_double_arguments);
Steve Block6ded16b2010-05-10 14:33:55 +01003516 if (frame_alignment > kPointerSize) {
3517 // Make stack end at alignment and make room for num_arguments - 4 words
3518 // and the original value of sp.
3519 mov(scratch, sp);
3520 sub(sp, sp, Operand((stack_passed_arguments + 1) * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003521 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003522 and_(sp, sp, Operand(-frame_alignment));
3523 str(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
3524 } else {
3525 sub(sp, sp, Operand(stack_passed_arguments * kPointerSize));
3526 }
3527}
3528
3529
Ben Murdoch257744e2011-11-30 15:57:28 +00003530void MacroAssembler::PrepareCallCFunction(int num_reg_arguments,
3531 Register scratch) {
3532 PrepareCallCFunction(num_reg_arguments, 0, scratch);
3533}
3534
3535
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003536void MacroAssembler::MovToFloatParameter(DwVfpRegister src) {
3537 DCHECK(src.is(d0));
3538 if (!use_eabi_hardfloat()) {
3539 vmov(r0, r1, src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003540 }
3541}
3542
3543
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003544// On ARM this is just a synonym to make the purpose clear.
3545void MacroAssembler::MovToFloatResult(DwVfpRegister src) {
3546 MovToFloatParameter(src);
Ben Murdoch257744e2011-11-30 15:57:28 +00003547}
3548
3549
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003550void MacroAssembler::MovToFloatParameters(DwVfpRegister src1,
3551 DwVfpRegister src2) {
3552 DCHECK(src1.is(d0));
3553 DCHECK(src2.is(d1));
3554 if (!use_eabi_hardfloat()) {
3555 vmov(r0, r1, src1);
3556 vmov(r2, r3, src2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003557 }
3558}
3559
3560
3561void MacroAssembler::CallCFunction(ExternalReference function,
3562 int num_reg_arguments,
3563 int num_double_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003564 mov(ip, Operand(function));
3565 CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003566}
3567
3568
3569void MacroAssembler::CallCFunction(Register function,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003570 int num_reg_arguments,
3571 int num_double_arguments) {
3572 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
Ben Murdoch257744e2011-11-30 15:57:28 +00003573}
3574
3575
Steve Block6ded16b2010-05-10 14:33:55 +01003576void MacroAssembler::CallCFunction(ExternalReference function,
3577 int num_arguments) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003578 CallCFunction(function, num_arguments, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003579}
3580
Ben Murdoch257744e2011-11-30 15:57:28 +00003581
Steve Block44f0eee2011-05-26 01:26:41 +01003582void MacroAssembler::CallCFunction(Register function,
Steve Block44f0eee2011-05-26 01:26:41 +01003583 int num_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003584 CallCFunction(function, num_arguments, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01003585}
3586
3587
Steve Block44f0eee2011-05-26 01:26:41 +01003588void MacroAssembler::CallCFunctionHelper(Register function,
Ben Murdoch257744e2011-11-30 15:57:28 +00003589 int num_reg_arguments,
3590 int num_double_arguments) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003591 DCHECK(has_frame());
Steve Block6ded16b2010-05-10 14:33:55 +01003592 // Make sure that the stack is aligned before calling a C function unless
3593 // running in the simulator. The simulator has its own alignment check which
3594 // provides more information.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003595#if V8_HOST_ARCH_ARM
Steve Block44f0eee2011-05-26 01:26:41 +01003596 if (emit_debug_code()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003597 int frame_alignment = base::OS::ActivationFrameAlignment();
Steve Block6ded16b2010-05-10 14:33:55 +01003598 int frame_alignment_mask = frame_alignment - 1;
3599 if (frame_alignment > kPointerSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003600 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block6ded16b2010-05-10 14:33:55 +01003601 Label alignment_as_expected;
3602 tst(sp, Operand(frame_alignment_mask));
3603 b(eq, &alignment_as_expected);
3604 // Don't use Check here, as it will call Runtime_Abort possibly
3605 // re-entering here.
3606 stop("Unexpected alignment");
3607 bind(&alignment_as_expected);
3608 }
3609 }
3610#endif
3611
3612 // Just call directly. The function called cannot cause a GC, or
3613 // allow preemption, so the return address in the link register
3614 // stays correct.
3615 Call(function);
Ben Murdoch257744e2011-11-30 15:57:28 +00003616 int stack_passed_arguments = CalculateStackPassedWords(
3617 num_reg_arguments, num_double_arguments);
3618 if (ActivationFrameAlignment() > kPointerSize) {
Steve Block6ded16b2010-05-10 14:33:55 +01003619 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3620 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003621 add(sp, sp, Operand(stack_passed_arguments * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003622 }
Steve Block1e0659c2011-05-24 12:43:12 +01003623}
3624
3625
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003626void MacroAssembler::CheckPageFlag(
3627 Register object,
3628 Register scratch,
3629 int mask,
3630 Condition cc,
3631 Label* condition_met) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003632 DCHECK(cc == eq || cc == ne);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003633 Bfc(scratch, object, 0, kPageSizeBits);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003634 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
3635 tst(scratch, Operand(mask));
3636 b(cc, condition_met);
3637}
3638
3639
3640void MacroAssembler::JumpIfBlack(Register object,
3641 Register scratch0,
3642 Register scratch1,
3643 Label* on_black) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003644 HasColor(object, scratch0, scratch1, on_black, 1, 1); // kBlackBitPattern.
3645 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003646}
3647
3648
3649void MacroAssembler::HasColor(Register object,
3650 Register bitmap_scratch,
3651 Register mask_scratch,
3652 Label* has_color,
3653 int first_bit,
3654 int second_bit) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003655 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003656
3657 GetMarkBits(object, bitmap_scratch, mask_scratch);
3658
3659 Label other_color, word_boundary;
3660 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3661 tst(ip, Operand(mask_scratch));
3662 b(first_bit == 1 ? eq : ne, &other_color);
3663 // Shift left 1 by adding.
3664 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC);
3665 b(eq, &word_boundary);
3666 tst(ip, Operand(mask_scratch));
3667 b(second_bit == 1 ? ne : eq, has_color);
3668 jmp(&other_color);
3669
3670 bind(&word_boundary);
3671 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
3672 tst(ip, Operand(1));
3673 b(second_bit == 1 ? ne : eq, has_color);
3674 bind(&other_color);
3675}
3676
3677
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003678void MacroAssembler::GetMarkBits(Register addr_reg,
3679 Register bitmap_reg,
3680 Register mask_reg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003681 DCHECK(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003682 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3683 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
3684 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
3685 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits);
3686 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2));
3687 mov(ip, Operand(1));
3688 mov(mask_reg, Operand(ip, LSL, mask_reg));
3689}
3690
3691
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003692void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
3693 Register mask_scratch, Register load_scratch,
3694 Label* value_is_white) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003695 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ip));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003696 GetMarkBits(value, bitmap_scratch, mask_scratch);
3697
3698 // If the value is black or grey we don't need to do anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003699 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003700 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
3701 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003702 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003703
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003704 // Since both black and grey have a 1 in the first position and white does
3705 // not have a 1 there we only need to check one bit.
3706 ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3707 tst(mask_scratch, load_scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003708 b(eq, value_is_white);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003709}
3710
3711
Ben Murdoch257744e2011-11-30 15:57:28 +00003712void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
Ben Murdochc5610432016-08-08 18:44:38 +01003713 usat(output_reg, 8, Operand(input_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00003714}
3715
3716
3717void MacroAssembler::ClampDoubleToUint8(Register result_reg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003718 DwVfpRegister input_reg,
3719 LowDwVfpRegister double_scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003720 Label done;
Ben Murdoch257744e2011-11-30 15:57:28 +00003721
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003722 // Handle inputs >= 255 (including +infinity).
3723 Vmov(double_scratch, 255.0, result_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003724 mov(result_reg, Operand(255));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003725 VFPCompareAndSetFlags(input_reg, double_scratch);
3726 b(ge, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003727
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003728 // For inputs < 255 (including negative) vcvt_u32_f64 with round-to-nearest
3729 // rounding mode will provide the correct result.
3730 vcvt_u32_f64(double_scratch.low(), input_reg, kFPSCRRounding);
3731 vmov(result_reg, double_scratch.low());
3732
Ben Murdoch257744e2011-11-30 15:57:28 +00003733 bind(&done);
3734}
3735
3736
3737void MacroAssembler::LoadInstanceDescriptors(Register map,
3738 Register descriptors) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003739 ldr(descriptors, FieldMemOperand(map, Map::kDescriptorsOffset));
3740}
3741
3742
3743void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3744 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3745 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3746}
3747
3748
3749void MacroAssembler::EnumLength(Register dst, Register map) {
3750 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3751 ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
3752 and_(dst, dst, Operand(Map::EnumLengthBits::kMask));
3753 SmiTag(dst);
Ben Murdoch257744e2011-11-30 15:57:28 +00003754}
3755
3756
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003757void MacroAssembler::LoadAccessor(Register dst, Register holder,
3758 int accessor_index,
3759 AccessorComponent accessor) {
3760 ldr(dst, FieldMemOperand(holder, HeapObject::kMapOffset));
3761 LoadInstanceDescriptors(dst, dst);
3762 ldr(dst,
3763 FieldMemOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3764 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
3765 : AccessorPair::kSetterOffset;
3766 ldr(dst, FieldMemOperand(dst, offset));
3767}
3768
3769
Ben Murdoch097c5b22016-05-18 11:27:45 +01003770void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3771 Register null_value = r5;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003772 Register empty_fixed_array_value = r6;
3773 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003774 Label next, start;
3775 mov(r2, r0);
3776
3777 // Check if the enum length field is properly initialized, indicating that
3778 // there is an enum cache.
3779 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
3780
3781 EnumLength(r3, r1);
3782 cmp(r3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
3783 b(eq, call_runtime);
3784
Ben Murdoch097c5b22016-05-18 11:27:45 +01003785 LoadRoot(null_value, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003786 jmp(&start);
3787
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003788 bind(&next);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003789 ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003790
3791 // For all objects but the receiver, check that the cache is empty.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003792 EnumLength(r3, r1);
3793 cmp(r3, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003794 b(ne, call_runtime);
3795
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003796 bind(&start);
3797
3798 // Check that there are no elements. Register r2 contains the current JS
3799 // object we've reached through the prototype chain.
3800 Label no_elements;
3801 ldr(r2, FieldMemOperand(r2, JSObject::kElementsOffset));
3802 cmp(r2, empty_fixed_array_value);
3803 b(eq, &no_elements);
3804
3805 // Second chance, the object may be using the empty slow element dictionary.
3806 CompareRoot(r2, Heap::kEmptySlowElementDictionaryRootIndex);
3807 b(ne, call_runtime);
3808
3809 bind(&no_elements);
3810 ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3811 cmp(r2, null_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003812 b(ne, &next);
3813}
3814
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003815void MacroAssembler::TestJSArrayForAllocationMemento(
3816 Register receiver_reg,
3817 Register scratch_reg,
3818 Label* no_memento_found) {
Ben Murdochda12d292016-06-02 14:46:10 +01003819 Label map_check;
3820 Label top_check;
Ben Murdochc5610432016-08-08 18:44:38 +01003821 ExternalReference new_space_allocation_top_adr =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003822 ExternalReference::new_space_allocation_top_address(isolate());
Ben Murdochda12d292016-06-02 14:46:10 +01003823 const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
3824 const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003825
Ben Murdochda12d292016-06-02 14:46:10 +01003826 // Bail out if the object is not in new space.
3827 JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
3828 // If the object is in new space, we need to check whether it is on the same
3829 // page as the current top.
3830 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01003831 mov(ip, Operand(new_space_allocation_top_adr));
3832 ldr(ip, MemOperand(ip));
3833 eor(scratch_reg, scratch_reg, Operand(ip));
Ben Murdochda12d292016-06-02 14:46:10 +01003834 tst(scratch_reg, Operand(~Page::kPageAlignmentMask));
3835 b(eq, &top_check);
3836 // The object is on a different page than allocation top. Bail out if the
3837 // object sits on the page boundary as no memento can follow and we cannot
3838 // touch the memory following it.
3839 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
3840 eor(scratch_reg, scratch_reg, Operand(receiver_reg));
3841 tst(scratch_reg, Operand(~Page::kPageAlignmentMask));
3842 b(ne, no_memento_found);
3843 // Continue with the actual map check.
3844 jmp(&map_check);
3845 // If top is on the same page as the current object, we need to check whether
3846 // we are below top.
3847 bind(&top_check);
3848 add(scratch_reg, receiver_reg, Operand(kMementoEndOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01003849 mov(ip, Operand(new_space_allocation_top_adr));
3850 ldr(ip, MemOperand(ip));
3851 cmp(scratch_reg, ip);
Ben Murdochda12d292016-06-02 14:46:10 +01003852 b(gt, no_memento_found);
3853 // Memento map check.
3854 bind(&map_check);
3855 ldr(scratch_reg, MemOperand(receiver_reg, kMementoMapOffset));
3856 cmp(scratch_reg, Operand(isolate()->factory()->allocation_memento_map()));
3857}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003858
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003859Register GetRegisterThatIsNotOneOf(Register reg1,
3860 Register reg2,
3861 Register reg3,
3862 Register reg4,
3863 Register reg5,
3864 Register reg6) {
3865 RegList regs = 0;
3866 if (reg1.is_valid()) regs |= reg1.bit();
3867 if (reg2.is_valid()) regs |= reg2.bit();
3868 if (reg3.is_valid()) regs |= reg3.bit();
3869 if (reg4.is_valid()) regs |= reg4.bit();
3870 if (reg5.is_valid()) regs |= reg5.bit();
3871 if (reg6.is_valid()) regs |= reg6.bit();
3872
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003873 const RegisterConfiguration* config =
3874 RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
3875 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
3876 int code = config->GetAllocatableGeneralCode(i);
3877 Register candidate = Register::from_code(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003878 if (regs & candidate.bit()) continue;
3879 return candidate;
3880 }
3881 UNREACHABLE();
3882 return no_reg;
3883}
3884
3885
3886void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3887 Register object,
3888 Register scratch0,
3889 Register scratch1,
3890 Label* found) {
3891 DCHECK(!scratch1.is(scratch0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003892 Register current = scratch0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003893 Label loop_again, end;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003894
3895 // scratch contained elements pointer.
3896 mov(current, object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003897 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
3898 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
3899 CompareRoot(current, Heap::kNullValueRootIndex);
3900 b(eq, &end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003901
3902 // Loop based on the map going up the prototype chain.
3903 bind(&loop_again);
3904 ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003905
3906 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3907 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3908 ldrb(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset));
3909 cmp(scratch1, Operand(JS_OBJECT_TYPE));
3910 b(lo, found);
3911
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003912 ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
3913 DecodeField<Map::ElementsKindBits>(scratch1);
3914 cmp(scratch1, Operand(DICTIONARY_ELEMENTS));
3915 b(eq, found);
3916 ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003917 CompareRoot(current, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003918 b(ne, &loop_again);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003919
3920 bind(&end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003921}
3922
3923
3924#ifdef DEBUG
3925bool AreAliased(Register reg1,
3926 Register reg2,
3927 Register reg3,
3928 Register reg4,
3929 Register reg5,
3930 Register reg6,
3931 Register reg7,
3932 Register reg8) {
3933 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3934 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3935 reg7.is_valid() + reg8.is_valid();
3936
3937 RegList regs = 0;
3938 if (reg1.is_valid()) regs |= reg1.bit();
3939 if (reg2.is_valid()) regs |= reg2.bit();
3940 if (reg3.is_valid()) regs |= reg3.bit();
3941 if (reg4.is_valid()) regs |= reg4.bit();
3942 if (reg5.is_valid()) regs |= reg5.bit();
3943 if (reg6.is_valid()) regs |= reg6.bit();
3944 if (reg7.is_valid()) regs |= reg7.bit();
3945 if (reg8.is_valid()) regs |= reg8.bit();
3946 int n_of_non_aliasing_regs = NumRegs(regs);
3947
3948 return n_of_valid_regs != n_of_non_aliasing_regs;
3949}
3950#endif
3951
3952
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003953CodePatcher::CodePatcher(Isolate* isolate, byte* address, int instructions,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003954 FlushICache flush_cache)
Steve Blocka7e24c12009-10-30 11:49:00 +00003955 : address_(address),
Steve Blocka7e24c12009-10-30 11:49:00 +00003956 size_(instructions * Assembler::kInstrSize),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003957 masm_(isolate, address, size_ + Assembler::kGap, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003958 flush_cache_(flush_cache) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003959 // Create a new macro assembler pointing to the address of the code to patch.
3960 // The size is adjusted with kGap on order for the assembler to generate size
3961 // bytes of instructions without failing with buffer size constraints.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003962 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003963}
3964
3965
3966CodePatcher::~CodePatcher() {
3967 // Indicate that code has changed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003968 if (flush_cache_ == FLUSH) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003969 Assembler::FlushICache(masm_.isolate(), address_, size_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003970 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003971
3972 // Check that the code was patched as expected.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003973 DCHECK(masm_.pc_ == address_ + size_);
3974 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
Steve Blocka7e24c12009-10-30 11:49:00 +00003975}
3976
3977
Steve Block1e0659c2011-05-24 12:43:12 +01003978void CodePatcher::Emit(Instr instr) {
3979 masm()->emit(instr);
Steve Blocka7e24c12009-10-30 11:49:00 +00003980}
3981
3982
3983void CodePatcher::Emit(Address addr) {
3984 masm()->emit(reinterpret_cast<Instr>(addr));
3985}
Steve Block1e0659c2011-05-24 12:43:12 +01003986
3987
3988void CodePatcher::EmitCondition(Condition cond) {
3989 Instr instr = Assembler::instr_at(masm_.pc_);
3990 instr = (instr & ~kCondMask) | cond;
3991 masm_.emit(instr);
3992}
Steve Blocka7e24c12009-10-30 11:49:00 +00003993
3994
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003995void MacroAssembler::TruncatingDiv(Register result,
3996 Register dividend,
3997 int32_t divisor) {
3998 DCHECK(!dividend.is(result));
3999 DCHECK(!dividend.is(ip));
4000 DCHECK(!result.is(ip));
4001 base::MagicNumbersForDivision<uint32_t> mag =
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004002 base::SignedDivisionByConstant(bit_cast<uint32_t>(divisor));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004003 mov(ip, Operand(mag.multiplier));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004004 bool neg = (mag.multiplier & (1U << 31)) != 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004005 if (divisor > 0 && neg) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004006 smmla(result, dividend, ip, dividend);
4007 } else {
4008 smmul(result, dividend, ip);
4009 if (divisor < 0 && !neg && mag.multiplier > 0) {
4010 sub(result, result, Operand(dividend));
4011 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004012 }
4013 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift));
4014 add(result, result, Operand(dividend, LSR, 31));
4015}
4016
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004017} // namespace internal
4018} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01004019
4020#endif // V8_TARGET_ARCH_ARM