blob: f58e1cdf9b6a95f702ace91a61db154a0ddcc061 [file] [log] [blame]
ager@chromium.org5ec48922009-05-05 07:25:34 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
kasperl@chromium.org71affb52009-05-26 05:44:31 +000028#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000032#include "assembler-x64.h"
ager@chromium.orge2902be2009-06-08 12:21:35 +000033#include "macro-assembler-x64.h"
ager@chromium.orgeadaf222009-06-16 09:43:10 +000034#include "debug.h"
kasperl@chromium.org71affb52009-05-26 05:44:31 +000035
36namespace v8 {
37namespace internal {
38
39MacroAssembler::MacroAssembler(void* buffer, int size)
40 : Assembler(buffer, size),
41 unresolved_(0),
42 generating_stub_(false),
43 allow_stub_calls_(true),
44 code_object_(Heap::undefined_value()) {
45}
46
ager@chromium.orge2902be2009-06-08 12:21:35 +000047
ager@chromium.org5aa501c2009-06-23 07:57:28 +000048// TODO(x64): For now, the write barrier is disabled on x64 and we
49// therefore generate no code. This should be fixed when the write
50// barrier is enabled.
51void MacroAssembler::RecordWrite(Register object, int offset,
52 Register value, Register scratch) {
53}
54
55
ager@chromium.orgeadaf222009-06-16 09:43:10 +000056void MacroAssembler::Assert(Condition cc, const char* msg) {
57 if (FLAG_debug_code) Check(cc, msg);
58}
59
60
61void MacroAssembler::Check(Condition cc, const char* msg) {
62 Label L;
63 j(cc, &L);
64 Abort(msg);
65 // will not return here
66 bind(&L);
67}
68
69
ager@chromium.org5aa501c2009-06-23 07:57:28 +000070void MacroAssembler::NegativeZeroTest(Register result,
71 Register op,
72 Label* then_label) {
73 Label ok;
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +000074 testl(result, result);
ager@chromium.org5aa501c2009-06-23 07:57:28 +000075 j(not_zero, &ok);
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +000076 testl(op, op);
ager@chromium.org5aa501c2009-06-23 07:57:28 +000077 j(sign, then_label);
78 bind(&ok);
79}
80
81
ager@chromium.orgeadaf222009-06-16 09:43:10 +000082void MacroAssembler::Abort(const char* msg) {
83 // We want to pass the msg string like a smi to avoid GC
84 // problems, however msg is not guaranteed to be aligned
85 // properly. Instead, we pass an aligned pointer that is
86 // a proper v8 smi, but also pass the alignment difference
87 // from the real pointer as a smi.
88 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
89 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
90 // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
91 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
92#ifdef DEBUG
93 if (msg != NULL) {
94 RecordComment("Abort message: ");
95 RecordComment(msg);
96 }
97#endif
98 push(rax);
99 movq(kScratchRegister, p0, RelocInfo::NONE);
100 push(kScratchRegister);
101 movq(kScratchRegister,
102 reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)),
103 RelocInfo::NONE);
104 push(kScratchRegister);
105 CallRuntime(Runtime::kAbort, 2);
106 // will not return here
107}
108
109
110void MacroAssembler::CallStub(CodeStub* stub) {
111 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
112 movq(kScratchRegister, stub->GetCode(), RelocInfo::CODE_TARGET);
113 call(kScratchRegister);
114}
115
116
117void MacroAssembler::StubReturn(int argc) {
118 ASSERT(argc >= 1 && generating_stub());
119 ret((argc - 1) * kPointerSize);
120}
121
122
123void MacroAssembler::IllegalOperation(int num_arguments) {
124 if (num_arguments > 0) {
125 addq(rsp, Immediate(num_arguments * kPointerSize));
126 }
127 movq(rax, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT);
128}
129
130
131void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
132 CallRuntime(Runtime::FunctionForId(id), num_arguments);
133}
134
135
136void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
137 // If the expected number of arguments of the runtime function is
138 // constant, we check that the actual number of arguments match the
139 // expectation.
140 if (f->nargs >= 0 && f->nargs != num_arguments) {
141 IllegalOperation(num_arguments);
142 return;
143 }
144
145 Runtime::FunctionId function_id =
146 static_cast<Runtime::FunctionId>(f->stub_id);
147 RuntimeStub stub(function_id, num_arguments);
148 CallStub(&stub);
149}
150
151
152void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
153 int num_arguments) {
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000154 // ----------- S t a t e -------------
155 // -- rsp[0] : return address
156 // -- rsp[8] : argument num_arguments - 1
157 // ...
158 // -- rsp[8 * num_arguments] : argument 0 (receiver)
159 // -----------------------------------
160
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000161 // TODO(1236192): Most runtime routines don't need the number of
162 // arguments passed in because it is constant. At some point we
163 // should remove this need and make the runtime routine entry code
164 // smarter.
165 movq(rax, Immediate(num_arguments));
166 JumpToBuiltin(ext);
167}
168
169
170void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
171 // Set the entry point and jump to the C entry runtime stub.
172 movq(rbx, ext);
173 CEntryStub ces;
174 movq(kScratchRegister, ces.GetCode(), RelocInfo::CODE_TARGET);
175 jmp(kScratchRegister);
kasperl@chromium.org71affb52009-05-26 05:44:31 +0000176}
177
ager@chromium.orge2902be2009-06-08 12:21:35 +0000178
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000179void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
180 bool resolved;
181 Handle<Code> code = ResolveBuiltin(id, &resolved);
182
183 const char* name = Builtins::GetName(id);
184 int argc = Builtins::GetArgumentsCount(id);
185
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000186 movq(target, code, RelocInfo::EMBEDDED_OBJECT);
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000187 if (!resolved) {
188 uint32_t flags =
189 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
190 Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
191 Bootstrapper::FixupFlagsUseCodeObject::encode(true);
192 Unresolved entry = { pc_offset() - sizeof(intptr_t), flags, name };
193 unresolved_.Add(entry);
194 }
195 addq(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
196}
197
198
199Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
200 bool* resolved) {
201 // Move the builtin function into the temporary function slot by
202 // reading it from the builtins object. NOTE: We should be able to
203 // reduce this to two instructions by putting the function table in
204 // the global object instead of the "builtins" object and by using a
205 // real register for the function.
206 movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
207 movq(rdx, FieldOperand(rdx, GlobalObject::kBuiltinsOffset));
208 int builtins_offset =
209 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
210 movq(rdi, FieldOperand(rdx, builtins_offset));
211
212
213 return Builtins::GetCode(id, resolved);
214}
215
216
ager@chromium.orge2902be2009-06-08 12:21:35 +0000217void MacroAssembler::Set(Register dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000218 if (x == 0) {
219 xor_(dst, dst);
220 } else if (is_int32(x)) {
ager@chromium.orge2902be2009-06-08 12:21:35 +0000221 movq(dst, Immediate(x));
222 } else if (is_uint32(x)) {
223 movl(dst, Immediate(x));
224 } else {
225 movq(dst, x, RelocInfo::NONE);
226 }
227}
228
229
230void MacroAssembler::Set(const Operand& dst, int64_t x) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000231 if (x == 0) {
232 xor_(kScratchRegister, kScratchRegister);
233 movq(dst, kScratchRegister);
234 } else if (is_int32(x)) {
235 movq(dst, Immediate(x));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000236 } else if (is_uint32(x)) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000237 movl(dst, Immediate(x));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000238 } else {
239 movq(kScratchRegister, x, RelocInfo::NONE);
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000240 movq(dst, kScratchRegister);
ager@chromium.orge2902be2009-06-08 12:21:35 +0000241 }
ager@chromium.orge2902be2009-06-08 12:21:35 +0000242}
243
244
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000245bool MacroAssembler::IsUnsafeSmi(Smi* value) {
246 return false;
247}
248
249void MacroAssembler::LoadUnsafeSmi(Register dst, Smi* source) {
250 UNIMPLEMENTED();
251}
252
253
254void MacroAssembler::Move(Register dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000255 ASSERT(!source->IsFailure());
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000256 if (source->IsSmi()) {
257 if (IsUnsafeSmi(source)) {
258 LoadUnsafeSmi(dst, source);
259 } else {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000260 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
261 movq(dst, Immediate(smi));
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000262 }
263 } else {
264 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
265 }
266}
267
268
269void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000270 if (source->IsSmi()) {
271 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
272 movq(dst, Immediate(smi));
273 } else {
274 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
275 movq(dst, kScratchRegister);
276 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000277}
278
279
280void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
281 Move(kScratchRegister, source);
282 cmpq(dst, kScratchRegister);
283}
284
285
ager@chromium.org3e875802009-06-29 08:26:34 +0000286void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000287 if (source->IsSmi()) {
288 if (IsUnsafeSmi(source)) {
289 LoadUnsafeSmi(kScratchRegister, source);
290 cmpl(dst, kScratchRegister);
291 } else {
292 // For smi-comparison, it suffices to compare the low 32 bits.
293 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
294 cmpl(dst, Immediate(smi));
295 }
296 } else {
297 ASSERT(source->IsHeapObject());
298 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
299 cmpq(dst, kScratchRegister);
300 }
ager@chromium.org3e875802009-06-29 08:26:34 +0000301}
302
303
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000304void MacroAssembler::Push(Handle<Object> source) {
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000305 if (source->IsSmi()) {
306 if (IsUnsafeSmi(source)) {
307 LoadUnsafeSmi(kScratchRegister, source);
308 push(kScratchRegister);
309 } else {
310 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
311 push(Immediate(smi));
312 }
313 } else {
314 ASSERT(source->IsHeapObject());
315 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
316 push(kScratchRegister);
317 }
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000318}
319
320
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000321void MacroAssembler::Push(Smi* source) {
322 if (IsUnsafeSmi(source)) {
323 LoadUnsafeSmi(kScratchRegister, source);
324 push(kScratchRegister);
325 } else {
326 int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(source));
327 push(Immediate(smi));
328 }
329}
330
331
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000332void MacroAssembler::Jump(ExternalReference ext) {
333 movq(kScratchRegister, ext);
334 jmp(kScratchRegister);
335}
336
337
338void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
339 movq(kScratchRegister, destination, rmode);
340 jmp(kScratchRegister);
341}
342
343
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000344void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
345 WriteRecordedPositions();
346 ASSERT(RelocInfo::IsCodeTarget(rmode));
347 movq(kScratchRegister, code_object, rmode);
ager@chromium.org3e875802009-06-29 08:26:34 +0000348#ifdef DEBUG
349 Label target;
350 bind(&target);
351#endif
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000352 jmp(kScratchRegister);
ager@chromium.org3e875802009-06-29 08:26:34 +0000353#ifdef DEBUG
354 ASSERT_EQ(kTargetAddrToReturnAddrDist,
355 SizeOfCodeGeneratedSince(&target) + kPointerSize);
356#endif
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000357}
358
359
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000360void MacroAssembler::Call(ExternalReference ext) {
361 movq(kScratchRegister, ext);
362 call(kScratchRegister);
363}
364
365
366void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
367 movq(kScratchRegister, destination, rmode);
368 call(kScratchRegister);
369}
370
371
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000372void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
373 WriteRecordedPositions();
374 ASSERT(RelocInfo::IsCodeTarget(rmode));
375 movq(kScratchRegister, code_object, rmode);
376#ifdef DEBUG
sgjesse@chromium.org0b6db592009-07-30 14:48:31 +0000377 // Patch target is kPointer size bytes *before* target label.
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000378 Label target;
379 bind(&target);
380#endif
381 call(kScratchRegister);
382#ifdef DEBUG
383 ASSERT_EQ(kTargetAddrToReturnAddrDist,
384 SizeOfCodeGeneratedSince(&target) + kPointerSize);
385#endif
386}
387
388
ager@chromium.orge2902be2009-06-08 12:21:35 +0000389void MacroAssembler::PushTryHandler(CodeLocation try_location,
390 HandlerType type) {
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000391 // Adjust this code if not the case.
392 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
393
394 // The pc (return address) is already on TOS. This code pushes state,
395 // frame pointer and current handler. Check that they are expected
396 // next on the stack, in that order.
ager@chromium.orge2902be2009-06-08 12:21:35 +0000397 ASSERT_EQ(StackHandlerConstants::kStateOffset,
398 StackHandlerConstants::kPCOffset - kPointerSize);
ager@chromium.orge2902be2009-06-08 12:21:35 +0000399 ASSERT_EQ(StackHandlerConstants::kFPOffset,
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000400 StackHandlerConstants::kStateOffset - kPointerSize);
401 ASSERT_EQ(StackHandlerConstants::kNextOffset,
ager@chromium.orge2902be2009-06-08 12:21:35 +0000402 StackHandlerConstants::kFPOffset - kPointerSize);
403
404 if (try_location == IN_JAVASCRIPT) {
405 if (type == TRY_CATCH_HANDLER) {
406 push(Immediate(StackHandler::TRY_CATCH));
407 } else {
408 push(Immediate(StackHandler::TRY_FINALLY));
409 }
ager@chromium.orge2902be2009-06-08 12:21:35 +0000410 push(rbp);
ager@chromium.orge2902be2009-06-08 12:21:35 +0000411 } else {
412 ASSERT(try_location == IN_JS_ENTRY);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000413 // The frame pointer does not point to a JS frame so we save NULL
414 // for rbp. We expect the code throwing an exception to check rbp
415 // before dereferencing it to restore the context.
ager@chromium.orge2902be2009-06-08 12:21:35 +0000416 push(Immediate(StackHandler::ENTRY));
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000417 push(Immediate(0)); // NULL frame pointer.
ager@chromium.orge2902be2009-06-08 12:21:35 +0000418 }
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000419 // Save the current handler.
ager@chromium.orge2902be2009-06-08 12:21:35 +0000420 movq(kScratchRegister, ExternalReference(Top::k_handler_address));
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000421 push(Operand(kScratchRegister, 0));
ager@chromium.orge2902be2009-06-08 12:21:35 +0000422 // Link this handler.
423 movq(Operand(kScratchRegister, 0), rsp);
424}
425
426
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000427void MacroAssembler::Ret() {
428 ret(0);
429}
430
431
ager@chromium.org3e875802009-06-29 08:26:34 +0000432void MacroAssembler::FCmp() {
433 fcompp();
434 push(rax);
435 fnstsw_ax();
436 // TODO(X64): Check that sahf is safe to use, using CPUProbe.
437 sahf();
438 pop(rax);
439}
440
441
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000442void MacroAssembler::CmpObjectType(Register heap_object,
443 InstanceType type,
444 Register map) {
445 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
446 CmpInstanceType(map, type);
447}
448
449
450void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
451 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
452 Immediate(static_cast<int8_t>(type)));
453}
454
455
kasperl@chromium.org86f77b72009-07-06 08:21:57 +0000456void MacroAssembler::TryGetFunctionPrototype(Register function,
457 Register result,
458 Label* miss) {
459 // Check that the receiver isn't a smi.
460 testl(function, Immediate(kSmiTagMask));
461 j(zero, miss);
462
463 // Check that the function really is a function.
464 CmpObjectType(function, JS_FUNCTION_TYPE, result);
465 j(not_equal, miss);
466
467 // Make sure that the function has an instance prototype.
468 Label non_instance;
469 testb(FieldOperand(result, Map::kBitFieldOffset),
470 Immediate(1 << Map::kHasNonInstancePrototype));
471 j(not_zero, &non_instance);
472
473 // Get the prototype or initial map from the function.
474 movq(result,
475 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
476
477 // If the prototype or initial map is the hole, don't return it and
478 // simply miss the cache instead. This will allow us to allocate a
479 // prototype object on-demand in the runtime system.
480 Cmp(result, Factory::the_hole_value());
481 j(equal, miss);
482
483 // If the function does not have an initial map, we're done.
484 Label done;
485 CmpObjectType(result, MAP_TYPE, kScratchRegister);
486 j(not_equal, &done);
487
488 // Get the prototype from the initial map.
489 movq(result, FieldOperand(result, Map::kPrototypeOffset));
490 jmp(&done);
491
492 // Non-instance prototype: Fetch prototype from constructor field
493 // in initial map.
494 bind(&non_instance);
495 movq(result, FieldOperand(result, Map::kConstructorOffset));
496
497 // All done.
498 bind(&done);
499}
500
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000501
502void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
503 if (FLAG_native_code_counters && counter->Enabled()) {
504 movq(kScratchRegister, ExternalReference(counter));
505 movl(Operand(kScratchRegister, 0), Immediate(value));
506 }
507}
508
509
510void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
511 ASSERT(value > 0);
512 if (FLAG_native_code_counters && counter->Enabled()) {
513 movq(kScratchRegister, ExternalReference(counter));
514 Operand operand(kScratchRegister, 0);
515 if (value == 1) {
516 incl(operand);
517 } else {
518 addl(operand, Immediate(value));
519 }
520 }
521}
522
523
524void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
525 ASSERT(value > 0);
526 if (FLAG_native_code_counters && counter->Enabled()) {
527 movq(kScratchRegister, ExternalReference(counter));
528 Operand operand(kScratchRegister, 0);
529 if (value == 1) {
530 decl(operand);
531 } else {
532 subl(operand, Immediate(value));
533 }
534 }
535}
536
537
538#ifdef ENABLE_DEBUGGER_SUPPORT
539
540void MacroAssembler::PushRegistersFromMemory(RegList regs) {
541 ASSERT((regs & ~kJSCallerSaved) == 0);
542 // Push the content of the memory location to the stack.
543 for (int i = 0; i < kNumJSCallerSaved; i++) {
544 int r = JSCallerSavedCode(i);
545 if ((regs & (1 << r)) != 0) {
546 ExternalReference reg_addr =
547 ExternalReference(Debug_Address::Register(i));
548 movq(kScratchRegister, reg_addr);
549 push(Operand(kScratchRegister, 0));
550 }
551 }
552}
553
554void MacroAssembler::SaveRegistersToMemory(RegList regs) {
555 ASSERT((regs & ~kJSCallerSaved) == 0);
556 // Copy the content of registers to memory location.
557 for (int i = 0; i < kNumJSCallerSaved; i++) {
558 int r = JSCallerSavedCode(i);
559 if ((regs & (1 << r)) != 0) {
560 Register reg = { r };
561 ExternalReference reg_addr =
562 ExternalReference(Debug_Address::Register(i));
563 movq(kScratchRegister, reg_addr);
564 movq(Operand(kScratchRegister, 0), reg);
565 }
566 }
567}
568
569
570void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
571 ASSERT((regs & ~kJSCallerSaved) == 0);
572 // Copy the content of memory location to registers.
573 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
574 int r = JSCallerSavedCode(i);
575 if ((regs & (1 << r)) != 0) {
576 Register reg = { r };
577 ExternalReference reg_addr =
578 ExternalReference(Debug_Address::Register(i));
579 movq(kScratchRegister, reg_addr);
580 movq(reg, Operand(kScratchRegister, 0));
581 }
582 }
583}
584
585
586void MacroAssembler::PopRegistersToMemory(RegList regs) {
587 ASSERT((regs & ~kJSCallerSaved) == 0);
588 // Pop the content from the stack to the memory location.
589 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
590 int r = JSCallerSavedCode(i);
591 if ((regs & (1 << r)) != 0) {
592 ExternalReference reg_addr =
593 ExternalReference(Debug_Address::Register(i));
594 movq(kScratchRegister, reg_addr);
595 pop(Operand(kScratchRegister, 0));
596 }
597 }
598}
599
600
601void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
602 Register scratch,
603 RegList regs) {
604 ASSERT(!scratch.is(kScratchRegister));
605 ASSERT(!base.is(kScratchRegister));
606 ASSERT(!base.is(scratch));
607 ASSERT((regs & ~kJSCallerSaved) == 0);
608 // Copy the content of the stack to the memory location and adjust base.
609 for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
610 int r = JSCallerSavedCode(i);
611 if ((regs & (1 << r)) != 0) {
612 movq(scratch, Operand(base, 0));
613 ExternalReference reg_addr =
614 ExternalReference(Debug_Address::Register(i));
615 movq(kScratchRegister, reg_addr);
616 movq(Operand(kScratchRegister, 0), scratch);
617 lea(base, Operand(base, kPointerSize));
618 }
619 }
620}
621
622#endif // ENABLE_DEBUGGER_SUPPORT
623
624
ager@chromium.org3e875802009-06-29 08:26:34 +0000625void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
626 bool resolved;
627 Handle<Code> code = ResolveBuiltin(id, &resolved);
628
629 // Calls are not allowed in some stubs.
630 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
631
632 // Rely on the assertion to check that the number of provided
633 // arguments match the expected number of arguments. Fake a
634 // parameter count to avoid emitting code to do the check.
635 ParameterCount expected(0);
636 InvokeCode(Handle<Code>(code), expected, expected,
637 RelocInfo::CODE_TARGET, flag);
638
639 const char* name = Builtins::GetName(id);
640 int argc = Builtins::GetArgumentsCount(id);
641 // The target address for the jump is stored as an immediate at offset
642 // kInvokeCodeAddressOffset.
643 if (!resolved) {
644 uint32_t flags =
645 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
kasperl@chromium.org68ac0092009-07-09 06:00:35 +0000646 Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
ager@chromium.org3e875802009-06-29 08:26:34 +0000647 Bootstrapper::FixupFlagsUseCodeObject::encode(false);
648 Unresolved entry =
649 { pc_offset() - kTargetAddrToReturnAddrDist, flags, name };
650 unresolved_.Add(entry);
651 }
652}
653
654
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000655void MacroAssembler::InvokePrologue(const ParameterCount& expected,
656 const ParameterCount& actual,
657 Handle<Code> code_constant,
658 Register code_register,
659 Label* done,
660 InvokeFlag flag) {
661 bool definitely_matches = false;
662 Label invoke;
663 if (expected.is_immediate()) {
664 ASSERT(actual.is_immediate());
665 if (expected.immediate() == actual.immediate()) {
666 definitely_matches = true;
667 } else {
668 movq(rax, Immediate(actual.immediate()));
669 if (expected.immediate() ==
670 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
671 // Don't worry about adapting arguments for built-ins that
672 // don't want that done. Skip adaption code by making it look
673 // like we have a match between expected and actual number of
674 // arguments.
675 definitely_matches = true;
676 } else {
677 movq(rbx, Immediate(expected.immediate()));
678 }
679 }
680 } else {
681 if (actual.is_immediate()) {
682 // Expected is in register, actual is immediate. This is the
683 // case when we invoke function values without going through the
684 // IC mechanism.
685 cmpq(expected.reg(), Immediate(actual.immediate()));
686 j(equal, &invoke);
687 ASSERT(expected.reg().is(rbx));
688 movq(rax, Immediate(actual.immediate()));
689 } else if (!expected.reg().is(actual.reg())) {
690 // Both expected and actual are in (different) registers. This
691 // is the case when we invoke functions using call and apply.
692 cmpq(expected.reg(), actual.reg());
693 j(equal, &invoke);
694 ASSERT(actual.reg().is(rax));
695 ASSERT(expected.reg().is(rbx));
696 }
697 }
698
699 if (!definitely_matches) {
700 Handle<Code> adaptor =
701 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
702 if (!code_constant.is_null()) {
703 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
704 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
705 } else if (!code_register.is(rdx)) {
706 movq(rdx, code_register);
707 }
708
709 movq(kScratchRegister, adaptor, RelocInfo::CODE_TARGET);
710 if (flag == CALL_FUNCTION) {
711 call(kScratchRegister);
712 jmp(done);
713 } else {
714 jmp(kScratchRegister);
715 }
716 bind(&invoke);
717 }
718}
719
720
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000721void MacroAssembler::InvokeCode(Register code,
722 const ParameterCount& expected,
723 const ParameterCount& actual,
724 InvokeFlag flag) {
725 Label done;
726 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
727 if (flag == CALL_FUNCTION) {
728 call(code);
729 } else {
730 ASSERT(flag == JUMP_FUNCTION);
731 jmp(code);
732 }
733 bind(&done);
734}
735
736
737void MacroAssembler::InvokeCode(Handle<Code> code,
738 const ParameterCount& expected,
739 const ParameterCount& actual,
740 RelocInfo::Mode rmode,
741 InvokeFlag flag) {
742 Label done;
743 Register dummy = rax;
744 InvokePrologue(expected, actual, code, dummy, &done, flag);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000745 if (flag == CALL_FUNCTION) {
ager@chromium.org3e875802009-06-29 08:26:34 +0000746 Call(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000747 } else {
748 ASSERT(flag == JUMP_FUNCTION);
ager@chromium.org3e875802009-06-29 08:26:34 +0000749 Jump(code, rmode);
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000750 }
751 bind(&done);
752}
753
754
755void MacroAssembler::InvokeFunction(Register function,
756 const ParameterCount& actual,
757 InvokeFlag flag) {
758 ASSERT(function.is(rdi));
759 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
760 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
ager@chromium.org3e875802009-06-29 08:26:34 +0000761 movsxlq(rbx,
762 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000763 movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
ager@chromium.org5aa501c2009-06-23 07:57:28 +0000764 // Advances rdx to the end of the Code object header, to the start of
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000765 // the executable code.
766 lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
767
768 ParameterCount expected(rbx);
769 InvokeCode(rdx, expected, actual, flag);
770}
771
772
773void MacroAssembler::EnterFrame(StackFrame::Type type) {
774 push(rbp);
775 movq(rbp, rsp);
776 push(rsi); // Context.
777 push(Immediate(Smi::FromInt(type)));
778 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
779 push(kScratchRegister);
780 if (FLAG_debug_code) {
781 movq(kScratchRegister,
782 Factory::undefined_value(),
783 RelocInfo::EMBEDDED_OBJECT);
784 cmpq(Operand(rsp, 0), kScratchRegister);
785 Check(not_equal, "code object not properly patched");
786 }
787}
788
789
790void MacroAssembler::LeaveFrame(StackFrame::Type type) {
791 if (FLAG_debug_code) {
792 movq(kScratchRegister, Immediate(Smi::FromInt(type)));
793 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
794 Check(equal, "stack frame types must match");
795 }
796 movq(rsp, rbp);
797 pop(rbp);
798}
799
800
801
802void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
803 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
804
805 // Setup the frame structure on the stack.
kasperl@chromium.org86f77b72009-07-06 08:21:57 +0000806 // All constants are relative to the frame pointer of the exit frame.
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000807 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
808 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
809 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
810 push(rbp);
811 movq(rbp, rsp);
812
813 // Reserve room for entry stack pointer and push the debug marker.
814 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
815 push(Immediate(0)); // saved entry sp, patched before call
816 push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
817
818 // Save the frame pointer and the context in top.
819 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
820 ExternalReference context_address(Top::k_context_address);
kasperl@chromium.org86f77b72009-07-06 08:21:57 +0000821 movq(r14, rax); // Backup rax before we use it.
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000822
823 movq(rax, rbp);
824 store_rax(c_entry_fp_address);
825 movq(rax, rsi);
826 store_rax(context_address);
827
828 // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
829 // so it must be retained across the C-call.
830 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
kasperl@chromium.org86f77b72009-07-06 08:21:57 +0000831 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
ager@chromium.orgeadaf222009-06-16 09:43:10 +0000832
833#ifdef ENABLE_DEBUGGER_SUPPORT
834 // Save the state of all registers to the stack from the memory
835 // location. This is needed to allow nested break points.
836 if (type == StackFrame::EXIT_DEBUG) {
837 // TODO(1243899): This should be symmetric to
838 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
839 // correct here, but computed for the other call. Very error
840 // prone! FIX THIS. Actually there are deeper problems with
841 // register saving than this asymmetry (see the bug report
842 // associated with this issue).
843 PushRegistersFromMemory(kJSCallerSaved);
844 }
845#endif
846
847 // Reserve space for two arguments: argc and argv
848 subq(rsp, Immediate(2 * kPointerSize));
849
850 // Get the required frame alignment for the OS.
851 static const int kFrameAlignment = OS::ActivationFrameAlignment();
852 if (kFrameAlignment > 0) {
853 ASSERT(IsPowerOf2(kFrameAlignment));
854 movq(kScratchRegister, Immediate(-kFrameAlignment));
855 and_(rsp, kScratchRegister);
856 }
857
858 // Patch the saved entry sp.
859 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
860}
861
862
863void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
864 // Registers:
865 // r15 : argv
866#ifdef ENABLE_DEBUGGER_SUPPORT
867 // Restore the memory copy of the registers by digging them out from
868 // the stack. This is needed to allow nested break points.
869 if (type == StackFrame::EXIT_DEBUG) {
870 // It's okay to clobber register ebx below because we don't need
871 // the function pointer after this.
872 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
873 int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
874 lea(rbx, Operand(rbp, kOffset));
875 CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
876 }
877#endif
878
879 // Get the return address from the stack and restore the frame pointer.
880 movq(rcx, Operand(rbp, 1 * kPointerSize));
881 movq(rbp, Operand(rbp, 0 * kPointerSize));
882
883 // Pop the arguments and the receiver from the caller stack.
884 lea(rsp, Operand(r15, 1 * kPointerSize));
885
886 // Restore current context from top and clear it in debug mode.
887 ExternalReference context_address(Top::k_context_address);
888 movq(kScratchRegister, context_address);
889 movq(rsi, Operand(kScratchRegister, 0));
890#ifdef DEBUG
891 movq(Operand(kScratchRegister, 0), Immediate(0));
892#endif
893
894 // Push the return address to get ready to return.
895 push(rcx);
896
897 // Clear the top frame.
898 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
899 movq(kScratchRegister, c_entry_fp_address);
900 movq(Operand(kScratchRegister, 0), Immediate(0));
901}
902
903
kasperl@chromium.orge959c182009-07-27 08:59:04 +0000904Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
905 JSObject* holder, Register holder_reg,
906 Register scratch,
907 Label* miss) {
908 // Make sure there's no overlap between scratch and the other
909 // registers.
910 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
911
912 // Keep track of the current object in register reg. On the first
913 // iteration, reg is an alias for object_reg, on later iterations,
914 // it is an alias for holder_reg.
915 Register reg = object_reg;
916 int depth = 1;
917
918 // Check the maps in the prototype chain.
919 // Traverse the prototype chain from the object and do map checks.
920 while (object != holder) {
921 depth++;
922
923 // Only global objects and objects that do not require access
924 // checks are allowed in stubs.
925 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
926
927 JSObject* prototype = JSObject::cast(object->GetPrototype());
928 if (Heap::InNewSpace(prototype)) {
929 // Get the map of the current object.
930 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
931 Cmp(scratch, Handle<Map>(object->map()));
932 // Branch on the result of the map check.
933 j(not_equal, miss);
934 // Check access rights to the global object. This has to happen
935 // after the map check so that we know that the object is
936 // actually a global object.
937 if (object->IsJSGlobalProxy()) {
938 CheckAccessGlobalProxy(reg, scratch, miss);
939
940 // Restore scratch register to be the map of the object.
941 // We load the prototype from the map in the scratch register.
942 movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
943 }
944 // The prototype is in new space; we cannot store a reference
945 // to it in the code. Load it from the map.
946 reg = holder_reg; // from now the object is in holder_reg
947 movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
948
949 } else {
950 // Check the map of the current object.
951 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
952 Handle<Map>(object->map()));
953 // Branch on the result of the map check.
954 j(not_equal, miss);
955 // Check access rights to the global object. This has to happen
956 // after the map check so that we know that the object is
957 // actually a global object.
958 if (object->IsJSGlobalProxy()) {
959 CheckAccessGlobalProxy(reg, scratch, miss);
960 }
961 // The prototype is in old space; load it directly.
962 reg = holder_reg; // from now the object is in holder_reg
963 Move(reg, Handle<JSObject>(prototype));
964 }
965
966 // Go to the next object in the prototype chain.
967 object = prototype;
968 }
969
970 // Check the holder map.
971 Cmp(FieldOperand(reg, HeapObject::kMapOffset),
972 Handle<Map>(holder->map()));
973 j(not_equal, miss);
974
975 // Log the check depth.
976 LOG(IntEvent("check-maps-depth", depth));
977
978 // Perform security check for access to the global object and return
979 // the holder register.
980 ASSERT(object == holder);
981 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
982 if (object->IsJSGlobalProxy()) {
983 CheckAccessGlobalProxy(reg, scratch, miss);
984 }
985 return reg;
986}
987
988
989
990
991void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
992 Register scratch,
993 Label* miss) {
994 Label same_contexts;
995
996 ASSERT(!holder_reg.is(scratch));
997 ASSERT(!scratch.is(kScratchRegister));
998 // Load current lexical context from the stack frame.
999 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
1000
1001 // When generating debug code, make sure the lexical context is set.
1002 if (FLAG_debug_code) {
1003 cmpq(scratch, Immediate(0));
1004 Check(not_equal, "we should not have an empty lexical context");
1005 }
1006 // Load the global context of the current context.
1007 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1008 movq(scratch, FieldOperand(scratch, offset));
1009 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
1010
1011 // Check the context is a global context.
1012 if (FLAG_debug_code) {
1013 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
1014 Factory::global_context_map());
1015 Check(equal, "JSGlobalObject::global_context should be a global context.");
1016 }
1017
1018 // Check if both contexts are the same.
1019 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1020 j(equal, &same_contexts);
1021
1022 // Compare security tokens.
1023 // Check that the security token in the calling global object is
1024 // compatible with the security token in the receiving global
1025 // object.
1026
1027 // Check the context is a global context.
1028 if (FLAG_debug_code) {
1029 // Preserve original value of holder_reg.
1030 push(holder_reg);
1031 movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1032 Cmp(holder_reg, Factory::null_value());
1033 Check(not_equal, "JSGlobalProxy::context() should not be null.");
1034
1035 // Read the first word and compare to global_context_map(),
1036 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1037 Cmp(holder_reg, Factory::global_context_map());
1038 Check(equal, "JSGlobalObject::global_context should be a global context.");
1039 pop(holder_reg);
1040 }
1041
1042 movq(kScratchRegister,
1043 FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
1044 int token_offset = Context::kHeaderSize +
1045 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1046 movq(scratch, FieldOperand(scratch, token_offset));
1047 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
1048 j(not_equal, miss);
1049
1050 bind(&same_contexts);
1051}
1052
1053
kasperl@chromium.org71affb52009-05-26 05:44:31 +00001054} } // namespace v8::internal