blob: 91fb050c7a725e7eeebb518eb7a71837b86f3575 [file] [log] [blame]
ricow@chromium.org65fae842010-08-25 15:26:24 +00001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
ricow@chromium.orgd236f4d2010-09-01 06:52:08 +000032#include "code-stubs.h"
ricow@chromium.org65fae842010-08-25 15:26:24 +000033#include "bootstrapper.h"
ricow@chromium.orgd236f4d2010-09-01 06:52:08 +000034#include "jsregexp.h"
ricow@chromium.org65fae842010-08-25 15:26:24 +000035#include "regexp-macro-assembler.h"
36
37namespace v8 {
38namespace internal {
39
40#define __ ACCESS_MASM(masm)
41void FastNewClosureStub::Generate(MacroAssembler* masm) {
42 // Create a new closure from the given function info in new
43 // space. Set the context to the current context in esi.
44 Label gc;
45 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
46
47 // Get the function info from the stack.
48 __ mov(edx, Operand(esp, 1 * kPointerSize));
49
50 // Compute the function map in the current global context and set that
51 // as the map of the allocated object.
52 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
53 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
54 __ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
55 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
56
57 // Initialize the rest of the function. We don't have to update the
58 // write barrier because the allocated object is in new space.
59 __ mov(ebx, Immediate(Factory::empty_fixed_array()));
60 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
61 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
62 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
63 Immediate(Factory::the_hole_value()));
64 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
65 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
66 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
kasperl@chromium.orga5551262010-12-07 12:49:48 +000067 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
68 Immediate(Factory::undefined_value()));
ricow@chromium.org65fae842010-08-25 15:26:24 +000069
70 // Initialize the code pointer in the function to be the one
71 // found in the shared function info object.
72 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
73 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
74 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
75
76 // Return and remove the on-stack parameter.
77 __ ret(1 * kPointerSize);
78
79 // Create a new closure through the slower runtime call.
80 __ bind(&gc);
81 __ pop(ecx); // Temporarily remove return address.
82 __ pop(edx);
83 __ push(esi);
84 __ push(edx);
vegorov@chromium.org21b5e952010-11-23 10:24:40 +000085 __ push(Immediate(Factory::false_value()));
ricow@chromium.org65fae842010-08-25 15:26:24 +000086 __ push(ecx); // Restore return address.
vegorov@chromium.org21b5e952010-11-23 10:24:40 +000087 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
ricow@chromium.org65fae842010-08-25 15:26:24 +000088}
89
90
91void FastNewContextStub::Generate(MacroAssembler* masm) {
92 // Try to allocate the context in new space.
93 Label gc;
94 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
95 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
96 eax, ebx, ecx, &gc, TAG_OBJECT);
97
98 // Get the function from the stack.
99 __ mov(ecx, Operand(esp, 1 * kPointerSize));
100
101 // Setup the object header.
102 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
103 __ mov(FieldOperand(eax, Context::kLengthOffset),
104 Immediate(Smi::FromInt(length)));
105
106 // Setup the fixed slots.
lrn@chromium.org5d00b602011-01-05 09:51:43 +0000107 __ Set(ebx, Immediate(0)); // Set to NULL.
ricow@chromium.org65fae842010-08-25 15:26:24 +0000108 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
109 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
110 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
111 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
112
113 // Copy the global object from the surrounding context. We go through the
114 // context in the function (ecx) to match the allocation behavior we have
115 // in the runtime system (see Heap::AllocateFunctionContext).
116 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
117 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
118 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
119
120 // Initialize the rest of the slots to undefined.
121 __ mov(ebx, Factory::undefined_value());
122 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
123 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
124 }
125
126 // Return and remove the on-stack parameter.
127 __ mov(esi, Operand(eax));
128 __ ret(1 * kPointerSize);
129
130 // Need to collect. Call into runtime system.
131 __ bind(&gc);
132 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
133}
134
135
136void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
137 // Stack layout on entry:
138 //
139 // [esp + kPointerSize]: constant elements.
140 // [esp + (2 * kPointerSize)]: literal index.
141 // [esp + (3 * kPointerSize)]: literals array.
142
143 // All sizes here are multiples of kPointerSize.
144 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
145 int size = JSArray::kSize + elements_size;
146
147 // Load boilerplate object into ecx and check if we need to create a
148 // boilerplate.
149 Label slow_case;
150 __ mov(ecx, Operand(esp, 3 * kPointerSize));
151 __ mov(eax, Operand(esp, 2 * kPointerSize));
152 STATIC_ASSERT(kPointerSize == 4);
153 STATIC_ASSERT(kSmiTagSize == 1);
154 STATIC_ASSERT(kSmiTag == 0);
ricow@chromium.orgd236f4d2010-09-01 06:52:08 +0000155 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
156 FixedArray::kHeaderSize));
ricow@chromium.org65fae842010-08-25 15:26:24 +0000157 __ cmp(ecx, Factory::undefined_value());
158 __ j(equal, &slow_case);
159
160 if (FLAG_debug_code) {
161 const char* message;
162 Handle<Map> expected_map;
163 if (mode_ == CLONE_ELEMENTS) {
164 message = "Expected (writable) fixed array";
165 expected_map = Factory::fixed_array_map();
166 } else {
167 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
168 message = "Expected copy-on-write fixed array";
169 expected_map = Factory::fixed_cow_array_map();
170 }
171 __ push(ecx);
172 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
173 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
174 __ Assert(equal, message);
175 __ pop(ecx);
176 }
177
178 // Allocate both the JS array and the elements array in one big
179 // allocation. This avoids multiple limit checks.
180 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
181
182 // Copy the JS array part.
183 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
184 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
185 __ mov(ebx, FieldOperand(ecx, i));
186 __ mov(FieldOperand(eax, i), ebx);
187 }
188 }
189
190 if (length_ > 0) {
191 // Get hold of the elements array of the boilerplate and setup the
192 // elements pointer in the resulting object.
193 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
194 __ lea(edx, Operand(eax, JSArray::kSize));
195 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
196
197 // Copy the elements array.
198 for (int i = 0; i < elements_size; i += kPointerSize) {
199 __ mov(ebx, FieldOperand(ecx, i));
200 __ mov(FieldOperand(edx, i), ebx);
201 }
202 }
203
204 // Return and remove the on-stack parameters.
205 __ ret(3 * kPointerSize);
206
207 __ bind(&slow_case);
208 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
209}
210
211
212// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
213void ToBooleanStub::Generate(MacroAssembler* masm) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +0000214 NearLabel false_result, true_result, not_string;
ricow@chromium.org65fae842010-08-25 15:26:24 +0000215 __ mov(eax, Operand(esp, 1 * kPointerSize));
216
217 // 'null' => false.
218 __ cmp(eax, Factory::null_value());
219 __ j(equal, &false_result);
220
221 // Get the map and type of the heap object.
222 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
223 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
224
225 // Undetectable => false.
226 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
227 1 << Map::kIsUndetectable);
228 __ j(not_zero, &false_result);
229
230 // JavaScript object => true.
231 __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE);
232 __ j(above_equal, &true_result);
233
234 // String value => false iff empty.
235 __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE);
236 __ j(above_equal, &not_string);
237 STATIC_ASSERT(kSmiTag == 0);
238 __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
239 __ j(zero, &false_result);
240 __ jmp(&true_result);
241
242 __ bind(&not_string);
243 // HeapNumber => false iff +0, -0, or NaN.
244 __ cmp(edx, Factory::heap_number_map());
245 __ j(not_equal, &true_result);
246 __ fldz();
247 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
248 __ FCmp();
249 __ j(zero, &false_result);
250 // Fall through to |true_result|.
251
252 // Return 1/0 for true/false in eax.
253 __ bind(&true_result);
254 __ mov(eax, 1);
255 __ ret(1 * kPointerSize);
256 __ bind(&false_result);
257 __ mov(eax, 0);
258 __ ret(1 * kPointerSize);
259}
260
261
262const char* GenericBinaryOpStub::GetName() {
263 if (name_ != NULL) return name_;
264 const int kMaxNameLength = 100;
265 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
266 if (name_ == NULL) return "OOM";
267 const char* op_name = Token::Name(op_);
268 const char* overwrite_name;
269 switch (mode_) {
270 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
271 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
272 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
273 default: overwrite_name = "UnknownOverwrite"; break;
274 }
275
276 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
277 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
278 op_name,
279 overwrite_name,
280 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
281 args_in_registers_ ? "RegArgs" : "StackArgs",
282 args_reversed_ ? "_R" : "",
283 static_operands_type_.ToString(),
284 BinaryOpIC::GetName(runtime_operands_type_));
285 return name_;
286}
287
288
289void GenericBinaryOpStub::GenerateCall(
290 MacroAssembler* masm,
291 Register left,
292 Register right) {
293 if (!ArgsInRegistersSupported()) {
294 // Pass arguments on the stack.
295 __ push(left);
296 __ push(right);
297 } else {
298 // The calling convention with registers is left in edx and right in eax.
299 Register left_arg = edx;
300 Register right_arg = eax;
301 if (!(left.is(left_arg) && right.is(right_arg))) {
302 if (left.is(right_arg) && right.is(left_arg)) {
303 if (IsOperationCommutative()) {
304 SetArgsReversed();
305 } else {
306 __ xchg(left, right);
307 }
308 } else if (left.is(left_arg)) {
309 __ mov(right_arg, right);
310 } else if (right.is(right_arg)) {
311 __ mov(left_arg, left);
312 } else if (left.is(right_arg)) {
313 if (IsOperationCommutative()) {
314 __ mov(left_arg, right);
315 SetArgsReversed();
316 } else {
317 // Order of moves important to avoid destroying left argument.
318 __ mov(left_arg, left);
319 __ mov(right_arg, right);
320 }
321 } else if (right.is(left_arg)) {
322 if (IsOperationCommutative()) {
323 __ mov(right_arg, left);
324 SetArgsReversed();
325 } else {
326 // Order of moves important to avoid destroying right argument.
327 __ mov(right_arg, right);
328 __ mov(left_arg, left);
329 }
330 } else {
331 // Order of moves is not important.
332 __ mov(left_arg, left);
333 __ mov(right_arg, right);
334 }
335 }
336
337 // Update flags to indicate that arguments are in registers.
338 SetArgsInRegisters();
339 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
340 }
341
342 // Call the stub.
343 __ CallStub(this);
344}
345
346
347void GenericBinaryOpStub::GenerateCall(
348 MacroAssembler* masm,
349 Register left,
350 Smi* right) {
351 if (!ArgsInRegistersSupported()) {
352 // Pass arguments on the stack.
353 __ push(left);
354 __ push(Immediate(right));
355 } else {
356 // The calling convention with registers is left in edx and right in eax.
357 Register left_arg = edx;
358 Register right_arg = eax;
359 if (left.is(left_arg)) {
360 __ mov(right_arg, Immediate(right));
361 } else if (left.is(right_arg) && IsOperationCommutative()) {
362 __ mov(left_arg, Immediate(right));
363 SetArgsReversed();
364 } else {
365 // For non-commutative operations, left and right_arg might be
366 // the same register. Therefore, the order of the moves is
367 // important here in order to not overwrite left before moving
368 // it to left_arg.
369 __ mov(left_arg, left);
370 __ mov(right_arg, Immediate(right));
371 }
372
373 // Update flags to indicate that arguments are in registers.
374 SetArgsInRegisters();
375 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
376 }
377
378 // Call the stub.
379 __ CallStub(this);
380}
381
382
383void GenericBinaryOpStub::GenerateCall(
384 MacroAssembler* masm,
385 Smi* left,
386 Register right) {
387 if (!ArgsInRegistersSupported()) {
388 // Pass arguments on the stack.
389 __ push(Immediate(left));
390 __ push(right);
391 } else {
392 // The calling convention with registers is left in edx and right in eax.
393 Register left_arg = edx;
394 Register right_arg = eax;
395 if (right.is(right_arg)) {
396 __ mov(left_arg, Immediate(left));
397 } else if (right.is(left_arg) && IsOperationCommutative()) {
398 __ mov(right_arg, Immediate(left));
399 SetArgsReversed();
400 } else {
401 // For non-commutative operations, right and left_arg might be
402 // the same register. Therefore, the order of the moves is
403 // important here in order to not overwrite right before moving
404 // it to right_arg.
405 __ mov(right_arg, right);
406 __ mov(left_arg, Immediate(left));
407 }
408 // Update flags to indicate that arguments are in registers.
409 SetArgsInRegisters();
410 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
411 }
412
413 // Call the stub.
414 __ CallStub(this);
415}
416
417
418class FloatingPointHelper : public AllStatic {
419 public:
420
421 enum ArgLocation {
422 ARGS_ON_STACK,
423 ARGS_IN_REGISTERS
424 };
425
426 // Code pattern for loading a floating point value. Input value must
427 // be either a smi or a heap number object (fp value). Requirements:
428 // operand in register number. Returns operand as floating point number
429 // on FPU stack.
430 static void LoadFloatOperand(MacroAssembler* masm, Register number);
431
432 // Code pattern for loading floating point values. Input values must
433 // be either smi or heap number objects (fp values). Requirements:
434 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
435 // Returns operands as floating point numbers on FPU stack.
436 static void LoadFloatOperands(MacroAssembler* masm,
437 Register scratch,
438 ArgLocation arg_location = ARGS_ON_STACK);
439
440 // Similar to LoadFloatOperand but assumes that both operands are smis.
441 // Expects operands in edx, eax.
442 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
443
444 // Test if operands are smi or number objects (fp). Requirements:
445 // operand_1 in eax, operand_2 in edx; falls through on float
446 // operands, jumps to the non_float label otherwise.
447 static void CheckFloatOperands(MacroAssembler* masm,
448 Label* non_float,
449 Register scratch);
450
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000451 // Checks that the two floating point numbers on top of the FPU stack
452 // have int32 values.
453 static void CheckFloatOperandsAreInt32(MacroAssembler* masm,
454 Label* non_int32);
455
ricow@chromium.org65fae842010-08-25 15:26:24 +0000456 // Takes the operands in edx and eax and loads them as integers in eax
457 // and ecx.
458 static void LoadAsIntegers(MacroAssembler* masm,
459 TypeInfo type_info,
460 bool use_sse3,
461 Label* operand_conversion_failure);
462 static void LoadNumbersAsIntegers(MacroAssembler* masm,
463 TypeInfo type_info,
464 bool use_sse3,
465 Label* operand_conversion_failure);
466 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
467 bool use_sse3,
468 Label* operand_conversion_failure);
469
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000470 // Must only be called after LoadUnknownsAsIntegers. Assumes that the
471 // operands are pushed on the stack, and that their conversions to int32
472 // are in eax and ecx. Checks that the original numbers were in the int32
473 // range.
474 static void CheckLoadedIntegersWereInt32(MacroAssembler* masm,
475 bool use_sse3,
476 Label* not_int32);
477
478 // Assumes that operands are smis or heap numbers and loads them
479 // into xmm0 and xmm1. Operands are in edx and eax.
ricow@chromium.org65fae842010-08-25 15:26:24 +0000480 // Leaves operands unchanged.
481 static void LoadSSE2Operands(MacroAssembler* masm);
482
483 // Test if operands are numbers (smi or HeapNumber objects), and load
484 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
485 // either operand is not a number. Operands are in edx and eax.
486 // Leaves operands unchanged.
487 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
488
489 // Similar to LoadSSE2Operands but assumes that both operands are smis.
490 // Expects operands in edx, eax.
491 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000492
493 // Checks that the two floating point numbers loaded into xmm0 and xmm1
494 // have int32 values.
495 static void CheckSSE2OperandsAreInt32(MacroAssembler* masm,
496 Label* non_int32,
497 Register scratch);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000498};
499
500
501void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
502 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
503 // dividend in eax and edx free for the division. Use eax, ebx for those.
504 Comment load_comment(masm, "-- Load arguments");
505 Register left = edx;
506 Register right = eax;
507 if (op_ == Token::DIV || op_ == Token::MOD) {
508 left = eax;
509 right = ebx;
510 if (HasArgsInRegisters()) {
511 __ mov(ebx, eax);
512 __ mov(eax, edx);
513 }
514 }
515 if (!HasArgsInRegisters()) {
516 __ mov(right, Operand(esp, 1 * kPointerSize));
517 __ mov(left, Operand(esp, 2 * kPointerSize));
518 }
519
520 if (static_operands_type_.IsSmi()) {
521 if (FLAG_debug_code) {
522 __ AbortIfNotSmi(left);
523 __ AbortIfNotSmi(right);
524 }
525 if (op_ == Token::BIT_OR) {
526 __ or_(right, Operand(left));
527 GenerateReturn(masm);
528 return;
529 } else if (op_ == Token::BIT_AND) {
530 __ and_(right, Operand(left));
531 GenerateReturn(masm);
532 return;
533 } else if (op_ == Token::BIT_XOR) {
534 __ xor_(right, Operand(left));
535 GenerateReturn(masm);
536 return;
537 }
538 }
539
540 // 2. Prepare the smi check of both operands by oring them together.
541 Comment smi_check_comment(masm, "-- Smi check arguments");
542 Label not_smis;
543 Register combined = ecx;
544 ASSERT(!left.is(combined) && !right.is(combined));
545 switch (op_) {
546 case Token::BIT_OR:
547 // Perform the operation into eax and smi check the result. Preserve
548 // eax in case the result is not a smi.
549 ASSERT(!left.is(ecx) && !right.is(ecx));
550 __ mov(ecx, right);
551 __ or_(right, Operand(left)); // Bitwise or is commutative.
552 combined = right;
553 break;
554
555 case Token::BIT_XOR:
556 case Token::BIT_AND:
557 case Token::ADD:
558 case Token::SUB:
559 case Token::MUL:
560 case Token::DIV:
561 case Token::MOD:
562 __ mov(combined, right);
563 __ or_(combined, Operand(left));
564 break;
565
566 case Token::SHL:
567 case Token::SAR:
568 case Token::SHR:
569 // Move the right operand into ecx for the shift operation, use eax
570 // for the smi check register.
571 ASSERT(!left.is(ecx) && !right.is(ecx));
572 __ mov(ecx, right);
573 __ or_(right, Operand(left));
574 combined = right;
575 break;
576
577 default:
578 break;
579 }
580
581 // 3. Perform the smi check of the operands.
582 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
583 __ test(combined, Immediate(kSmiTagMask));
584 __ j(not_zero, &not_smis, not_taken);
585
586 // 4. Operands are both smis, perform the operation leaving the result in
587 // eax and check the result if necessary.
588 Comment perform_smi(masm, "-- Perform smi operation");
589 Label use_fp_on_smis;
590 switch (op_) {
591 case Token::BIT_OR:
592 // Nothing to do.
593 break;
594
595 case Token::BIT_XOR:
596 ASSERT(right.is(eax));
597 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
598 break;
599
600 case Token::BIT_AND:
601 ASSERT(right.is(eax));
602 __ and_(right, Operand(left)); // Bitwise and is commutative.
603 break;
604
605 case Token::SHL:
606 // Remove tags from operands (but keep sign).
607 __ SmiUntag(left);
608 __ SmiUntag(ecx);
609 // Perform the operation.
610 __ shl_cl(left);
611 // Check that the *signed* result fits in a smi.
612 __ cmp(left, 0xc0000000);
613 __ j(sign, &use_fp_on_smis, not_taken);
614 // Tag the result and store it in register eax.
615 __ SmiTag(left);
616 __ mov(eax, left);
617 break;
618
619 case Token::SAR:
620 // Remove tags from operands (but keep sign).
621 __ SmiUntag(left);
622 __ SmiUntag(ecx);
623 // Perform the operation.
624 __ sar_cl(left);
625 // Tag the result and store it in register eax.
626 __ SmiTag(left);
627 __ mov(eax, left);
628 break;
629
630 case Token::SHR:
631 // Remove tags from operands (but keep sign).
632 __ SmiUntag(left);
633 __ SmiUntag(ecx);
634 // Perform the operation.
635 __ shr_cl(left);
636 // Check that the *unsigned* result fits in a smi.
637 // Neither of the two high-order bits can be set:
638 // - 0x80000000: high bit would be lost when smi tagging.
639 // - 0x40000000: this number would convert to negative when
640 // Smi tagging these two cases can only happen with shifts
641 // by 0 or 1 when handed a valid smi.
642 __ test(left, Immediate(0xc0000000));
643 __ j(not_zero, slow, not_taken);
644 // Tag the result and store it in register eax.
645 __ SmiTag(left);
646 __ mov(eax, left);
647 break;
648
649 case Token::ADD:
650 ASSERT(right.is(eax));
651 __ add(right, Operand(left)); // Addition is commutative.
652 __ j(overflow, &use_fp_on_smis, not_taken);
653 break;
654
655 case Token::SUB:
656 __ sub(left, Operand(right));
657 __ j(overflow, &use_fp_on_smis, not_taken);
658 __ mov(eax, left);
659 break;
660
661 case Token::MUL:
662 // If the smi tag is 0 we can just leave the tag on one operand.
663 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
664 // We can't revert the multiplication if the result is not a smi
665 // so save the right operand.
666 __ mov(ebx, right);
667 // Remove tag from one of the operands (but keep sign).
668 __ SmiUntag(right);
669 // Do multiplication.
670 __ imul(right, Operand(left)); // Multiplication is commutative.
671 __ j(overflow, &use_fp_on_smis, not_taken);
672 // Check for negative zero result. Use combined = left | right.
673 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
674 break;
675
676 case Token::DIV:
677 // We can't revert the division if the result is not a smi so
678 // save the left operand.
679 __ mov(edi, left);
680 // Check for 0 divisor.
681 __ test(right, Operand(right));
682 __ j(zero, &use_fp_on_smis, not_taken);
683 // Sign extend left into edx:eax.
684 ASSERT(left.is(eax));
685 __ cdq();
686 // Divide edx:eax by right.
687 __ idiv(right);
688 // Check for the corner case of dividing the most negative smi by
689 // -1. We cannot use the overflow flag, since it is not set by idiv
690 // instruction.
691 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
692 __ cmp(eax, 0x40000000);
693 __ j(equal, &use_fp_on_smis);
694 // Check for negative zero result. Use combined = left | right.
695 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
696 // Check that the remainder is zero.
697 __ test(edx, Operand(edx));
698 __ j(not_zero, &use_fp_on_smis);
699 // Tag the result and store it in register eax.
700 __ SmiTag(eax);
701 break;
702
703 case Token::MOD:
704 // Check for 0 divisor.
705 __ test(right, Operand(right));
706 __ j(zero, &not_smis, not_taken);
707
708 // Sign extend left into edx:eax.
709 ASSERT(left.is(eax));
710 __ cdq();
711 // Divide edx:eax by right.
712 __ idiv(right);
713 // Check for negative zero result. Use combined = left | right.
714 __ NegativeZeroTest(edx, combined, slow);
715 // Move remainder to register eax.
716 __ mov(eax, edx);
717 break;
718
719 default:
720 UNREACHABLE();
721 }
722
723 // 5. Emit return of result in eax.
724 GenerateReturn(masm);
725
726 // 6. For some operations emit inline code to perform floating point
727 // operations on known smis (e.g., if the result of the operation
728 // overflowed the smi range).
729 switch (op_) {
730 case Token::SHL: {
731 Comment perform_float(masm, "-- Perform float operation on smis");
732 __ bind(&use_fp_on_smis);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000733 if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) {
734 // Result we want is in left == edx, so we can put the allocated heap
735 // number in eax.
736 __ AllocateHeapNumber(eax, ecx, ebx, slow);
737 // Store the result in the HeapNumber and return.
738 if (CpuFeatures::IsSupported(SSE2)) {
739 CpuFeatures::Scope use_sse2(SSE2);
740 __ cvtsi2sd(xmm0, Operand(left));
741 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
742 } else {
743 // It's OK to overwrite the right argument on the stack because we
744 // are about to return.
745 __ mov(Operand(esp, 1 * kPointerSize), left);
746 __ fild_s(Operand(esp, 1 * kPointerSize));
747 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
748 }
749 GenerateReturn(masm);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000750 } else {
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000751 ASSERT(runtime_operands_type_ == BinaryOpIC::UNINIT_OR_SMI);
752 __ jmp(slow);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000753 }
ricow@chromium.org65fae842010-08-25 15:26:24 +0000754 break;
755 }
756
757 case Token::ADD:
758 case Token::SUB:
759 case Token::MUL:
760 case Token::DIV: {
761 Comment perform_float(masm, "-- Perform float operation on smis");
762 __ bind(&use_fp_on_smis);
763 // Restore arguments to edx, eax.
764 switch (op_) {
765 case Token::ADD:
766 // Revert right = right + left.
767 __ sub(right, Operand(left));
768 break;
769 case Token::SUB:
770 // Revert left = left - right.
771 __ add(left, Operand(right));
772 break;
773 case Token::MUL:
774 // Right was clobbered but a copy is in ebx.
775 __ mov(right, ebx);
776 break;
777 case Token::DIV:
778 // Left was clobbered but a copy is in edi. Right is in ebx for
779 // division.
780 __ mov(edx, edi);
781 __ mov(eax, right);
782 break;
783 default: UNREACHABLE();
784 break;
785 }
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000786 if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) {
787 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
788 if (CpuFeatures::IsSupported(SSE2)) {
789 CpuFeatures::Scope use_sse2(SSE2);
790 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
791 switch (op_) {
792 case Token::ADD: __ addsd(xmm0, xmm1); break;
793 case Token::SUB: __ subsd(xmm0, xmm1); break;
794 case Token::MUL: __ mulsd(xmm0, xmm1); break;
795 case Token::DIV: __ divsd(xmm0, xmm1); break;
796 default: UNREACHABLE();
797 }
798 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
799 } else { // SSE2 not available, use FPU.
800 FloatingPointHelper::LoadFloatSmis(masm, ebx);
801 switch (op_) {
802 case Token::ADD: __ faddp(1); break;
803 case Token::SUB: __ fsubp(1); break;
804 case Token::MUL: __ fmulp(1); break;
805 case Token::DIV: __ fdivp(1); break;
806 default: UNREACHABLE();
807 }
808 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
ricow@chromium.org65fae842010-08-25 15:26:24 +0000809 }
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000810 __ mov(eax, ecx);
811 GenerateReturn(masm);
812 } else {
813 ASSERT(runtime_operands_type_ == BinaryOpIC::UNINIT_OR_SMI);
814 __ jmp(slow);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000815 }
ricow@chromium.org65fae842010-08-25 15:26:24 +0000816 break;
817 }
818
819 default:
820 break;
821 }
822
823 // 7. Non-smi operands, fall out to the non-smi code with the operands in
824 // edx and eax.
825 Comment done_comment(masm, "-- Enter non-smi code");
826 __ bind(&not_smis);
827 switch (op_) {
828 case Token::BIT_OR:
829 case Token::SHL:
830 case Token::SAR:
831 case Token::SHR:
832 // Right operand is saved in ecx and eax was destroyed by the smi
833 // check.
834 __ mov(eax, ecx);
835 break;
836
837 case Token::DIV:
838 case Token::MOD:
839 // Operands are in eax, ebx at this point.
840 __ mov(edx, eax);
841 __ mov(eax, ebx);
842 break;
843
844 default:
845 break;
846 }
847}
848
849
850void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
851 Label call_runtime;
852
853 __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
854
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000855 if (runtime_operands_type_ == BinaryOpIC::UNINIT_OR_SMI) {
856 Label slow;
857 if (ShouldGenerateSmiCode()) GenerateSmiCode(masm, &slow);
858 __ bind(&slow);
859 GenerateTypeTransition(masm);
860 }
861
ricow@chromium.org65fae842010-08-25 15:26:24 +0000862 // Generate fast case smi code if requested. This flag is set when the fast
863 // case smi code is not generated by the caller. Generating it here will speed
864 // up common operations.
865 if (ShouldGenerateSmiCode()) {
866 GenerateSmiCode(masm, &call_runtime);
867 } else if (op_ != Token::MOD) { // MOD goes straight to runtime.
868 if (!HasArgsInRegisters()) {
869 GenerateLoadArguments(masm);
870 }
871 }
872
873 // Floating point case.
874 if (ShouldGenerateFPCode()) {
875 switch (op_) {
876 case Token::ADD:
877 case Token::SUB:
878 case Token::MUL:
879 case Token::DIV: {
880 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
881 HasSmiCodeInStub()) {
882 // Execution reaches this point when the first non-smi argument occurs
883 // (and only if smi code is generated). This is the right moment to
884 // patch to HEAP_NUMBERS state. The transition is attempted only for
885 // the four basic operations. The stub stays in the DEFAULT state
886 // forever for all other operations (also if smi code is skipped).
887 GenerateTypeTransition(masm);
888 break;
889 }
890
891 Label not_floats;
892 if (CpuFeatures::IsSupported(SSE2)) {
893 CpuFeatures::Scope use_sse2(SSE2);
894 if (static_operands_type_.IsNumber()) {
895 if (FLAG_debug_code) {
896 // Assert at runtime that inputs are only numbers.
897 __ AbortIfNotNumber(edx);
898 __ AbortIfNotNumber(eax);
899 }
900 if (static_operands_type_.IsSmi()) {
901 if (FLAG_debug_code) {
902 __ AbortIfNotSmi(edx);
903 __ AbortIfNotSmi(eax);
904 }
905 FloatingPointHelper::LoadSSE2Smis(masm, ecx);
906 } else {
907 FloatingPointHelper::LoadSSE2Operands(masm);
908 }
909 } else {
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +0000910 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000911 }
912
913 switch (op_) {
914 case Token::ADD: __ addsd(xmm0, xmm1); break;
915 case Token::SUB: __ subsd(xmm0, xmm1); break;
916 case Token::MUL: __ mulsd(xmm0, xmm1); break;
917 case Token::DIV: __ divsd(xmm0, xmm1); break;
918 default: UNREACHABLE();
919 }
920 GenerateHeapResultAllocation(masm, &call_runtime);
921 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
922 GenerateReturn(masm);
923 } else { // SSE2 not available, use FPU.
924 if (static_operands_type_.IsNumber()) {
925 if (FLAG_debug_code) {
926 // Assert at runtime that inputs are only numbers.
927 __ AbortIfNotNumber(edx);
928 __ AbortIfNotNumber(eax);
929 }
930 } else {
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +0000931 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000932 }
933 FloatingPointHelper::LoadFloatOperands(
934 masm,
935 ecx,
936 FloatingPointHelper::ARGS_IN_REGISTERS);
937 switch (op_) {
938 case Token::ADD: __ faddp(1); break;
939 case Token::SUB: __ fsubp(1); break;
940 case Token::MUL: __ fmulp(1); break;
941 case Token::DIV: __ fdivp(1); break;
942 default: UNREACHABLE();
943 }
944 Label after_alloc_failure;
945 GenerateHeapResultAllocation(masm, &after_alloc_failure);
946 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
947 GenerateReturn(masm);
948 __ bind(&after_alloc_failure);
949 __ ffree();
950 __ jmp(&call_runtime);
951 }
952 __ bind(&not_floats);
953 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
954 !HasSmiCodeInStub()) {
955 // Execution reaches this point when the first non-number argument
956 // occurs (and only if smi code is skipped from the stub, otherwise
957 // the patching has already been done earlier in this case branch).
958 // Try patching to STRINGS for ADD operation.
959 if (op_ == Token::ADD) {
960 GenerateTypeTransition(masm);
961 }
962 }
963 break;
964 }
965 case Token::MOD: {
966 // For MOD we go directly to runtime in the non-smi case.
967 break;
968 }
969 case Token::BIT_OR:
970 case Token::BIT_AND:
971 case Token::BIT_XOR:
972 case Token::SAR:
973 case Token::SHL:
974 case Token::SHR: {
975 Label non_smi_result;
976 FloatingPointHelper::LoadAsIntegers(masm,
977 static_operands_type_,
978 use_sse3_,
979 &call_runtime);
980 switch (op_) {
981 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
982 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
983 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
984 case Token::SAR: __ sar_cl(eax); break;
985 case Token::SHL: __ shl_cl(eax); break;
986 case Token::SHR: __ shr_cl(eax); break;
987 default: UNREACHABLE();
988 }
989 if (op_ == Token::SHR) {
990 // Check if result is non-negative and fits in a smi.
991 __ test(eax, Immediate(0xc0000000));
992 __ j(not_zero, &call_runtime);
993 } else {
994 // Check if result fits in a smi.
995 __ cmp(eax, 0xc0000000);
996 __ j(negative, &non_smi_result);
997 }
998 // Tag smi result and return.
999 __ SmiTag(eax);
1000 GenerateReturn(masm);
1001
1002 // All ops except SHR return a signed int32 that we load in
1003 // a HeapNumber.
1004 if (op_ != Token::SHR) {
1005 __ bind(&non_smi_result);
1006 // Allocate a heap number if needed.
1007 __ mov(ebx, Operand(eax)); // ebx: result
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00001008 NearLabel skip_allocation;
ricow@chromium.org65fae842010-08-25 15:26:24 +00001009 switch (mode_) {
1010 case OVERWRITE_LEFT:
1011 case OVERWRITE_RIGHT:
1012 // If the operand was an object, we skip the
1013 // allocation of a heap number.
1014 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1015 1 * kPointerSize : 2 * kPointerSize));
1016 __ test(eax, Immediate(kSmiTagMask));
1017 __ j(not_zero, &skip_allocation, not_taken);
1018 // Fall through!
1019 case NO_OVERWRITE:
1020 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1021 __ bind(&skip_allocation);
1022 break;
1023 default: UNREACHABLE();
1024 }
1025 // Store the result in the HeapNumber and return.
1026 if (CpuFeatures::IsSupported(SSE2)) {
1027 CpuFeatures::Scope use_sse2(SSE2);
1028 __ cvtsi2sd(xmm0, Operand(ebx));
1029 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1030 } else {
1031 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1032 __ fild_s(Operand(esp, 1 * kPointerSize));
1033 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1034 }
1035 GenerateReturn(masm);
1036 }
1037 break;
1038 }
1039 default: UNREACHABLE(); break;
1040 }
1041 }
1042
ager@chromium.org5b2fbee2010-09-08 06:38:15 +00001043 // If all else fails, use the runtime system to get the correct
1044 // result. If arguments was passed in registers now place them on the
1045 // stack in the correct order below the return address.
1046
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001047 // Avoid hitting the string ADD code below when allocation fails in
1048 // the floating point code above.
1049 if (op_ != Token::ADD) {
1050 __ bind(&call_runtime);
1051 }
1052
ricow@chromium.org65fae842010-08-25 15:26:24 +00001053 if (HasArgsInRegisters()) {
1054 GenerateRegisterArgsPush(masm);
1055 }
1056
1057 switch (op_) {
1058 case Token::ADD: {
1059 // Test for string arguments before calling runtime.
ricow@chromium.org65fae842010-08-25 15:26:24 +00001060
1061 // If this stub has already generated FP-specific code then the arguments
1062 // are already in edx, eax
1063 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
1064 GenerateLoadArguments(masm);
1065 }
1066
1067 // Registers containing left and right operands respectively.
1068 Register lhs, rhs;
1069 if (HasArgsReversed()) {
1070 lhs = eax;
1071 rhs = edx;
1072 } else {
1073 lhs = edx;
1074 rhs = eax;
1075 }
1076
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001077 // Test if left operand is a string.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00001078 NearLabel lhs_not_string;
ricow@chromium.org65fae842010-08-25 15:26:24 +00001079 __ test(lhs, Immediate(kSmiTagMask));
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001080 __ j(zero, &lhs_not_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001081 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx);
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001082 __ j(above_equal, &lhs_not_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001083
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001084 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
1085 __ TailCallStub(&string_add_left_stub);
1086
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00001087 NearLabel call_runtime_with_args;
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001088 // Left operand is not a string, test right.
1089 __ bind(&lhs_not_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001090 __ test(rhs, Immediate(kSmiTagMask));
ager@chromium.org5b2fbee2010-09-08 06:38:15 +00001091 __ j(zero, &call_runtime_with_args);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001092 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
ager@chromium.org5b2fbee2010-09-08 06:38:15 +00001093 __ j(above_equal, &call_runtime_with_args);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001094
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001095 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1096 __ TailCallStub(&string_add_right_stub);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001097
ricow@chromium.org65fae842010-08-25 15:26:24 +00001098 // Neither argument is a string.
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001099 __ bind(&call_runtime);
1100 if (HasArgsInRegisters()) {
1101 GenerateRegisterArgsPush(masm);
1102 }
ager@chromium.org5b2fbee2010-09-08 06:38:15 +00001103 __ bind(&call_runtime_with_args);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001104 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1105 break;
1106 }
1107 case Token::SUB:
1108 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1109 break;
1110 case Token::MUL:
1111 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1112 break;
1113 case Token::DIV:
1114 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1115 break;
1116 case Token::MOD:
1117 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1118 break;
1119 case Token::BIT_OR:
1120 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1121 break;
1122 case Token::BIT_AND:
1123 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1124 break;
1125 case Token::BIT_XOR:
1126 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1127 break;
1128 case Token::SAR:
1129 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1130 break;
1131 case Token::SHL:
1132 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1133 break;
1134 case Token::SHR:
1135 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1136 break;
1137 default:
1138 UNREACHABLE();
1139 }
1140}
1141
1142
1143void GenericBinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
1144 Label* alloc_failure) {
1145 Label skip_allocation;
1146 OverwriteMode mode = mode_;
1147 if (HasArgsReversed()) {
1148 if (mode == OVERWRITE_RIGHT) {
1149 mode = OVERWRITE_LEFT;
1150 } else if (mode == OVERWRITE_LEFT) {
1151 mode = OVERWRITE_RIGHT;
1152 }
1153 }
1154 switch (mode) {
1155 case OVERWRITE_LEFT: {
1156 // If the argument in edx is already an object, we skip the
1157 // allocation of a heap number.
1158 __ test(edx, Immediate(kSmiTagMask));
1159 __ j(not_zero, &skip_allocation, not_taken);
1160 // Allocate a heap number for the result. Keep eax and edx intact
1161 // for the possible runtime call.
1162 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
1163 // Now edx can be overwritten losing one of the arguments as we are
1164 // now done and will not need it any more.
1165 __ mov(edx, Operand(ebx));
1166 __ bind(&skip_allocation);
1167 // Use object in edx as a result holder
1168 __ mov(eax, Operand(edx));
1169 break;
1170 }
1171 case OVERWRITE_RIGHT:
1172 // If the argument in eax is already an object, we skip the
1173 // allocation of a heap number.
1174 __ test(eax, Immediate(kSmiTagMask));
1175 __ j(not_zero, &skip_allocation, not_taken);
1176 // Fall through!
1177 case NO_OVERWRITE:
1178 // Allocate a heap number for the result. Keep eax and edx intact
1179 // for the possible runtime call.
1180 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
1181 // Now eax can be overwritten losing one of the arguments as we are
1182 // now done and will not need it any more.
1183 __ mov(eax, ebx);
1184 __ bind(&skip_allocation);
1185 break;
1186 default: UNREACHABLE();
1187 }
1188}
1189
1190
1191void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
1192 // If arguments are not passed in registers read them from the stack.
1193 ASSERT(!HasArgsInRegisters());
1194 __ mov(eax, Operand(esp, 1 * kPointerSize));
1195 __ mov(edx, Operand(esp, 2 * kPointerSize));
1196}
1197
1198
1199void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
1200 // If arguments are not passed in registers remove them from the stack before
1201 // returning.
1202 if (!HasArgsInRegisters()) {
1203 __ ret(2 * kPointerSize); // Remove both operands
1204 } else {
1205 __ ret(0);
1206 }
1207}
1208
1209
1210void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
1211 ASSERT(HasArgsInRegisters());
1212 __ pop(ecx);
1213 if (HasArgsReversed()) {
1214 __ push(eax);
1215 __ push(edx);
1216 } else {
1217 __ push(edx);
1218 __ push(eax);
1219 }
1220 __ push(ecx);
1221}
1222
1223
1224void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1225 // Ensure the operands are on the stack.
1226 if (HasArgsInRegisters()) {
1227 GenerateRegisterArgsPush(masm);
1228 }
1229
1230 __ pop(ecx); // Save return address.
1231
1232 // Left and right arguments are now on top.
1233 // Push this stub's key. Although the operation and the type info are
1234 // encoded into the key, the encoding is opaque, so push them too.
1235 __ push(Immediate(Smi::FromInt(MinorKey())));
1236 __ push(Immediate(Smi::FromInt(op_)));
1237 __ push(Immediate(Smi::FromInt(runtime_operands_type_)));
1238
1239 __ push(ecx); // Push return address.
1240
1241 // Patch the caller to an appropriate specialized stub and return the
1242 // operation result to the caller of the stub.
1243 __ TailCallExternalReference(
1244 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
1245 5,
1246 1);
1247}
1248
1249
1250Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
1251 GenericBinaryOpStub stub(key, type_info);
1252 return stub.GetCode();
1253}
1254
1255
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001256Handle<Code> GetTypeRecordingBinaryOpStub(int key,
1257 TRBinaryOpIC::TypeInfo type_info,
1258 TRBinaryOpIC::TypeInfo result_type_info) {
1259 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info);
1260 return stub.GetCode();
1261}
1262
1263
1264void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1265 __ pop(ecx); // Save return address.
1266 __ push(edx);
1267 __ push(eax);
1268 // Left and right arguments are now on top.
1269 // Push this stub's key. Although the operation and the type info are
1270 // encoded into the key, the encoding is opaque, so push them too.
1271 __ push(Immediate(Smi::FromInt(MinorKey())));
1272 __ push(Immediate(Smi::FromInt(op_)));
1273 __ push(Immediate(Smi::FromInt(operands_type_)));
1274
1275 __ push(ecx); // Push return address.
1276
1277 // Patch the caller to an appropriate specialized stub and return the
1278 // operation result to the caller of the stub.
1279 __ TailCallExternalReference(
1280 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
1281 5,
1282 1);
1283}
1284
1285
1286// Prepare for a type transition runtime call when the args are already on
1287// the stack, under the return address.
1288void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs(
1289 MacroAssembler* masm) {
1290 __ pop(ecx); // Save return address.
1291 // Left and right arguments are already on top of the stack.
1292 // Push this stub's key. Although the operation and the type info are
1293 // encoded into the key, the encoding is opaque, so push them too.
1294 __ push(Immediate(Smi::FromInt(MinorKey())));
1295 __ push(Immediate(Smi::FromInt(op_)));
1296 __ push(Immediate(Smi::FromInt(operands_type_)));
1297
1298 __ push(ecx); // Push return address.
1299
1300 // Patch the caller to an appropriate specialized stub and return the
1301 // operation result to the caller of the stub.
1302 __ TailCallExternalReference(
1303 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
1304 5,
1305 1);
1306}
1307
1308
1309void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
1310 switch (operands_type_) {
1311 case TRBinaryOpIC::UNINITIALIZED:
1312 GenerateTypeTransition(masm);
1313 break;
1314 case TRBinaryOpIC::SMI:
1315 GenerateSmiStub(masm);
1316 break;
1317 case TRBinaryOpIC::INT32:
1318 GenerateInt32Stub(masm);
1319 break;
1320 case TRBinaryOpIC::HEAP_NUMBER:
1321 GenerateHeapNumberStub(masm);
1322 break;
1323 case TRBinaryOpIC::STRING:
1324 GenerateStringStub(masm);
1325 break;
1326 case TRBinaryOpIC::GENERIC:
1327 GenerateGeneric(masm);
1328 break;
1329 default:
1330 UNREACHABLE();
1331 }
1332}
1333
1334
1335const char* TypeRecordingBinaryOpStub::GetName() {
1336 if (name_ != NULL) return name_;
1337 const int kMaxNameLength = 100;
1338 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
1339 if (name_ == NULL) return "OOM";
1340 const char* op_name = Token::Name(op_);
1341 const char* overwrite_name;
1342 switch (mode_) {
1343 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
1344 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
1345 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
1346 default: overwrite_name = "UnknownOverwrite"; break;
1347 }
1348
1349 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
1350 "TypeRecordingBinaryOpStub_%s_%s_%s",
1351 op_name,
1352 overwrite_name,
1353 TRBinaryOpIC::GetName(operands_type_));
1354 return name_;
1355}
1356
1357
1358void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
1359 Label* slow,
1360 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
1361 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
1362 // dividend in eax and edx free for the division. Use eax, ebx for those.
1363 Comment load_comment(masm, "-- Load arguments");
1364 Register left = edx;
1365 Register right = eax;
1366 if (op_ == Token::DIV || op_ == Token::MOD) {
1367 left = eax;
1368 right = ebx;
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001369 __ mov(ebx, eax);
1370 __ mov(eax, edx);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001371 }
1372
1373
1374 // 2. Prepare the smi check of both operands by oring them together.
1375 Comment smi_check_comment(masm, "-- Smi check arguments");
1376 Label not_smis;
1377 Register combined = ecx;
1378 ASSERT(!left.is(combined) && !right.is(combined));
1379 switch (op_) {
1380 case Token::BIT_OR:
1381 // Perform the operation into eax and smi check the result. Preserve
1382 // eax in case the result is not a smi.
1383 ASSERT(!left.is(ecx) && !right.is(ecx));
1384 __ mov(ecx, right);
1385 __ or_(right, Operand(left)); // Bitwise or is commutative.
1386 combined = right;
1387 break;
1388
1389 case Token::BIT_XOR:
1390 case Token::BIT_AND:
1391 case Token::ADD:
1392 case Token::SUB:
1393 case Token::MUL:
1394 case Token::DIV:
1395 case Token::MOD:
1396 __ mov(combined, right);
1397 __ or_(combined, Operand(left));
1398 break;
1399
1400 case Token::SHL:
1401 case Token::SAR:
1402 case Token::SHR:
1403 // Move the right operand into ecx for the shift operation, use eax
1404 // for the smi check register.
1405 ASSERT(!left.is(ecx) && !right.is(ecx));
1406 __ mov(ecx, right);
1407 __ or_(right, Operand(left));
1408 combined = right;
1409 break;
1410
1411 default:
1412 break;
1413 }
1414
1415 // 3. Perform the smi check of the operands.
1416 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
1417 __ test(combined, Immediate(kSmiTagMask));
1418 __ j(not_zero, &not_smis, not_taken);
1419
1420 // 4. Operands are both smis, perform the operation leaving the result in
1421 // eax and check the result if necessary.
1422 Comment perform_smi(masm, "-- Perform smi operation");
1423 Label use_fp_on_smis;
1424 switch (op_) {
1425 case Token::BIT_OR:
1426 // Nothing to do.
1427 break;
1428
1429 case Token::BIT_XOR:
1430 ASSERT(right.is(eax));
1431 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
1432 break;
1433
1434 case Token::BIT_AND:
1435 ASSERT(right.is(eax));
1436 __ and_(right, Operand(left)); // Bitwise and is commutative.
1437 break;
1438
1439 case Token::SHL:
1440 // Remove tags from operands (but keep sign).
1441 __ SmiUntag(left);
1442 __ SmiUntag(ecx);
1443 // Perform the operation.
1444 __ shl_cl(left);
1445 // Check that the *signed* result fits in a smi.
1446 __ cmp(left, 0xc0000000);
1447 __ j(sign, &use_fp_on_smis, not_taken);
1448 // Tag the result and store it in register eax.
1449 __ SmiTag(left);
1450 __ mov(eax, left);
1451 break;
1452
1453 case Token::SAR:
1454 // Remove tags from operands (but keep sign).
1455 __ SmiUntag(left);
1456 __ SmiUntag(ecx);
1457 // Perform the operation.
1458 __ sar_cl(left);
1459 // Tag the result and store it in register eax.
1460 __ SmiTag(left);
1461 __ mov(eax, left);
1462 break;
1463
1464 case Token::SHR:
1465 // Remove tags from operands (but keep sign).
1466 __ SmiUntag(left);
1467 __ SmiUntag(ecx);
1468 // Perform the operation.
1469 __ shr_cl(left);
1470 // Check that the *unsigned* result fits in a smi.
1471 // Neither of the two high-order bits can be set:
1472 // - 0x80000000: high bit would be lost when smi tagging.
1473 // - 0x40000000: this number would convert to negative when
1474 // Smi tagging these two cases can only happen with shifts
1475 // by 0 or 1 when handed a valid smi.
1476 __ test(left, Immediate(0xc0000000));
1477 __ j(not_zero, slow, not_taken);
1478 // Tag the result and store it in register eax.
1479 __ SmiTag(left);
1480 __ mov(eax, left);
1481 break;
1482
1483 case Token::ADD:
1484 ASSERT(right.is(eax));
1485 __ add(right, Operand(left)); // Addition is commutative.
1486 __ j(overflow, &use_fp_on_smis, not_taken);
1487 break;
1488
1489 case Token::SUB:
1490 __ sub(left, Operand(right));
1491 __ j(overflow, &use_fp_on_smis, not_taken);
1492 __ mov(eax, left);
1493 break;
1494
1495 case Token::MUL:
1496 // If the smi tag is 0 we can just leave the tag on one operand.
1497 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
1498 // We can't revert the multiplication if the result is not a smi
1499 // so save the right operand.
1500 __ mov(ebx, right);
1501 // Remove tag from one of the operands (but keep sign).
1502 __ SmiUntag(right);
1503 // Do multiplication.
1504 __ imul(right, Operand(left)); // Multiplication is commutative.
1505 __ j(overflow, &use_fp_on_smis, not_taken);
1506 // Check for negative zero result. Use combined = left | right.
1507 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
1508 break;
1509
1510 case Token::DIV:
1511 // We can't revert the division if the result is not a smi so
1512 // save the left operand.
1513 __ mov(edi, left);
1514 // Check for 0 divisor.
1515 __ test(right, Operand(right));
1516 __ j(zero, &use_fp_on_smis, not_taken);
1517 // Sign extend left into edx:eax.
1518 ASSERT(left.is(eax));
1519 __ cdq();
1520 // Divide edx:eax by right.
1521 __ idiv(right);
1522 // Check for the corner case of dividing the most negative smi by
1523 // -1. We cannot use the overflow flag, since it is not set by idiv
1524 // instruction.
1525 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
1526 __ cmp(eax, 0x40000000);
1527 __ j(equal, &use_fp_on_smis);
1528 // Check for negative zero result. Use combined = left | right.
1529 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
1530 // Check that the remainder is zero.
1531 __ test(edx, Operand(edx));
1532 __ j(not_zero, &use_fp_on_smis);
1533 // Tag the result and store it in register eax.
1534 __ SmiTag(eax);
1535 break;
1536
1537 case Token::MOD:
1538 // Check for 0 divisor.
1539 __ test(right, Operand(right));
1540 __ j(zero, &not_smis, not_taken);
1541
1542 // Sign extend left into edx:eax.
1543 ASSERT(left.is(eax));
1544 __ cdq();
1545 // Divide edx:eax by right.
1546 __ idiv(right);
1547 // Check for negative zero result. Use combined = left | right.
1548 __ NegativeZeroTest(edx, combined, slow);
1549 // Move remainder to register eax.
1550 __ mov(eax, edx);
1551 break;
1552
1553 default:
1554 UNREACHABLE();
1555 }
1556
1557 // 5. Emit return of result in eax. Some operations have registers pushed.
1558 switch (op_) {
1559 case Token::ADD:
1560 case Token::SUB:
1561 case Token::MUL:
1562 case Token::DIV:
1563 __ ret(0);
1564 break;
1565 case Token::MOD:
1566 case Token::BIT_OR:
1567 case Token::BIT_AND:
1568 case Token::BIT_XOR:
1569 case Token::SAR:
1570 case Token::SHL:
1571 case Token::SHR:
1572 __ ret(2 * kPointerSize);
1573 break;
1574 default:
1575 UNREACHABLE();
1576 }
1577
1578 // 6. For some operations emit inline code to perform floating point
1579 // operations on known smis (e.g., if the result of the operation
1580 // overflowed the smi range).
1581 if (allow_heapnumber_results == NO_HEAPNUMBER_RESULTS) {
1582 __ bind(&use_fp_on_smis);
1583 switch (op_) {
1584 // Undo the effects of some operations, and some register moves.
1585 case Token::SHL:
1586 // The arguments are saved on the stack, and only used from there.
1587 break;
1588 case Token::ADD:
1589 // Revert right = right + left.
1590 __ sub(right, Operand(left));
1591 break;
1592 case Token::SUB:
1593 // Revert left = left - right.
1594 __ add(left, Operand(right));
1595 break;
1596 case Token::MUL:
1597 // Right was clobbered but a copy is in ebx.
1598 __ mov(right, ebx);
1599 break;
1600 case Token::DIV:
1601 // Left was clobbered but a copy is in edi. Right is in ebx for
1602 // division. They should be in eax, ebx for jump to not_smi.
1603 __ mov(eax, edi);
1604 break;
1605 default:
1606 // No other operators jump to use_fp_on_smis.
1607 break;
1608 }
1609 __ jmp(&not_smis);
1610 } else {
1611 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS);
1612 switch (op_) {
1613 case Token::SHL: {
1614 Comment perform_float(masm, "-- Perform float operation on smis");
1615 __ bind(&use_fp_on_smis);
1616 // Result we want is in left == edx, so we can put the allocated heap
1617 // number in eax.
1618 __ AllocateHeapNumber(eax, ecx, ebx, slow);
1619 // Store the result in the HeapNumber and return.
1620 if (CpuFeatures::IsSupported(SSE2)) {
1621 CpuFeatures::Scope use_sse2(SSE2);
1622 __ cvtsi2sd(xmm0, Operand(left));
1623 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1624 } else {
1625 // It's OK to overwrite the right argument on the stack because we
1626 // are about to return.
1627 __ mov(Operand(esp, 1 * kPointerSize), left);
1628 __ fild_s(Operand(esp, 1 * kPointerSize));
1629 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1630 }
1631 __ ret(2 * kPointerSize);
1632 break;
1633 }
1634
1635 case Token::ADD:
1636 case Token::SUB:
1637 case Token::MUL:
1638 case Token::DIV: {
1639 Comment perform_float(masm, "-- Perform float operation on smis");
1640 __ bind(&use_fp_on_smis);
1641 // Restore arguments to edx, eax.
1642 switch (op_) {
1643 case Token::ADD:
1644 // Revert right = right + left.
1645 __ sub(right, Operand(left));
1646 break;
1647 case Token::SUB:
1648 // Revert left = left - right.
1649 __ add(left, Operand(right));
1650 break;
1651 case Token::MUL:
1652 // Right was clobbered but a copy is in ebx.
1653 __ mov(right, ebx);
1654 break;
1655 case Token::DIV:
1656 // Left was clobbered but a copy is in edi. Right is in ebx for
1657 // division.
1658 __ mov(edx, edi);
1659 __ mov(eax, right);
1660 break;
1661 default: UNREACHABLE();
1662 break;
1663 }
1664 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
1665 if (CpuFeatures::IsSupported(SSE2)) {
1666 CpuFeatures::Scope use_sse2(SSE2);
1667 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
1668 switch (op_) {
1669 case Token::ADD: __ addsd(xmm0, xmm1); break;
1670 case Token::SUB: __ subsd(xmm0, xmm1); break;
1671 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1672 case Token::DIV: __ divsd(xmm0, xmm1); break;
1673 default: UNREACHABLE();
1674 }
1675 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
1676 } else { // SSE2 not available, use FPU.
1677 FloatingPointHelper::LoadFloatSmis(masm, ebx);
1678 switch (op_) {
1679 case Token::ADD: __ faddp(1); break;
1680 case Token::SUB: __ fsubp(1); break;
1681 case Token::MUL: __ fmulp(1); break;
1682 case Token::DIV: __ fdivp(1); break;
1683 default: UNREACHABLE();
1684 }
1685 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
1686 }
1687 __ mov(eax, ecx);
1688 __ ret(0);
1689 break;
1690 }
1691
1692 default:
1693 break;
1694 }
1695 }
1696
1697 // 7. Non-smi operands, fall out to the non-smi code with the operands in
1698 // edx and eax.
1699 Comment done_comment(masm, "-- Enter non-smi code");
1700 __ bind(&not_smis);
1701 switch (op_) {
1702 case Token::BIT_OR:
1703 case Token::SHL:
1704 case Token::SAR:
1705 case Token::SHR:
1706 // Right operand is saved in ecx and eax was destroyed by the smi
1707 // check.
1708 __ mov(eax, ecx);
1709 break;
1710
1711 case Token::DIV:
1712 case Token::MOD:
1713 // Operands are in eax, ebx at this point.
1714 __ mov(edx, eax);
1715 __ mov(eax, ebx);
1716 break;
1717
1718 default:
1719 break;
1720 }
1721}
1722
1723
1724void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1725 Label call_runtime;
1726
1727 switch (op_) {
1728 case Token::ADD:
1729 case Token::SUB:
1730 case Token::MUL:
1731 case Token::DIV:
1732 break;
1733 case Token::MOD:
1734 case Token::BIT_OR:
1735 case Token::BIT_AND:
1736 case Token::BIT_XOR:
1737 case Token::SAR:
1738 case Token::SHL:
1739 case Token::SHR:
1740 GenerateRegisterArgsPush(masm);
1741 break;
1742 default:
1743 UNREACHABLE();
1744 }
1745
1746 if (result_type_ == TRBinaryOpIC::UNINITIALIZED ||
1747 result_type_ == TRBinaryOpIC::SMI) {
1748 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS);
1749 } else {
1750 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1751 }
1752 __ bind(&call_runtime);
1753 switch (op_) {
1754 case Token::ADD:
1755 case Token::SUB:
1756 case Token::MUL:
1757 case Token::DIV:
1758 GenerateTypeTransition(masm);
1759 break;
1760 case Token::MOD:
1761 case Token::BIT_OR:
1762 case Token::BIT_AND:
1763 case Token::BIT_XOR:
1764 case Token::SAR:
1765 case Token::SHL:
1766 case Token::SHR:
1767 GenerateTypeTransitionWithSavedArgs(masm);
1768 break;
1769 default:
1770 UNREACHABLE();
1771 }
1772}
1773
1774
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001775void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1776 Label call_runtime;
1777 ASSERT(operands_type_ == TRBinaryOpIC::STRING);
1778 ASSERT(op_ == Token::ADD);
1779 // If one of the arguments is a string, call the string add stub.
1780 // Otherwise, transition to the generic TRBinaryOpIC type.
1781
1782 // Registers containing left and right operands respectively.
1783 Register left = edx;
1784 Register right = eax;
1785
1786 // Test if left operand is a string.
1787 NearLabel left_not_string;
1788 __ test(left, Immediate(kSmiTagMask));
1789 __ j(zero, &left_not_string);
1790 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
1791 __ j(above_equal, &left_not_string);
1792
1793 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
1794 GenerateRegisterArgsPush(masm);
1795 __ TailCallStub(&string_add_left_stub);
1796
1797 // Left operand is not a string, test right.
1798 __ bind(&left_not_string);
1799 __ test(right, Immediate(kSmiTagMask));
1800 __ j(zero, &call_runtime);
1801 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
1802 __ j(above_equal, &call_runtime);
1803
1804 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1805 GenerateRegisterArgsPush(masm);
1806 __ TailCallStub(&string_add_right_stub);
1807
1808 // Neither argument is a string.
1809 __ bind(&call_runtime);
1810 GenerateTypeTransition(masm);
1811}
1812
1813
1814void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1815 Label call_runtime;
1816 ASSERT(operands_type_ == TRBinaryOpIC::INT32);
1817
1818 // Floating point case.
1819 switch (op_) {
1820 case Token::ADD:
1821 case Token::SUB:
1822 case Token::MUL:
1823 case Token::DIV: {
1824 Label not_floats;
1825 Label not_int32;
1826 if (CpuFeatures::IsSupported(SSE2)) {
1827 CpuFeatures::Scope use_sse2(SSE2);
1828 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1829 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1830 switch (op_) {
1831 case Token::ADD: __ addsd(xmm0, xmm1); break;
1832 case Token::SUB: __ subsd(xmm0, xmm1); break;
1833 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1834 case Token::DIV: __ divsd(xmm0, xmm1); break;
1835 default: UNREACHABLE();
1836 }
1837 // Check result type if it is currently Int32.
1838 if (result_type_ <= TRBinaryOpIC::INT32) {
1839 __ cvttsd2si(ecx, Operand(xmm0));
1840 __ cvtsi2sd(xmm2, Operand(ecx));
1841 __ ucomisd(xmm0, xmm2);
1842 __ j(not_zero, &not_int32);
1843 __ j(carry, &not_int32);
1844 }
1845 GenerateHeapResultAllocation(masm, &call_runtime);
1846 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1847 __ ret(0);
1848 } else { // SSE2 not available, use FPU.
1849 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1850 FloatingPointHelper::LoadFloatOperands(
1851 masm,
1852 ecx,
1853 FloatingPointHelper::ARGS_IN_REGISTERS);
1854 FloatingPointHelper::CheckFloatOperandsAreInt32(masm, &not_int32);
1855 switch (op_) {
1856 case Token::ADD: __ faddp(1); break;
1857 case Token::SUB: __ fsubp(1); break;
1858 case Token::MUL: __ fmulp(1); break;
1859 case Token::DIV: __ fdivp(1); break;
1860 default: UNREACHABLE();
1861 }
1862 Label after_alloc_failure;
1863 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1864 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1865 __ ret(0);
1866 __ bind(&after_alloc_failure);
1867 __ ffree();
1868 __ jmp(&call_runtime);
1869 }
1870
1871 __ bind(&not_floats);
1872 __ bind(&not_int32);
1873 GenerateTypeTransition(masm);
1874 break;
1875 }
1876
1877 case Token::MOD: {
1878 // For MOD we go directly to runtime in the non-smi case.
1879 break;
1880 }
1881 case Token::BIT_OR:
1882 case Token::BIT_AND:
1883 case Token::BIT_XOR:
1884 case Token::SAR:
1885 case Token::SHL:
1886 case Token::SHR: {
1887 GenerateRegisterArgsPush(masm);
1888 Label not_floats;
1889 Label not_int32;
1890 Label non_smi_result;
1891 /* {
1892 CpuFeatures::Scope use_sse2(SSE2);
1893 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1894 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1895 }*/
1896 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1897 use_sse3_,
1898 &not_floats);
1899 FloatingPointHelper::CheckLoadedIntegersWereInt32(masm, use_sse3_,
1900 &not_int32);
1901 switch (op_) {
1902 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1903 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1904 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
1905 case Token::SAR: __ sar_cl(eax); break;
1906 case Token::SHL: __ shl_cl(eax); break;
1907 case Token::SHR: __ shr_cl(eax); break;
1908 default: UNREACHABLE();
1909 }
1910 if (op_ == Token::SHR) {
1911 // Check if result is non-negative and fits in a smi.
1912 __ test(eax, Immediate(0xc0000000));
1913 __ j(not_zero, &call_runtime);
1914 } else {
1915 // Check if result fits in a smi.
1916 __ cmp(eax, 0xc0000000);
1917 __ j(negative, &non_smi_result);
1918 }
1919 // Tag smi result and return.
1920 __ SmiTag(eax);
1921 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1922
1923 // All ops except SHR return a signed int32 that we load in
1924 // a HeapNumber.
1925 if (op_ != Token::SHR) {
1926 __ bind(&non_smi_result);
1927 // Allocate a heap number if needed.
1928 __ mov(ebx, Operand(eax)); // ebx: result
1929 NearLabel skip_allocation;
1930 switch (mode_) {
1931 case OVERWRITE_LEFT:
1932 case OVERWRITE_RIGHT:
1933 // If the operand was an object, we skip the
1934 // allocation of a heap number.
1935 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1936 1 * kPointerSize : 2 * kPointerSize));
1937 __ test(eax, Immediate(kSmiTagMask));
1938 __ j(not_zero, &skip_allocation, not_taken);
1939 // Fall through!
1940 case NO_OVERWRITE:
1941 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1942 __ bind(&skip_allocation);
1943 break;
1944 default: UNREACHABLE();
1945 }
1946 // Store the result in the HeapNumber and return.
1947 if (CpuFeatures::IsSupported(SSE2)) {
1948 CpuFeatures::Scope use_sse2(SSE2);
1949 __ cvtsi2sd(xmm0, Operand(ebx));
1950 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1951 } else {
1952 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1953 __ fild_s(Operand(esp, 1 * kPointerSize));
1954 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1955 }
1956 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1957 }
1958
1959 __ bind(&not_floats);
1960 __ bind(&not_int32);
1961 GenerateTypeTransitionWithSavedArgs(masm);
1962 break;
1963 }
1964 default: UNREACHABLE(); break;
1965 }
1966
1967 // If an allocation fails, or SHR or MOD hit a hard case,
1968 // use the runtime system to get the correct result.
1969 __ bind(&call_runtime);
1970
1971 switch (op_) {
1972 case Token::ADD:
1973 GenerateRegisterArgsPush(masm);
1974 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1975 break;
1976 case Token::SUB:
1977 GenerateRegisterArgsPush(masm);
1978 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1979 break;
1980 case Token::MUL:
1981 GenerateRegisterArgsPush(masm);
1982 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1983 break;
1984 case Token::DIV:
1985 GenerateRegisterArgsPush(masm);
1986 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1987 break;
1988 case Token::MOD:
1989 GenerateRegisterArgsPush(masm);
1990 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1991 break;
1992 case Token::BIT_OR:
1993 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1994 break;
1995 case Token::BIT_AND:
1996 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1997 break;
1998 case Token::BIT_XOR:
1999 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
2000 break;
2001 case Token::SAR:
2002 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
2003 break;
2004 case Token::SHL:
2005 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
2006 break;
2007 case Token::SHR:
2008 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2009 break;
2010 default:
2011 UNREACHABLE();
2012 }
2013}
2014
2015
2016void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
2017 Label call_runtime;
2018 ASSERT(operands_type_ == TRBinaryOpIC::HEAP_NUMBER ||
2019 operands_type_ == TRBinaryOpIC::INT32);
2020
2021 // Floating point case.
2022 switch (op_) {
2023 case Token::ADD:
2024 case Token::SUB:
2025 case Token::MUL:
2026 case Token::DIV: {
2027 Label not_floats;
2028 if (CpuFeatures::IsSupported(SSE2)) {
2029 CpuFeatures::Scope use_sse2(SSE2);
2030 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
2031
2032 switch (op_) {
2033 case Token::ADD: __ addsd(xmm0, xmm1); break;
2034 case Token::SUB: __ subsd(xmm0, xmm1); break;
2035 case Token::MUL: __ mulsd(xmm0, xmm1); break;
2036 case Token::DIV: __ divsd(xmm0, xmm1); break;
2037 default: UNREACHABLE();
2038 }
2039 GenerateHeapResultAllocation(masm, &call_runtime);
2040 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2041 __ ret(0);
2042 } else { // SSE2 not available, use FPU.
2043 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
2044 FloatingPointHelper::LoadFloatOperands(
2045 masm,
2046 ecx,
2047 FloatingPointHelper::ARGS_IN_REGISTERS);
2048 switch (op_) {
2049 case Token::ADD: __ faddp(1); break;
2050 case Token::SUB: __ fsubp(1); break;
2051 case Token::MUL: __ fmulp(1); break;
2052 case Token::DIV: __ fdivp(1); break;
2053 default: UNREACHABLE();
2054 }
2055 Label after_alloc_failure;
2056 GenerateHeapResultAllocation(masm, &after_alloc_failure);
2057 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2058 __ ret(0);
2059 __ bind(&after_alloc_failure);
2060 __ ffree();
2061 __ jmp(&call_runtime);
2062 }
2063
2064 __ bind(&not_floats);
2065 GenerateTypeTransition(masm);
2066 break;
2067 }
2068
2069 case Token::MOD: {
2070 // For MOD we go directly to runtime in the non-smi case.
2071 break;
2072 }
2073 case Token::BIT_OR:
2074 case Token::BIT_AND:
2075 case Token::BIT_XOR:
2076 case Token::SAR:
2077 case Token::SHL:
2078 case Token::SHR: {
2079 GenerateRegisterArgsPush(masm);
2080 Label not_floats;
2081 Label non_smi_result;
2082 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
2083 use_sse3_,
2084 &not_floats);
2085 switch (op_) {
2086 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
2087 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
2088 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
2089 case Token::SAR: __ sar_cl(eax); break;
2090 case Token::SHL: __ shl_cl(eax); break;
2091 case Token::SHR: __ shr_cl(eax); break;
2092 default: UNREACHABLE();
2093 }
2094 if (op_ == Token::SHR) {
2095 // Check if result is non-negative and fits in a smi.
2096 __ test(eax, Immediate(0xc0000000));
2097 __ j(not_zero, &call_runtime);
2098 } else {
2099 // Check if result fits in a smi.
2100 __ cmp(eax, 0xc0000000);
2101 __ j(negative, &non_smi_result);
2102 }
2103 // Tag smi result and return.
2104 __ SmiTag(eax);
2105 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
2106
2107 // All ops except SHR return a signed int32 that we load in
2108 // a HeapNumber.
2109 if (op_ != Token::SHR) {
2110 __ bind(&non_smi_result);
2111 // Allocate a heap number if needed.
2112 __ mov(ebx, Operand(eax)); // ebx: result
2113 NearLabel skip_allocation;
2114 switch (mode_) {
2115 case OVERWRITE_LEFT:
2116 case OVERWRITE_RIGHT:
2117 // If the operand was an object, we skip the
2118 // allocation of a heap number.
2119 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
2120 1 * kPointerSize : 2 * kPointerSize));
2121 __ test(eax, Immediate(kSmiTagMask));
2122 __ j(not_zero, &skip_allocation, not_taken);
2123 // Fall through!
2124 case NO_OVERWRITE:
2125 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
2126 __ bind(&skip_allocation);
2127 break;
2128 default: UNREACHABLE();
2129 }
2130 // Store the result in the HeapNumber and return.
2131 if (CpuFeatures::IsSupported(SSE2)) {
2132 CpuFeatures::Scope use_sse2(SSE2);
2133 __ cvtsi2sd(xmm0, Operand(ebx));
2134 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2135 } else {
2136 __ mov(Operand(esp, 1 * kPointerSize), ebx);
2137 __ fild_s(Operand(esp, 1 * kPointerSize));
2138 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2139 }
2140 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
2141 }
2142
2143 __ bind(&not_floats);
2144 GenerateTypeTransitionWithSavedArgs(masm);
2145 break;
2146 }
2147 default: UNREACHABLE(); break;
2148 }
2149
2150 // If an allocation fails, or SHR or MOD hit a hard case,
2151 // use the runtime system to get the correct result.
2152 __ bind(&call_runtime);
2153
2154 switch (op_) {
2155 case Token::ADD:
2156 GenerateRegisterArgsPush(masm);
2157 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
2158 break;
2159 case Token::SUB:
2160 GenerateRegisterArgsPush(masm);
2161 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
2162 break;
2163 case Token::MUL:
2164 GenerateRegisterArgsPush(masm);
2165 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
2166 break;
2167 case Token::DIV:
2168 GenerateRegisterArgsPush(masm);
2169 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
2170 break;
2171 case Token::MOD:
2172 GenerateRegisterArgsPush(masm);
2173 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
2174 break;
2175 case Token::BIT_OR:
2176 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
2177 break;
2178 case Token::BIT_AND:
2179 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
2180 break;
2181 case Token::BIT_XOR:
2182 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
2183 break;
2184 case Token::SAR:
2185 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
2186 break;
2187 case Token::SHL:
2188 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
2189 break;
2190 case Token::SHR:
2191 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2192 break;
2193 default:
2194 UNREACHABLE();
2195 }
2196}
2197
2198
2199void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
2200 Label call_runtime;
2201
2202 __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
2203
2204 switch (op_) {
2205 case Token::ADD:
2206 case Token::SUB:
2207 case Token::MUL:
2208 case Token::DIV:
2209 break;
2210 case Token::MOD:
2211 case Token::BIT_OR:
2212 case Token::BIT_AND:
2213 case Token::BIT_XOR:
2214 case Token::SAR:
2215 case Token::SHL:
2216 case Token::SHR:
2217 GenerateRegisterArgsPush(masm);
2218 break;
2219 default:
2220 UNREACHABLE();
2221 }
2222
2223 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
2224
2225 // Floating point case.
2226 switch (op_) {
2227 case Token::ADD:
2228 case Token::SUB:
2229 case Token::MUL:
2230 case Token::DIV: {
2231 Label not_floats;
2232 if (CpuFeatures::IsSupported(SSE2)) {
2233 CpuFeatures::Scope use_sse2(SSE2);
2234 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
2235
2236 switch (op_) {
2237 case Token::ADD: __ addsd(xmm0, xmm1); break;
2238 case Token::SUB: __ subsd(xmm0, xmm1); break;
2239 case Token::MUL: __ mulsd(xmm0, xmm1); break;
2240 case Token::DIV: __ divsd(xmm0, xmm1); break;
2241 default: UNREACHABLE();
2242 }
2243 GenerateHeapResultAllocation(masm, &call_runtime);
2244 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2245 __ ret(0);
2246 } else { // SSE2 not available, use FPU.
2247 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
2248 FloatingPointHelper::LoadFloatOperands(
2249 masm,
2250 ecx,
2251 FloatingPointHelper::ARGS_IN_REGISTERS);
2252 switch (op_) {
2253 case Token::ADD: __ faddp(1); break;
2254 case Token::SUB: __ fsubp(1); break;
2255 case Token::MUL: __ fmulp(1); break;
2256 case Token::DIV: __ fdivp(1); break;
2257 default: UNREACHABLE();
2258 }
2259 Label after_alloc_failure;
2260 GenerateHeapResultAllocation(masm, &after_alloc_failure);
2261 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2262 __ ret(0);
2263 __ bind(&after_alloc_failure);
2264 __ ffree();
2265 __ jmp(&call_runtime);
2266 }
2267 __ bind(&not_floats);
2268 break;
2269 }
2270 case Token::MOD: {
2271 // For MOD we go directly to runtime in the non-smi case.
2272 break;
2273 }
2274 case Token::BIT_OR:
2275 case Token::BIT_AND:
2276 case Token::BIT_XOR:
2277 case Token::SAR:
2278 case Token::SHL:
2279 case Token::SHR: {
2280 Label non_smi_result;
2281 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
2282 use_sse3_,
2283 &call_runtime);
2284 switch (op_) {
2285 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
2286 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
2287 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
2288 case Token::SAR: __ sar_cl(eax); break;
2289 case Token::SHL: __ shl_cl(eax); break;
2290 case Token::SHR: __ shr_cl(eax); break;
2291 default: UNREACHABLE();
2292 }
2293 if (op_ == Token::SHR) {
2294 // Check if result is non-negative and fits in a smi.
2295 __ test(eax, Immediate(0xc0000000));
2296 __ j(not_zero, &call_runtime);
2297 } else {
2298 // Check if result fits in a smi.
2299 __ cmp(eax, 0xc0000000);
2300 __ j(negative, &non_smi_result);
2301 }
2302 // Tag smi result and return.
2303 __ SmiTag(eax);
2304 __ ret(2 * kPointerSize); // Drop the arguments from the stack.
2305
2306 // All ops except SHR return a signed int32 that we load in
2307 // a HeapNumber.
2308 if (op_ != Token::SHR) {
2309 __ bind(&non_smi_result);
2310 // Allocate a heap number if needed.
2311 __ mov(ebx, Operand(eax)); // ebx: result
2312 NearLabel skip_allocation;
2313 switch (mode_) {
2314 case OVERWRITE_LEFT:
2315 case OVERWRITE_RIGHT:
2316 // If the operand was an object, we skip the
2317 // allocation of a heap number.
2318 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
2319 1 * kPointerSize : 2 * kPointerSize));
2320 __ test(eax, Immediate(kSmiTagMask));
2321 __ j(not_zero, &skip_allocation, not_taken);
2322 // Fall through!
2323 case NO_OVERWRITE:
2324 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
2325 __ bind(&skip_allocation);
2326 break;
2327 default: UNREACHABLE();
2328 }
2329 // Store the result in the HeapNumber and return.
2330 if (CpuFeatures::IsSupported(SSE2)) {
2331 CpuFeatures::Scope use_sse2(SSE2);
2332 __ cvtsi2sd(xmm0, Operand(ebx));
2333 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2334 } else {
2335 __ mov(Operand(esp, 1 * kPointerSize), ebx);
2336 __ fild_s(Operand(esp, 1 * kPointerSize));
2337 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2338 }
2339 __ ret(2 * kPointerSize);
2340 }
2341 break;
2342 }
2343 default: UNREACHABLE(); break;
2344 }
2345
2346 // If all else fails, use the runtime system to get the correct
2347 // result.
2348 __ bind(&call_runtime);
2349 switch (op_) {
2350 case Token::ADD: {
2351 GenerateRegisterArgsPush(masm);
2352 // Test for string arguments before calling runtime.
2353 // Registers containing left and right operands respectively.
2354 Register lhs, rhs;
2355 lhs = edx;
2356 rhs = eax;
2357
2358 // Test if left operand is a string.
2359 NearLabel lhs_not_string;
2360 __ test(lhs, Immediate(kSmiTagMask));
2361 __ j(zero, &lhs_not_string);
2362 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx);
2363 __ j(above_equal, &lhs_not_string);
2364
2365 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
2366 __ TailCallStub(&string_add_left_stub);
2367
2368 NearLabel call_add_runtime;
2369 // Left operand is not a string, test right.
2370 __ bind(&lhs_not_string);
2371 __ test(rhs, Immediate(kSmiTagMask));
2372 __ j(zero, &call_add_runtime);
2373 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
2374 __ j(above_equal, &call_add_runtime);
2375
2376 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
2377 __ TailCallStub(&string_add_right_stub);
2378
2379 // Neither argument is a string.
2380 __ bind(&call_add_runtime);
2381 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
2382 break;
2383 }
2384 case Token::SUB:
2385 GenerateRegisterArgsPush(masm);
2386 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
2387 break;
2388 case Token::MUL:
2389 GenerateRegisterArgsPush(masm);
2390 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
2391 break;
2392 case Token::DIV:
2393 GenerateRegisterArgsPush(masm);
2394 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
2395 break;
2396 case Token::MOD:
2397 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
2398 break;
2399 case Token::BIT_OR:
2400 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
2401 break;
2402 case Token::BIT_AND:
2403 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
2404 break;
2405 case Token::BIT_XOR:
2406 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
2407 break;
2408 case Token::SAR:
2409 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
2410 break;
2411 case Token::SHL:
2412 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
2413 break;
2414 case Token::SHR:
2415 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2416 break;
2417 default:
2418 UNREACHABLE();
2419 }
2420}
2421
2422
2423void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
2424 MacroAssembler* masm,
2425 Label* alloc_failure) {
2426 Label skip_allocation;
2427 OverwriteMode mode = mode_;
2428 switch (mode) {
2429 case OVERWRITE_LEFT: {
2430 // If the argument in edx is already an object, we skip the
2431 // allocation of a heap number.
2432 __ test(edx, Immediate(kSmiTagMask));
2433 __ j(not_zero, &skip_allocation, not_taken);
2434 // Allocate a heap number for the result. Keep eax and edx intact
2435 // for the possible runtime call.
2436 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2437 // Now edx can be overwritten losing one of the arguments as we are
2438 // now done and will not need it any more.
2439 __ mov(edx, Operand(ebx));
2440 __ bind(&skip_allocation);
2441 // Use object in edx as a result holder
2442 __ mov(eax, Operand(edx));
2443 break;
2444 }
2445 case OVERWRITE_RIGHT:
2446 // If the argument in eax is already an object, we skip the
2447 // allocation of a heap number.
2448 __ test(eax, Immediate(kSmiTagMask));
2449 __ j(not_zero, &skip_allocation, not_taken);
2450 // Fall through!
2451 case NO_OVERWRITE:
2452 // Allocate a heap number for the result. Keep eax and edx intact
2453 // for the possible runtime call.
2454 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2455 // Now eax can be overwritten losing one of the arguments as we are
2456 // now done and will not need it any more.
2457 __ mov(eax, ebx);
2458 __ bind(&skip_allocation);
2459 break;
2460 default: UNREACHABLE();
2461 }
2462}
2463
2464
2465void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
2466 __ pop(ecx);
2467 __ push(edx);
2468 __ push(eax);
2469 __ push(ecx);
2470}
2471
2472
ricow@chromium.org65fae842010-08-25 15:26:24 +00002473void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
whesse@chromium.org023421e2010-12-21 12:19:12 +00002474 // TAGGED case:
2475 // Input:
2476 // esp[4]: tagged number input argument (should be number).
2477 // esp[0]: return address.
2478 // Output:
2479 // eax: tagged double result.
2480 // UNTAGGED case:
2481 // Input::
2482 // esp[0]: return address.
2483 // xmm1: untagged double input argument
2484 // Output:
2485 // xmm1: untagged double result.
2486
ricow@chromium.org65fae842010-08-25 15:26:24 +00002487 Label runtime_call;
2488 Label runtime_call_clear_stack;
whesse@chromium.org023421e2010-12-21 12:19:12 +00002489 Label skip_cache;
2490 const bool tagged = (argument_type_ == TAGGED);
2491 if (tagged) {
2492 // Test that eax is a number.
2493 NearLabel input_not_smi;
2494 NearLabel loaded;
2495 __ mov(eax, Operand(esp, kPointerSize));
2496 __ test(eax, Immediate(kSmiTagMask));
2497 __ j(not_zero, &input_not_smi);
2498 // Input is a smi. Untag and load it onto the FPU stack.
2499 // Then load the low and high words of the double into ebx, edx.
2500 STATIC_ASSERT(kSmiTagSize == 1);
2501 __ sar(eax, 1);
2502 __ sub(Operand(esp), Immediate(2 * kPointerSize));
2503 __ mov(Operand(esp, 0), eax);
2504 __ fild_s(Operand(esp, 0));
2505 __ fst_d(Operand(esp, 0));
2506 __ pop(edx);
2507 __ pop(ebx);
2508 __ jmp(&loaded);
2509 __ bind(&input_not_smi);
2510 // Check if input is a HeapNumber.
2511 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2512 __ cmp(Operand(ebx), Immediate(Factory::heap_number_map()));
2513 __ j(not_equal, &runtime_call);
2514 // Input is a HeapNumber. Push it on the FPU stack and load its
2515 // low and high words into ebx, edx.
2516 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
2517 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
2518 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
ricow@chromium.org65fae842010-08-25 15:26:24 +00002519
whesse@chromium.org023421e2010-12-21 12:19:12 +00002520 __ bind(&loaded);
2521 } else { // UNTAGGED.
2522 if (CpuFeatures::IsSupported(SSE4_1)) {
2523 CpuFeatures::Scope sse4_scope(SSE4_1);
2524 __ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx.
2525 } else {
2526 __ pshufd(xmm0, xmm1, 0x1);
2527 __ movd(Operand(edx), xmm0);
2528 }
2529 __ movd(Operand(ebx), xmm1);
2530 }
2531
2532 // ST[0] or xmm1 == double value
ricow@chromium.org65fae842010-08-25 15:26:24 +00002533 // ebx = low 32 bits of double value
2534 // edx = high 32 bits of double value
2535 // Compute hash (the shifts are arithmetic):
2536 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
2537 __ mov(ecx, ebx);
2538 __ xor_(ecx, Operand(edx));
2539 __ mov(eax, ecx);
2540 __ sar(eax, 16);
2541 __ xor_(ecx, Operand(eax));
2542 __ mov(eax, ecx);
2543 __ sar(eax, 8);
2544 __ xor_(ecx, Operand(eax));
2545 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
2546 __ and_(Operand(ecx), Immediate(TranscendentalCache::kCacheSize - 1));
2547
whesse@chromium.org023421e2010-12-21 12:19:12 +00002548 // ST[0] or xmm1 == double value.
ricow@chromium.org65fae842010-08-25 15:26:24 +00002549 // ebx = low 32 bits of double value.
2550 // edx = high 32 bits of double value.
2551 // ecx = TranscendentalCache::hash(double value).
2552 __ mov(eax,
2553 Immediate(ExternalReference::transcendental_cache_array_address()));
2554 // Eax points to cache array.
2555 __ mov(eax, Operand(eax, type_ * sizeof(TranscendentalCache::caches_[0])));
2556 // Eax points to the cache for the type type_.
2557 // If NULL, the cache hasn't been initialized yet, so go through runtime.
2558 __ test(eax, Operand(eax));
2559 __ j(zero, &runtime_call_clear_stack);
2560#ifdef DEBUG
2561 // Check that the layout of cache elements match expectations.
2562 { TranscendentalCache::Element test_elem[2];
2563 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
2564 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
2565 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
2566 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
2567 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
2568 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
2569 CHECK_EQ(0, elem_in0 - elem_start);
2570 CHECK_EQ(kIntSize, elem_in1 - elem_start);
2571 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
2572 }
2573#endif
2574 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
2575 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
2576 __ lea(ecx, Operand(eax, ecx, times_4, 0));
2577 // Check if cache matches: Double value is stored in uint32_t[2] array.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00002578 NearLabel cache_miss;
ricow@chromium.org65fae842010-08-25 15:26:24 +00002579 __ cmp(ebx, Operand(ecx, 0));
2580 __ j(not_equal, &cache_miss);
2581 __ cmp(edx, Operand(ecx, kIntSize));
2582 __ j(not_equal, &cache_miss);
2583 // Cache hit!
2584 __ mov(eax, Operand(ecx, 2 * kIntSize));
whesse@chromium.org023421e2010-12-21 12:19:12 +00002585 if (tagged) {
2586 __ fstp(0);
2587 __ ret(kPointerSize);
2588 } else { // UNTAGGED.
2589 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2590 __ Ret();
2591 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002592
2593 __ bind(&cache_miss);
2594 // Update cache with new value.
2595 // We are short on registers, so use no_reg as scratch.
2596 // This gives slightly larger code.
whesse@chromium.org023421e2010-12-21 12:19:12 +00002597 if (tagged) {
2598 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
2599 } else { // UNTAGGED.
2600 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2601 __ sub(Operand(esp), Immediate(kDoubleSize));
2602 __ movdbl(Operand(esp, 0), xmm1);
2603 __ fld_d(Operand(esp, 0));
2604 __ add(Operand(esp), Immediate(kDoubleSize));
2605 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002606 GenerateOperation(masm);
2607 __ mov(Operand(ecx, 0), ebx);
2608 __ mov(Operand(ecx, kIntSize), edx);
2609 __ mov(Operand(ecx, 2 * kIntSize), eax);
2610 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
whesse@chromium.org023421e2010-12-21 12:19:12 +00002611 if (tagged) {
2612 __ ret(kPointerSize);
2613 } else { // UNTAGGED.
2614 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2615 __ Ret();
ricow@chromium.org65fae842010-08-25 15:26:24 +00002616
whesse@chromium.org023421e2010-12-21 12:19:12 +00002617 // Skip cache and return answer directly, only in untagged case.
2618 __ bind(&skip_cache);
2619 __ sub(Operand(esp), Immediate(kDoubleSize));
2620 __ movdbl(Operand(esp, 0), xmm1);
2621 __ fld_d(Operand(esp, 0));
2622 GenerateOperation(masm);
2623 __ fstp_d(Operand(esp, 0));
2624 __ movdbl(xmm1, Operand(esp, 0));
2625 __ add(Operand(esp), Immediate(kDoubleSize));
2626 // We return the value in xmm1 without adding it to the cache, but
2627 // we cause a scavenging GC so that future allocations will succeed.
2628 __ EnterInternalFrame();
2629 // Allocate an unused object bigger than a HeapNumber.
2630 __ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
2631 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
2632 __ LeaveInternalFrame();
2633 __ Ret();
2634 }
2635
2636 // Call runtime, doing whatever allocation and cleanup is necessary.
2637 if (tagged) {
2638 __ bind(&runtime_call_clear_stack);
2639 __ fstp(0);
2640 __ bind(&runtime_call);
2641 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
2642 } else { // UNTAGGED.
2643 __ bind(&runtime_call_clear_stack);
2644 __ bind(&runtime_call);
2645 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2646 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
2647 __ EnterInternalFrame();
2648 __ push(eax);
2649 __ CallRuntime(RuntimeFunction(), 1);
2650 __ LeaveInternalFrame();
2651 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2652 __ Ret();
2653 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002654}
2655
2656
2657Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
2658 switch (type_) {
ricow@chromium.org65fae842010-08-25 15:26:24 +00002659 case TranscendentalCache::SIN: return Runtime::kMath_sin;
2660 case TranscendentalCache::COS: return Runtime::kMath_cos;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002661 case TranscendentalCache::LOG: return Runtime::kMath_log;
ricow@chromium.org65fae842010-08-25 15:26:24 +00002662 default:
2663 UNIMPLEMENTED();
2664 return Runtime::kAbort;
2665 }
2666}
2667
2668
2669void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
2670 // Only free register is edi.
whesse@chromium.org023421e2010-12-21 12:19:12 +00002671 // Input value is on FP stack, and also in ebx/edx.
2672 // Input value is possibly in xmm1.
2673 // Address of result (a newly allocated HeapNumber) may be in eax.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002674 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
2675 // Both fsin and fcos require arguments in the range +/-2^63 and
2676 // return NaN for infinities and NaN. They can share all code except
2677 // the actual fsin/fcos operation.
whesse@chromium.org023421e2010-12-21 12:19:12 +00002678 NearLabel in_range, done;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002679 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
2680 // work. We must reduce it to the appropriate range.
2681 __ mov(edi, edx);
2682 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
2683 int supported_exponent_limit =
2684 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
2685 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
2686 __ j(below, &in_range, taken);
2687 // Check for infinity and NaN. Both return NaN for sin.
2688 __ cmp(Operand(edi), Immediate(0x7ff00000));
2689 NearLabel non_nan_result;
2690 __ j(not_equal, &non_nan_result, taken);
2691 // Input is +/-Infinity or NaN. Result is NaN.
2692 __ fstp(0);
2693 // NaN is represented by 0x7ff8000000000000.
2694 __ push(Immediate(0x7ff80000));
2695 __ push(Immediate(0));
2696 __ fld_d(Operand(esp, 0));
2697 __ add(Operand(esp), Immediate(2 * kPointerSize));
2698 __ jmp(&done);
ricow@chromium.org65fae842010-08-25 15:26:24 +00002699
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002700 __ bind(&non_nan_result);
ricow@chromium.org65fae842010-08-25 15:26:24 +00002701
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002702 // Use fpmod to restrict argument to the range +/-2*PI.
2703 __ mov(edi, eax); // Save eax before using fnstsw_ax.
2704 __ fldpi();
2705 __ fadd(0);
2706 __ fld(1);
2707 // FPU Stack: input, 2*pi, input.
2708 {
2709 NearLabel no_exceptions;
2710 __ fwait();
2711 __ fnstsw_ax();
2712 // Clear if Illegal Operand or Zero Division exceptions are set.
2713 __ test(Operand(eax), Immediate(5));
2714 __ j(zero, &no_exceptions);
2715 __ fnclex();
2716 __ bind(&no_exceptions);
2717 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002718
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002719 // Compute st(0) % st(1)
2720 {
2721 NearLabel partial_remainder_loop;
2722 __ bind(&partial_remainder_loop);
2723 __ fprem1();
2724 __ fwait();
2725 __ fnstsw_ax();
2726 __ test(Operand(eax), Immediate(0x400 /* C2 */));
2727 // If C2 is set, computation only has partial result. Loop to
2728 // continue computation.
2729 __ j(not_zero, &partial_remainder_loop);
2730 }
2731 // FPU Stack: input, 2*pi, input % 2*pi
2732 __ fstp(2);
2733 __ fstp(0);
2734 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
2735
2736 // FPU Stack: input % 2*pi
2737 __ bind(&in_range);
2738 switch (type_) {
2739 case TranscendentalCache::SIN:
2740 __ fsin();
2741 break;
2742 case TranscendentalCache::COS:
2743 __ fcos();
2744 break;
2745 default:
2746 UNREACHABLE();
2747 }
2748 __ bind(&done);
2749 } else {
2750 ASSERT(type_ == TranscendentalCache::LOG);
2751 __ fldln2();
2752 __ fxch();
2753 __ fyl2x();
ricow@chromium.org65fae842010-08-25 15:26:24 +00002754 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002755}
2756
2757
2758// Get the integer part of a heap number. Surprisingly, all this bit twiddling
2759// is faster than using the built-in instructions on floating point registers.
2760// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
2761// trashed registers.
2762void IntegerConvert(MacroAssembler* masm,
2763 Register source,
2764 TypeInfo type_info,
2765 bool use_sse3,
2766 Label* conversion_failure) {
2767 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
2768 Label done, right_exponent, normal_exponent;
2769 Register scratch = ebx;
2770 Register scratch2 = edi;
2771 if (type_info.IsInteger32() && CpuFeatures::IsEnabled(SSE2)) {
2772 CpuFeatures::Scope scope(SSE2);
2773 __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset));
2774 return;
2775 }
2776 if (!type_info.IsInteger32() || !use_sse3) {
2777 // Get exponent word.
2778 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
2779 // Get exponent alone in scratch2.
2780 __ mov(scratch2, scratch);
2781 __ and_(scratch2, HeapNumber::kExponentMask);
2782 }
2783 if (use_sse3) {
2784 CpuFeatures::Scope scope(SSE3);
2785 if (!type_info.IsInteger32()) {
2786 // Check whether the exponent is too big for a 64 bit signed integer.
2787 static const uint32_t kTooBigExponent =
2788 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
2789 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
2790 __ j(greater_equal, conversion_failure);
2791 }
2792 // Load x87 register with heap number.
2793 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
2794 // Reserve space for 64 bit answer.
2795 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
2796 // Do conversion, which cannot fail because we checked the exponent.
2797 __ fisttp_d(Operand(esp, 0));
2798 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
2799 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
2800 } else {
2801 // Load ecx with zero. We use this either for the final shift or
2802 // for the answer.
2803 __ xor_(ecx, Operand(ecx));
2804 // Check whether the exponent matches a 32 bit signed int that cannot be
2805 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
2806 // exponent is 30 (biased). This is the exponent that we are fastest at and
2807 // also the highest exponent we can handle here.
2808 const uint32_t non_smi_exponent =
2809 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
2810 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
2811 // If we have a match of the int32-but-not-Smi exponent then skip some
2812 // logic.
2813 __ j(equal, &right_exponent);
2814 // If the exponent is higher than that then go to slow case. This catches
2815 // numbers that don't fit in a signed int32, infinities and NaNs.
2816 __ j(less, &normal_exponent);
2817
2818 {
2819 // Handle a big exponent. The only reason we have this code is that the
2820 // >>> operator has a tendency to generate numbers with an exponent of 31.
2821 const uint32_t big_non_smi_exponent =
2822 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
2823 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
2824 __ j(not_equal, conversion_failure);
2825 // We have the big exponent, typically from >>>. This means the number is
2826 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
2827 __ mov(scratch2, scratch);
2828 __ and_(scratch2, HeapNumber::kMantissaMask);
2829 // Put back the implicit 1.
2830 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
2831 // Shift up the mantissa bits to take up the space the exponent used to
2832 // take. We just orred in the implicit bit so that took care of one and
2833 // we want to use the full unsigned range so we subtract 1 bit from the
2834 // shift distance.
2835 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
2836 __ shl(scratch2, big_shift_distance);
2837 // Get the second half of the double.
2838 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
2839 // Shift down 21 bits to get the most significant 11 bits or the low
2840 // mantissa word.
2841 __ shr(ecx, 32 - big_shift_distance);
2842 __ or_(ecx, Operand(scratch2));
2843 // We have the answer in ecx, but we may need to negate it.
2844 __ test(scratch, Operand(scratch));
2845 __ j(positive, &done);
2846 __ neg(ecx);
2847 __ jmp(&done);
2848 }
2849
2850 __ bind(&normal_exponent);
2851 // Exponent word in scratch, exponent part of exponent word in scratch2.
2852 // Zero in ecx.
2853 // We know the exponent is smaller than 30 (biased). If it is less than
2854 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
2855 // it rounds to zero.
2856 const uint32_t zero_exponent =
2857 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
2858 __ sub(Operand(scratch2), Immediate(zero_exponent));
2859 // ecx already has a Smi zero.
2860 __ j(less, &done);
2861
2862 // We have a shifted exponent between 0 and 30 in scratch2.
2863 __ shr(scratch2, HeapNumber::kExponentShift);
2864 __ mov(ecx, Immediate(30));
2865 __ sub(ecx, Operand(scratch2));
2866
2867 __ bind(&right_exponent);
2868 // Here ecx is the shift, scratch is the exponent word.
2869 // Get the top bits of the mantissa.
2870 __ and_(scratch, HeapNumber::kMantissaMask);
2871 // Put back the implicit 1.
2872 __ or_(scratch, 1 << HeapNumber::kExponentShift);
2873 // Shift up the mantissa bits to take up the space the exponent used to
2874 // take. We have kExponentShift + 1 significant bits int he low end of the
2875 // word. Shift them to the top bits.
2876 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
2877 __ shl(scratch, shift_distance);
2878 // Get the second half of the double. For some exponents we don't
2879 // actually need this because the bits get shifted out again, but
2880 // it's probably slower to test than just to do it.
2881 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
2882 // Shift down 22 bits to get the most significant 10 bits or the low
2883 // mantissa word.
2884 __ shr(scratch2, 32 - shift_distance);
2885 __ or_(scratch2, Operand(scratch));
2886 // Move down according to the exponent.
2887 __ shr_cl(scratch2);
2888 // Now the unsigned answer is in scratch2. We need to move it to ecx and
2889 // we may need to fix the sign.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00002890 NearLabel negative;
ricow@chromium.org65fae842010-08-25 15:26:24 +00002891 __ xor_(ecx, Operand(ecx));
2892 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
2893 __ j(greater, &negative);
2894 __ mov(ecx, scratch2);
2895 __ jmp(&done);
2896 __ bind(&negative);
2897 __ sub(ecx, Operand(scratch2));
2898 __ bind(&done);
2899 }
2900}
2901
2902
2903// Input: edx, eax are the left and right objects of a bit op.
2904// Output: eax, ecx are left and right integers for a bit op.
2905void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm,
2906 TypeInfo type_info,
2907 bool use_sse3,
2908 Label* conversion_failure) {
2909 // Check float operands.
2910 Label arg1_is_object, check_undefined_arg1;
2911 Label arg2_is_object, check_undefined_arg2;
2912 Label load_arg2, done;
2913
2914 if (!type_info.IsDouble()) {
2915 if (!type_info.IsSmi()) {
2916 __ test(edx, Immediate(kSmiTagMask));
2917 __ j(not_zero, &arg1_is_object);
2918 } else {
2919 if (FLAG_debug_code) __ AbortIfNotSmi(edx);
2920 }
2921 __ SmiUntag(edx);
2922 __ jmp(&load_arg2);
2923 }
2924
2925 __ bind(&arg1_is_object);
2926
2927 // Get the untagged integer version of the edx heap number in ecx.
2928 IntegerConvert(masm, edx, type_info, use_sse3, conversion_failure);
2929 __ mov(edx, ecx);
2930
2931 // Here edx has the untagged integer, eax has a Smi or a heap number.
2932 __ bind(&load_arg2);
2933 if (!type_info.IsDouble()) {
2934 // Test if arg2 is a Smi.
2935 if (!type_info.IsSmi()) {
2936 __ test(eax, Immediate(kSmiTagMask));
2937 __ j(not_zero, &arg2_is_object);
2938 } else {
2939 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
2940 }
2941 __ SmiUntag(eax);
2942 __ mov(ecx, eax);
2943 __ jmp(&done);
2944 }
2945
2946 __ bind(&arg2_is_object);
2947
2948 // Get the untagged integer version of the eax heap number in ecx.
2949 IntegerConvert(masm, eax, type_info, use_sse3, conversion_failure);
2950 __ bind(&done);
2951 __ mov(eax, edx);
2952}
2953
2954
2955// Input: edx, eax are the left and right objects of a bit op.
2956// Output: eax, ecx are left and right integers for a bit op.
2957void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2958 bool use_sse3,
2959 Label* conversion_failure) {
2960 // Check float operands.
2961 Label arg1_is_object, check_undefined_arg1;
2962 Label arg2_is_object, check_undefined_arg2;
2963 Label load_arg2, done;
2964
2965 // Test if arg1 is a Smi.
2966 __ test(edx, Immediate(kSmiTagMask));
2967 __ j(not_zero, &arg1_is_object);
2968
2969 __ SmiUntag(edx);
2970 __ jmp(&load_arg2);
2971
2972 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2973 __ bind(&check_undefined_arg1);
2974 __ cmp(edx, Factory::undefined_value());
2975 __ j(not_equal, conversion_failure);
2976 __ mov(edx, Immediate(0));
2977 __ jmp(&load_arg2);
2978
2979 __ bind(&arg1_is_object);
2980 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
2981 __ cmp(ebx, Factory::heap_number_map());
2982 __ j(not_equal, &check_undefined_arg1);
2983
2984 // Get the untagged integer version of the edx heap number in ecx.
2985 IntegerConvert(masm,
2986 edx,
2987 TypeInfo::Unknown(),
2988 use_sse3,
2989 conversion_failure);
2990 __ mov(edx, ecx);
2991
2992 // Here edx has the untagged integer, eax has a Smi or a heap number.
2993 __ bind(&load_arg2);
2994
2995 // Test if arg2 is a Smi.
2996 __ test(eax, Immediate(kSmiTagMask));
2997 __ j(not_zero, &arg2_is_object);
2998
2999 __ SmiUntag(eax);
3000 __ mov(ecx, eax);
3001 __ jmp(&done);
3002
3003 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
3004 __ bind(&check_undefined_arg2);
3005 __ cmp(eax, Factory::undefined_value());
3006 __ j(not_equal, conversion_failure);
3007 __ mov(ecx, Immediate(0));
3008 __ jmp(&done);
3009
3010 __ bind(&arg2_is_object);
3011 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3012 __ cmp(ebx, Factory::heap_number_map());
3013 __ j(not_equal, &check_undefined_arg2);
3014
3015 // Get the untagged integer version of the eax heap number in ecx.
3016 IntegerConvert(masm,
3017 eax,
3018 TypeInfo::Unknown(),
3019 use_sse3,
3020 conversion_failure);
3021 __ bind(&done);
3022 __ mov(eax, edx);
3023}
3024
3025
3026void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
3027 TypeInfo type_info,
3028 bool use_sse3,
3029 Label* conversion_failure) {
3030 if (type_info.IsNumber()) {
3031 LoadNumbersAsIntegers(masm, type_info, use_sse3, conversion_failure);
3032 } else {
3033 LoadUnknownsAsIntegers(masm, use_sse3, conversion_failure);
3034 }
3035}
3036
3037
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003038void FloatingPointHelper::CheckLoadedIntegersWereInt32(MacroAssembler* masm,
3039 bool use_sse3,
3040 Label* not_int32) {
3041 return;
3042}
3043
3044
ricow@chromium.org65fae842010-08-25 15:26:24 +00003045void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
3046 Register number) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003047 NearLabel load_smi, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003048
3049 __ test(number, Immediate(kSmiTagMask));
3050 __ j(zero, &load_smi, not_taken);
3051 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
3052 __ jmp(&done);
3053
3054 __ bind(&load_smi);
3055 __ SmiUntag(number);
3056 __ push(number);
3057 __ fild_s(Operand(esp, 0));
3058 __ pop(number);
3059
3060 __ bind(&done);
3061}
3062
3063
3064void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003065 NearLabel load_smi_edx, load_eax, load_smi_eax, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003066 // Load operand in edx into xmm0.
3067 __ test(edx, Immediate(kSmiTagMask));
3068 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
3069 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3070
3071 __ bind(&load_eax);
3072 // Load operand in eax into xmm1.
3073 __ test(eax, Immediate(kSmiTagMask));
3074 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
3075 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3076 __ jmp(&done);
3077
3078 __ bind(&load_smi_edx);
3079 __ SmiUntag(edx); // Untag smi before converting to float.
3080 __ cvtsi2sd(xmm0, Operand(edx));
3081 __ SmiTag(edx); // Retag smi for heap number overwriting test.
3082 __ jmp(&load_eax);
3083
3084 __ bind(&load_smi_eax);
3085 __ SmiUntag(eax); // Untag smi before converting to float.
3086 __ cvtsi2sd(xmm1, Operand(eax));
3087 __ SmiTag(eax); // Retag smi for heap number overwriting test.
3088
3089 __ bind(&done);
3090}
3091
3092
3093void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
3094 Label* not_numbers) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003095 NearLabel load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003096 // Load operand in edx into xmm0, or branch to not_numbers.
3097 __ test(edx, Immediate(kSmiTagMask));
3098 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
3099 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map());
3100 __ j(not_equal, not_numbers); // Argument in edx is not a number.
3101 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3102 __ bind(&load_eax);
3103 // Load operand in eax into xmm1, or branch to not_numbers.
3104 __ test(eax, Immediate(kSmiTagMask));
3105 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
3106 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::heap_number_map());
3107 __ j(equal, &load_float_eax);
3108 __ jmp(not_numbers); // Argument in eax is not a number.
3109 __ bind(&load_smi_edx);
3110 __ SmiUntag(edx); // Untag smi before converting to float.
3111 __ cvtsi2sd(xmm0, Operand(edx));
3112 __ SmiTag(edx); // Retag smi for heap number overwriting test.
3113 __ jmp(&load_eax);
3114 __ bind(&load_smi_eax);
3115 __ SmiUntag(eax); // Untag smi before converting to float.
3116 __ cvtsi2sd(xmm1, Operand(eax));
3117 __ SmiTag(eax); // Retag smi for heap number overwriting test.
3118 __ jmp(&done);
3119 __ bind(&load_float_eax);
3120 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3121 __ bind(&done);
3122}
3123
3124
3125void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
3126 Register scratch) {
3127 const Register left = edx;
3128 const Register right = eax;
3129 __ mov(scratch, left);
3130 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
3131 __ SmiUntag(scratch);
3132 __ cvtsi2sd(xmm0, Operand(scratch));
3133
3134 __ mov(scratch, right);
3135 __ SmiUntag(scratch);
3136 __ cvtsi2sd(xmm1, Operand(scratch));
3137}
3138
3139
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003140void FloatingPointHelper::CheckSSE2OperandsAreInt32(MacroAssembler* masm,
3141 Label* non_int32,
3142 Register scratch) {
3143 __ cvttsd2si(scratch, Operand(xmm0));
3144 __ cvtsi2sd(xmm2, Operand(scratch));
3145 __ ucomisd(xmm0, xmm2);
3146 __ j(not_zero, non_int32);
3147 __ j(carry, non_int32);
3148 __ cvttsd2si(scratch, Operand(xmm1));
3149 __ cvtsi2sd(xmm2, Operand(scratch));
3150 __ ucomisd(xmm1, xmm2);
3151 __ j(not_zero, non_int32);
3152 __ j(carry, non_int32);
3153}
3154
3155
ricow@chromium.org65fae842010-08-25 15:26:24 +00003156void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
3157 Register scratch,
3158 ArgLocation arg_location) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003159 NearLabel load_smi_1, load_smi_2, done_load_1, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003160 if (arg_location == ARGS_IN_REGISTERS) {
3161 __ mov(scratch, edx);
3162 } else {
3163 __ mov(scratch, Operand(esp, 2 * kPointerSize));
3164 }
3165 __ test(scratch, Immediate(kSmiTagMask));
3166 __ j(zero, &load_smi_1, not_taken);
3167 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
3168 __ bind(&done_load_1);
3169
3170 if (arg_location == ARGS_IN_REGISTERS) {
3171 __ mov(scratch, eax);
3172 } else {
3173 __ mov(scratch, Operand(esp, 1 * kPointerSize));
3174 }
3175 __ test(scratch, Immediate(kSmiTagMask));
3176 __ j(zero, &load_smi_2, not_taken);
3177 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
3178 __ jmp(&done);
3179
3180 __ bind(&load_smi_1);
3181 __ SmiUntag(scratch);
3182 __ push(scratch);
3183 __ fild_s(Operand(esp, 0));
3184 __ pop(scratch);
3185 __ jmp(&done_load_1);
3186
3187 __ bind(&load_smi_2);
3188 __ SmiUntag(scratch);
3189 __ push(scratch);
3190 __ fild_s(Operand(esp, 0));
3191 __ pop(scratch);
3192
3193 __ bind(&done);
3194}
3195
3196
3197void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
3198 Register scratch) {
3199 const Register left = edx;
3200 const Register right = eax;
3201 __ mov(scratch, left);
3202 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
3203 __ SmiUntag(scratch);
3204 __ push(scratch);
3205 __ fild_s(Operand(esp, 0));
3206
3207 __ mov(scratch, right);
3208 __ SmiUntag(scratch);
3209 __ mov(Operand(esp, 0), scratch);
3210 __ fild_s(Operand(esp, 0));
3211 __ pop(scratch);
3212}
3213
3214
3215void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
3216 Label* non_float,
3217 Register scratch) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003218 NearLabel test_other, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003219 // Test if both operands are floats or smi -> scratch=k_is_float;
3220 // Otherwise scratch = k_not_float.
3221 __ test(edx, Immediate(kSmiTagMask));
3222 __ j(zero, &test_other, not_taken); // argument in edx is OK
3223 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
3224 __ cmp(scratch, Factory::heap_number_map());
3225 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
3226
3227 __ bind(&test_other);
3228 __ test(eax, Immediate(kSmiTagMask));
3229 __ j(zero, &done); // argument in eax is OK
3230 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
3231 __ cmp(scratch, Factory::heap_number_map());
3232 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
3233
3234 // Fall-through: Both operands are numbers.
3235 __ bind(&done);
3236}
3237
3238
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003239void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
3240 Label* non_int32) {
3241 return;
3242}
3243
3244
ricow@chromium.org65fae842010-08-25 15:26:24 +00003245void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003246 Label slow, done, undo;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003247
3248 if (op_ == Token::SUB) {
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003249 if (include_smi_code_) {
3250 // Check whether the value is a smi.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003251 NearLabel try_float;
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003252 __ test(eax, Immediate(kSmiTagMask));
3253 __ j(not_zero, &try_float, not_taken);
ricow@chromium.org65fae842010-08-25 15:26:24 +00003254
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003255 if (negative_zero_ == kStrictNegativeZero) {
3256 // Go slow case if the value of the expression is zero
3257 // to make sure that we switch between 0 and -0.
3258 __ test(eax, Operand(eax));
3259 __ j(zero, &slow, not_taken);
3260 }
3261
3262 // The value of the expression is a smi that is not zero. Try
3263 // optimistic subtraction '0 - value'.
3264 __ mov(edx, Operand(eax));
3265 __ Set(eax, Immediate(0));
3266 __ sub(eax, Operand(edx));
3267 __ j(overflow, &undo, not_taken);
3268 __ StubReturn(1);
3269
3270 // Try floating point case.
3271 __ bind(&try_float);
3272 } else if (FLAG_debug_code) {
3273 __ AbortIfSmi(eax);
ricow@chromium.org65fae842010-08-25 15:26:24 +00003274 }
3275
ricow@chromium.org65fae842010-08-25 15:26:24 +00003276 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3277 __ cmp(edx, Factory::heap_number_map());
3278 __ j(not_equal, &slow);
3279 if (overwrite_ == UNARY_OVERWRITE) {
3280 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
3281 __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
3282 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
3283 } else {
3284 __ mov(edx, Operand(eax));
3285 // edx: operand
3286 __ AllocateHeapNumber(eax, ebx, ecx, &undo);
3287 // eax: allocated 'empty' number
3288 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
3289 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
3290 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
3291 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
3292 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
3293 }
3294 } else if (op_ == Token::BIT_NOT) {
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003295 if (include_smi_code_) {
3296 Label non_smi;
3297 __ test(eax, Immediate(kSmiTagMask));
3298 __ j(not_zero, &non_smi);
3299 __ not_(eax);
3300 __ and_(eax, ~kSmiTagMask); // Remove inverted smi-tag.
3301 __ ret(0);
3302 __ bind(&non_smi);
3303 } else if (FLAG_debug_code) {
3304 __ AbortIfSmi(eax);
3305 }
3306
ricow@chromium.org65fae842010-08-25 15:26:24 +00003307 // Check if the operand is a heap number.
3308 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3309 __ cmp(edx, Factory::heap_number_map());
3310 __ j(not_equal, &slow, not_taken);
3311
3312 // Convert the heap number in eax to an untagged integer in ecx.
3313 IntegerConvert(masm,
3314 eax,
3315 TypeInfo::Unknown(),
3316 CpuFeatures::IsSupported(SSE3),
3317 &slow);
3318
3319 // Do the bitwise operation and check if the result fits in a smi.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003320 NearLabel try_float;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003321 __ not_(ecx);
3322 __ cmp(ecx, 0xc0000000);
3323 __ j(sign, &try_float, not_taken);
3324
3325 // Tag the result as a smi and we're done.
3326 STATIC_ASSERT(kSmiTagSize == 1);
3327 __ lea(eax, Operand(ecx, times_2, kSmiTag));
3328 __ jmp(&done);
3329
3330 // Try to store the result in a heap number.
3331 __ bind(&try_float);
3332 if (overwrite_ == UNARY_NO_OVERWRITE) {
3333 // Allocate a fresh heap number, but don't overwrite eax until
3334 // we're sure we can do it without going through the slow case
3335 // that needs the value in eax.
3336 __ AllocateHeapNumber(ebx, edx, edi, &slow);
3337 __ mov(eax, Operand(ebx));
3338 }
3339 if (CpuFeatures::IsSupported(SSE2)) {
3340 CpuFeatures::Scope use_sse2(SSE2);
3341 __ cvtsi2sd(xmm0, Operand(ecx));
3342 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
3343 } else {
3344 __ push(ecx);
3345 __ fild_s(Operand(esp, 0));
3346 __ pop(ecx);
3347 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
3348 }
3349 } else {
3350 UNIMPLEMENTED();
3351 }
3352
3353 // Return from the stub.
3354 __ bind(&done);
3355 __ StubReturn(1);
3356
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003357 // Restore eax and go slow case.
3358 __ bind(&undo);
3359 __ mov(eax, Operand(edx));
3360
ricow@chromium.org65fae842010-08-25 15:26:24 +00003361 // Handle the slow case by jumping to the JavaScript builtin.
3362 __ bind(&slow);
3363 __ pop(ecx); // pop return address.
3364 __ push(eax);
3365 __ push(ecx); // push return address
3366 switch (op_) {
3367 case Token::SUB:
3368 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
3369 break;
3370 case Token::BIT_NOT:
3371 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
3372 break;
3373 default:
3374 UNREACHABLE();
3375 }
3376}
3377
3378
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003379void MathPowStub::Generate(MacroAssembler* masm) {
3380 // Registers are used as follows:
3381 // edx = base
3382 // eax = exponent
3383 // ecx = temporary, result
3384
3385 CpuFeatures::Scope use_sse2(SSE2);
3386 Label allocate_return, call_runtime;
3387
3388 // Load input parameters.
3389 __ mov(edx, Operand(esp, 2 * kPointerSize));
3390 __ mov(eax, Operand(esp, 1 * kPointerSize));
3391
3392 // Save 1 in xmm3 - we need this several times later on.
3393 __ mov(ecx, Immediate(1));
3394 __ cvtsi2sd(xmm3, Operand(ecx));
3395
3396 Label exponent_nonsmi;
3397 Label base_nonsmi;
3398 // If the exponent is a heap number go to that specific case.
3399 __ test(eax, Immediate(kSmiTagMask));
3400 __ j(not_zero, &exponent_nonsmi);
3401 __ test(edx, Immediate(kSmiTagMask));
3402 __ j(not_zero, &base_nonsmi);
3403
3404 // Optimized version when both exponent and base is a smi.
3405 Label powi;
3406 __ SmiUntag(edx);
3407 __ cvtsi2sd(xmm0, Operand(edx));
3408 __ jmp(&powi);
3409 // exponent is smi and base is a heapnumber.
3410 __ bind(&base_nonsmi);
3411 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3412 Factory::heap_number_map());
3413 __ j(not_equal, &call_runtime);
3414
3415 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3416
3417 // Optimized version of pow if exponent is a smi.
3418 // xmm0 contains the base.
3419 __ bind(&powi);
3420 __ SmiUntag(eax);
3421
3422 // Save exponent in base as we need to check if exponent is negative later.
3423 // We know that base and exponent are in different registers.
3424 __ mov(edx, eax);
3425
3426 // Get absolute value of exponent.
3427 NearLabel no_neg;
3428 __ cmp(eax, 0);
3429 __ j(greater_equal, &no_neg);
3430 __ neg(eax);
3431 __ bind(&no_neg);
3432
3433 // Load xmm1 with 1.
3434 __ movsd(xmm1, xmm3);
3435 NearLabel while_true;
3436 NearLabel no_multiply;
3437
3438 __ bind(&while_true);
3439 __ shr(eax, 1);
3440 __ j(not_carry, &no_multiply);
3441 __ mulsd(xmm1, xmm0);
3442 __ bind(&no_multiply);
3443 __ test(eax, Operand(eax));
3444 __ mulsd(xmm0, xmm0);
3445 __ j(not_zero, &while_true);
3446
3447 // base has the original value of the exponent - if the exponent is
3448 // negative return 1/result.
3449 __ test(edx, Operand(edx));
3450 __ j(positive, &allocate_return);
3451 // Special case if xmm1 has reached infinity.
3452 __ mov(ecx, Immediate(0x7FB00000));
3453 __ movd(xmm0, Operand(ecx));
3454 __ cvtss2sd(xmm0, xmm0);
3455 __ ucomisd(xmm0, xmm1);
3456 __ j(equal, &call_runtime);
3457 __ divsd(xmm3, xmm1);
3458 __ movsd(xmm1, xmm3);
3459 __ jmp(&allocate_return);
3460
3461 // exponent (or both) is a heapnumber - no matter what we should now work
3462 // on doubles.
3463 __ bind(&exponent_nonsmi);
3464 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3465 Factory::heap_number_map());
3466 __ j(not_equal, &call_runtime);
3467 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3468 // Test if exponent is nan.
3469 __ ucomisd(xmm1, xmm1);
3470 __ j(parity_even, &call_runtime);
3471
3472 NearLabel base_not_smi;
3473 NearLabel handle_special_cases;
3474 __ test(edx, Immediate(kSmiTagMask));
3475 __ j(not_zero, &base_not_smi);
3476 __ SmiUntag(edx);
3477 __ cvtsi2sd(xmm0, Operand(edx));
3478 __ jmp(&handle_special_cases);
3479
3480 __ bind(&base_not_smi);
3481 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3482 Factory::heap_number_map());
3483 __ j(not_equal, &call_runtime);
3484 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
3485 __ and_(ecx, HeapNumber::kExponentMask);
3486 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask));
3487 // base is NaN or +/-Infinity
3488 __ j(greater_equal, &call_runtime);
3489 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3490
3491 // base is in xmm0 and exponent is in xmm1.
3492 __ bind(&handle_special_cases);
3493 NearLabel not_minus_half;
3494 // Test for -0.5.
3495 // Load xmm2 with -0.5.
3496 __ mov(ecx, Immediate(0xBF000000));
3497 __ movd(xmm2, Operand(ecx));
3498 __ cvtss2sd(xmm2, xmm2);
3499 // xmm2 now has -0.5.
3500 __ ucomisd(xmm2, xmm1);
3501 __ j(not_equal, &not_minus_half);
3502
3503 // Calculates reciprocal of square root.
3504 // Note that 1/sqrt(x) = sqrt(1/x))
3505 __ divsd(xmm3, xmm0);
3506 __ movsd(xmm1, xmm3);
3507 __ sqrtsd(xmm1, xmm1);
3508 __ jmp(&allocate_return);
3509
3510 // Test for 0.5.
3511 __ bind(&not_minus_half);
3512 // Load xmm2 with 0.5.
3513 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
3514 __ addsd(xmm2, xmm3);
3515 // xmm2 now has 0.5.
3516 __ ucomisd(xmm2, xmm1);
3517 __ j(not_equal, &call_runtime);
3518 // Calculates square root.
3519 __ movsd(xmm1, xmm0);
3520 __ sqrtsd(xmm1, xmm1);
3521
3522 __ bind(&allocate_return);
3523 __ AllocateHeapNumber(ecx, eax, edx, &call_runtime);
3524 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm1);
3525 __ mov(eax, ecx);
3526 __ ret(2);
3527
3528 __ bind(&call_runtime);
3529 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
3530}
3531
3532
ricow@chromium.org65fae842010-08-25 15:26:24 +00003533void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
3534 // The key is in edx and the parameter count is in eax.
3535
3536 // The displacement is used for skipping the frame pointer on the
3537 // stack. It is the offset of the last parameter (if any) relative
3538 // to the frame pointer.
3539 static const int kDisplacement = 1 * kPointerSize;
3540
3541 // Check that the key is a smi.
3542 Label slow;
3543 __ test(edx, Immediate(kSmiTagMask));
3544 __ j(not_zero, &slow, not_taken);
3545
3546 // Check if the calling frame is an arguments adaptor frame.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003547 NearLabel adaptor;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003548 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3549 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
3550 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3551 __ j(equal, &adaptor);
3552
3553 // Check index against formal parameters count limit passed in
3554 // through register eax. Use unsigned comparison to get negative
3555 // check for free.
3556 __ cmp(edx, Operand(eax));
3557 __ j(above_equal, &slow, not_taken);
3558
3559 // Read the argument from the stack and return it.
3560 STATIC_ASSERT(kSmiTagSize == 1);
3561 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
3562 __ lea(ebx, Operand(ebp, eax, times_2, 0));
3563 __ neg(edx);
3564 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
3565 __ ret(0);
3566
3567 // Arguments adaptor case: Check index against actual arguments
3568 // limit found in the arguments adaptor frame. Use unsigned
3569 // comparison to get negative check for free.
3570 __ bind(&adaptor);
3571 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3572 __ cmp(edx, Operand(ecx));
3573 __ j(above_equal, &slow, not_taken);
3574
3575 // Read the argument from the stack and return it.
3576 STATIC_ASSERT(kSmiTagSize == 1);
3577 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
3578 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
3579 __ neg(edx);
3580 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
3581 __ ret(0);
3582
3583 // Slow-case: Handle non-smi or out-of-bounds access to arguments
3584 // by calling the runtime system.
3585 __ bind(&slow);
3586 __ pop(ebx); // Return address.
3587 __ push(edx);
3588 __ push(ebx);
3589 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
3590}
3591
3592
3593void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
3594 // esp[0] : return address
3595 // esp[4] : number of parameters
3596 // esp[8] : receiver displacement
3597 // esp[16] : function
3598
3599 // The displacement is used for skipping the return address and the
3600 // frame pointer on the stack. It is the offset of the last
3601 // parameter (if any) relative to the frame pointer.
3602 static const int kDisplacement = 2 * kPointerSize;
3603
3604 // Check if the calling frame is an arguments adaptor frame.
3605 Label adaptor_frame, try_allocate, runtime;
3606 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3607 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3608 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3609 __ j(equal, &adaptor_frame);
3610
3611 // Get the length from the frame.
3612 __ mov(ecx, Operand(esp, 1 * kPointerSize));
3613 __ jmp(&try_allocate);
3614
3615 // Patch the arguments.length and the parameters pointer.
3616 __ bind(&adaptor_frame);
3617 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3618 __ mov(Operand(esp, 1 * kPointerSize), ecx);
3619 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
3620 __ mov(Operand(esp, 2 * kPointerSize), edx);
3621
3622 // Try the new space allocation. Start out with computing the size of
3623 // the arguments object and the elements array.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003624 NearLabel add_arguments_object;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003625 __ bind(&try_allocate);
3626 __ test(ecx, Operand(ecx));
3627 __ j(zero, &add_arguments_object);
3628 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
3629 __ bind(&add_arguments_object);
3630 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSize));
3631
3632 // Do the allocation of both objects in one go.
3633 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
3634
3635 // Get the arguments boilerplate from the current (global) context.
3636 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
3637 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3638 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
3639 __ mov(edi, Operand(edi, offset));
3640
3641 // Copy the JS object part.
3642 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
3643 __ mov(ebx, FieldOperand(edi, i));
3644 __ mov(FieldOperand(eax, i), ebx);
3645 }
3646
3647 // Setup the callee in-object property.
3648 STATIC_ASSERT(Heap::arguments_callee_index == 0);
3649 __ mov(ebx, Operand(esp, 3 * kPointerSize));
3650 __ mov(FieldOperand(eax, JSObject::kHeaderSize), ebx);
3651
3652 // Get the length (smi tagged) and set that as an in-object property too.
3653 STATIC_ASSERT(Heap::arguments_length_index == 1);
3654 __ mov(ecx, Operand(esp, 1 * kPointerSize));
3655 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
3656
3657 // If there are no actual arguments, we're done.
3658 Label done;
3659 __ test(ecx, Operand(ecx));
3660 __ j(zero, &done);
3661
3662 // Get the parameters pointer from the stack.
3663 __ mov(edx, Operand(esp, 2 * kPointerSize));
3664
3665 // Setup the elements pointer in the allocated arguments object and
3666 // initialize the header in the elements fixed array.
3667 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
3668 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3669 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3670 Immediate(Factory::fixed_array_map()));
3671 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3672 // Untag the length for the loop below.
3673 __ SmiUntag(ecx);
3674
3675 // Copy the fixed array slots.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003676 NearLabel loop;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003677 __ bind(&loop);
3678 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
3679 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
3680 __ add(Operand(edi), Immediate(kPointerSize));
3681 __ sub(Operand(edx), Immediate(kPointerSize));
3682 __ dec(ecx);
3683 __ j(not_zero, &loop);
3684
3685 // Return and remove the on-stack parameters.
3686 __ bind(&done);
3687 __ ret(3 * kPointerSize);
3688
3689 // Do the runtime call to allocate the arguments object.
3690 __ bind(&runtime);
3691 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
3692}
3693
3694
3695void RegExpExecStub::Generate(MacroAssembler* masm) {
3696 // Just jump directly to runtime if native RegExp is not selected at compile
3697 // time or if regexp entry in generated code is turned off runtime switch or
3698 // at compilation.
3699#ifdef V8_INTERPRETED_REGEXP
3700 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3701#else // V8_INTERPRETED_REGEXP
3702 if (!FLAG_regexp_entry_native) {
3703 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3704 return;
3705 }
3706
3707 // Stack frame on entry.
3708 // esp[0]: return address
3709 // esp[4]: last_match_info (expected JSArray)
3710 // esp[8]: previous index
3711 // esp[12]: subject string
3712 // esp[16]: JSRegExp object
3713
3714 static const int kLastMatchInfoOffset = 1 * kPointerSize;
3715 static const int kPreviousIndexOffset = 2 * kPointerSize;
3716 static const int kSubjectOffset = 3 * kPointerSize;
3717 static const int kJSRegExpOffset = 4 * kPointerSize;
3718
3719 Label runtime, invoke_regexp;
3720
3721 // Ensure that a RegExp stack is allocated.
3722 ExternalReference address_of_regexp_stack_memory_address =
3723 ExternalReference::address_of_regexp_stack_memory_address();
3724 ExternalReference address_of_regexp_stack_memory_size =
3725 ExternalReference::address_of_regexp_stack_memory_size();
3726 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3727 __ test(ebx, Operand(ebx));
3728 __ j(zero, &runtime, not_taken);
3729
3730 // Check that the first argument is a JSRegExp object.
3731 __ mov(eax, Operand(esp, kJSRegExpOffset));
3732 STATIC_ASSERT(kSmiTag == 0);
3733 __ test(eax, Immediate(kSmiTagMask));
3734 __ j(zero, &runtime);
3735 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
3736 __ j(not_equal, &runtime);
3737 // Check that the RegExp has been compiled (data contains a fixed array).
3738 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3739 if (FLAG_debug_code) {
3740 __ test(ecx, Immediate(kSmiTagMask));
3741 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
3742 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
3743 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
3744 }
3745
3746 // ecx: RegExp data (FixedArray)
3747 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
3748 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
3749 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
3750 __ j(not_equal, &runtime);
3751
3752 // ecx: RegExp data (FixedArray)
3753 // Check that the number of captures fit in the static offsets vector buffer.
3754 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3755 // Calculate number of capture registers (number_of_captures + 1) * 2. This
3756 // uses the asumption that smis are 2 * their untagged value.
3757 STATIC_ASSERT(kSmiTag == 0);
3758 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3759 __ add(Operand(edx), Immediate(2)); // edx was a smi.
3760 // Check that the static offsets vector buffer is large enough.
3761 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
3762 __ j(above, &runtime);
3763
3764 // ecx: RegExp data (FixedArray)
3765 // edx: Number of capture registers
3766 // Check that the second argument is a string.
3767 __ mov(eax, Operand(esp, kSubjectOffset));
3768 __ test(eax, Immediate(kSmiTagMask));
3769 __ j(zero, &runtime);
3770 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
3771 __ j(NegateCondition(is_string), &runtime);
3772 // Get the length of the string to ebx.
3773 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
3774
3775 // ebx: Length of subject string as a smi
3776 // ecx: RegExp data (FixedArray)
3777 // edx: Number of capture registers
3778 // Check that the third argument is a positive smi less than the subject
3779 // string length. A negative value will be greater (unsigned comparison).
3780 __ mov(eax, Operand(esp, kPreviousIndexOffset));
3781 __ test(eax, Immediate(kSmiTagMask));
3782 __ j(not_zero, &runtime);
3783 __ cmp(eax, Operand(ebx));
3784 __ j(above_equal, &runtime);
3785
3786 // ecx: RegExp data (FixedArray)
3787 // edx: Number of capture registers
3788 // Check that the fourth object is a JSArray object.
3789 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3790 __ test(eax, Immediate(kSmiTagMask));
3791 __ j(zero, &runtime);
3792 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3793 __ j(not_equal, &runtime);
3794 // Check that the JSArray is in fast case.
3795 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3796 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
3797 __ cmp(eax, Factory::fixed_array_map());
3798 __ j(not_equal, &runtime);
3799 // Check that the last match info has space for the capture registers and the
3800 // additional information.
3801 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
3802 __ SmiUntag(eax);
3803 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
3804 __ cmp(edx, Operand(eax));
3805 __ j(greater, &runtime);
3806
3807 // ecx: RegExp data (FixedArray)
3808 // Check the representation and encoding of the subject string.
3809 Label seq_ascii_string, seq_two_byte_string, check_code;
3810 __ mov(eax, Operand(esp, kSubjectOffset));
3811 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3812 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3813 // First check for flat two byte string.
3814 __ and_(ebx,
3815 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
3816 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
3817 __ j(zero, &seq_two_byte_string);
3818 // Any other flat string must be a flat ascii string.
3819 __ test(Operand(ebx),
3820 Immediate(kIsNotStringMask | kStringRepresentationMask));
3821 __ j(zero, &seq_ascii_string);
3822
3823 // Check for flat cons string.
3824 // A flat cons string is a cons string where the second part is the empty
3825 // string. In that case the subject string is just the first part of the cons
3826 // string. Also in this case the first part of the cons string is known to be
3827 // a sequential string or an external string.
3828 STATIC_ASSERT(kExternalStringTag != 0);
3829 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
3830 __ test(Operand(ebx),
3831 Immediate(kIsNotStringMask | kExternalStringTag));
3832 __ j(not_zero, &runtime);
3833 // String is a cons string.
3834 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
3835 __ cmp(Operand(edx), Factory::empty_string());
3836 __ j(not_equal, &runtime);
3837 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
3838 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3839 // String is a cons string with empty second part.
3840 // eax: first part of cons string.
3841 // ebx: map of first part of cons string.
3842 // Is first part a flat two byte string?
3843 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3844 kStringRepresentationMask | kStringEncodingMask);
3845 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
3846 __ j(zero, &seq_two_byte_string);
3847 // Any other flat string must be ascii.
3848 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3849 kStringRepresentationMask);
3850 __ j(not_zero, &runtime);
3851
3852 __ bind(&seq_ascii_string);
3853 // eax: subject string (flat ascii)
3854 // ecx: RegExp data (FixedArray)
3855 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
3856 __ Set(edi, Immediate(1)); // Type is ascii.
3857 __ jmp(&check_code);
3858
3859 __ bind(&seq_two_byte_string);
3860 // eax: subject string (flat two byte)
3861 // ecx: RegExp data (FixedArray)
3862 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
3863 __ Set(edi, Immediate(0)); // Type is two byte.
3864
3865 __ bind(&check_code);
3866 // Check that the irregexp code has been generated for the actual string
3867 // encoding. If it has, the field contains a code object otherwise it contains
3868 // the hole.
3869 __ CmpObjectType(edx, CODE_TYPE, ebx);
3870 __ j(not_equal, &runtime);
3871
3872 // eax: subject string
3873 // edx: code
3874 // edi: encoding of subject string (1 if ascii, 0 if two_byte);
3875 // Load used arguments before starting to push arguments for call to native
3876 // RegExp code to avoid handling changing stack height.
3877 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
3878 __ SmiUntag(ebx); // Previous index from smi.
3879
3880 // eax: subject string
3881 // ebx: previous index
3882 // edx: code
3883 // edi: encoding of subject string (1 if ascii 0 if two_byte);
3884 // All checks done. Now push arguments for native regexp code.
3885 __ IncrementCounter(&Counters::regexp_entry_native, 1);
3886
3887 static const int kRegExpExecuteArguments = 7;
3888 __ PrepareCallCFunction(kRegExpExecuteArguments, ecx);
3889
3890 // Argument 7: Indicate that this is a direct call from JavaScript.
3891 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
3892
3893 // Argument 6: Start (high end) of backtracking stack memory area.
3894 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3895 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3896 __ mov(Operand(esp, 5 * kPointerSize), ecx);
3897
3898 // Argument 5: static offsets vector buffer.
3899 __ mov(Operand(esp, 4 * kPointerSize),
3900 Immediate(ExternalReference::address_of_static_offsets_vector()));
3901
3902 // Argument 4: End of string data
3903 // Argument 3: Start of string data
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003904 NearLabel setup_two_byte, setup_rest;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003905 __ test(edi, Operand(edi));
3906 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
3907 __ j(zero, &setup_two_byte);
3908 __ SmiUntag(edi);
3909 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
3910 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3911 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
3912 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3913 __ jmp(&setup_rest);
3914
3915 __ bind(&setup_two_byte);
3916 STATIC_ASSERT(kSmiTag == 0);
3917 STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2).
3918 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
3919 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3920 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
3921 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3922
3923 __ bind(&setup_rest);
3924
3925 // Argument 2: Previous index.
3926 __ mov(Operand(esp, 1 * kPointerSize), ebx);
3927
3928 // Argument 1: Subject string.
3929 __ mov(Operand(esp, 0 * kPointerSize), eax);
3930
3931 // Locate the code entry and call it.
3932 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
3933 __ CallCFunction(edx, kRegExpExecuteArguments);
3934
3935 // Check the result.
3936 Label success;
3937 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
3938 __ j(equal, &success, taken);
3939 Label failure;
3940 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
3941 __ j(equal, &failure, taken);
3942 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
3943 // If not exception it can only be retry. Handle that in the runtime system.
3944 __ j(not_equal, &runtime);
3945 // Result must now be exception. If there is no pending exception already a
3946 // stack overflow (on the backtrack stack) was detected in RegExp code but
3947 // haven't created the exception yet. Handle that in the runtime system.
3948 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
3949 ExternalReference pending_exception(Top::k_pending_exception_address);
3950 __ mov(eax,
3951 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
3952 __ cmp(eax, Operand::StaticVariable(pending_exception));
3953 __ j(equal, &runtime);
3954 __ bind(&failure);
3955 // For failure and exception return null.
3956 __ mov(Operand(eax), Factory::null_value());
3957 __ ret(4 * kPointerSize);
3958
3959 // Load RegExp data.
3960 __ bind(&success);
3961 __ mov(eax, Operand(esp, kJSRegExpOffset));
3962 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3963 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3964 // Calculate number of capture registers (number_of_captures + 1) * 2.
3965 STATIC_ASSERT(kSmiTag == 0);
3966 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3967 __ add(Operand(edx), Immediate(2)); // edx was a smi.
3968
3969 // edx: Number of capture registers
3970 // Load last_match_info which is still known to be a fast case JSArray.
3971 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3972 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3973
3974 // ebx: last_match_info backing store (FixedArray)
3975 // edx: number of capture registers
3976 // Store the capture count.
3977 __ SmiTag(edx); // Number of capture registers to smi.
3978 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
3979 __ SmiUntag(edx); // Number of capture registers back from smi.
3980 // Store last subject and last input.
3981 __ mov(eax, Operand(esp, kSubjectOffset));
3982 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
3983 __ mov(ecx, ebx);
3984 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
3985 __ mov(eax, Operand(esp, kSubjectOffset));
3986 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
3987 __ mov(ecx, ebx);
3988 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
3989
3990 // Get the static offsets vector filled by the native regexp code.
3991 ExternalReference address_of_static_offsets_vector =
3992 ExternalReference::address_of_static_offsets_vector();
3993 __ mov(ecx, Immediate(address_of_static_offsets_vector));
3994
3995 // ebx: last_match_info backing store (FixedArray)
3996 // ecx: offsets vector
3997 // edx: number of capture registers
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003998 NearLabel next_capture, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003999 // Capture register counter starts from number of capture registers and
4000 // counts down until wraping after zero.
4001 __ bind(&next_capture);
4002 __ sub(Operand(edx), Immediate(1));
4003 __ j(negative, &done);
4004 // Read the value from the static offsets vector buffer.
4005 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
4006 __ SmiTag(edi);
4007 // Store the smi value in the last match info.
4008 __ mov(FieldOperand(ebx,
4009 edx,
4010 times_pointer_size,
4011 RegExpImpl::kFirstCaptureOffset),
4012 edi);
4013 __ jmp(&next_capture);
4014 __ bind(&done);
4015
4016 // Return last match info.
4017 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
4018 __ ret(4 * kPointerSize);
4019
4020 // Do the runtime call to execute the regexp.
4021 __ bind(&runtime);
4022 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
4023#endif // V8_INTERPRETED_REGEXP
4024}
4025
4026
kasperl@chromium.orga5551262010-12-07 12:49:48 +00004027void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
4028 const int kMaxInlineLength = 100;
4029 Label slowcase;
4030 NearLabel done;
4031 __ mov(ebx, Operand(esp, kPointerSize * 3));
4032 __ test(ebx, Immediate(kSmiTagMask));
4033 __ j(not_zero, &slowcase);
4034 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
4035 __ j(above, &slowcase);
4036 // Smi-tagging is equivalent to multiplying by 2.
4037 STATIC_ASSERT(kSmiTag == 0);
4038 STATIC_ASSERT(kSmiTagSize == 1);
4039 // Allocate RegExpResult followed by FixedArray with size in ebx.
4040 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
4041 // Elements: [Map][Length][..elements..]
4042 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
4043 times_half_pointer_size,
4044 ebx, // In: Number of elements (times 2, being a smi)
4045 eax, // Out: Start of allocation (tagged).
4046 ecx, // Out: End of allocation.
4047 edx, // Scratch register
4048 &slowcase,
4049 TAG_OBJECT);
4050 // eax: Start of allocated area, object-tagged.
4051
4052 // Set JSArray map to global.regexp_result_map().
4053 // Set empty properties FixedArray.
4054 // Set elements to point to FixedArray allocated right after the JSArray.
4055 // Interleave operations for better latency.
4056 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
4057 __ mov(ecx, Immediate(Factory::empty_fixed_array()));
4058 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
4059 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
4060 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
4061 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
4062 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
4063 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
4064
4065 // Set input, index and length fields from arguments.
4066 __ mov(ecx, Operand(esp, kPointerSize * 1));
4067 __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
4068 __ mov(ecx, Operand(esp, kPointerSize * 2));
4069 __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
4070 __ mov(ecx, Operand(esp, kPointerSize * 3));
4071 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
4072
4073 // Fill out the elements FixedArray.
4074 // eax: JSArray.
4075 // ebx: FixedArray.
4076 // ecx: Number of elements in array, as smi.
4077
4078 // Set map.
4079 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
4080 Immediate(Factory::fixed_array_map()));
4081 // Set length.
4082 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
4083 // Fill contents of fixed-array with the-hole.
4084 __ SmiUntag(ecx);
4085 __ mov(edx, Immediate(Factory::the_hole_value()));
4086 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
4087 // Fill fixed array elements with hole.
4088 // eax: JSArray.
4089 // ecx: Number of elements to fill.
4090 // ebx: Start of elements in FixedArray.
4091 // edx: the hole.
4092 Label loop;
4093 __ test(ecx, Operand(ecx));
4094 __ bind(&loop);
4095 __ j(less_equal, &done); // Jump if ecx is negative or zero.
4096 __ sub(Operand(ecx), Immediate(1));
4097 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
4098 __ jmp(&loop);
4099
4100 __ bind(&done);
4101 __ ret(3 * kPointerSize);
4102
4103 __ bind(&slowcase);
4104 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
4105}
4106
4107
ricow@chromium.org65fae842010-08-25 15:26:24 +00004108void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
4109 Register object,
4110 Register result,
4111 Register scratch1,
4112 Register scratch2,
4113 bool object_is_smi,
4114 Label* not_found) {
4115 // Use of registers. Register result is used as a temporary.
4116 Register number_string_cache = result;
4117 Register mask = scratch1;
4118 Register scratch = scratch2;
4119
4120 // Load the number string cache.
4121 ExternalReference roots_address = ExternalReference::roots_address();
4122 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
4123 __ mov(number_string_cache,
4124 Operand::StaticArray(scratch, times_pointer_size, roots_address));
4125 // Make the hash mask from the length of the number string cache. It
4126 // contains two elements (number and string) for each cache entry.
4127 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
4128 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
4129 __ sub(Operand(mask), Immediate(1)); // Make mask.
4130
4131 // Calculate the entry in the number string cache. The hash value in the
4132 // number string cache for smis is just the smi value, and the hash for
4133 // doubles is the xor of the upper and lower words. See
4134 // Heap::GetNumberStringCache.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004135 NearLabel smi_hash_calculated;
4136 NearLabel load_result_from_cache;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004137 if (object_is_smi) {
4138 __ mov(scratch, object);
4139 __ SmiUntag(scratch);
4140 } else {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004141 NearLabel not_smi, hash_calculated;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004142 STATIC_ASSERT(kSmiTag == 0);
4143 __ test(object, Immediate(kSmiTagMask));
4144 __ j(not_zero, &not_smi);
4145 __ mov(scratch, object);
4146 __ SmiUntag(scratch);
4147 __ jmp(&smi_hash_calculated);
4148 __ bind(&not_smi);
4149 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
4150 Factory::heap_number_map());
4151 __ j(not_equal, not_found);
4152 STATIC_ASSERT(8 == kDoubleSize);
4153 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
4154 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
4155 // Object is heap number and hash is now in scratch. Calculate cache index.
4156 __ and_(scratch, Operand(mask));
4157 Register index = scratch;
4158 Register probe = mask;
4159 __ mov(probe,
4160 FieldOperand(number_string_cache,
4161 index,
4162 times_twice_pointer_size,
4163 FixedArray::kHeaderSize));
4164 __ test(probe, Immediate(kSmiTagMask));
4165 __ j(zero, not_found);
4166 if (CpuFeatures::IsSupported(SSE2)) {
4167 CpuFeatures::Scope fscope(SSE2);
4168 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
4169 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
4170 __ ucomisd(xmm0, xmm1);
4171 } else {
4172 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
4173 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
4174 __ FCmp();
4175 }
4176 __ j(parity_even, not_found); // Bail out if NaN is involved.
4177 __ j(not_equal, not_found); // The cache did not contain this value.
4178 __ jmp(&load_result_from_cache);
4179 }
4180
4181 __ bind(&smi_hash_calculated);
4182 // Object is smi and hash is now in scratch. Calculate cache index.
4183 __ and_(scratch, Operand(mask));
4184 Register index = scratch;
4185 // Check if the entry is the smi we are looking for.
4186 __ cmp(object,
4187 FieldOperand(number_string_cache,
4188 index,
4189 times_twice_pointer_size,
4190 FixedArray::kHeaderSize));
4191 __ j(not_equal, not_found);
4192
4193 // Get the result from the cache.
4194 __ bind(&load_result_from_cache);
4195 __ mov(result,
4196 FieldOperand(number_string_cache,
4197 index,
4198 times_twice_pointer_size,
4199 FixedArray::kHeaderSize + kPointerSize));
4200 __ IncrementCounter(&Counters::number_to_string_native, 1);
4201}
4202
4203
4204void NumberToStringStub::Generate(MacroAssembler* masm) {
4205 Label runtime;
4206
4207 __ mov(ebx, Operand(esp, kPointerSize));
4208
4209 // Generate code to lookup number in the number string cache.
4210 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
4211 __ ret(1 * kPointerSize);
4212
4213 __ bind(&runtime);
4214 // Handle number to string in the runtime system if not found in the cache.
4215 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
4216}
4217
4218
4219static int NegativeComparisonResult(Condition cc) {
4220 ASSERT(cc != equal);
4221 ASSERT((cc == less) || (cc == less_equal)
4222 || (cc == greater) || (cc == greater_equal));
4223 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
4224}
4225
4226void CompareStub::Generate(MacroAssembler* masm) {
4227 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4228
4229 Label check_unequal_objects, done;
4230
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00004231 // Compare two smis if required.
4232 if (include_smi_compare_) {
4233 Label non_smi, smi_done;
4234 __ mov(ecx, Operand(edx));
4235 __ or_(ecx, Operand(eax));
4236 __ test(ecx, Immediate(kSmiTagMask));
4237 __ j(not_zero, &non_smi, not_taken);
4238 __ sub(edx, Operand(eax)); // Return on the result of the subtraction.
4239 __ j(no_overflow, &smi_done);
whesse@chromium.org4a5224e2010-10-20 12:37:07 +00004240 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00004241 __ bind(&smi_done);
4242 __ mov(eax, edx);
4243 __ ret(0);
4244 __ bind(&non_smi);
4245 } else if (FLAG_debug_code) {
4246 __ mov(ecx, Operand(edx));
4247 __ or_(ecx, Operand(eax));
4248 __ test(ecx, Immediate(kSmiTagMask));
4249 __ Assert(not_zero, "Unexpected smi operands.");
4250 }
4251
ricow@chromium.org65fae842010-08-25 15:26:24 +00004252 // NOTICE! This code is only reached after a smi-fast-case check, so
4253 // it is certain that at least one operand isn't a smi.
4254
4255 // Identical objects can be compared fast, but there are some tricky cases
4256 // for NaN and undefined.
4257 {
4258 Label not_identical;
4259 __ cmp(eax, Operand(edx));
4260 __ j(not_equal, &not_identical);
4261
4262 if (cc_ != equal) {
4263 // Check for undefined. undefined OP undefined is false even though
4264 // undefined == undefined.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004265 NearLabel check_for_nan;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004266 __ cmp(edx, Factory::undefined_value());
4267 __ j(not_equal, &check_for_nan);
4268 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
4269 __ ret(0);
4270 __ bind(&check_for_nan);
4271 }
4272
4273 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
4274 // so we do the second best thing - test it ourselves.
4275 // Note: if cc_ != equal, never_nan_nan_ is not used.
4276 if (never_nan_nan_ && (cc_ == equal)) {
4277 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4278 __ ret(0);
4279 } else {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004280 NearLabel heap_number;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004281 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
4282 Immediate(Factory::heap_number_map()));
4283 __ j(equal, &heap_number);
4284 if (cc_ != equal) {
4285 // Call runtime on identical JSObjects. Otherwise return equal.
4286 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
4287 __ j(above_equal, &not_identical);
4288 }
4289 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4290 __ ret(0);
4291
4292 __ bind(&heap_number);
4293 // It is a heap number, so return non-equal if it's NaN and equal if
4294 // it's not NaN.
4295 // The representation of NaN values has all exponent bits (52..62) set,
4296 // and not all mantissa bits (0..51) clear.
4297 // We only accept QNaNs, which have bit 51 set.
4298 // Read top bits of double representation (second word of value).
4299
4300 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
4301 // all bits in the mask are set. We only need to check the word
4302 // that contains the exponent and high bit of the mantissa.
4303 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0);
4304 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
lrn@chromium.org5d00b602011-01-05 09:51:43 +00004305 __ Set(eax, Immediate(0));
ricow@chromium.org65fae842010-08-25 15:26:24 +00004306 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
4307 // bits.
4308 __ add(edx, Operand(edx));
4309 __ cmp(edx, kQuietNaNHighBitsMask << 1);
4310 if (cc_ == equal) {
4311 STATIC_ASSERT(EQUAL != 1);
4312 __ setcc(above_equal, eax);
4313 __ ret(0);
4314 } else {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004315 NearLabel nan;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004316 __ j(above_equal, &nan);
4317 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4318 __ ret(0);
4319 __ bind(&nan);
4320 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
4321 __ ret(0);
4322 }
4323 }
4324
4325 __ bind(&not_identical);
4326 }
4327
4328 // Strict equality can quickly decide whether objects are equal.
4329 // Non-strict object equality is slower, so it is handled later in the stub.
4330 if (cc_ == equal && strict_) {
4331 Label slow; // Fallthrough label.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004332 NearLabel not_smis;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004333 // If we're doing a strict equality comparison, we don't have to do
4334 // type conversion, so we generate code to do fast comparison for objects
4335 // and oddballs. Non-smi numbers and strings still go through the usual
4336 // slow-case code.
4337 // If either is a Smi (we know that not both are), then they can only
4338 // be equal if the other is a HeapNumber. If so, use the slow case.
4339 STATIC_ASSERT(kSmiTag == 0);
4340 ASSERT_EQ(0, Smi::FromInt(0));
4341 __ mov(ecx, Immediate(kSmiTagMask));
4342 __ and_(ecx, Operand(eax));
4343 __ test(ecx, Operand(edx));
4344 __ j(not_zero, &not_smis);
4345 // One operand is a smi.
4346
4347 // Check whether the non-smi is a heap number.
4348 STATIC_ASSERT(kSmiTagMask == 1);
4349 // ecx still holds eax & kSmiTag, which is either zero or one.
4350 __ sub(Operand(ecx), Immediate(0x01));
4351 __ mov(ebx, edx);
4352 __ xor_(ebx, Operand(eax));
4353 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
4354 __ xor_(ebx, Operand(eax));
4355 // if eax was smi, ebx is now edx, else eax.
4356
4357 // Check if the non-smi operand is a heap number.
4358 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
4359 Immediate(Factory::heap_number_map()));
4360 // If heap number, handle it in the slow case.
4361 __ j(equal, &slow);
4362 // Return non-equal (ebx is not zero)
4363 __ mov(eax, ebx);
4364 __ ret(0);
4365
4366 __ bind(&not_smis);
4367 // If either operand is a JSObject or an oddball value, then they are not
4368 // equal since their pointers are different
4369 // There is no test for undetectability in strict equality.
4370
4371 // Get the type of the first operand.
4372 // If the first object is a JS object, we have done pointer comparison.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004373 NearLabel first_non_object;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004374 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4375 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
4376 __ j(below, &first_non_object);
4377
4378 // Return non-zero (eax is not zero)
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004379 NearLabel return_not_equal;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004380 STATIC_ASSERT(kHeapObjectTag != 0);
4381 __ bind(&return_not_equal);
4382 __ ret(0);
4383
4384 __ bind(&first_non_object);
4385 // Check for oddballs: true, false, null, undefined.
4386 __ CmpInstanceType(ecx, ODDBALL_TYPE);
4387 __ j(equal, &return_not_equal);
4388
4389 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ecx);
4390 __ j(above_equal, &return_not_equal);
4391
4392 // Check for oddballs: true, false, null, undefined.
4393 __ CmpInstanceType(ecx, ODDBALL_TYPE);
4394 __ j(equal, &return_not_equal);
4395
4396 // Fall through to the general case.
4397 __ bind(&slow);
4398 }
4399
4400 // Generate the number comparison code.
4401 if (include_number_compare_) {
4402 Label non_number_comparison;
4403 Label unordered;
4404 if (CpuFeatures::IsSupported(SSE2)) {
4405 CpuFeatures::Scope use_sse2(SSE2);
4406 CpuFeatures::Scope use_cmov(CMOV);
4407
4408 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
4409 __ ucomisd(xmm0, xmm1);
4410
4411 // Don't base result on EFLAGS when a NaN is involved.
4412 __ j(parity_even, &unordered, not_taken);
4413 // Return a result of -1, 0, or 1, based on EFLAGS.
4414 __ mov(eax, 0); // equal
4415 __ mov(ecx, Immediate(Smi::FromInt(1)));
4416 __ cmov(above, eax, Operand(ecx));
4417 __ mov(ecx, Immediate(Smi::FromInt(-1)));
4418 __ cmov(below, eax, Operand(ecx));
4419 __ ret(0);
4420 } else {
4421 FloatingPointHelper::CheckFloatOperands(
4422 masm, &non_number_comparison, ebx);
4423 FloatingPointHelper::LoadFloatOperand(masm, eax);
4424 FloatingPointHelper::LoadFloatOperand(masm, edx);
4425 __ FCmp();
4426
4427 // Don't base result on EFLAGS when a NaN is involved.
4428 __ j(parity_even, &unordered, not_taken);
4429
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004430 NearLabel below_label, above_label;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004431 // Return a result of -1, 0, or 1, based on EFLAGS.
4432 __ j(below, &below_label, not_taken);
4433 __ j(above, &above_label, not_taken);
4434
lrn@chromium.org5d00b602011-01-05 09:51:43 +00004435 __ Set(eax, Immediate(0));
ricow@chromium.org65fae842010-08-25 15:26:24 +00004436 __ ret(0);
4437
4438 __ bind(&below_label);
4439 __ mov(eax, Immediate(Smi::FromInt(-1)));
4440 __ ret(0);
4441
4442 __ bind(&above_label);
4443 __ mov(eax, Immediate(Smi::FromInt(1)));
4444 __ ret(0);
4445 }
4446
4447 // If one of the numbers was NaN, then the result is always false.
4448 // The cc is never not-equal.
4449 __ bind(&unordered);
4450 ASSERT(cc_ != not_equal);
4451 if (cc_ == less || cc_ == less_equal) {
4452 __ mov(eax, Immediate(Smi::FromInt(1)));
4453 } else {
4454 __ mov(eax, Immediate(Smi::FromInt(-1)));
4455 }
4456 __ ret(0);
4457
4458 // The number comparison code did not provide a valid result.
4459 __ bind(&non_number_comparison);
4460 }
4461
4462 // Fast negative check for symbol-to-symbol equality.
4463 Label check_for_strings;
4464 if (cc_ == equal) {
4465 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
4466 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
4467
4468 // We've already checked for object identity, so if both operands
4469 // are symbols they aren't equal. Register eax already holds a
4470 // non-zero value, which indicates not equal, so just return.
4471 __ ret(0);
4472 }
4473
4474 __ bind(&check_for_strings);
4475
4476 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
4477 &check_unequal_objects);
4478
4479 // Inline comparison of ascii strings.
4480 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
4481 edx,
4482 eax,
4483 ecx,
4484 ebx,
4485 edi);
4486#ifdef DEBUG
4487 __ Abort("Unexpected fall-through from string comparison");
4488#endif
4489
4490 __ bind(&check_unequal_objects);
4491 if (cc_ == equal && !strict_) {
4492 // Non-strict equality. Objects are unequal if
4493 // they are both JSObjects and not undetectable,
4494 // and their pointers are different.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004495 NearLabel not_both_objects;
4496 NearLabel return_unequal;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004497 // At most one is a smi, so we can test for smi by adding the two.
4498 // A smi plus a heap object has the low bit set, a heap object plus
4499 // a heap object has the low bit clear.
4500 STATIC_ASSERT(kSmiTag == 0);
4501 STATIC_ASSERT(kSmiTagMask == 1);
4502 __ lea(ecx, Operand(eax, edx, times_1, 0));
4503 __ test(ecx, Immediate(kSmiTagMask));
4504 __ j(not_zero, &not_both_objects);
4505 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
4506 __ j(below, &not_both_objects);
4507 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ebx);
4508 __ j(below, &not_both_objects);
4509 // We do not bail out after this point. Both are JSObjects, and
4510 // they are equal if and only if both are undetectable.
4511 // The and of the undetectable flags is 1 if and only if they are equal.
4512 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
4513 1 << Map::kIsUndetectable);
4514 __ j(zero, &return_unequal);
4515 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
4516 1 << Map::kIsUndetectable);
4517 __ j(zero, &return_unequal);
4518 // The objects are both undetectable, so they both compare as the value
4519 // undefined, and are equal.
4520 __ Set(eax, Immediate(EQUAL));
4521 __ bind(&return_unequal);
4522 // Return non-equal by returning the non-zero object pointer in eax,
4523 // or return equal if we fell through to here.
4524 __ ret(0); // rax, rdx were pushed
4525 __ bind(&not_both_objects);
4526 }
4527
4528 // Push arguments below the return address.
4529 __ pop(ecx);
4530 __ push(edx);
4531 __ push(eax);
4532
4533 // Figure out which native to call and setup the arguments.
4534 Builtins::JavaScript builtin;
4535 if (cc_ == equal) {
4536 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
4537 } else {
4538 builtin = Builtins::COMPARE;
4539 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
4540 }
4541
4542 // Restore return address on the stack.
4543 __ push(ecx);
4544
4545 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
4546 // tagged as a small integer.
4547 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
4548}
4549
4550
4551void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
4552 Label* label,
4553 Register object,
4554 Register scratch) {
4555 __ test(object, Immediate(kSmiTagMask));
4556 __ j(zero, label);
4557 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
4558 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4559 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
4560 __ cmp(scratch, kSymbolTag | kStringTag);
4561 __ j(not_equal, label);
4562}
4563
4564
4565void StackCheckStub::Generate(MacroAssembler* masm) {
whesse@chromium.org4a5224e2010-10-20 12:37:07 +00004566 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
ricow@chromium.org65fae842010-08-25 15:26:24 +00004567}
4568
4569
4570void CallFunctionStub::Generate(MacroAssembler* masm) {
4571 Label slow;
4572
4573 // If the receiver might be a value (string, number or boolean) check for this
4574 // and box it if it is.
4575 if (ReceiverMightBeValue()) {
4576 // Get the receiver from the stack.
4577 // +1 ~ return address
4578 Label receiver_is_value, receiver_is_js_object;
4579 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
4580
4581 // Check if receiver is a smi (which is a number value).
4582 __ test(eax, Immediate(kSmiTagMask));
4583 __ j(zero, &receiver_is_value, not_taken);
4584
4585 // Check if the receiver is a valid JS object.
4586 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
4587 __ j(above_equal, &receiver_is_js_object);
4588
4589 // Call the runtime to box the value.
4590 __ bind(&receiver_is_value);
4591 __ EnterInternalFrame();
4592 __ push(eax);
4593 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
4594 __ LeaveInternalFrame();
4595 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
4596
4597 __ bind(&receiver_is_js_object);
4598 }
4599
4600 // Get the function to call from the stack.
4601 // +2 ~ receiver, return address
4602 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
4603
4604 // Check that the function really is a JavaScript function.
4605 __ test(edi, Immediate(kSmiTagMask));
4606 __ j(zero, &slow, not_taken);
4607 // Goto slow case if we do not have a function.
4608 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
4609 __ j(not_equal, &slow, not_taken);
4610
4611 // Fast-case: Just invoke the function.
4612 ParameterCount actual(argc_);
4613 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
4614
4615 // Slow-case: Non-function called.
4616 __ bind(&slow);
4617 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
4618 // of the original receiver from the call site).
4619 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
4620 __ Set(eax, Immediate(argc_));
4621 __ Set(ebx, Immediate(0));
4622 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
4623 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
4624 __ jmp(adaptor, RelocInfo::CODE_TARGET);
4625}
4626
4627
4628void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
4629 // eax holds the exception.
4630
4631 // Adjust this code if not the case.
4632 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
4633
4634 // Drop the sp to the top of the handler.
4635 ExternalReference handler_address(Top::k_handler_address);
4636 __ mov(esp, Operand::StaticVariable(handler_address));
4637
4638 // Restore next handler and frame pointer, discard handler state.
4639 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4640 __ pop(Operand::StaticVariable(handler_address));
4641 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
4642 __ pop(ebp);
4643 __ pop(edx); // Remove state.
4644
4645 // Before returning we restore the context from the frame pointer if
4646 // not NULL. The frame pointer is NULL in the exception handler of
4647 // a JS entry frame.
lrn@chromium.org5d00b602011-01-05 09:51:43 +00004648 __ Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004649 NearLabel skip;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004650 __ cmp(ebp, 0);
4651 __ j(equal, &skip, not_taken);
4652 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4653 __ bind(&skip);
4654
4655 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
4656 __ ret(0);
4657}
4658
4659
ricow@chromium.org65fae842010-08-25 15:26:24 +00004660void CEntryStub::GenerateCore(MacroAssembler* masm,
4661 Label* throw_normal_exception,
4662 Label* throw_termination_exception,
4663 Label* throw_out_of_memory_exception,
4664 bool do_gc,
4665 bool always_allocate_scope,
4666 int /* alignment_skew */) {
4667 // eax: result parameter for PerformGC, if any
4668 // ebx: pointer to C function (C callee-saved)
4669 // ebp: frame pointer (restored after C call)
4670 // esp: stack pointer (restored after C call)
4671 // edi: number of arguments including receiver (C callee-saved)
4672 // esi: pointer to the first argument (C callee-saved)
4673
4674 // Result returned in eax, or eax+edx if result_size_ is 2.
4675
4676 // Check stack alignment.
4677 if (FLAG_debug_code) {
4678 __ CheckStackAlignment();
4679 }
4680
4681 if (do_gc) {
4682 // Pass failure code returned from last attempt as first argument to
4683 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
4684 // stack alignment is known to be correct. This function takes one argument
4685 // which is passed on the stack, and we know that the stack has been
4686 // prepared to pass at least one argument.
4687 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
4688 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
4689 }
4690
4691 ExternalReference scope_depth =
4692 ExternalReference::heap_always_allocate_scope_depth();
4693 if (always_allocate_scope) {
4694 __ inc(Operand::StaticVariable(scope_depth));
4695 }
4696
4697 // Call C function.
4698 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
4699 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
4700 __ call(Operand(ebx));
4701 // Result is in eax or edx:eax - do not destroy these registers!
4702
4703 if (always_allocate_scope) {
4704 __ dec(Operand::StaticVariable(scope_depth));
4705 }
4706
4707 // Make sure we're not trying to return 'the hole' from the runtime
4708 // call as this may lead to crashes in the IC code later.
4709 if (FLAG_debug_code) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004710 NearLabel okay;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004711 __ cmp(eax, Factory::the_hole_value());
4712 __ j(not_equal, &okay);
4713 __ int3();
4714 __ bind(&okay);
4715 }
4716
4717 // Check for failure result.
4718 Label failure_returned;
4719 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4720 __ lea(ecx, Operand(eax, 1));
4721 // Lower 2 bits of ecx are 0 iff eax has failure tag.
4722 __ test(ecx, Immediate(kFailureTagMask));
4723 __ j(zero, &failure_returned, not_taken);
4724
4725 // Exit the JavaScript to C++ exit frame.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00004726 __ LeaveExitFrame(save_doubles_);
ricow@chromium.org65fae842010-08-25 15:26:24 +00004727 __ ret(0);
4728
4729 // Handling of failure.
4730 __ bind(&failure_returned);
4731
4732 Label retry;
4733 // If the returned exception is RETRY_AFTER_GC continue at retry label
4734 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4735 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
4736 __ j(zero, &retry, taken);
4737
4738 // Special handling of out of memory exceptions.
4739 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4740 __ j(equal, throw_out_of_memory_exception);
4741
4742 // Retrieve the pending exception and clear the variable.
4743 ExternalReference pending_exception_address(Top::k_pending_exception_address);
4744 __ mov(eax, Operand::StaticVariable(pending_exception_address));
4745 __ mov(edx,
4746 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
4747 __ mov(Operand::StaticVariable(pending_exception_address), edx);
4748
4749 // Special handling of termination exceptions which are uncatchable
4750 // by javascript code.
4751 __ cmp(eax, Factory::termination_exception());
4752 __ j(equal, throw_termination_exception);
4753
4754 // Handle normal exception.
4755 __ jmp(throw_normal_exception);
4756
4757 // Retry.
4758 __ bind(&retry);
4759}
4760
4761
4762void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
4763 UncatchableExceptionType type) {
4764 // Adjust this code if not the case.
4765 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
4766
4767 // Drop sp to the top stack handler.
4768 ExternalReference handler_address(Top::k_handler_address);
4769 __ mov(esp, Operand::StaticVariable(handler_address));
4770
4771 // Unwind the handlers until the ENTRY handler is found.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004772 NearLabel loop, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004773 __ bind(&loop);
4774 // Load the type of the current stack handler.
4775 const int kStateOffset = StackHandlerConstants::kStateOffset;
4776 __ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
4777 __ j(equal, &done);
4778 // Fetch the next handler in the list.
4779 const int kNextOffset = StackHandlerConstants::kNextOffset;
4780 __ mov(esp, Operand(esp, kNextOffset));
4781 __ jmp(&loop);
4782 __ bind(&done);
4783
4784 // Set the top handler address to next handler past the current ENTRY handler.
4785 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4786 __ pop(Operand::StaticVariable(handler_address));
4787
4788 if (type == OUT_OF_MEMORY) {
4789 // Set external caught exception to false.
4790 ExternalReference external_caught(Top::k_external_caught_exception_address);
4791 __ mov(eax, false);
4792 __ mov(Operand::StaticVariable(external_caught), eax);
4793
4794 // Set pending exception and eax to out of memory exception.
4795 ExternalReference pending_exception(Top::k_pending_exception_address);
4796 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4797 __ mov(Operand::StaticVariable(pending_exception), eax);
4798 }
4799
4800 // Clear the context pointer.
lrn@chromium.org5d00b602011-01-05 09:51:43 +00004801 __ Set(esi, Immediate(0));
ricow@chromium.org65fae842010-08-25 15:26:24 +00004802
4803 // Restore fp from handler and discard handler state.
4804 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
4805 __ pop(ebp);
4806 __ pop(edx); // State.
4807
4808 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
4809 __ ret(0);
4810}
4811
4812
4813void CEntryStub::Generate(MacroAssembler* masm) {
4814 // eax: number of arguments including receiver
4815 // ebx: pointer to C function (C callee-saved)
4816 // ebp: frame pointer (restored after C call)
4817 // esp: stack pointer (restored after C call)
4818 // esi: current context (C callee-saved)
4819 // edi: JS function of the caller (C callee-saved)
4820
4821 // NOTE: Invocations of builtins may return failure objects instead
4822 // of a proper result. The builtin entry handles this by performing
4823 // a garbage collection and retrying the builtin (twice).
4824
4825 // Enter the exit frame that transitions from JavaScript to C++.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00004826 __ EnterExitFrame(save_doubles_);
ricow@chromium.org65fae842010-08-25 15:26:24 +00004827
4828 // eax: result parameter for PerformGC, if any (setup below)
4829 // ebx: pointer to builtin function (C callee-saved)
4830 // ebp: frame pointer (restored after C call)
4831 // esp: stack pointer (restored after C call)
4832 // edi: number of arguments including receiver (C callee-saved)
4833 // esi: argv pointer (C callee-saved)
4834
4835 Label throw_normal_exception;
4836 Label throw_termination_exception;
4837 Label throw_out_of_memory_exception;
4838
4839 // Call into the runtime system.
4840 GenerateCore(masm,
4841 &throw_normal_exception,
4842 &throw_termination_exception,
4843 &throw_out_of_memory_exception,
4844 false,
4845 false);
4846
4847 // Do space-specific GC and retry runtime call.
4848 GenerateCore(masm,
4849 &throw_normal_exception,
4850 &throw_termination_exception,
4851 &throw_out_of_memory_exception,
4852 true,
4853 false);
4854
4855 // Do full GC and retry runtime call one final time.
4856 Failure* failure = Failure::InternalError();
4857 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
4858 GenerateCore(masm,
4859 &throw_normal_exception,
4860 &throw_termination_exception,
4861 &throw_out_of_memory_exception,
4862 true,
4863 true);
4864
4865 __ bind(&throw_out_of_memory_exception);
4866 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
4867
4868 __ bind(&throw_termination_exception);
4869 GenerateThrowUncatchable(masm, TERMINATION);
4870
4871 __ bind(&throw_normal_exception);
4872 GenerateThrowTOS(masm);
4873}
4874
4875
4876void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4877 Label invoke, exit;
4878#ifdef ENABLE_LOGGING_AND_PROFILING
4879 Label not_outermost_js, not_outermost_js_2;
4880#endif
4881
4882 // Setup frame.
4883 __ push(ebp);
4884 __ mov(ebp, Operand(esp));
4885
4886 // Push marker in two places.
4887 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4888 __ push(Immediate(Smi::FromInt(marker))); // context slot
4889 __ push(Immediate(Smi::FromInt(marker))); // function slot
4890 // Save callee-saved registers (C calling conventions).
4891 __ push(edi);
4892 __ push(esi);
4893 __ push(ebx);
4894
4895 // Save copies of the top frame descriptor on the stack.
4896 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
4897 __ push(Operand::StaticVariable(c_entry_fp));
4898
4899#ifdef ENABLE_LOGGING_AND_PROFILING
4900 // If this is the outermost JS call, set js_entry_sp value.
4901 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
4902 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
4903 __ j(not_equal, &not_outermost_js);
4904 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
4905 __ bind(&not_outermost_js);
4906#endif
4907
4908 // Call a faked try-block that does the invoke.
4909 __ call(&invoke);
4910
4911 // Caught exception: Store result (exception) in the pending
4912 // exception field in the JSEnv and return a failure sentinel.
4913 ExternalReference pending_exception(Top::k_pending_exception_address);
4914 __ mov(Operand::StaticVariable(pending_exception), eax);
4915 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
4916 __ jmp(&exit);
4917
4918 // Invoke: Link this frame into the handler chain.
4919 __ bind(&invoke);
4920 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
4921
4922 // Clear any pending exceptions.
4923 __ mov(edx,
4924 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
4925 __ mov(Operand::StaticVariable(pending_exception), edx);
4926
4927 // Fake a receiver (NULL).
4928 __ push(Immediate(0)); // receiver
4929
4930 // Invoke the function by calling through JS entry trampoline
4931 // builtin and pop the faked function when we return. Notice that we
4932 // cannot store a reference to the trampoline code directly in this
4933 // stub, because the builtin stubs may not have been generated yet.
4934 if (is_construct) {
4935 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
4936 __ mov(edx, Immediate(construct_entry));
4937 } else {
4938 ExternalReference entry(Builtins::JSEntryTrampoline);
4939 __ mov(edx, Immediate(entry));
4940 }
4941 __ mov(edx, Operand(edx, 0)); // deref address
4942 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
4943 __ call(Operand(edx));
4944
4945 // Unlink this frame from the handler chain.
4946 __ pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
4947 // Pop next_sp.
4948 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
4949
4950#ifdef ENABLE_LOGGING_AND_PROFILING
4951 // If current EBP value is the same as js_entry_sp value, it means that
4952 // the current function is the outermost.
4953 __ cmp(ebp, Operand::StaticVariable(js_entry_sp));
4954 __ j(not_equal, &not_outermost_js_2);
4955 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
4956 __ bind(&not_outermost_js_2);
4957#endif
4958
4959 // Restore the top frame descriptor from the stack.
4960 __ bind(&exit);
4961 __ pop(Operand::StaticVariable(ExternalReference(Top::k_c_entry_fp_address)));
4962
4963 // Restore callee-saved registers (C calling conventions).
4964 __ pop(ebx);
4965 __ pop(esi);
4966 __ pop(edi);
4967 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
4968
4969 // Restore frame pointer and return.
4970 __ pop(ebp);
4971 __ ret(0);
4972}
4973
4974
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00004975// Generate stub code for instanceof.
4976// This code can patch a call site inlined cache of the instance of check,
4977// which looks like this.
4978//
4979// 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
4980// 75 0a jne <some near label>
4981// b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
4982//
4983// If call site patching is requested the stack will have the delta from the
4984// return address to the cmp instruction just below the return address. This
4985// also means that call site patching can only take place with arguments in
4986// registers. TOS looks like this when call site patching is requested
4987//
4988// esp[0] : return address
4989// esp[4] : delta from return address to cmp instruction
4990//
ricow@chromium.org65fae842010-08-25 15:26:24 +00004991void InstanceofStub::Generate(MacroAssembler* masm) {
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00004992 // Call site inlining and patching implies arguments in registers.
4993 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
4994
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00004995 // Fixed register usage throughout the stub.
4996 Register object = eax; // Object (lhs).
4997 Register map = ebx; // Map of the object.
4998 Register function = edx; // Function (rhs).
4999 Register prototype = edi; // Prototype of the function.
5000 Register scratch = ecx;
5001
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005002 // Constants describing the call site code to patch.
5003 static const int kDeltaToCmpImmediate = 2;
5004 static const int kDeltaToMov = 8;
5005 static const int kDeltaToMovImmediate = 9;
5006 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
5007 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
5008 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
5009
5010 ExternalReference roots_address = ExternalReference::roots_address();
5011
5012 ASSERT_EQ(object.code(), InstanceofStub::left().code());
5013 ASSERT_EQ(function.code(), InstanceofStub::right().code());
5014
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005015 // Get the object and function - they are always both needed.
5016 Label slow, not_js_object;
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005017 if (!HasArgsInRegisters()) {
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005018 __ mov(object, Operand(esp, 2 * kPointerSize));
5019 __ mov(function, Operand(esp, 1 * kPointerSize));
5020 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00005021
5022 // Check that the left hand is a JS object.
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005023 __ test(object, Immediate(kSmiTagMask));
5024 __ j(zero, &not_js_object, not_taken);
5025 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005026
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005027 // If there is a call site cache don't look in the global cache, but do the
5028 // real lookup and update the call site cache.
5029 if (!HasCallSiteInlineCheck()) {
5030 // Look up the function and the map in the instanceof cache.
5031 NearLabel miss;
5032 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
5033 __ cmp(function,
5034 Operand::StaticArray(scratch, times_pointer_size, roots_address));
5035 __ j(not_equal, &miss);
5036 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
5037 __ cmp(map, Operand::StaticArray(
5038 scratch, times_pointer_size, roots_address));
5039 __ j(not_equal, &miss);
5040 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5041 __ mov(eax, Operand::StaticArray(
5042 scratch, times_pointer_size, roots_address));
5043 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5044 __ bind(&miss);
5045 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00005046
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005047 // Get the prototype of the function.
5048 __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005049
5050 // Check that the function prototype is a JS object.
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005051 __ test(prototype, Immediate(kSmiTagMask));
ricow@chromium.org65fae842010-08-25 15:26:24 +00005052 __ j(zero, &slow, not_taken);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005053 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005054
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005055 // Update the global instanceof or call site inlined cache with the current
5056 // map and function. The cached answer will be set when it is known below.
5057 if (!HasCallSiteInlineCheck()) {
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005058 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
5059 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
5060 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
5061 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
5062 function);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005063 } else {
5064 // The constants for the code patching are based on no push instructions
5065 // at the call site.
5066 ASSERT(HasArgsInRegisters());
5067 // Get return address and delta to inlined map check.
5068 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5069 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5070 if (FLAG_debug_code) {
5071 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
5072 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
5073 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
5074 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
5075 }
5076 __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
5077 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00005078
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005079 // Loop through the prototype chain of the object looking for the function
5080 // prototype.
5081 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005082 NearLabel loop, is_instance, is_not_instance;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005083 __ bind(&loop);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005084 __ cmp(scratch, Operand(prototype));
ricow@chromium.org65fae842010-08-25 15:26:24 +00005085 __ j(equal, &is_instance);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005086 __ cmp(Operand(scratch), Immediate(Factory::null_value()));
ricow@chromium.org65fae842010-08-25 15:26:24 +00005087 __ j(equal, &is_not_instance);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005088 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
5089 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
ricow@chromium.org65fae842010-08-25 15:26:24 +00005090 __ jmp(&loop);
5091
5092 __ bind(&is_instance);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005093 if (!HasCallSiteInlineCheck()) {
5094 __ Set(eax, Immediate(0));
5095 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5096 __ mov(Operand::StaticArray(scratch,
5097 times_pointer_size, roots_address), eax);
5098 } else {
5099 // Get return address and delta to inlined map check.
5100 __ mov(eax, Factory::true_value());
5101 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5102 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5103 if (FLAG_debug_code) {
5104 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5105 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
5106 }
5107 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
5108 if (!ReturnTrueFalseObject()) {
5109 __ Set(eax, Immediate(0));
5110 }
5111 }
5112 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005113
5114 __ bind(&is_not_instance);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005115 if (!HasCallSiteInlineCheck()) {
5116 __ Set(eax, Immediate(Smi::FromInt(1)));
5117 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5118 __ mov(Operand::StaticArray(
5119 scratch, times_pointer_size, roots_address), eax);
5120 } else {
5121 // Get return address and delta to inlined map check.
5122 __ mov(eax, Factory::false_value());
5123 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5124 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5125 if (FLAG_debug_code) {
5126 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5127 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
5128 }
5129 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
5130 if (!ReturnTrueFalseObject()) {
5131 __ Set(eax, Immediate(Smi::FromInt(1)));
5132 }
5133 }
5134 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005135
5136 Label object_not_null, object_not_null_or_smi;
5137 __ bind(&not_js_object);
5138 // Before null, smi and string value checks, check that the rhs is a function
5139 // as for a non-function rhs an exception needs to be thrown.
5140 __ test(function, Immediate(kSmiTagMask));
5141 __ j(zero, &slow, not_taken);
5142 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
5143 __ j(not_equal, &slow, not_taken);
5144
5145 // Null is not instance of anything.
5146 __ cmp(object, Factory::null_value());
5147 __ j(not_equal, &object_not_null);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005148 __ Set(eax, Immediate(Smi::FromInt(1)));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005149 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005150
5151 __ bind(&object_not_null);
5152 // Smi values is not instance of anything.
5153 __ test(object, Immediate(kSmiTagMask));
5154 __ j(not_zero, &object_not_null_or_smi, not_taken);
5155 __ Set(eax, Immediate(Smi::FromInt(1)));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005156 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005157
5158 __ bind(&object_not_null_or_smi);
5159 // String values is not instance of anything.
5160 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
5161 __ j(NegateCondition(is_string), &slow);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005162 __ Set(eax, Immediate(Smi::FromInt(1)));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005163 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005164
5165 // Slow-case: Go through the JavaScript implementation.
5166 __ bind(&slow);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005167 if (!ReturnTrueFalseObject()) {
5168 // Tail call the builtin which returns 0 or 1.
5169 if (HasArgsInRegisters()) {
5170 // Push arguments below return address.
5171 __ pop(scratch);
5172 __ push(object);
5173 __ push(function);
5174 __ push(scratch);
5175 }
5176 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
5177 } else {
5178 // Call the builtin and convert 0/1 to true/false.
5179 __ EnterInternalFrame();
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005180 __ push(object);
5181 __ push(function);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005182 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
5183 __ LeaveInternalFrame();
5184 NearLabel true_value, done;
5185 __ test(eax, Operand(eax));
5186 __ j(zero, &true_value);
5187 __ mov(eax, Factory::false_value());
5188 __ jmp(&done);
5189 __ bind(&true_value);
5190 __ mov(eax, Factory::true_value());
5191 __ bind(&done);
5192 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005193 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00005194}
5195
5196
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005197Register InstanceofStub::left() { return eax; }
5198
5199
5200Register InstanceofStub::right() { return edx; }
5201
5202
ricow@chromium.org65fae842010-08-25 15:26:24 +00005203int CompareStub::MinorKey() {
5204 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
5205 // stubs the never NaN NaN condition is only taken into account if the
5206 // condition is equals.
5207 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
5208 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
5209 return ConditionField::encode(static_cast<unsigned>(cc_))
5210 | RegisterField::encode(false) // lhs_ and rhs_ are not used
5211 | StrictField::encode(strict_)
5212 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00005213 | IncludeNumberCompareField::encode(include_number_compare_)
5214 | IncludeSmiCompareField::encode(include_smi_compare_);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005215}
5216
5217
5218// Unfortunately you have to run without snapshots to see most of these
5219// names in the profile since most compare stubs end up in the snapshot.
5220const char* CompareStub::GetName() {
5221 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
5222
5223 if (name_ != NULL) return name_;
5224 const int kMaxNameLength = 100;
5225 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
5226 if (name_ == NULL) return "OOM";
5227
5228 const char* cc_name;
5229 switch (cc_) {
5230 case less: cc_name = "LT"; break;
5231 case greater: cc_name = "GT"; break;
5232 case less_equal: cc_name = "LE"; break;
5233 case greater_equal: cc_name = "GE"; break;
5234 case equal: cc_name = "EQ"; break;
5235 case not_equal: cc_name = "NE"; break;
5236 default: cc_name = "UnknownCondition"; break;
5237 }
5238
5239 const char* strict_name = "";
5240 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
5241 strict_name = "_STRICT";
5242 }
5243
5244 const char* never_nan_nan_name = "";
5245 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
5246 never_nan_nan_name = "_NO_NAN";
5247 }
5248
5249 const char* include_number_compare_name = "";
5250 if (!include_number_compare_) {
5251 include_number_compare_name = "_NO_NUMBER";
5252 }
5253
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00005254 const char* include_smi_compare_name = "";
5255 if (!include_smi_compare_) {
5256 include_smi_compare_name = "_NO_SMI";
5257 }
5258
ricow@chromium.org65fae842010-08-25 15:26:24 +00005259 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00005260 "CompareStub_%s%s%s%s%s",
ricow@chromium.org65fae842010-08-25 15:26:24 +00005261 cc_name,
5262 strict_name,
5263 never_nan_nan_name,
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00005264 include_number_compare_name,
5265 include_smi_compare_name);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005266 return name_;
5267}
5268
5269
5270// -------------------------------------------------------------------------
5271// StringCharCodeAtGenerator
5272
5273void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
5274 Label flat_string;
5275 Label ascii_string;
5276 Label got_char_code;
5277
5278 // If the receiver is a smi trigger the non-string case.
5279 STATIC_ASSERT(kSmiTag == 0);
5280 __ test(object_, Immediate(kSmiTagMask));
5281 __ j(zero, receiver_not_string_);
5282
5283 // Fetch the instance type of the receiver into result register.
5284 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
5285 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
5286 // If the receiver is not a string trigger the non-string case.
5287 __ test(result_, Immediate(kIsNotStringMask));
5288 __ j(not_zero, receiver_not_string_);
5289
5290 // If the index is non-smi trigger the non-smi case.
5291 STATIC_ASSERT(kSmiTag == 0);
5292 __ test(index_, Immediate(kSmiTagMask));
5293 __ j(not_zero, &index_not_smi_);
5294
5295 // Put smi-tagged index into scratch register.
5296 __ mov(scratch_, index_);
5297 __ bind(&got_smi_index_);
5298
5299 // Check for index out of range.
5300 __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset));
5301 __ j(above_equal, index_out_of_range_);
5302
5303 // We need special handling for non-flat strings.
5304 STATIC_ASSERT(kSeqStringTag == 0);
5305 __ test(result_, Immediate(kStringRepresentationMask));
5306 __ j(zero, &flat_string);
5307
5308 // Handle non-flat strings.
5309 __ test(result_, Immediate(kIsConsStringMask));
5310 __ j(zero, &call_runtime_);
5311
5312 // ConsString.
5313 // Check whether the right hand side is the empty string (i.e. if
5314 // this is really a flat string in a cons string). If that is not
5315 // the case we would rather go to the runtime system now to flatten
5316 // the string.
5317 __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
5318 Immediate(Factory::empty_string()));
5319 __ j(not_equal, &call_runtime_);
5320 // Get the first of the two strings and load its instance type.
5321 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
5322 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
5323 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
5324 // If the first cons component is also non-flat, then go to runtime.
5325 STATIC_ASSERT(kSeqStringTag == 0);
5326 __ test(result_, Immediate(kStringRepresentationMask));
5327 __ j(not_zero, &call_runtime_);
5328
5329 // Check for 1-byte or 2-byte string.
5330 __ bind(&flat_string);
5331 STATIC_ASSERT(kAsciiStringTag != 0);
5332 __ test(result_, Immediate(kStringEncodingMask));
5333 __ j(not_zero, &ascii_string);
5334
5335 // 2-byte string.
5336 // Load the 2-byte character code into the result register.
5337 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
5338 __ movzx_w(result_, FieldOperand(object_,
5339 scratch_, times_1, // Scratch is smi-tagged.
5340 SeqTwoByteString::kHeaderSize));
5341 __ jmp(&got_char_code);
5342
5343 // ASCII string.
5344 // Load the byte into the result register.
5345 __ bind(&ascii_string);
5346 __ SmiUntag(scratch_);
5347 __ movzx_b(result_, FieldOperand(object_,
5348 scratch_, times_1,
5349 SeqAsciiString::kHeaderSize));
5350 __ bind(&got_char_code);
5351 __ SmiTag(result_);
5352 __ bind(&exit_);
5353}
5354
5355
5356void StringCharCodeAtGenerator::GenerateSlow(
5357 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5358 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
5359
5360 // Index is not a smi.
5361 __ bind(&index_not_smi_);
5362 // If index is a heap number, try converting it to an integer.
5363 __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
5364 call_helper.BeforeCall(masm);
5365 __ push(object_);
5366 __ push(index_);
5367 __ push(index_); // Consumed by runtime conversion function.
5368 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
5369 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
5370 } else {
5371 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
5372 // NumberToSmi discards numbers that are not exact integers.
5373 __ CallRuntime(Runtime::kNumberToSmi, 1);
5374 }
5375 if (!scratch_.is(eax)) {
5376 // Save the conversion result before the pop instructions below
5377 // have a chance to overwrite it.
5378 __ mov(scratch_, eax);
5379 }
5380 __ pop(index_);
5381 __ pop(object_);
5382 // Reload the instance type.
5383 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
5384 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
5385 call_helper.AfterCall(masm);
5386 // If index is still not a smi, it must be out of range.
5387 STATIC_ASSERT(kSmiTag == 0);
5388 __ test(scratch_, Immediate(kSmiTagMask));
5389 __ j(not_zero, index_out_of_range_);
5390 // Otherwise, return to the fast path.
5391 __ jmp(&got_smi_index_);
5392
5393 // Call runtime. We get here when the receiver is a string and the
5394 // index is a number, but the code of getting the actual character
5395 // is too complex (e.g., when the string needs to be flattened).
5396 __ bind(&call_runtime_);
5397 call_helper.BeforeCall(masm);
5398 __ push(object_);
5399 __ push(index_);
5400 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
5401 if (!result_.is(eax)) {
5402 __ mov(result_, eax);
5403 }
5404 call_helper.AfterCall(masm);
5405 __ jmp(&exit_);
5406
5407 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
5408}
5409
5410
5411// -------------------------------------------------------------------------
5412// StringCharFromCodeGenerator
5413
5414void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
5415 // Fast case of Heap::LookupSingleCharacterStringFromCode.
5416 STATIC_ASSERT(kSmiTag == 0);
5417 STATIC_ASSERT(kSmiShiftSize == 0);
5418 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
5419 __ test(code_,
5420 Immediate(kSmiTagMask |
5421 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
5422 __ j(not_zero, &slow_case_, not_taken);
5423
5424 __ Set(result_, Immediate(Factory::single_character_string_cache()));
5425 STATIC_ASSERT(kSmiTag == 0);
5426 STATIC_ASSERT(kSmiTagSize == 1);
5427 STATIC_ASSERT(kSmiShiftSize == 0);
5428 // At this point code register contains smi tagged ascii char code.
5429 __ mov(result_, FieldOperand(result_,
5430 code_, times_half_pointer_size,
5431 FixedArray::kHeaderSize));
5432 __ cmp(result_, Factory::undefined_value());
5433 __ j(equal, &slow_case_, not_taken);
5434 __ bind(&exit_);
5435}
5436
5437
5438void StringCharFromCodeGenerator::GenerateSlow(
5439 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5440 __ Abort("Unexpected fallthrough to CharFromCode slow case");
5441
5442 __ bind(&slow_case_);
5443 call_helper.BeforeCall(masm);
5444 __ push(code_);
5445 __ CallRuntime(Runtime::kCharFromCode, 1);
5446 if (!result_.is(eax)) {
5447 __ mov(result_, eax);
5448 }
5449 call_helper.AfterCall(masm);
5450 __ jmp(&exit_);
5451
5452 __ Abort("Unexpected fallthrough from CharFromCode slow case");
5453}
5454
5455
5456// -------------------------------------------------------------------------
5457// StringCharAtGenerator
5458
5459void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
5460 char_code_at_generator_.GenerateFast(masm);
5461 char_from_code_generator_.GenerateFast(masm);
5462}
5463
5464
5465void StringCharAtGenerator::GenerateSlow(
5466 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5467 char_code_at_generator_.GenerateSlow(masm, call_helper);
5468 char_from_code_generator_.GenerateSlow(masm, call_helper);
5469}
5470
5471
5472void StringAddStub::Generate(MacroAssembler* masm) {
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005473 Label string_add_runtime, call_builtin;
5474 Builtins::JavaScript builtin_id = Builtins::ADD;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005475
5476 // Load the two arguments.
5477 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
5478 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
5479
5480 // Make sure that both arguments are strings if not known in advance.
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005481 if (flags_ == NO_STRING_ADD_FLAGS) {
ricow@chromium.org65fae842010-08-25 15:26:24 +00005482 __ test(eax, Immediate(kSmiTagMask));
5483 __ j(zero, &string_add_runtime);
5484 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
5485 __ j(above_equal, &string_add_runtime);
5486
5487 // First argument is a a string, test second.
5488 __ test(edx, Immediate(kSmiTagMask));
5489 __ j(zero, &string_add_runtime);
5490 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
5491 __ j(above_equal, &string_add_runtime);
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005492 } else {
5493 // Here at least one of the arguments is definitely a string.
5494 // We convert the one that is not known to be a string.
5495 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
5496 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
5497 GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi,
5498 &call_builtin);
5499 builtin_id = Builtins::STRING_ADD_RIGHT;
5500 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
5501 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
5502 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
5503 &call_builtin);
5504 builtin_id = Builtins::STRING_ADD_LEFT;
5505 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00005506 }
5507
5508 // Both arguments are strings.
5509 // eax: first string
5510 // edx: second string
5511 // Check if either of the strings are empty. In that case return the other.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005512 NearLabel second_not_zero_length, both_not_zero_length;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005513 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
5514 STATIC_ASSERT(kSmiTag == 0);
5515 __ test(ecx, Operand(ecx));
5516 __ j(not_zero, &second_not_zero_length);
5517 // Second string is empty, result is first string which is already in eax.
5518 __ IncrementCounter(&Counters::string_add_native, 1);
5519 __ ret(2 * kPointerSize);
5520 __ bind(&second_not_zero_length);
5521 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
5522 STATIC_ASSERT(kSmiTag == 0);
5523 __ test(ebx, Operand(ebx));
5524 __ j(not_zero, &both_not_zero_length);
5525 // First string is empty, result is second string which is in edx.
5526 __ mov(eax, edx);
5527 __ IncrementCounter(&Counters::string_add_native, 1);
5528 __ ret(2 * kPointerSize);
5529
5530 // Both strings are non-empty.
5531 // eax: first string
5532 // ebx: length of first string as a smi
5533 // ecx: length of second string as a smi
5534 // edx: second string
5535 // Look at the length of the result of adding the two strings.
5536 Label string_add_flat_result, longer_than_two;
5537 __ bind(&both_not_zero_length);
5538 __ add(ebx, Operand(ecx));
5539 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
5540 // Handle exceptionally long strings in the runtime system.
5541 __ j(overflow, &string_add_runtime);
5542 // Use the runtime system when adding two one character strings, as it
5543 // contains optimizations for this specific case using the symbol table.
5544 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
5545 __ j(not_equal, &longer_than_two);
5546
5547 // Check that both strings are non-external ascii strings.
5548 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
5549 &string_add_runtime);
5550
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005551 // Get the two characters forming the new string.
ricow@chromium.org65fae842010-08-25 15:26:24 +00005552 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
5553 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
5554
5555 // Try to lookup two character string in symbol table. If it is not found
5556 // just allocate a new one.
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005557 Label make_two_character_string, make_two_character_string_no_reload;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005558 StringHelper::GenerateTwoCharacterSymbolTableProbe(
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005559 masm, ebx, ecx, eax, edx, edi,
5560 &make_two_character_string_no_reload, &make_two_character_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005561 __ IncrementCounter(&Counters::string_add_native, 1);
5562 __ ret(2 * kPointerSize);
5563
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005564 // Allocate a two character string.
ricow@chromium.org65fae842010-08-25 15:26:24 +00005565 __ bind(&make_two_character_string);
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005566 // Reload the arguments.
5567 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
5568 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
5569 // Get the two characters forming the new string.
5570 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
5571 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
5572 __ bind(&make_two_character_string_no_reload);
5573 __ IncrementCounter(&Counters::string_add_make_two_char, 1);
5574 __ AllocateAsciiString(eax, // Result.
5575 2, // Length.
5576 edi, // Scratch 1.
5577 edx, // Scratch 2.
5578 &string_add_runtime);
5579 // Pack both characters in ebx.
5580 __ shl(ecx, kBitsPerByte);
5581 __ or_(ebx, Operand(ecx));
5582 // Set the characters in the new string.
5583 __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx);
5584 __ IncrementCounter(&Counters::string_add_native, 1);
5585 __ ret(2 * kPointerSize);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005586
5587 __ bind(&longer_than_two);
5588 // Check if resulting string will be flat.
5589 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength)));
5590 __ j(below, &string_add_flat_result);
5591
5592 // If result is not supposed to be flat allocate a cons string object. If both
5593 // strings are ascii the result is an ascii cons string.
5594 Label non_ascii, allocated, ascii_data;
5595 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
5596 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
5597 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
5598 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
5599 __ and_(ecx, Operand(edi));
5600 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
5601 __ test(ecx, Immediate(kAsciiStringTag));
5602 __ j(zero, &non_ascii);
5603 __ bind(&ascii_data);
5604 // Allocate an acsii cons string.
5605 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
5606 __ bind(&allocated);
5607 // Fill the fields of the cons string.
5608 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
5609 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
5610 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
5611 Immediate(String::kEmptyHashField));
5612 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
5613 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
5614 __ mov(eax, ecx);
5615 __ IncrementCounter(&Counters::string_add_native, 1);
5616 __ ret(2 * kPointerSize);
5617 __ bind(&non_ascii);
5618 // At least one of the strings is two-byte. Check whether it happens
5619 // to contain only ascii characters.
5620 // ecx: first instance type AND second instance type.
5621 // edi: second instance type.
5622 __ test(ecx, Immediate(kAsciiDataHintMask));
5623 __ j(not_zero, &ascii_data);
5624 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5625 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5626 __ xor_(edi, Operand(ecx));
5627 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
5628 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
5629 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
5630 __ j(equal, &ascii_data);
5631 // Allocate a two byte cons string.
5632 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime);
5633 __ jmp(&allocated);
5634
5635 // Handle creating a flat result. First check that both strings are not
5636 // external strings.
5637 // eax: first string
5638 // ebx: length of resulting flat string as a smi
5639 // edx: second string
5640 __ bind(&string_add_flat_result);
5641 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5642 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5643 __ and_(ecx, kStringRepresentationMask);
5644 __ cmp(ecx, kExternalStringTag);
5645 __ j(equal, &string_add_runtime);
5646 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5647 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5648 __ and_(ecx, kStringRepresentationMask);
5649 __ cmp(ecx, kExternalStringTag);
5650 __ j(equal, &string_add_runtime);
5651 // Now check if both strings are ascii strings.
5652 // eax: first string
5653 // ebx: length of resulting flat string as a smi
5654 // edx: second string
5655 Label non_ascii_string_add_flat_result;
5656 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
5657 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5658 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5659 __ j(zero, &non_ascii_string_add_flat_result);
5660 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5661 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5662 __ j(zero, &string_add_runtime);
5663
ricow@chromium.org65fae842010-08-25 15:26:24 +00005664 // Both strings are ascii strings. As they are short they are both flat.
5665 // ebx: length of resulting flat string as a smi
5666 __ SmiUntag(ebx);
5667 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5668 // eax: result string
5669 __ mov(ecx, eax);
5670 // Locate first character of result.
5671 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5672 // Load first argument and locate first character.
5673 __ mov(edx, Operand(esp, 2 * kPointerSize));
5674 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5675 __ SmiUntag(edi);
5676 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5677 // eax: result string
5678 // ecx: first character of result
5679 // edx: first char of first argument
5680 // edi: length of first argument
5681 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
5682 // Load second argument and locate first character.
5683 __ mov(edx, Operand(esp, 1 * kPointerSize));
5684 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5685 __ SmiUntag(edi);
5686 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5687 // eax: result string
5688 // ecx: next character of result
5689 // edx: first char of second argument
5690 // edi: length of second argument
5691 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
5692 __ IncrementCounter(&Counters::string_add_native, 1);
5693 __ ret(2 * kPointerSize);
5694
5695 // Handle creating a flat two byte result.
5696 // eax: first string - known to be two byte
5697 // ebx: length of resulting flat string as a smi
5698 // edx: second string
5699 __ bind(&non_ascii_string_add_flat_result);
5700 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5701 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5702 __ j(not_zero, &string_add_runtime);
5703 // Both strings are two byte strings. As they are short they are both
5704 // flat.
5705 __ SmiUntag(ebx);
5706 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5707 // eax: result string
5708 __ mov(ecx, eax);
5709 // Locate first character of result.
5710 __ add(Operand(ecx),
5711 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5712 // Load first argument and locate first character.
5713 __ mov(edx, Operand(esp, 2 * kPointerSize));
5714 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5715 __ SmiUntag(edi);
5716 __ add(Operand(edx),
5717 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5718 // eax: result string
5719 // ecx: first character of result
5720 // edx: first char of first argument
5721 // edi: length of first argument
5722 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
5723 // Load second argument and locate first character.
5724 __ mov(edx, Operand(esp, 1 * kPointerSize));
5725 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5726 __ SmiUntag(edi);
5727 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5728 // eax: result string
5729 // ecx: next character of result
5730 // edx: first char of second argument
5731 // edi: length of second argument
5732 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
5733 __ IncrementCounter(&Counters::string_add_native, 1);
5734 __ ret(2 * kPointerSize);
5735
5736 // Just jump to runtime to add the two strings.
5737 __ bind(&string_add_runtime);
5738 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005739
5740 if (call_builtin.is_linked()) {
5741 __ bind(&call_builtin);
5742 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
5743 }
5744}
5745
5746
5747void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
5748 int stack_offset,
5749 Register arg,
5750 Register scratch1,
5751 Register scratch2,
5752 Register scratch3,
5753 Label* slow) {
5754 // First check if the argument is already a string.
5755 Label not_string, done;
5756 __ test(arg, Immediate(kSmiTagMask));
5757 __ j(zero, &not_string);
5758 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
5759 __ j(below, &done);
5760
5761 // Check the number to string cache.
5762 Label not_cached;
5763 __ bind(&not_string);
5764 // Puts the cached result into scratch1.
5765 NumberToStringStub::GenerateLookupNumberStringCache(masm,
5766 arg,
5767 scratch1,
5768 scratch2,
5769 scratch3,
5770 false,
5771 &not_cached);
5772 __ mov(arg, scratch1);
5773 __ mov(Operand(esp, stack_offset), arg);
5774 __ jmp(&done);
5775
5776 // Check if the argument is a safe string wrapper.
5777 __ bind(&not_cached);
5778 __ test(arg, Immediate(kSmiTagMask));
5779 __ j(zero, slow);
5780 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
5781 __ j(not_equal, slow);
5782 __ test_b(FieldOperand(scratch1, Map::kBitField2Offset),
5783 1 << Map::kStringWrapperSafeForDefaultValueOf);
5784 __ j(zero, slow);
5785 __ mov(arg, FieldOperand(arg, JSValue::kValueOffset));
5786 __ mov(Operand(esp, stack_offset), arg);
5787
5788 __ bind(&done);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005789}
5790
5791
5792void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
5793 Register dest,
5794 Register src,
5795 Register count,
5796 Register scratch,
5797 bool ascii) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005798 NearLabel loop;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005799 __ bind(&loop);
5800 // This loop just copies one character at a time, as it is only used for very
5801 // short strings.
5802 if (ascii) {
5803 __ mov_b(scratch, Operand(src, 0));
5804 __ mov_b(Operand(dest, 0), scratch);
5805 __ add(Operand(src), Immediate(1));
5806 __ add(Operand(dest), Immediate(1));
5807 } else {
5808 __ mov_w(scratch, Operand(src, 0));
5809 __ mov_w(Operand(dest, 0), scratch);
5810 __ add(Operand(src), Immediate(2));
5811 __ add(Operand(dest), Immediate(2));
5812 }
5813 __ sub(Operand(count), Immediate(1));
5814 __ j(not_zero, &loop);
5815}
5816
5817
5818void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
5819 Register dest,
5820 Register src,
5821 Register count,
5822 Register scratch,
5823 bool ascii) {
5824 // Copy characters using rep movs of doublewords.
5825 // The destination is aligned on a 4 byte boundary because we are
5826 // copying to the beginning of a newly allocated string.
5827 ASSERT(dest.is(edi)); // rep movs destination
5828 ASSERT(src.is(esi)); // rep movs source
5829 ASSERT(count.is(ecx)); // rep movs count
5830 ASSERT(!scratch.is(dest));
5831 ASSERT(!scratch.is(src));
5832 ASSERT(!scratch.is(count));
5833
5834 // Nothing to do for zero characters.
5835 Label done;
5836 __ test(count, Operand(count));
5837 __ j(zero, &done);
5838
5839 // Make count the number of bytes to copy.
5840 if (!ascii) {
5841 __ shl(count, 1);
5842 }
5843
5844 // Don't enter the rep movs if there are less than 4 bytes to copy.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005845 NearLabel last_bytes;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005846 __ test(count, Immediate(~3));
5847 __ j(zero, &last_bytes);
5848
5849 // Copy from edi to esi using rep movs instruction.
5850 __ mov(scratch, count);
5851 __ sar(count, 2); // Number of doublewords to copy.
5852 __ cld();
5853 __ rep_movs();
5854
5855 // Find number of bytes left.
5856 __ mov(count, scratch);
5857 __ and_(count, 3);
5858
5859 // Check if there are more bytes to copy.
5860 __ bind(&last_bytes);
5861 __ test(count, Operand(count));
5862 __ j(zero, &done);
5863
5864 // Copy remaining characters.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005865 NearLabel loop;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005866 __ bind(&loop);
5867 __ mov_b(scratch, Operand(src, 0));
5868 __ mov_b(Operand(dest, 0), scratch);
5869 __ add(Operand(src), Immediate(1));
5870 __ add(Operand(dest), Immediate(1));
5871 __ sub(Operand(count), Immediate(1));
5872 __ j(not_zero, &loop);
5873
5874 __ bind(&done);
5875}
5876
5877
5878void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
5879 Register c1,
5880 Register c2,
5881 Register scratch1,
5882 Register scratch2,
5883 Register scratch3,
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005884 Label* not_probed,
ricow@chromium.org65fae842010-08-25 15:26:24 +00005885 Label* not_found) {
5886 // Register scratch3 is the general scratch register in this function.
5887 Register scratch = scratch3;
5888
5889 // Make sure that both characters are not digits as such strings has a
5890 // different hash algorithm. Don't try to look for these in the symbol table.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005891 NearLabel not_array_index;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005892 __ mov(scratch, c1);
5893 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5894 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
5895 __ j(above, &not_array_index);
5896 __ mov(scratch, c2);
5897 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5898 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005899 __ j(below_equal, not_probed);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005900
5901 __ bind(&not_array_index);
5902 // Calculate the two character string hash.
5903 Register hash = scratch1;
5904 GenerateHashInit(masm, hash, c1, scratch);
5905 GenerateHashAddCharacter(masm, hash, c2, scratch);
5906 GenerateHashGetHash(masm, hash, scratch);
5907
5908 // Collect the two characters in a register.
5909 Register chars = c1;
5910 __ shl(c2, kBitsPerByte);
5911 __ or_(chars, Operand(c2));
5912
5913 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5914 // hash: hash of two character string.
5915
5916 // Load the symbol table.
5917 Register symbol_table = c2;
5918 ExternalReference roots_address = ExternalReference::roots_address();
5919 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
5920 __ mov(symbol_table,
5921 Operand::StaticArray(scratch, times_pointer_size, roots_address));
5922
5923 // Calculate capacity mask from the symbol table capacity.
5924 Register mask = scratch2;
5925 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
5926 __ SmiUntag(mask);
5927 __ sub(Operand(mask), Immediate(1));
5928
5929 // Registers
5930 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5931 // hash: hash of two character string
5932 // symbol_table: symbol table
5933 // mask: capacity mask
5934 // scratch: -
5935
5936 // Perform a number of probes in the symbol table.
5937 static const int kProbes = 4;
5938 Label found_in_symbol_table;
5939 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
5940 for (int i = 0; i < kProbes; i++) {
5941 // Calculate entry in symbol table.
5942 __ mov(scratch, hash);
5943 if (i > 0) {
5944 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
5945 }
5946 __ and_(scratch, Operand(mask));
5947
5948 // Load the entry from the symbol table.
5949 Register candidate = scratch; // Scratch register contains candidate.
5950 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
5951 __ mov(candidate,
5952 FieldOperand(symbol_table,
5953 scratch,
5954 times_pointer_size,
5955 SymbolTable::kElementsStartOffset));
5956
5957 // If entry is undefined no string with this hash can be found.
5958 __ cmp(candidate, Factory::undefined_value());
5959 __ j(equal, not_found);
5960
5961 // If length is not 2 the string is not a candidate.
5962 __ cmp(FieldOperand(candidate, String::kLengthOffset),
5963 Immediate(Smi::FromInt(2)));
5964 __ j(not_equal, &next_probe[i]);
5965
5966 // As we are out of registers save the mask on the stack and use that
5967 // register as a temporary.
5968 __ push(mask);
5969 Register temp = mask;
5970
5971 // Check that the candidate is a non-external ascii string.
5972 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
5973 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
5974 __ JumpIfInstanceTypeIsNotSequentialAscii(
5975 temp, temp, &next_probe_pop_mask[i]);
5976
5977 // Check if the two characters match.
5978 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
5979 __ and_(temp, 0x0000ffff);
5980 __ cmp(chars, Operand(temp));
5981 __ j(equal, &found_in_symbol_table);
5982 __ bind(&next_probe_pop_mask[i]);
5983 __ pop(mask);
5984 __ bind(&next_probe[i]);
5985 }
5986
5987 // No matching 2 character string found by probing.
5988 __ jmp(not_found);
5989
5990 // Scratch register contains result when we fall through to here.
5991 Register result = scratch;
5992 __ bind(&found_in_symbol_table);
5993 __ pop(mask); // Pop saved mask from the stack.
5994 if (!result.is(eax)) {
5995 __ mov(eax, result);
5996 }
5997}
5998
5999
6000void StringHelper::GenerateHashInit(MacroAssembler* masm,
6001 Register hash,
6002 Register character,
6003 Register scratch) {
6004 // hash = character + (character << 10);
6005 __ mov(hash, character);
6006 __ shl(hash, 10);
6007 __ add(hash, Operand(character));
6008 // hash ^= hash >> 6;
6009 __ mov(scratch, hash);
6010 __ sar(scratch, 6);
6011 __ xor_(hash, Operand(scratch));
6012}
6013
6014
6015void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
6016 Register hash,
6017 Register character,
6018 Register scratch) {
6019 // hash += character;
6020 __ add(hash, Operand(character));
6021 // hash += hash << 10;
6022 __ mov(scratch, hash);
6023 __ shl(scratch, 10);
6024 __ add(hash, Operand(scratch));
6025 // hash ^= hash >> 6;
6026 __ mov(scratch, hash);
6027 __ sar(scratch, 6);
6028 __ xor_(hash, Operand(scratch));
6029}
6030
6031
6032void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
6033 Register hash,
6034 Register scratch) {
6035 // hash += hash << 3;
6036 __ mov(scratch, hash);
6037 __ shl(scratch, 3);
6038 __ add(hash, Operand(scratch));
6039 // hash ^= hash >> 11;
6040 __ mov(scratch, hash);
6041 __ sar(scratch, 11);
6042 __ xor_(hash, Operand(scratch));
6043 // hash += hash << 15;
6044 __ mov(scratch, hash);
6045 __ shl(scratch, 15);
6046 __ add(hash, Operand(scratch));
6047
6048 // if (hash == 0) hash = 27;
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00006049 NearLabel hash_not_zero;
ricow@chromium.org65fae842010-08-25 15:26:24 +00006050 __ test(hash, Operand(hash));
6051 __ j(not_zero, &hash_not_zero);
6052 __ mov(hash, Immediate(27));
6053 __ bind(&hash_not_zero);
6054}
6055
6056
6057void SubStringStub::Generate(MacroAssembler* masm) {
6058 Label runtime;
6059
6060 // Stack frame on entry.
6061 // esp[0]: return address
6062 // esp[4]: to
6063 // esp[8]: from
6064 // esp[12]: string
6065
6066 // Make sure first argument is a string.
6067 __ mov(eax, Operand(esp, 3 * kPointerSize));
6068 STATIC_ASSERT(kSmiTag == 0);
6069 __ test(eax, Immediate(kSmiTagMask));
6070 __ j(zero, &runtime);
6071 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
6072 __ j(NegateCondition(is_string), &runtime);
6073
6074 // eax: string
6075 // ebx: instance type
6076
6077 // Calculate length of sub string using the smi values.
6078 Label result_longer_than_two;
6079 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
6080 __ test(ecx, Immediate(kSmiTagMask));
6081 __ j(not_zero, &runtime);
6082 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
6083 __ test(edx, Immediate(kSmiTagMask));
6084 __ j(not_zero, &runtime);
6085 __ sub(ecx, Operand(edx));
6086 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
6087 Label return_eax;
6088 __ j(equal, &return_eax);
6089 // Special handling of sub-strings of length 1 and 2. One character strings
6090 // are handled in the runtime system (looked up in the single character
6091 // cache). Two character strings are looked for in the symbol cache.
6092 __ SmiUntag(ecx); // Result length is no longer smi.
6093 __ cmp(ecx, 2);
6094 __ j(greater, &result_longer_than_two);
6095 __ j(less, &runtime);
6096
6097 // Sub string of length 2 requested.
6098 // eax: string
6099 // ebx: instance type
6100 // ecx: sub string length (value is 2)
6101 // edx: from index (smi)
6102 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
6103
6104 // Get the two characters forming the sub string.
6105 __ SmiUntag(edx); // From index is no longer smi.
6106 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
6107 __ movzx_b(ecx,
6108 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
6109
6110 // Try to lookup two character string in symbol table.
6111 Label make_two_character_string;
6112 StringHelper::GenerateTwoCharacterSymbolTableProbe(
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00006113 masm, ebx, ecx, eax, edx, edi,
6114 &make_two_character_string, &make_two_character_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00006115 __ ret(3 * kPointerSize);
6116
6117 __ bind(&make_two_character_string);
6118 // Setup registers for allocating the two character string.
6119 __ mov(eax, Operand(esp, 3 * kPointerSize));
6120 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
6121 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
6122 __ Set(ecx, Immediate(2));
6123
6124 __ bind(&result_longer_than_two);
6125 // eax: string
6126 // ebx: instance type
6127 // ecx: result string length
6128 // Check for flat ascii string
6129 Label non_ascii_flat;
6130 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
6131
6132 // Allocate the result.
6133 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
6134
6135 // eax: result string
6136 // ecx: result string length
6137 __ mov(edx, esi); // esi used by following code.
6138 // Locate first character of result.
6139 __ mov(edi, eax);
6140 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
6141 // Load string argument and locate character of sub string start.
6142 __ mov(esi, Operand(esp, 3 * kPointerSize));
6143 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
6144 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
6145 __ SmiUntag(ebx);
6146 __ add(esi, Operand(ebx));
6147
6148 // eax: result string
6149 // ecx: result length
6150 // edx: original value of esi
6151 // edi: first character of result
6152 // esi: character of sub string start
6153 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
6154 __ mov(esi, edx); // Restore esi.
6155 __ IncrementCounter(&Counters::sub_string_native, 1);
6156 __ ret(3 * kPointerSize);
6157
6158 __ bind(&non_ascii_flat);
6159 // eax: string
6160 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
6161 // ecx: result string length
6162 // Check for flat two byte string
6163 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
6164 __ j(not_equal, &runtime);
6165
6166 // Allocate the result.
6167 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
6168
6169 // eax: result string
6170 // ecx: result string length
6171 __ mov(edx, esi); // esi used by following code.
6172 // Locate first character of result.
6173 __ mov(edi, eax);
6174 __ add(Operand(edi),
6175 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
6176 // Load string argument and locate character of sub string start.
6177 __ mov(esi, Operand(esp, 3 * kPointerSize));
6178 __ add(Operand(esi),
6179 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
6180 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
6181 // As from is a smi it is 2 times the value which matches the size of a two
6182 // byte character.
6183 STATIC_ASSERT(kSmiTag == 0);
6184 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
6185 __ add(esi, Operand(ebx));
6186
6187 // eax: result string
6188 // ecx: result length
6189 // edx: original value of esi
6190 // edi: first character of result
6191 // esi: character of sub string start
6192 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
6193 __ mov(esi, edx); // Restore esi.
6194
6195 __ bind(&return_eax);
6196 __ IncrementCounter(&Counters::sub_string_native, 1);
6197 __ ret(3 * kPointerSize);
6198
6199 // Just jump to runtime to create the sub string.
6200 __ bind(&runtime);
6201 __ TailCallRuntime(Runtime::kSubString, 3, 1);
6202}
6203
6204
6205void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
6206 Register left,
6207 Register right,
6208 Register scratch1,
6209 Register scratch2,
6210 Register scratch3) {
6211 Label result_not_equal;
6212 Label result_greater;
6213 Label compare_lengths;
6214
6215 __ IncrementCounter(&Counters::string_compare_native, 1);
6216
6217 // Find minimum length.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00006218 NearLabel left_shorter;
ricow@chromium.org65fae842010-08-25 15:26:24 +00006219 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
6220 __ mov(scratch3, scratch1);
6221 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
6222
6223 Register length_delta = scratch3;
6224
6225 __ j(less_equal, &left_shorter);
6226 // Right string is shorter. Change scratch1 to be length of right string.
6227 __ sub(scratch1, Operand(length_delta));
6228 __ bind(&left_shorter);
6229
6230 Register min_length = scratch1;
6231
6232 // If either length is zero, just compare lengths.
6233 __ test(min_length, Operand(min_length));
6234 __ j(zero, &compare_lengths);
6235
6236 // Change index to run from -min_length to -1 by adding min_length
6237 // to string start. This means that loop ends when index reaches zero,
6238 // which doesn't need an additional compare.
6239 __ SmiUntag(min_length);
6240 __ lea(left,
6241 FieldOperand(left,
6242 min_length, times_1,
6243 SeqAsciiString::kHeaderSize));
6244 __ lea(right,
6245 FieldOperand(right,
6246 min_length, times_1,
6247 SeqAsciiString::kHeaderSize));
6248 __ neg(min_length);
6249
6250 Register index = min_length; // index = -min_length;
6251
6252 {
6253 // Compare loop.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00006254 NearLabel loop;
ricow@chromium.org65fae842010-08-25 15:26:24 +00006255 __ bind(&loop);
6256 // Compare characters.
6257 __ mov_b(scratch2, Operand(left, index, times_1, 0));
6258 __ cmpb(scratch2, Operand(right, index, times_1, 0));
6259 __ j(not_equal, &result_not_equal);
6260 __ add(Operand(index), Immediate(1));
6261 __ j(not_zero, &loop);
6262 }
6263
6264 // Compare lengths - strings up to min-length are equal.
6265 __ bind(&compare_lengths);
6266 __ test(length_delta, Operand(length_delta));
6267 __ j(not_zero, &result_not_equal);
6268
6269 // Result is EQUAL.
6270 STATIC_ASSERT(EQUAL == 0);
6271 STATIC_ASSERT(kSmiTag == 0);
6272 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
6273 __ ret(0);
6274
6275 __ bind(&result_not_equal);
6276 __ j(greater, &result_greater);
6277
6278 // Result is LESS.
6279 __ Set(eax, Immediate(Smi::FromInt(LESS)));
6280 __ ret(0);
6281
6282 // Result is GREATER.
6283 __ bind(&result_greater);
6284 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
6285 __ ret(0);
6286}
6287
6288
6289void StringCompareStub::Generate(MacroAssembler* masm) {
6290 Label runtime;
6291
6292 // Stack frame on entry.
6293 // esp[0]: return address
6294 // esp[4]: right string
6295 // esp[8]: left string
6296
6297 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
6298 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
6299
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00006300 NearLabel not_same;
ricow@chromium.org65fae842010-08-25 15:26:24 +00006301 __ cmp(edx, Operand(eax));
6302 __ j(not_equal, &not_same);
6303 STATIC_ASSERT(EQUAL == 0);
6304 STATIC_ASSERT(kSmiTag == 0);
6305 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
6306 __ IncrementCounter(&Counters::string_compare_native, 1);
6307 __ ret(2 * kPointerSize);
6308
6309 __ bind(&not_same);
6310
6311 // Check that both objects are sequential ascii strings.
6312 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
6313
6314 // Compare flat ascii strings.
6315 // Drop arguments from the stack.
6316 __ pop(ecx);
6317 __ add(Operand(esp), Immediate(2 * kPointerSize));
6318 __ push(ecx);
6319 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
6320
6321 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
6322 // tagged as a small integer.
6323 __ bind(&runtime);
6324 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6325}
6326
kasperl@chromium.orga5551262010-12-07 12:49:48 +00006327
6328void StringCharAtStub::Generate(MacroAssembler* masm) {
6329 // Expects two arguments (object, index) on the stack:
6330
6331 // Stack frame on entry.
6332 // esp[0]: return address
6333 // esp[4]: index
6334 // esp[8]: object
6335
6336 Register object = ebx;
6337 Register index = eax;
6338 Register scratch1 = ecx;
6339 Register scratch2 = edx;
6340 Register result = eax;
6341
6342 __ pop(scratch1); // Return address.
6343 __ pop(index);
6344 __ pop(object);
6345 __ push(scratch1);
6346
6347 Label need_conversion;
6348 Label index_out_of_range;
6349 Label done;
6350 StringCharAtGenerator generator(object,
6351 index,
6352 scratch1,
6353 scratch2,
6354 result,
6355 &need_conversion,
6356 &need_conversion,
6357 &index_out_of_range,
6358 STRING_INDEX_IS_NUMBER);
6359 generator.GenerateFast(masm);
6360 __ jmp(&done);
6361
6362 __ bind(&index_out_of_range);
6363 // When the index is out of range, the spec requires us to return
6364 // the empty string.
6365 __ Set(result, Immediate(Factory::empty_string()));
6366 __ jmp(&done);
6367
6368 __ bind(&need_conversion);
6369 // Move smi zero into the result register, which will trigger
6370 // conversion.
6371 __ Set(result, Immediate(Smi::FromInt(0)));
6372 __ jmp(&done);
6373
6374 StubRuntimeCallHelper call_helper;
6375 generator.GenerateSlow(masm, call_helper);
6376
6377 __ bind(&done);
6378 __ ret(0);
6379}
6380
6381void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
6382 ASSERT(state_ == CompareIC::SMIS);
6383 NearLabel miss;
6384 __ mov(ecx, Operand(edx));
6385 __ or_(ecx, Operand(eax));
6386 __ test(ecx, Immediate(kSmiTagMask));
6387 __ j(not_zero, &miss, not_taken);
6388
6389 if (GetCondition() == equal) {
6390 // For equality we do not care about the sign of the result.
6391 __ sub(eax, Operand(edx));
6392 } else {
6393 NearLabel done;
6394 __ sub(edx, Operand(eax));
6395 __ j(no_overflow, &done);
6396 // Correct sign of result in case of overflow.
6397 __ not_(edx);
6398 __ bind(&done);
6399 __ mov(eax, edx);
6400 }
6401 __ ret(0);
6402
6403 __ bind(&miss);
6404 GenerateMiss(masm);
6405}
6406
6407
6408void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
6409 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
6410
6411 NearLabel generic_stub;
6412 NearLabel unordered;
6413 NearLabel miss;
6414 __ mov(ecx, Operand(edx));
6415 __ and_(ecx, Operand(eax));
6416 __ test(ecx, Immediate(kSmiTagMask));
6417 __ j(zero, &generic_stub, not_taken);
6418
6419 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
6420 __ j(not_equal, &miss, not_taken);
6421 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
6422 __ j(not_equal, &miss, not_taken);
6423
6424 // Inlining the double comparison and falling back to the general compare
6425 // stub if NaN is involved or SS2 or CMOV is unsupported.
6426 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) {
6427 CpuFeatures::Scope scope1(SSE2);
6428 CpuFeatures::Scope scope2(CMOV);
6429
6430 // Load left and right operand
6431 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
6432 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
6433
6434 // Compare operands
6435 __ ucomisd(xmm0, xmm1);
6436
6437 // Don't base result on EFLAGS when a NaN is involved.
6438 __ j(parity_even, &unordered, not_taken);
6439
6440 // Return a result of -1, 0, or 1, based on EFLAGS.
6441 // Performing mov, because xor would destroy the flag register.
6442 __ mov(eax, 0); // equal
6443 __ mov(ecx, Immediate(Smi::FromInt(1)));
6444 __ cmov(above, eax, Operand(ecx));
6445 __ mov(ecx, Immediate(Smi::FromInt(-1)));
6446 __ cmov(below, eax, Operand(ecx));
6447 __ ret(0);
6448
6449 __ bind(&unordered);
6450 }
6451
6452 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
6453 __ bind(&generic_stub);
6454 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
6455
6456 __ bind(&miss);
6457 GenerateMiss(masm);
6458}
6459
6460
6461void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
6462 ASSERT(state_ == CompareIC::OBJECTS);
6463 NearLabel miss;
6464 __ mov(ecx, Operand(edx));
6465 __ and_(ecx, Operand(eax));
6466 __ test(ecx, Immediate(kSmiTagMask));
6467 __ j(zero, &miss, not_taken);
6468
6469 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
6470 __ j(not_equal, &miss, not_taken);
6471 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
6472 __ j(not_equal, &miss, not_taken);
6473
6474 ASSERT(GetCondition() == equal);
6475 __ sub(eax, Operand(edx));
6476 __ ret(0);
6477
6478 __ bind(&miss);
6479 GenerateMiss(masm);
6480}
6481
6482
6483void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6484 // Save the registers.
6485 __ pop(ecx);
6486 __ push(edx);
6487 __ push(eax);
6488 __ push(ecx);
6489
6490 // Call the runtime system in a fresh internal frame.
6491 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
6492 __ EnterInternalFrame();
6493 __ push(edx);
6494 __ push(eax);
6495 __ push(Immediate(Smi::FromInt(op_)));
6496 __ CallExternalReference(miss, 3);
6497 __ LeaveInternalFrame();
6498
6499 // Compute the entry point of the rewritten stub.
6500 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
6501
6502 // Restore registers.
6503 __ pop(ecx);
6504 __ pop(eax);
6505 __ pop(edx);
6506 __ push(ecx);
6507
6508 // Do a tail call to the rewritten stub.
6509 __ jmp(Operand(edi));
6510}
6511
6512
ricow@chromium.org65fae842010-08-25 15:26:24 +00006513#undef __
6514
6515} } // namespace v8::internal
6516
6517#endif // V8_TARGET_ARCH_IA32