blob: 5f3400f83e8ea649f297fb872325fdfef243355d [file] [log] [blame]
ager@chromium.org0ee099b2011-01-25 14:06:47 +00001// Copyright 2011 the V8 project authors. All rights reserved.
ricow@chromium.org65fae842010-08-25 15:26:24 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
ricow@chromium.orgd236f4d2010-09-01 06:52:08 +000032#include "code-stubs.h"
ricow@chromium.org65fae842010-08-25 15:26:24 +000033#include "bootstrapper.h"
ricow@chromium.orgd236f4d2010-09-01 06:52:08 +000034#include "jsregexp.h"
ricow@chromium.org65fae842010-08-25 15:26:24 +000035#include "regexp-macro-assembler.h"
36
37namespace v8 {
38namespace internal {
39
40#define __ ACCESS_MASM(masm)
41void FastNewClosureStub::Generate(MacroAssembler* masm) {
42 // Create a new closure from the given function info in new
43 // space. Set the context to the current context in esi.
44 Label gc;
45 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
46
47 // Get the function info from the stack.
48 __ mov(edx, Operand(esp, 1 * kPointerSize));
49
50 // Compute the function map in the current global context and set that
51 // as the map of the allocated object.
52 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
53 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
54 __ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
55 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
56
57 // Initialize the rest of the function. We don't have to update the
58 // write barrier because the allocated object is in new space.
59 __ mov(ebx, Immediate(Factory::empty_fixed_array()));
60 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
61 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
62 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
63 Immediate(Factory::the_hole_value()));
64 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
65 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
66 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
kasperl@chromium.orga5551262010-12-07 12:49:48 +000067 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
68 Immediate(Factory::undefined_value()));
ricow@chromium.org65fae842010-08-25 15:26:24 +000069
70 // Initialize the code pointer in the function to be the one
71 // found in the shared function info object.
72 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
73 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
74 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
75
76 // Return and remove the on-stack parameter.
77 __ ret(1 * kPointerSize);
78
79 // Create a new closure through the slower runtime call.
80 __ bind(&gc);
81 __ pop(ecx); // Temporarily remove return address.
82 __ pop(edx);
83 __ push(esi);
84 __ push(edx);
vegorov@chromium.org21b5e952010-11-23 10:24:40 +000085 __ push(Immediate(Factory::false_value()));
ricow@chromium.org65fae842010-08-25 15:26:24 +000086 __ push(ecx); // Restore return address.
vegorov@chromium.org21b5e952010-11-23 10:24:40 +000087 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
ricow@chromium.org65fae842010-08-25 15:26:24 +000088}
89
90
91void FastNewContextStub::Generate(MacroAssembler* masm) {
92 // Try to allocate the context in new space.
93 Label gc;
ager@chromium.org0ee099b2011-01-25 14:06:47 +000094 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
95 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
ricow@chromium.org65fae842010-08-25 15:26:24 +000096 eax, ebx, ecx, &gc, TAG_OBJECT);
97
98 // Get the function from the stack.
99 __ mov(ecx, Operand(esp, 1 * kPointerSize));
100
101 // Setup the object header.
102 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
103 __ mov(FieldOperand(eax, Context::kLengthOffset),
ager@chromium.org0ee099b2011-01-25 14:06:47 +0000104 Immediate(Smi::FromInt(length)));
ricow@chromium.org65fae842010-08-25 15:26:24 +0000105
106 // Setup the fixed slots.
lrn@chromium.org5d00b602011-01-05 09:51:43 +0000107 __ Set(ebx, Immediate(0)); // Set to NULL.
ricow@chromium.org65fae842010-08-25 15:26:24 +0000108 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
109 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
110 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
111 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
112
113 // Copy the global object from the surrounding context. We go through the
114 // context in the function (ecx) to match the allocation behavior we have
115 // in the runtime system (see Heap::AllocateFunctionContext).
116 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
117 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
118 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
119
120 // Initialize the rest of the slots to undefined.
121 __ mov(ebx, Factory::undefined_value());
ager@chromium.org0ee099b2011-01-25 14:06:47 +0000122 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
ricow@chromium.org65fae842010-08-25 15:26:24 +0000123 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
124 }
125
126 // Return and remove the on-stack parameter.
127 __ mov(esi, Operand(eax));
128 __ ret(1 * kPointerSize);
129
130 // Need to collect. Call into runtime system.
131 __ bind(&gc);
132 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
133}
134
135
136void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
137 // Stack layout on entry:
138 //
139 // [esp + kPointerSize]: constant elements.
140 // [esp + (2 * kPointerSize)]: literal index.
141 // [esp + (3 * kPointerSize)]: literals array.
142
143 // All sizes here are multiples of kPointerSize.
144 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
145 int size = JSArray::kSize + elements_size;
146
147 // Load boilerplate object into ecx and check if we need to create a
148 // boilerplate.
149 Label slow_case;
150 __ mov(ecx, Operand(esp, 3 * kPointerSize));
151 __ mov(eax, Operand(esp, 2 * kPointerSize));
152 STATIC_ASSERT(kPointerSize == 4);
153 STATIC_ASSERT(kSmiTagSize == 1);
154 STATIC_ASSERT(kSmiTag == 0);
ricow@chromium.orgd236f4d2010-09-01 06:52:08 +0000155 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
156 FixedArray::kHeaderSize));
ricow@chromium.org65fae842010-08-25 15:26:24 +0000157 __ cmp(ecx, Factory::undefined_value());
158 __ j(equal, &slow_case);
159
160 if (FLAG_debug_code) {
161 const char* message;
162 Handle<Map> expected_map;
163 if (mode_ == CLONE_ELEMENTS) {
164 message = "Expected (writable) fixed array";
165 expected_map = Factory::fixed_array_map();
166 } else {
167 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
168 message = "Expected copy-on-write fixed array";
169 expected_map = Factory::fixed_cow_array_map();
170 }
171 __ push(ecx);
172 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
173 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
174 __ Assert(equal, message);
175 __ pop(ecx);
176 }
177
178 // Allocate both the JS array and the elements array in one big
179 // allocation. This avoids multiple limit checks.
180 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
181
182 // Copy the JS array part.
183 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
184 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
185 __ mov(ebx, FieldOperand(ecx, i));
186 __ mov(FieldOperand(eax, i), ebx);
187 }
188 }
189
190 if (length_ > 0) {
191 // Get hold of the elements array of the boilerplate and setup the
192 // elements pointer in the resulting object.
193 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
194 __ lea(edx, Operand(eax, JSArray::kSize));
195 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
196
197 // Copy the elements array.
198 for (int i = 0; i < elements_size; i += kPointerSize) {
199 __ mov(ebx, FieldOperand(ecx, i));
200 __ mov(FieldOperand(edx, i), ebx);
201 }
202 }
203
204 // Return and remove the on-stack parameters.
205 __ ret(3 * kPointerSize);
206
207 __ bind(&slow_case);
208 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
209}
210
211
212// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
213void ToBooleanStub::Generate(MacroAssembler* masm) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +0000214 NearLabel false_result, true_result, not_string;
ricow@chromium.org65fae842010-08-25 15:26:24 +0000215 __ mov(eax, Operand(esp, 1 * kPointerSize));
216
217 // 'null' => false.
218 __ cmp(eax, Factory::null_value());
219 __ j(equal, &false_result);
220
221 // Get the map and type of the heap object.
222 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
223 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
224
225 // Undetectable => false.
226 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
227 1 << Map::kIsUndetectable);
228 __ j(not_zero, &false_result);
229
230 // JavaScript object => true.
231 __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE);
232 __ j(above_equal, &true_result);
233
234 // String value => false iff empty.
235 __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE);
236 __ j(above_equal, &not_string);
237 STATIC_ASSERT(kSmiTag == 0);
238 __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
239 __ j(zero, &false_result);
240 __ jmp(&true_result);
241
242 __ bind(&not_string);
243 // HeapNumber => false iff +0, -0, or NaN.
244 __ cmp(edx, Factory::heap_number_map());
245 __ j(not_equal, &true_result);
246 __ fldz();
247 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
248 __ FCmp();
249 __ j(zero, &false_result);
250 // Fall through to |true_result|.
251
252 // Return 1/0 for true/false in eax.
253 __ bind(&true_result);
254 __ mov(eax, 1);
255 __ ret(1 * kPointerSize);
256 __ bind(&false_result);
257 __ mov(eax, 0);
258 __ ret(1 * kPointerSize);
259}
260
261
262const char* GenericBinaryOpStub::GetName() {
263 if (name_ != NULL) return name_;
264 const int kMaxNameLength = 100;
265 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
266 if (name_ == NULL) return "OOM";
267 const char* op_name = Token::Name(op_);
268 const char* overwrite_name;
269 switch (mode_) {
270 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
271 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
272 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
273 default: overwrite_name = "UnknownOverwrite"; break;
274 }
275
276 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
277 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
278 op_name,
279 overwrite_name,
280 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
281 args_in_registers_ ? "RegArgs" : "StackArgs",
282 args_reversed_ ? "_R" : "",
283 static_operands_type_.ToString(),
284 BinaryOpIC::GetName(runtime_operands_type_));
285 return name_;
286}
287
288
289void GenericBinaryOpStub::GenerateCall(
290 MacroAssembler* masm,
291 Register left,
292 Register right) {
293 if (!ArgsInRegistersSupported()) {
294 // Pass arguments on the stack.
295 __ push(left);
296 __ push(right);
297 } else {
298 // The calling convention with registers is left in edx and right in eax.
299 Register left_arg = edx;
300 Register right_arg = eax;
301 if (!(left.is(left_arg) && right.is(right_arg))) {
302 if (left.is(right_arg) && right.is(left_arg)) {
303 if (IsOperationCommutative()) {
304 SetArgsReversed();
305 } else {
306 __ xchg(left, right);
307 }
308 } else if (left.is(left_arg)) {
309 __ mov(right_arg, right);
310 } else if (right.is(right_arg)) {
311 __ mov(left_arg, left);
312 } else if (left.is(right_arg)) {
313 if (IsOperationCommutative()) {
314 __ mov(left_arg, right);
315 SetArgsReversed();
316 } else {
317 // Order of moves important to avoid destroying left argument.
318 __ mov(left_arg, left);
319 __ mov(right_arg, right);
320 }
321 } else if (right.is(left_arg)) {
322 if (IsOperationCommutative()) {
323 __ mov(right_arg, left);
324 SetArgsReversed();
325 } else {
326 // Order of moves important to avoid destroying right argument.
327 __ mov(right_arg, right);
328 __ mov(left_arg, left);
329 }
330 } else {
331 // Order of moves is not important.
332 __ mov(left_arg, left);
333 __ mov(right_arg, right);
334 }
335 }
336
337 // Update flags to indicate that arguments are in registers.
338 SetArgsInRegisters();
339 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
340 }
341
342 // Call the stub.
343 __ CallStub(this);
344}
345
346
347void GenericBinaryOpStub::GenerateCall(
348 MacroAssembler* masm,
349 Register left,
350 Smi* right) {
351 if (!ArgsInRegistersSupported()) {
352 // Pass arguments on the stack.
353 __ push(left);
354 __ push(Immediate(right));
355 } else {
356 // The calling convention with registers is left in edx and right in eax.
357 Register left_arg = edx;
358 Register right_arg = eax;
359 if (left.is(left_arg)) {
360 __ mov(right_arg, Immediate(right));
361 } else if (left.is(right_arg) && IsOperationCommutative()) {
362 __ mov(left_arg, Immediate(right));
363 SetArgsReversed();
364 } else {
365 // For non-commutative operations, left and right_arg might be
366 // the same register. Therefore, the order of the moves is
367 // important here in order to not overwrite left before moving
368 // it to left_arg.
369 __ mov(left_arg, left);
370 __ mov(right_arg, Immediate(right));
371 }
372
373 // Update flags to indicate that arguments are in registers.
374 SetArgsInRegisters();
375 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
376 }
377
378 // Call the stub.
379 __ CallStub(this);
380}
381
382
383void GenericBinaryOpStub::GenerateCall(
384 MacroAssembler* masm,
385 Smi* left,
386 Register right) {
387 if (!ArgsInRegistersSupported()) {
388 // Pass arguments on the stack.
389 __ push(Immediate(left));
390 __ push(right);
391 } else {
392 // The calling convention with registers is left in edx and right in eax.
393 Register left_arg = edx;
394 Register right_arg = eax;
395 if (right.is(right_arg)) {
396 __ mov(left_arg, Immediate(left));
397 } else if (right.is(left_arg) && IsOperationCommutative()) {
398 __ mov(right_arg, Immediate(left));
399 SetArgsReversed();
400 } else {
401 // For non-commutative operations, right and left_arg might be
402 // the same register. Therefore, the order of the moves is
403 // important here in order to not overwrite right before moving
404 // it to right_arg.
405 __ mov(right_arg, right);
406 __ mov(left_arg, Immediate(left));
407 }
408 // Update flags to indicate that arguments are in registers.
409 SetArgsInRegisters();
410 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
411 }
412
413 // Call the stub.
414 __ CallStub(this);
415}
416
417
418class FloatingPointHelper : public AllStatic {
419 public:
420
421 enum ArgLocation {
422 ARGS_ON_STACK,
423 ARGS_IN_REGISTERS
424 };
425
426 // Code pattern for loading a floating point value. Input value must
427 // be either a smi or a heap number object (fp value). Requirements:
428 // operand in register number. Returns operand as floating point number
429 // on FPU stack.
430 static void LoadFloatOperand(MacroAssembler* masm, Register number);
431
432 // Code pattern for loading floating point values. Input values must
433 // be either smi or heap number objects (fp values). Requirements:
434 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
435 // Returns operands as floating point numbers on FPU stack.
436 static void LoadFloatOperands(MacroAssembler* masm,
437 Register scratch,
438 ArgLocation arg_location = ARGS_ON_STACK);
439
440 // Similar to LoadFloatOperand but assumes that both operands are smis.
441 // Expects operands in edx, eax.
442 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
443
444 // Test if operands are smi or number objects (fp). Requirements:
445 // operand_1 in eax, operand_2 in edx; falls through on float
446 // operands, jumps to the non_float label otherwise.
447 static void CheckFloatOperands(MacroAssembler* masm,
448 Label* non_float,
449 Register scratch);
450
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000451 // Checks that the two floating point numbers on top of the FPU stack
452 // have int32 values.
453 static void CheckFloatOperandsAreInt32(MacroAssembler* masm,
454 Label* non_int32);
455
ricow@chromium.org65fae842010-08-25 15:26:24 +0000456 // Takes the operands in edx and eax and loads them as integers in eax
457 // and ecx.
458 static void LoadAsIntegers(MacroAssembler* masm,
459 TypeInfo type_info,
460 bool use_sse3,
461 Label* operand_conversion_failure);
462 static void LoadNumbersAsIntegers(MacroAssembler* masm,
463 TypeInfo type_info,
464 bool use_sse3,
465 Label* operand_conversion_failure);
466 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
467 bool use_sse3,
468 Label* operand_conversion_failure);
469
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000470 // Must only be called after LoadUnknownsAsIntegers. Assumes that the
471 // operands are pushed on the stack, and that their conversions to int32
472 // are in eax and ecx. Checks that the original numbers were in the int32
473 // range.
474 static void CheckLoadedIntegersWereInt32(MacroAssembler* masm,
475 bool use_sse3,
476 Label* not_int32);
477
478 // Assumes that operands are smis or heap numbers and loads them
479 // into xmm0 and xmm1. Operands are in edx and eax.
ricow@chromium.org65fae842010-08-25 15:26:24 +0000480 // Leaves operands unchanged.
481 static void LoadSSE2Operands(MacroAssembler* masm);
482
483 // Test if operands are numbers (smi or HeapNumber objects), and load
484 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
485 // either operand is not a number. Operands are in edx and eax.
486 // Leaves operands unchanged.
487 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
488
489 // Similar to LoadSSE2Operands but assumes that both operands are smis.
490 // Expects operands in edx, eax.
491 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000492
493 // Checks that the two floating point numbers loaded into xmm0 and xmm1
494 // have int32 values.
495 static void CheckSSE2OperandsAreInt32(MacroAssembler* masm,
496 Label* non_int32,
497 Register scratch);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000498};
499
500
501void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
502 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
503 // dividend in eax and edx free for the division. Use eax, ebx for those.
504 Comment load_comment(masm, "-- Load arguments");
505 Register left = edx;
506 Register right = eax;
507 if (op_ == Token::DIV || op_ == Token::MOD) {
508 left = eax;
509 right = ebx;
510 if (HasArgsInRegisters()) {
511 __ mov(ebx, eax);
512 __ mov(eax, edx);
513 }
514 }
515 if (!HasArgsInRegisters()) {
516 __ mov(right, Operand(esp, 1 * kPointerSize));
517 __ mov(left, Operand(esp, 2 * kPointerSize));
518 }
519
520 if (static_operands_type_.IsSmi()) {
521 if (FLAG_debug_code) {
522 __ AbortIfNotSmi(left);
523 __ AbortIfNotSmi(right);
524 }
525 if (op_ == Token::BIT_OR) {
526 __ or_(right, Operand(left));
527 GenerateReturn(masm);
528 return;
529 } else if (op_ == Token::BIT_AND) {
530 __ and_(right, Operand(left));
531 GenerateReturn(masm);
532 return;
533 } else if (op_ == Token::BIT_XOR) {
534 __ xor_(right, Operand(left));
535 GenerateReturn(masm);
536 return;
537 }
538 }
539
540 // 2. Prepare the smi check of both operands by oring them together.
541 Comment smi_check_comment(masm, "-- Smi check arguments");
542 Label not_smis;
543 Register combined = ecx;
544 ASSERT(!left.is(combined) && !right.is(combined));
545 switch (op_) {
546 case Token::BIT_OR:
547 // Perform the operation into eax and smi check the result. Preserve
548 // eax in case the result is not a smi.
549 ASSERT(!left.is(ecx) && !right.is(ecx));
550 __ mov(ecx, right);
551 __ or_(right, Operand(left)); // Bitwise or is commutative.
552 combined = right;
553 break;
554
555 case Token::BIT_XOR:
556 case Token::BIT_AND:
557 case Token::ADD:
558 case Token::SUB:
559 case Token::MUL:
560 case Token::DIV:
561 case Token::MOD:
562 __ mov(combined, right);
563 __ or_(combined, Operand(left));
564 break;
565
566 case Token::SHL:
567 case Token::SAR:
568 case Token::SHR:
569 // Move the right operand into ecx for the shift operation, use eax
570 // for the smi check register.
571 ASSERT(!left.is(ecx) && !right.is(ecx));
572 __ mov(ecx, right);
573 __ or_(right, Operand(left));
574 combined = right;
575 break;
576
577 default:
578 break;
579 }
580
581 // 3. Perform the smi check of the operands.
582 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
583 __ test(combined, Immediate(kSmiTagMask));
584 __ j(not_zero, &not_smis, not_taken);
585
586 // 4. Operands are both smis, perform the operation leaving the result in
587 // eax and check the result if necessary.
588 Comment perform_smi(masm, "-- Perform smi operation");
589 Label use_fp_on_smis;
590 switch (op_) {
591 case Token::BIT_OR:
592 // Nothing to do.
593 break;
594
595 case Token::BIT_XOR:
596 ASSERT(right.is(eax));
597 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
598 break;
599
600 case Token::BIT_AND:
601 ASSERT(right.is(eax));
602 __ and_(right, Operand(left)); // Bitwise and is commutative.
603 break;
604
605 case Token::SHL:
606 // Remove tags from operands (but keep sign).
607 __ SmiUntag(left);
608 __ SmiUntag(ecx);
609 // Perform the operation.
610 __ shl_cl(left);
611 // Check that the *signed* result fits in a smi.
612 __ cmp(left, 0xc0000000);
613 __ j(sign, &use_fp_on_smis, not_taken);
614 // Tag the result and store it in register eax.
615 __ SmiTag(left);
616 __ mov(eax, left);
617 break;
618
619 case Token::SAR:
620 // Remove tags from operands (but keep sign).
621 __ SmiUntag(left);
622 __ SmiUntag(ecx);
623 // Perform the operation.
624 __ sar_cl(left);
625 // Tag the result and store it in register eax.
626 __ SmiTag(left);
627 __ mov(eax, left);
628 break;
629
630 case Token::SHR:
631 // Remove tags from operands (but keep sign).
632 __ SmiUntag(left);
633 __ SmiUntag(ecx);
634 // Perform the operation.
635 __ shr_cl(left);
636 // Check that the *unsigned* result fits in a smi.
637 // Neither of the two high-order bits can be set:
638 // - 0x80000000: high bit would be lost when smi tagging.
639 // - 0x40000000: this number would convert to negative when
640 // Smi tagging these two cases can only happen with shifts
641 // by 0 or 1 when handed a valid smi.
642 __ test(left, Immediate(0xc0000000));
643 __ j(not_zero, slow, not_taken);
644 // Tag the result and store it in register eax.
645 __ SmiTag(left);
646 __ mov(eax, left);
647 break;
648
649 case Token::ADD:
650 ASSERT(right.is(eax));
651 __ add(right, Operand(left)); // Addition is commutative.
652 __ j(overflow, &use_fp_on_smis, not_taken);
653 break;
654
655 case Token::SUB:
656 __ sub(left, Operand(right));
657 __ j(overflow, &use_fp_on_smis, not_taken);
658 __ mov(eax, left);
659 break;
660
661 case Token::MUL:
662 // If the smi tag is 0 we can just leave the tag on one operand.
663 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
664 // We can't revert the multiplication if the result is not a smi
665 // so save the right operand.
666 __ mov(ebx, right);
667 // Remove tag from one of the operands (but keep sign).
668 __ SmiUntag(right);
669 // Do multiplication.
670 __ imul(right, Operand(left)); // Multiplication is commutative.
671 __ j(overflow, &use_fp_on_smis, not_taken);
672 // Check for negative zero result. Use combined = left | right.
673 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
674 break;
675
676 case Token::DIV:
677 // We can't revert the division if the result is not a smi so
678 // save the left operand.
679 __ mov(edi, left);
680 // Check for 0 divisor.
681 __ test(right, Operand(right));
682 __ j(zero, &use_fp_on_smis, not_taken);
683 // Sign extend left into edx:eax.
684 ASSERT(left.is(eax));
685 __ cdq();
686 // Divide edx:eax by right.
687 __ idiv(right);
688 // Check for the corner case of dividing the most negative smi by
689 // -1. We cannot use the overflow flag, since it is not set by idiv
690 // instruction.
691 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
692 __ cmp(eax, 0x40000000);
693 __ j(equal, &use_fp_on_smis);
694 // Check for negative zero result. Use combined = left | right.
695 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
696 // Check that the remainder is zero.
697 __ test(edx, Operand(edx));
698 __ j(not_zero, &use_fp_on_smis);
699 // Tag the result and store it in register eax.
700 __ SmiTag(eax);
701 break;
702
703 case Token::MOD:
704 // Check for 0 divisor.
705 __ test(right, Operand(right));
706 __ j(zero, &not_smis, not_taken);
707
708 // Sign extend left into edx:eax.
709 ASSERT(left.is(eax));
710 __ cdq();
711 // Divide edx:eax by right.
712 __ idiv(right);
713 // Check for negative zero result. Use combined = left | right.
714 __ NegativeZeroTest(edx, combined, slow);
715 // Move remainder to register eax.
716 __ mov(eax, edx);
717 break;
718
719 default:
720 UNREACHABLE();
721 }
722
723 // 5. Emit return of result in eax.
724 GenerateReturn(masm);
725
726 // 6. For some operations emit inline code to perform floating point
727 // operations on known smis (e.g., if the result of the operation
728 // overflowed the smi range).
729 switch (op_) {
730 case Token::SHL: {
731 Comment perform_float(masm, "-- Perform float operation on smis");
732 __ bind(&use_fp_on_smis);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000733 if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) {
734 // Result we want is in left == edx, so we can put the allocated heap
735 // number in eax.
736 __ AllocateHeapNumber(eax, ecx, ebx, slow);
737 // Store the result in the HeapNumber and return.
738 if (CpuFeatures::IsSupported(SSE2)) {
739 CpuFeatures::Scope use_sse2(SSE2);
740 __ cvtsi2sd(xmm0, Operand(left));
741 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
742 } else {
743 // It's OK to overwrite the right argument on the stack because we
744 // are about to return.
745 __ mov(Operand(esp, 1 * kPointerSize), left);
746 __ fild_s(Operand(esp, 1 * kPointerSize));
747 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
748 }
749 GenerateReturn(masm);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000750 } else {
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000751 ASSERT(runtime_operands_type_ == BinaryOpIC::UNINIT_OR_SMI);
752 __ jmp(slow);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000753 }
ricow@chromium.org65fae842010-08-25 15:26:24 +0000754 break;
755 }
756
757 case Token::ADD:
758 case Token::SUB:
759 case Token::MUL:
760 case Token::DIV: {
761 Comment perform_float(masm, "-- Perform float operation on smis");
762 __ bind(&use_fp_on_smis);
763 // Restore arguments to edx, eax.
764 switch (op_) {
765 case Token::ADD:
766 // Revert right = right + left.
767 __ sub(right, Operand(left));
768 break;
769 case Token::SUB:
770 // Revert left = left - right.
771 __ add(left, Operand(right));
772 break;
773 case Token::MUL:
774 // Right was clobbered but a copy is in ebx.
775 __ mov(right, ebx);
776 break;
777 case Token::DIV:
778 // Left was clobbered but a copy is in edi. Right is in ebx for
779 // division.
780 __ mov(edx, edi);
781 __ mov(eax, right);
782 break;
783 default: UNREACHABLE();
784 break;
785 }
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000786 if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) {
787 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
788 if (CpuFeatures::IsSupported(SSE2)) {
789 CpuFeatures::Scope use_sse2(SSE2);
790 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
791 switch (op_) {
792 case Token::ADD: __ addsd(xmm0, xmm1); break;
793 case Token::SUB: __ subsd(xmm0, xmm1); break;
794 case Token::MUL: __ mulsd(xmm0, xmm1); break;
795 case Token::DIV: __ divsd(xmm0, xmm1); break;
796 default: UNREACHABLE();
797 }
798 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
799 } else { // SSE2 not available, use FPU.
800 FloatingPointHelper::LoadFloatSmis(masm, ebx);
801 switch (op_) {
802 case Token::ADD: __ faddp(1); break;
803 case Token::SUB: __ fsubp(1); break;
804 case Token::MUL: __ fmulp(1); break;
805 case Token::DIV: __ fdivp(1); break;
806 default: UNREACHABLE();
807 }
808 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
ricow@chromium.org65fae842010-08-25 15:26:24 +0000809 }
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000810 __ mov(eax, ecx);
811 GenerateReturn(masm);
812 } else {
813 ASSERT(runtime_operands_type_ == BinaryOpIC::UNINIT_OR_SMI);
814 __ jmp(slow);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000815 }
ricow@chromium.org65fae842010-08-25 15:26:24 +0000816 break;
817 }
818
819 default:
820 break;
821 }
822
823 // 7. Non-smi operands, fall out to the non-smi code with the operands in
824 // edx and eax.
825 Comment done_comment(masm, "-- Enter non-smi code");
826 __ bind(&not_smis);
827 switch (op_) {
828 case Token::BIT_OR:
829 case Token::SHL:
830 case Token::SAR:
831 case Token::SHR:
832 // Right operand is saved in ecx and eax was destroyed by the smi
833 // check.
834 __ mov(eax, ecx);
835 break;
836
837 case Token::DIV:
838 case Token::MOD:
839 // Operands are in eax, ebx at this point.
840 __ mov(edx, eax);
841 __ mov(eax, ebx);
842 break;
843
844 default:
845 break;
846 }
847}
848
849
850void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
851 Label call_runtime;
852
853 __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
854
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000855 if (runtime_operands_type_ == BinaryOpIC::UNINIT_OR_SMI) {
856 Label slow;
857 if (ShouldGenerateSmiCode()) GenerateSmiCode(masm, &slow);
858 __ bind(&slow);
859 GenerateTypeTransition(masm);
860 }
861
ricow@chromium.org65fae842010-08-25 15:26:24 +0000862 // Generate fast case smi code if requested. This flag is set when the fast
863 // case smi code is not generated by the caller. Generating it here will speed
864 // up common operations.
865 if (ShouldGenerateSmiCode()) {
866 GenerateSmiCode(masm, &call_runtime);
867 } else if (op_ != Token::MOD) { // MOD goes straight to runtime.
868 if (!HasArgsInRegisters()) {
869 GenerateLoadArguments(masm);
870 }
871 }
872
873 // Floating point case.
874 if (ShouldGenerateFPCode()) {
875 switch (op_) {
876 case Token::ADD:
877 case Token::SUB:
878 case Token::MUL:
879 case Token::DIV: {
880 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
881 HasSmiCodeInStub()) {
882 // Execution reaches this point when the first non-smi argument occurs
883 // (and only if smi code is generated). This is the right moment to
884 // patch to HEAP_NUMBERS state. The transition is attempted only for
885 // the four basic operations. The stub stays in the DEFAULT state
886 // forever for all other operations (also if smi code is skipped).
887 GenerateTypeTransition(masm);
888 break;
889 }
890
891 Label not_floats;
892 if (CpuFeatures::IsSupported(SSE2)) {
893 CpuFeatures::Scope use_sse2(SSE2);
894 if (static_operands_type_.IsNumber()) {
895 if (FLAG_debug_code) {
896 // Assert at runtime that inputs are only numbers.
897 __ AbortIfNotNumber(edx);
898 __ AbortIfNotNumber(eax);
899 }
900 if (static_operands_type_.IsSmi()) {
901 if (FLAG_debug_code) {
902 __ AbortIfNotSmi(edx);
903 __ AbortIfNotSmi(eax);
904 }
905 FloatingPointHelper::LoadSSE2Smis(masm, ecx);
906 } else {
907 FloatingPointHelper::LoadSSE2Operands(masm);
908 }
909 } else {
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +0000910 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000911 }
912
913 switch (op_) {
914 case Token::ADD: __ addsd(xmm0, xmm1); break;
915 case Token::SUB: __ subsd(xmm0, xmm1); break;
916 case Token::MUL: __ mulsd(xmm0, xmm1); break;
917 case Token::DIV: __ divsd(xmm0, xmm1); break;
918 default: UNREACHABLE();
919 }
920 GenerateHeapResultAllocation(masm, &call_runtime);
921 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
922 GenerateReturn(masm);
923 } else { // SSE2 not available, use FPU.
924 if (static_operands_type_.IsNumber()) {
925 if (FLAG_debug_code) {
926 // Assert at runtime that inputs are only numbers.
927 __ AbortIfNotNumber(edx);
928 __ AbortIfNotNumber(eax);
929 }
930 } else {
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +0000931 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
ricow@chromium.org65fae842010-08-25 15:26:24 +0000932 }
933 FloatingPointHelper::LoadFloatOperands(
934 masm,
935 ecx,
936 FloatingPointHelper::ARGS_IN_REGISTERS);
937 switch (op_) {
938 case Token::ADD: __ faddp(1); break;
939 case Token::SUB: __ fsubp(1); break;
940 case Token::MUL: __ fmulp(1); break;
941 case Token::DIV: __ fdivp(1); break;
942 default: UNREACHABLE();
943 }
944 Label after_alloc_failure;
945 GenerateHeapResultAllocation(masm, &after_alloc_failure);
946 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
947 GenerateReturn(masm);
948 __ bind(&after_alloc_failure);
949 __ ffree();
950 __ jmp(&call_runtime);
951 }
952 __ bind(&not_floats);
953 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
954 !HasSmiCodeInStub()) {
955 // Execution reaches this point when the first non-number argument
956 // occurs (and only if smi code is skipped from the stub, otherwise
957 // the patching has already been done earlier in this case branch).
958 // Try patching to STRINGS for ADD operation.
959 if (op_ == Token::ADD) {
960 GenerateTypeTransition(masm);
961 }
962 }
963 break;
964 }
965 case Token::MOD: {
966 // For MOD we go directly to runtime in the non-smi case.
967 break;
968 }
969 case Token::BIT_OR:
970 case Token::BIT_AND:
971 case Token::BIT_XOR:
972 case Token::SAR:
973 case Token::SHL:
974 case Token::SHR: {
975 Label non_smi_result;
976 FloatingPointHelper::LoadAsIntegers(masm,
977 static_operands_type_,
978 use_sse3_,
979 &call_runtime);
980 switch (op_) {
981 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
982 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
983 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
984 case Token::SAR: __ sar_cl(eax); break;
985 case Token::SHL: __ shl_cl(eax); break;
986 case Token::SHR: __ shr_cl(eax); break;
987 default: UNREACHABLE();
988 }
989 if (op_ == Token::SHR) {
990 // Check if result is non-negative and fits in a smi.
991 __ test(eax, Immediate(0xc0000000));
992 __ j(not_zero, &call_runtime);
993 } else {
994 // Check if result fits in a smi.
995 __ cmp(eax, 0xc0000000);
996 __ j(negative, &non_smi_result);
997 }
998 // Tag smi result and return.
999 __ SmiTag(eax);
1000 GenerateReturn(masm);
1001
1002 // All ops except SHR return a signed int32 that we load in
1003 // a HeapNumber.
1004 if (op_ != Token::SHR) {
1005 __ bind(&non_smi_result);
1006 // Allocate a heap number if needed.
1007 __ mov(ebx, Operand(eax)); // ebx: result
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00001008 NearLabel skip_allocation;
ricow@chromium.org65fae842010-08-25 15:26:24 +00001009 switch (mode_) {
1010 case OVERWRITE_LEFT:
1011 case OVERWRITE_RIGHT:
1012 // If the operand was an object, we skip the
1013 // allocation of a heap number.
1014 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1015 1 * kPointerSize : 2 * kPointerSize));
1016 __ test(eax, Immediate(kSmiTagMask));
1017 __ j(not_zero, &skip_allocation, not_taken);
1018 // Fall through!
1019 case NO_OVERWRITE:
1020 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1021 __ bind(&skip_allocation);
1022 break;
1023 default: UNREACHABLE();
1024 }
1025 // Store the result in the HeapNumber and return.
1026 if (CpuFeatures::IsSupported(SSE2)) {
1027 CpuFeatures::Scope use_sse2(SSE2);
1028 __ cvtsi2sd(xmm0, Operand(ebx));
1029 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1030 } else {
1031 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1032 __ fild_s(Operand(esp, 1 * kPointerSize));
1033 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1034 }
1035 GenerateReturn(masm);
1036 }
1037 break;
1038 }
1039 default: UNREACHABLE(); break;
1040 }
1041 }
1042
ager@chromium.org5b2fbee2010-09-08 06:38:15 +00001043 // If all else fails, use the runtime system to get the correct
1044 // result. If arguments was passed in registers now place them on the
1045 // stack in the correct order below the return address.
1046
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001047 // Avoid hitting the string ADD code below when allocation fails in
1048 // the floating point code above.
1049 if (op_ != Token::ADD) {
1050 __ bind(&call_runtime);
1051 }
1052
ricow@chromium.org65fae842010-08-25 15:26:24 +00001053 if (HasArgsInRegisters()) {
1054 GenerateRegisterArgsPush(masm);
1055 }
1056
1057 switch (op_) {
1058 case Token::ADD: {
1059 // Test for string arguments before calling runtime.
ricow@chromium.org65fae842010-08-25 15:26:24 +00001060
1061 // If this stub has already generated FP-specific code then the arguments
1062 // are already in edx, eax
1063 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
1064 GenerateLoadArguments(masm);
1065 }
1066
1067 // Registers containing left and right operands respectively.
1068 Register lhs, rhs;
1069 if (HasArgsReversed()) {
1070 lhs = eax;
1071 rhs = edx;
1072 } else {
1073 lhs = edx;
1074 rhs = eax;
1075 }
1076
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001077 // Test if left operand is a string.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00001078 NearLabel lhs_not_string;
ricow@chromium.org65fae842010-08-25 15:26:24 +00001079 __ test(lhs, Immediate(kSmiTagMask));
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001080 __ j(zero, &lhs_not_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001081 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx);
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001082 __ j(above_equal, &lhs_not_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001083
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001084 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
1085 __ TailCallStub(&string_add_left_stub);
1086
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00001087 NearLabel call_runtime_with_args;
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001088 // Left operand is not a string, test right.
1089 __ bind(&lhs_not_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001090 __ test(rhs, Immediate(kSmiTagMask));
ager@chromium.org5b2fbee2010-09-08 06:38:15 +00001091 __ j(zero, &call_runtime_with_args);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001092 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
ager@chromium.org5b2fbee2010-09-08 06:38:15 +00001093 __ j(above_equal, &call_runtime_with_args);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001094
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001095 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1096 __ TailCallStub(&string_add_right_stub);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001097
ricow@chromium.org65fae842010-08-25 15:26:24 +00001098 // Neither argument is a string.
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00001099 __ bind(&call_runtime);
1100 if (HasArgsInRegisters()) {
1101 GenerateRegisterArgsPush(masm);
1102 }
ager@chromium.org5b2fbee2010-09-08 06:38:15 +00001103 __ bind(&call_runtime_with_args);
ricow@chromium.org65fae842010-08-25 15:26:24 +00001104 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1105 break;
1106 }
1107 case Token::SUB:
1108 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1109 break;
1110 case Token::MUL:
1111 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1112 break;
1113 case Token::DIV:
1114 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1115 break;
1116 case Token::MOD:
1117 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1118 break;
1119 case Token::BIT_OR:
1120 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1121 break;
1122 case Token::BIT_AND:
1123 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1124 break;
1125 case Token::BIT_XOR:
1126 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1127 break;
1128 case Token::SAR:
1129 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1130 break;
1131 case Token::SHL:
1132 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1133 break;
1134 case Token::SHR:
1135 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1136 break;
1137 default:
1138 UNREACHABLE();
1139 }
1140}
1141
1142
1143void GenericBinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
1144 Label* alloc_failure) {
1145 Label skip_allocation;
1146 OverwriteMode mode = mode_;
1147 if (HasArgsReversed()) {
1148 if (mode == OVERWRITE_RIGHT) {
1149 mode = OVERWRITE_LEFT;
1150 } else if (mode == OVERWRITE_LEFT) {
1151 mode = OVERWRITE_RIGHT;
1152 }
1153 }
1154 switch (mode) {
1155 case OVERWRITE_LEFT: {
1156 // If the argument in edx is already an object, we skip the
1157 // allocation of a heap number.
1158 __ test(edx, Immediate(kSmiTagMask));
1159 __ j(not_zero, &skip_allocation, not_taken);
1160 // Allocate a heap number for the result. Keep eax and edx intact
1161 // for the possible runtime call.
1162 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
1163 // Now edx can be overwritten losing one of the arguments as we are
1164 // now done and will not need it any more.
1165 __ mov(edx, Operand(ebx));
1166 __ bind(&skip_allocation);
1167 // Use object in edx as a result holder
1168 __ mov(eax, Operand(edx));
1169 break;
1170 }
1171 case OVERWRITE_RIGHT:
1172 // If the argument in eax is already an object, we skip the
1173 // allocation of a heap number.
1174 __ test(eax, Immediate(kSmiTagMask));
1175 __ j(not_zero, &skip_allocation, not_taken);
1176 // Fall through!
1177 case NO_OVERWRITE:
1178 // Allocate a heap number for the result. Keep eax and edx intact
1179 // for the possible runtime call.
1180 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
1181 // Now eax can be overwritten losing one of the arguments as we are
1182 // now done and will not need it any more.
1183 __ mov(eax, ebx);
1184 __ bind(&skip_allocation);
1185 break;
1186 default: UNREACHABLE();
1187 }
1188}
1189
1190
1191void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
1192 // If arguments are not passed in registers read them from the stack.
1193 ASSERT(!HasArgsInRegisters());
1194 __ mov(eax, Operand(esp, 1 * kPointerSize));
1195 __ mov(edx, Operand(esp, 2 * kPointerSize));
1196}
1197
1198
1199void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
1200 // If arguments are not passed in registers remove them from the stack before
1201 // returning.
1202 if (!HasArgsInRegisters()) {
1203 __ ret(2 * kPointerSize); // Remove both operands
1204 } else {
1205 __ ret(0);
1206 }
1207}
1208
1209
1210void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
1211 ASSERT(HasArgsInRegisters());
1212 __ pop(ecx);
1213 if (HasArgsReversed()) {
1214 __ push(eax);
1215 __ push(edx);
1216 } else {
1217 __ push(edx);
1218 __ push(eax);
1219 }
1220 __ push(ecx);
1221}
1222
1223
1224void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1225 // Ensure the operands are on the stack.
1226 if (HasArgsInRegisters()) {
1227 GenerateRegisterArgsPush(masm);
1228 }
1229
1230 __ pop(ecx); // Save return address.
1231
1232 // Left and right arguments are now on top.
1233 // Push this stub's key. Although the operation and the type info are
1234 // encoded into the key, the encoding is opaque, so push them too.
1235 __ push(Immediate(Smi::FromInt(MinorKey())));
1236 __ push(Immediate(Smi::FromInt(op_)));
1237 __ push(Immediate(Smi::FromInt(runtime_operands_type_)));
1238
1239 __ push(ecx); // Push return address.
1240
1241 // Patch the caller to an appropriate specialized stub and return the
1242 // operation result to the caller of the stub.
1243 __ TailCallExternalReference(
1244 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
1245 5,
1246 1);
1247}
1248
1249
1250Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
1251 GenericBinaryOpStub stub(key, type_info);
1252 return stub.GetCode();
1253}
1254
1255
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001256Handle<Code> GetTypeRecordingBinaryOpStub(int key,
1257 TRBinaryOpIC::TypeInfo type_info,
1258 TRBinaryOpIC::TypeInfo result_type_info) {
1259 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info);
1260 return stub.GetCode();
1261}
1262
1263
1264void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1265 __ pop(ecx); // Save return address.
1266 __ push(edx);
1267 __ push(eax);
1268 // Left and right arguments are now on top.
1269 // Push this stub's key. Although the operation and the type info are
1270 // encoded into the key, the encoding is opaque, so push them too.
1271 __ push(Immediate(Smi::FromInt(MinorKey())));
1272 __ push(Immediate(Smi::FromInt(op_)));
1273 __ push(Immediate(Smi::FromInt(operands_type_)));
1274
1275 __ push(ecx); // Push return address.
1276
1277 // Patch the caller to an appropriate specialized stub and return the
1278 // operation result to the caller of the stub.
1279 __ TailCallExternalReference(
1280 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
1281 5,
1282 1);
1283}
1284
1285
1286// Prepare for a type transition runtime call when the args are already on
1287// the stack, under the return address.
1288void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs(
1289 MacroAssembler* masm) {
1290 __ pop(ecx); // Save return address.
1291 // Left and right arguments are already on top of the stack.
1292 // Push this stub's key. Although the operation and the type info are
1293 // encoded into the key, the encoding is opaque, so push them too.
1294 __ push(Immediate(Smi::FromInt(MinorKey())));
1295 __ push(Immediate(Smi::FromInt(op_)));
1296 __ push(Immediate(Smi::FromInt(operands_type_)));
1297
1298 __ push(ecx); // Push return address.
1299
1300 // Patch the caller to an appropriate specialized stub and return the
1301 // operation result to the caller of the stub.
1302 __ TailCallExternalReference(
1303 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
1304 5,
1305 1);
1306}
1307
1308
1309void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
1310 switch (operands_type_) {
1311 case TRBinaryOpIC::UNINITIALIZED:
1312 GenerateTypeTransition(masm);
1313 break;
1314 case TRBinaryOpIC::SMI:
1315 GenerateSmiStub(masm);
1316 break;
1317 case TRBinaryOpIC::INT32:
1318 GenerateInt32Stub(masm);
1319 break;
1320 case TRBinaryOpIC::HEAP_NUMBER:
1321 GenerateHeapNumberStub(masm);
1322 break;
1323 case TRBinaryOpIC::STRING:
1324 GenerateStringStub(masm);
1325 break;
1326 case TRBinaryOpIC::GENERIC:
1327 GenerateGeneric(masm);
1328 break;
1329 default:
1330 UNREACHABLE();
1331 }
1332}
1333
1334
1335const char* TypeRecordingBinaryOpStub::GetName() {
1336 if (name_ != NULL) return name_;
1337 const int kMaxNameLength = 100;
1338 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
1339 if (name_ == NULL) return "OOM";
1340 const char* op_name = Token::Name(op_);
1341 const char* overwrite_name;
1342 switch (mode_) {
1343 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
1344 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
1345 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
1346 default: overwrite_name = "UnknownOverwrite"; break;
1347 }
1348
1349 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
1350 "TypeRecordingBinaryOpStub_%s_%s_%s",
1351 op_name,
1352 overwrite_name,
1353 TRBinaryOpIC::GetName(operands_type_));
1354 return name_;
1355}
1356
1357
1358void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
1359 Label* slow,
1360 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
1361 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
1362 // dividend in eax and edx free for the division. Use eax, ebx for those.
1363 Comment load_comment(masm, "-- Load arguments");
1364 Register left = edx;
1365 Register right = eax;
1366 if (op_ == Token::DIV || op_ == Token::MOD) {
1367 left = eax;
1368 right = ebx;
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001369 __ mov(ebx, eax);
1370 __ mov(eax, edx);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001371 }
1372
1373
1374 // 2. Prepare the smi check of both operands by oring them together.
1375 Comment smi_check_comment(masm, "-- Smi check arguments");
1376 Label not_smis;
1377 Register combined = ecx;
1378 ASSERT(!left.is(combined) && !right.is(combined));
1379 switch (op_) {
1380 case Token::BIT_OR:
1381 // Perform the operation into eax and smi check the result. Preserve
1382 // eax in case the result is not a smi.
1383 ASSERT(!left.is(ecx) && !right.is(ecx));
1384 __ mov(ecx, right);
1385 __ or_(right, Operand(left)); // Bitwise or is commutative.
1386 combined = right;
1387 break;
1388
1389 case Token::BIT_XOR:
1390 case Token::BIT_AND:
1391 case Token::ADD:
1392 case Token::SUB:
1393 case Token::MUL:
1394 case Token::DIV:
1395 case Token::MOD:
1396 __ mov(combined, right);
1397 __ or_(combined, Operand(left));
1398 break;
1399
1400 case Token::SHL:
1401 case Token::SAR:
1402 case Token::SHR:
1403 // Move the right operand into ecx for the shift operation, use eax
1404 // for the smi check register.
1405 ASSERT(!left.is(ecx) && !right.is(ecx));
1406 __ mov(ecx, right);
1407 __ or_(right, Operand(left));
1408 combined = right;
1409 break;
1410
1411 default:
1412 break;
1413 }
1414
1415 // 3. Perform the smi check of the operands.
1416 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
1417 __ test(combined, Immediate(kSmiTagMask));
1418 __ j(not_zero, &not_smis, not_taken);
1419
1420 // 4. Operands are both smis, perform the operation leaving the result in
1421 // eax and check the result if necessary.
1422 Comment perform_smi(masm, "-- Perform smi operation");
1423 Label use_fp_on_smis;
1424 switch (op_) {
1425 case Token::BIT_OR:
1426 // Nothing to do.
1427 break;
1428
1429 case Token::BIT_XOR:
1430 ASSERT(right.is(eax));
1431 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
1432 break;
1433
1434 case Token::BIT_AND:
1435 ASSERT(right.is(eax));
1436 __ and_(right, Operand(left)); // Bitwise and is commutative.
1437 break;
1438
1439 case Token::SHL:
1440 // Remove tags from operands (but keep sign).
1441 __ SmiUntag(left);
1442 __ SmiUntag(ecx);
1443 // Perform the operation.
1444 __ shl_cl(left);
1445 // Check that the *signed* result fits in a smi.
1446 __ cmp(left, 0xc0000000);
1447 __ j(sign, &use_fp_on_smis, not_taken);
1448 // Tag the result and store it in register eax.
1449 __ SmiTag(left);
1450 __ mov(eax, left);
1451 break;
1452
1453 case Token::SAR:
1454 // Remove tags from operands (but keep sign).
1455 __ SmiUntag(left);
1456 __ SmiUntag(ecx);
1457 // Perform the operation.
1458 __ sar_cl(left);
1459 // Tag the result and store it in register eax.
1460 __ SmiTag(left);
1461 __ mov(eax, left);
1462 break;
1463
1464 case Token::SHR:
1465 // Remove tags from operands (but keep sign).
1466 __ SmiUntag(left);
1467 __ SmiUntag(ecx);
1468 // Perform the operation.
1469 __ shr_cl(left);
1470 // Check that the *unsigned* result fits in a smi.
1471 // Neither of the two high-order bits can be set:
1472 // - 0x80000000: high bit would be lost when smi tagging.
1473 // - 0x40000000: this number would convert to negative when
1474 // Smi tagging these two cases can only happen with shifts
1475 // by 0 or 1 when handed a valid smi.
1476 __ test(left, Immediate(0xc0000000));
1477 __ j(not_zero, slow, not_taken);
1478 // Tag the result and store it in register eax.
1479 __ SmiTag(left);
1480 __ mov(eax, left);
1481 break;
1482
1483 case Token::ADD:
1484 ASSERT(right.is(eax));
1485 __ add(right, Operand(left)); // Addition is commutative.
1486 __ j(overflow, &use_fp_on_smis, not_taken);
1487 break;
1488
1489 case Token::SUB:
1490 __ sub(left, Operand(right));
1491 __ j(overflow, &use_fp_on_smis, not_taken);
1492 __ mov(eax, left);
1493 break;
1494
1495 case Token::MUL:
1496 // If the smi tag is 0 we can just leave the tag on one operand.
1497 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
1498 // We can't revert the multiplication if the result is not a smi
1499 // so save the right operand.
1500 __ mov(ebx, right);
1501 // Remove tag from one of the operands (but keep sign).
1502 __ SmiUntag(right);
1503 // Do multiplication.
1504 __ imul(right, Operand(left)); // Multiplication is commutative.
1505 __ j(overflow, &use_fp_on_smis, not_taken);
1506 // Check for negative zero result. Use combined = left | right.
1507 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
1508 break;
1509
1510 case Token::DIV:
1511 // We can't revert the division if the result is not a smi so
1512 // save the left operand.
1513 __ mov(edi, left);
1514 // Check for 0 divisor.
1515 __ test(right, Operand(right));
1516 __ j(zero, &use_fp_on_smis, not_taken);
1517 // Sign extend left into edx:eax.
1518 ASSERT(left.is(eax));
1519 __ cdq();
1520 // Divide edx:eax by right.
1521 __ idiv(right);
1522 // Check for the corner case of dividing the most negative smi by
1523 // -1. We cannot use the overflow flag, since it is not set by idiv
1524 // instruction.
1525 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
1526 __ cmp(eax, 0x40000000);
1527 __ j(equal, &use_fp_on_smis);
1528 // Check for negative zero result. Use combined = left | right.
1529 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
1530 // Check that the remainder is zero.
1531 __ test(edx, Operand(edx));
1532 __ j(not_zero, &use_fp_on_smis);
1533 // Tag the result and store it in register eax.
1534 __ SmiTag(eax);
1535 break;
1536
1537 case Token::MOD:
1538 // Check for 0 divisor.
1539 __ test(right, Operand(right));
1540 __ j(zero, &not_smis, not_taken);
1541
1542 // Sign extend left into edx:eax.
1543 ASSERT(left.is(eax));
1544 __ cdq();
1545 // Divide edx:eax by right.
1546 __ idiv(right);
1547 // Check for negative zero result. Use combined = left | right.
1548 __ NegativeZeroTest(edx, combined, slow);
1549 // Move remainder to register eax.
1550 __ mov(eax, edx);
1551 break;
1552
1553 default:
1554 UNREACHABLE();
1555 }
1556
1557 // 5. Emit return of result in eax. Some operations have registers pushed.
1558 switch (op_) {
1559 case Token::ADD:
1560 case Token::SUB:
1561 case Token::MUL:
1562 case Token::DIV:
1563 __ ret(0);
1564 break;
1565 case Token::MOD:
1566 case Token::BIT_OR:
1567 case Token::BIT_AND:
1568 case Token::BIT_XOR:
1569 case Token::SAR:
1570 case Token::SHL:
1571 case Token::SHR:
1572 __ ret(2 * kPointerSize);
1573 break;
1574 default:
1575 UNREACHABLE();
1576 }
1577
1578 // 6. For some operations emit inline code to perform floating point
1579 // operations on known smis (e.g., if the result of the operation
1580 // overflowed the smi range).
1581 if (allow_heapnumber_results == NO_HEAPNUMBER_RESULTS) {
1582 __ bind(&use_fp_on_smis);
1583 switch (op_) {
1584 // Undo the effects of some operations, and some register moves.
1585 case Token::SHL:
1586 // The arguments are saved on the stack, and only used from there.
1587 break;
1588 case Token::ADD:
1589 // Revert right = right + left.
1590 __ sub(right, Operand(left));
1591 break;
1592 case Token::SUB:
1593 // Revert left = left - right.
1594 __ add(left, Operand(right));
1595 break;
1596 case Token::MUL:
1597 // Right was clobbered but a copy is in ebx.
1598 __ mov(right, ebx);
1599 break;
1600 case Token::DIV:
1601 // Left was clobbered but a copy is in edi. Right is in ebx for
1602 // division. They should be in eax, ebx for jump to not_smi.
1603 __ mov(eax, edi);
1604 break;
1605 default:
1606 // No other operators jump to use_fp_on_smis.
1607 break;
1608 }
1609 __ jmp(&not_smis);
1610 } else {
1611 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS);
1612 switch (op_) {
1613 case Token::SHL: {
1614 Comment perform_float(masm, "-- Perform float operation on smis");
1615 __ bind(&use_fp_on_smis);
1616 // Result we want is in left == edx, so we can put the allocated heap
1617 // number in eax.
1618 __ AllocateHeapNumber(eax, ecx, ebx, slow);
1619 // Store the result in the HeapNumber and return.
1620 if (CpuFeatures::IsSupported(SSE2)) {
1621 CpuFeatures::Scope use_sse2(SSE2);
1622 __ cvtsi2sd(xmm0, Operand(left));
1623 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1624 } else {
1625 // It's OK to overwrite the right argument on the stack because we
1626 // are about to return.
1627 __ mov(Operand(esp, 1 * kPointerSize), left);
1628 __ fild_s(Operand(esp, 1 * kPointerSize));
1629 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1630 }
1631 __ ret(2 * kPointerSize);
1632 break;
1633 }
1634
1635 case Token::ADD:
1636 case Token::SUB:
1637 case Token::MUL:
1638 case Token::DIV: {
1639 Comment perform_float(masm, "-- Perform float operation on smis");
1640 __ bind(&use_fp_on_smis);
1641 // Restore arguments to edx, eax.
1642 switch (op_) {
1643 case Token::ADD:
1644 // Revert right = right + left.
1645 __ sub(right, Operand(left));
1646 break;
1647 case Token::SUB:
1648 // Revert left = left - right.
1649 __ add(left, Operand(right));
1650 break;
1651 case Token::MUL:
1652 // Right was clobbered but a copy is in ebx.
1653 __ mov(right, ebx);
1654 break;
1655 case Token::DIV:
1656 // Left was clobbered but a copy is in edi. Right is in ebx for
1657 // division.
1658 __ mov(edx, edi);
1659 __ mov(eax, right);
1660 break;
1661 default: UNREACHABLE();
1662 break;
1663 }
1664 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
1665 if (CpuFeatures::IsSupported(SSE2)) {
1666 CpuFeatures::Scope use_sse2(SSE2);
1667 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
1668 switch (op_) {
1669 case Token::ADD: __ addsd(xmm0, xmm1); break;
1670 case Token::SUB: __ subsd(xmm0, xmm1); break;
1671 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1672 case Token::DIV: __ divsd(xmm0, xmm1); break;
1673 default: UNREACHABLE();
1674 }
1675 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
1676 } else { // SSE2 not available, use FPU.
1677 FloatingPointHelper::LoadFloatSmis(masm, ebx);
1678 switch (op_) {
1679 case Token::ADD: __ faddp(1); break;
1680 case Token::SUB: __ fsubp(1); break;
1681 case Token::MUL: __ fmulp(1); break;
1682 case Token::DIV: __ fdivp(1); break;
1683 default: UNREACHABLE();
1684 }
1685 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
1686 }
1687 __ mov(eax, ecx);
1688 __ ret(0);
1689 break;
1690 }
1691
1692 default:
1693 break;
1694 }
1695 }
1696
1697 // 7. Non-smi operands, fall out to the non-smi code with the operands in
1698 // edx and eax.
1699 Comment done_comment(masm, "-- Enter non-smi code");
1700 __ bind(&not_smis);
1701 switch (op_) {
1702 case Token::BIT_OR:
1703 case Token::SHL:
1704 case Token::SAR:
1705 case Token::SHR:
1706 // Right operand is saved in ecx and eax was destroyed by the smi
1707 // check.
1708 __ mov(eax, ecx);
1709 break;
1710
1711 case Token::DIV:
1712 case Token::MOD:
1713 // Operands are in eax, ebx at this point.
1714 __ mov(edx, eax);
1715 __ mov(eax, ebx);
1716 break;
1717
1718 default:
1719 break;
1720 }
1721}
1722
1723
1724void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1725 Label call_runtime;
1726
1727 switch (op_) {
1728 case Token::ADD:
1729 case Token::SUB:
1730 case Token::MUL:
1731 case Token::DIV:
1732 break;
1733 case Token::MOD:
1734 case Token::BIT_OR:
1735 case Token::BIT_AND:
1736 case Token::BIT_XOR:
1737 case Token::SAR:
1738 case Token::SHL:
1739 case Token::SHR:
1740 GenerateRegisterArgsPush(masm);
1741 break;
1742 default:
1743 UNREACHABLE();
1744 }
1745
1746 if (result_type_ == TRBinaryOpIC::UNINITIALIZED ||
1747 result_type_ == TRBinaryOpIC::SMI) {
1748 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS);
1749 } else {
1750 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1751 }
1752 __ bind(&call_runtime);
1753 switch (op_) {
1754 case Token::ADD:
1755 case Token::SUB:
1756 case Token::MUL:
1757 case Token::DIV:
1758 GenerateTypeTransition(masm);
1759 break;
1760 case Token::MOD:
1761 case Token::BIT_OR:
1762 case Token::BIT_AND:
1763 case Token::BIT_XOR:
1764 case Token::SAR:
1765 case Token::SHL:
1766 case Token::SHR:
1767 GenerateTypeTransitionWithSavedArgs(masm);
1768 break;
1769 default:
1770 UNREACHABLE();
1771 }
1772}
1773
1774
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001775void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001776 ASSERT(operands_type_ == TRBinaryOpIC::STRING);
1777 ASSERT(op_ == Token::ADD);
ager@chromium.org0ee099b2011-01-25 14:06:47 +00001778 // Try to add arguments as strings, otherwise, transition to the generic
1779 // TRBinaryOpIC type.
1780 GenerateAddStrings(masm);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001781 GenerateTypeTransition(masm);
1782}
1783
1784
1785void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1786 Label call_runtime;
1787 ASSERT(operands_type_ == TRBinaryOpIC::INT32);
1788
1789 // Floating point case.
1790 switch (op_) {
1791 case Token::ADD:
1792 case Token::SUB:
1793 case Token::MUL:
1794 case Token::DIV: {
1795 Label not_floats;
1796 Label not_int32;
1797 if (CpuFeatures::IsSupported(SSE2)) {
1798 CpuFeatures::Scope use_sse2(SSE2);
1799 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1800 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1801 switch (op_) {
1802 case Token::ADD: __ addsd(xmm0, xmm1); break;
1803 case Token::SUB: __ subsd(xmm0, xmm1); break;
1804 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1805 case Token::DIV: __ divsd(xmm0, xmm1); break;
1806 default: UNREACHABLE();
1807 }
1808 // Check result type if it is currently Int32.
1809 if (result_type_ <= TRBinaryOpIC::INT32) {
1810 __ cvttsd2si(ecx, Operand(xmm0));
1811 __ cvtsi2sd(xmm2, Operand(ecx));
1812 __ ucomisd(xmm0, xmm2);
1813 __ j(not_zero, &not_int32);
1814 __ j(carry, &not_int32);
1815 }
1816 GenerateHeapResultAllocation(masm, &call_runtime);
1817 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1818 __ ret(0);
1819 } else { // SSE2 not available, use FPU.
1820 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1821 FloatingPointHelper::LoadFloatOperands(
1822 masm,
1823 ecx,
1824 FloatingPointHelper::ARGS_IN_REGISTERS);
1825 FloatingPointHelper::CheckFloatOperandsAreInt32(masm, &not_int32);
1826 switch (op_) {
1827 case Token::ADD: __ faddp(1); break;
1828 case Token::SUB: __ fsubp(1); break;
1829 case Token::MUL: __ fmulp(1); break;
1830 case Token::DIV: __ fdivp(1); break;
1831 default: UNREACHABLE();
1832 }
1833 Label after_alloc_failure;
1834 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1835 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1836 __ ret(0);
1837 __ bind(&after_alloc_failure);
1838 __ ffree();
1839 __ jmp(&call_runtime);
1840 }
1841
1842 __ bind(&not_floats);
1843 __ bind(&not_int32);
1844 GenerateTypeTransition(masm);
1845 break;
1846 }
1847
1848 case Token::MOD: {
1849 // For MOD we go directly to runtime in the non-smi case.
1850 break;
1851 }
1852 case Token::BIT_OR:
1853 case Token::BIT_AND:
1854 case Token::BIT_XOR:
1855 case Token::SAR:
1856 case Token::SHL:
1857 case Token::SHR: {
1858 GenerateRegisterArgsPush(masm);
1859 Label not_floats;
1860 Label not_int32;
1861 Label non_smi_result;
1862 /* {
1863 CpuFeatures::Scope use_sse2(SSE2);
1864 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1865 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1866 }*/
1867 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1868 use_sse3_,
1869 &not_floats);
1870 FloatingPointHelper::CheckLoadedIntegersWereInt32(masm, use_sse3_,
1871 &not_int32);
1872 switch (op_) {
1873 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1874 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1875 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
1876 case Token::SAR: __ sar_cl(eax); break;
1877 case Token::SHL: __ shl_cl(eax); break;
1878 case Token::SHR: __ shr_cl(eax); break;
1879 default: UNREACHABLE();
1880 }
1881 if (op_ == Token::SHR) {
1882 // Check if result is non-negative and fits in a smi.
1883 __ test(eax, Immediate(0xc0000000));
1884 __ j(not_zero, &call_runtime);
1885 } else {
1886 // Check if result fits in a smi.
1887 __ cmp(eax, 0xc0000000);
1888 __ j(negative, &non_smi_result);
1889 }
1890 // Tag smi result and return.
1891 __ SmiTag(eax);
1892 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1893
1894 // All ops except SHR return a signed int32 that we load in
1895 // a HeapNumber.
1896 if (op_ != Token::SHR) {
1897 __ bind(&non_smi_result);
1898 // Allocate a heap number if needed.
1899 __ mov(ebx, Operand(eax)); // ebx: result
1900 NearLabel skip_allocation;
1901 switch (mode_) {
1902 case OVERWRITE_LEFT:
1903 case OVERWRITE_RIGHT:
1904 // If the operand was an object, we skip the
1905 // allocation of a heap number.
1906 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1907 1 * kPointerSize : 2 * kPointerSize));
1908 __ test(eax, Immediate(kSmiTagMask));
1909 __ j(not_zero, &skip_allocation, not_taken);
1910 // Fall through!
1911 case NO_OVERWRITE:
1912 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1913 __ bind(&skip_allocation);
1914 break;
1915 default: UNREACHABLE();
1916 }
1917 // Store the result in the HeapNumber and return.
1918 if (CpuFeatures::IsSupported(SSE2)) {
1919 CpuFeatures::Scope use_sse2(SSE2);
1920 __ cvtsi2sd(xmm0, Operand(ebx));
1921 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1922 } else {
1923 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1924 __ fild_s(Operand(esp, 1 * kPointerSize));
1925 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1926 }
1927 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1928 }
1929
1930 __ bind(&not_floats);
1931 __ bind(&not_int32);
1932 GenerateTypeTransitionWithSavedArgs(masm);
1933 break;
1934 }
1935 default: UNREACHABLE(); break;
1936 }
1937
1938 // If an allocation fails, or SHR or MOD hit a hard case,
1939 // use the runtime system to get the correct result.
1940 __ bind(&call_runtime);
1941
1942 switch (op_) {
1943 case Token::ADD:
1944 GenerateRegisterArgsPush(masm);
1945 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1946 break;
1947 case Token::SUB:
1948 GenerateRegisterArgsPush(masm);
1949 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1950 break;
1951 case Token::MUL:
1952 GenerateRegisterArgsPush(masm);
1953 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1954 break;
1955 case Token::DIV:
1956 GenerateRegisterArgsPush(masm);
1957 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1958 break;
1959 case Token::MOD:
1960 GenerateRegisterArgsPush(masm);
1961 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1962 break;
1963 case Token::BIT_OR:
1964 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1965 break;
1966 case Token::BIT_AND:
1967 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1968 break;
1969 case Token::BIT_XOR:
1970 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1971 break;
1972 case Token::SAR:
1973 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1974 break;
1975 case Token::SHL:
1976 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1977 break;
1978 case Token::SHR:
1979 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1980 break;
1981 default:
1982 UNREACHABLE();
1983 }
1984}
1985
1986
1987void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1988 Label call_runtime;
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001989 ASSERT(operands_type_ == TRBinaryOpIC::HEAP_NUMBER);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001990
1991 // Floating point case.
1992 switch (op_) {
1993 case Token::ADD:
1994 case Token::SUB:
1995 case Token::MUL:
1996 case Token::DIV: {
1997 Label not_floats;
1998 if (CpuFeatures::IsSupported(SSE2)) {
1999 CpuFeatures::Scope use_sse2(SSE2);
2000 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
2001
2002 switch (op_) {
2003 case Token::ADD: __ addsd(xmm0, xmm1); break;
2004 case Token::SUB: __ subsd(xmm0, xmm1); break;
2005 case Token::MUL: __ mulsd(xmm0, xmm1); break;
2006 case Token::DIV: __ divsd(xmm0, xmm1); break;
2007 default: UNREACHABLE();
2008 }
2009 GenerateHeapResultAllocation(masm, &call_runtime);
2010 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2011 __ ret(0);
2012 } else { // SSE2 not available, use FPU.
2013 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
2014 FloatingPointHelper::LoadFloatOperands(
2015 masm,
2016 ecx,
2017 FloatingPointHelper::ARGS_IN_REGISTERS);
2018 switch (op_) {
2019 case Token::ADD: __ faddp(1); break;
2020 case Token::SUB: __ fsubp(1); break;
2021 case Token::MUL: __ fmulp(1); break;
2022 case Token::DIV: __ fdivp(1); break;
2023 default: UNREACHABLE();
2024 }
2025 Label after_alloc_failure;
2026 GenerateHeapResultAllocation(masm, &after_alloc_failure);
2027 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2028 __ ret(0);
2029 __ bind(&after_alloc_failure);
2030 __ ffree();
2031 __ jmp(&call_runtime);
2032 }
2033
2034 __ bind(&not_floats);
2035 GenerateTypeTransition(masm);
2036 break;
2037 }
2038
2039 case Token::MOD: {
2040 // For MOD we go directly to runtime in the non-smi case.
2041 break;
2042 }
2043 case Token::BIT_OR:
2044 case Token::BIT_AND:
2045 case Token::BIT_XOR:
2046 case Token::SAR:
2047 case Token::SHL:
2048 case Token::SHR: {
2049 GenerateRegisterArgsPush(masm);
2050 Label not_floats;
2051 Label non_smi_result;
2052 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
2053 use_sse3_,
2054 &not_floats);
2055 switch (op_) {
2056 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
2057 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
2058 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
2059 case Token::SAR: __ sar_cl(eax); break;
2060 case Token::SHL: __ shl_cl(eax); break;
2061 case Token::SHR: __ shr_cl(eax); break;
2062 default: UNREACHABLE();
2063 }
2064 if (op_ == Token::SHR) {
2065 // Check if result is non-negative and fits in a smi.
2066 __ test(eax, Immediate(0xc0000000));
2067 __ j(not_zero, &call_runtime);
2068 } else {
2069 // Check if result fits in a smi.
2070 __ cmp(eax, 0xc0000000);
2071 __ j(negative, &non_smi_result);
2072 }
2073 // Tag smi result and return.
2074 __ SmiTag(eax);
2075 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
2076
2077 // All ops except SHR return a signed int32 that we load in
2078 // a HeapNumber.
2079 if (op_ != Token::SHR) {
2080 __ bind(&non_smi_result);
2081 // Allocate a heap number if needed.
2082 __ mov(ebx, Operand(eax)); // ebx: result
2083 NearLabel skip_allocation;
2084 switch (mode_) {
2085 case OVERWRITE_LEFT:
2086 case OVERWRITE_RIGHT:
2087 // If the operand was an object, we skip the
2088 // allocation of a heap number.
2089 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
2090 1 * kPointerSize : 2 * kPointerSize));
2091 __ test(eax, Immediate(kSmiTagMask));
2092 __ j(not_zero, &skip_allocation, not_taken);
2093 // Fall through!
2094 case NO_OVERWRITE:
2095 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
2096 __ bind(&skip_allocation);
2097 break;
2098 default: UNREACHABLE();
2099 }
2100 // Store the result in the HeapNumber and return.
2101 if (CpuFeatures::IsSupported(SSE2)) {
2102 CpuFeatures::Scope use_sse2(SSE2);
2103 __ cvtsi2sd(xmm0, Operand(ebx));
2104 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2105 } else {
2106 __ mov(Operand(esp, 1 * kPointerSize), ebx);
2107 __ fild_s(Operand(esp, 1 * kPointerSize));
2108 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2109 }
2110 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
2111 }
2112
2113 __ bind(&not_floats);
2114 GenerateTypeTransitionWithSavedArgs(masm);
2115 break;
2116 }
2117 default: UNREACHABLE(); break;
2118 }
2119
2120 // If an allocation fails, or SHR or MOD hit a hard case,
2121 // use the runtime system to get the correct result.
2122 __ bind(&call_runtime);
2123
2124 switch (op_) {
2125 case Token::ADD:
2126 GenerateRegisterArgsPush(masm);
2127 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
2128 break;
2129 case Token::SUB:
2130 GenerateRegisterArgsPush(masm);
2131 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
2132 break;
2133 case Token::MUL:
2134 GenerateRegisterArgsPush(masm);
2135 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
2136 break;
2137 case Token::DIV:
2138 GenerateRegisterArgsPush(masm);
2139 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
2140 break;
2141 case Token::MOD:
2142 GenerateRegisterArgsPush(masm);
2143 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
2144 break;
2145 case Token::BIT_OR:
2146 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
2147 break;
2148 case Token::BIT_AND:
2149 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
2150 break;
2151 case Token::BIT_XOR:
2152 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
2153 break;
2154 case Token::SAR:
2155 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
2156 break;
2157 case Token::SHL:
2158 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
2159 break;
2160 case Token::SHR:
2161 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2162 break;
2163 default:
2164 UNREACHABLE();
2165 }
2166}
2167
2168
2169void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
2170 Label call_runtime;
2171
2172 __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
2173
2174 switch (op_) {
2175 case Token::ADD:
2176 case Token::SUB:
2177 case Token::MUL:
2178 case Token::DIV:
2179 break;
2180 case Token::MOD:
2181 case Token::BIT_OR:
2182 case Token::BIT_AND:
2183 case Token::BIT_XOR:
2184 case Token::SAR:
2185 case Token::SHL:
2186 case Token::SHR:
2187 GenerateRegisterArgsPush(masm);
2188 break;
2189 default:
2190 UNREACHABLE();
2191 }
2192
2193 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
2194
2195 // Floating point case.
2196 switch (op_) {
2197 case Token::ADD:
2198 case Token::SUB:
2199 case Token::MUL:
2200 case Token::DIV: {
2201 Label not_floats;
2202 if (CpuFeatures::IsSupported(SSE2)) {
2203 CpuFeatures::Scope use_sse2(SSE2);
2204 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
2205
2206 switch (op_) {
2207 case Token::ADD: __ addsd(xmm0, xmm1); break;
2208 case Token::SUB: __ subsd(xmm0, xmm1); break;
2209 case Token::MUL: __ mulsd(xmm0, xmm1); break;
2210 case Token::DIV: __ divsd(xmm0, xmm1); break;
2211 default: UNREACHABLE();
2212 }
2213 GenerateHeapResultAllocation(masm, &call_runtime);
2214 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2215 __ ret(0);
2216 } else { // SSE2 not available, use FPU.
2217 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
2218 FloatingPointHelper::LoadFloatOperands(
2219 masm,
2220 ecx,
2221 FloatingPointHelper::ARGS_IN_REGISTERS);
2222 switch (op_) {
2223 case Token::ADD: __ faddp(1); break;
2224 case Token::SUB: __ fsubp(1); break;
2225 case Token::MUL: __ fmulp(1); break;
2226 case Token::DIV: __ fdivp(1); break;
2227 default: UNREACHABLE();
2228 }
2229 Label after_alloc_failure;
2230 GenerateHeapResultAllocation(masm, &after_alloc_failure);
2231 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2232 __ ret(0);
2233 __ bind(&after_alloc_failure);
2234 __ ffree();
2235 __ jmp(&call_runtime);
2236 }
2237 __ bind(&not_floats);
2238 break;
2239 }
2240 case Token::MOD: {
2241 // For MOD we go directly to runtime in the non-smi case.
2242 break;
2243 }
2244 case Token::BIT_OR:
2245 case Token::BIT_AND:
2246 case Token::BIT_XOR:
2247 case Token::SAR:
2248 case Token::SHL:
2249 case Token::SHR: {
2250 Label non_smi_result;
2251 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
2252 use_sse3_,
2253 &call_runtime);
2254 switch (op_) {
2255 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
2256 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
2257 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
2258 case Token::SAR: __ sar_cl(eax); break;
2259 case Token::SHL: __ shl_cl(eax); break;
2260 case Token::SHR: __ shr_cl(eax); break;
2261 default: UNREACHABLE();
2262 }
2263 if (op_ == Token::SHR) {
2264 // Check if result is non-negative and fits in a smi.
2265 __ test(eax, Immediate(0xc0000000));
2266 __ j(not_zero, &call_runtime);
2267 } else {
2268 // Check if result fits in a smi.
2269 __ cmp(eax, 0xc0000000);
2270 __ j(negative, &non_smi_result);
2271 }
2272 // Tag smi result and return.
2273 __ SmiTag(eax);
2274 __ ret(2 * kPointerSize); // Drop the arguments from the stack.
2275
2276 // All ops except SHR return a signed int32 that we load in
2277 // a HeapNumber.
2278 if (op_ != Token::SHR) {
2279 __ bind(&non_smi_result);
2280 // Allocate a heap number if needed.
2281 __ mov(ebx, Operand(eax)); // ebx: result
2282 NearLabel skip_allocation;
2283 switch (mode_) {
2284 case OVERWRITE_LEFT:
2285 case OVERWRITE_RIGHT:
2286 // If the operand was an object, we skip the
2287 // allocation of a heap number.
2288 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
2289 1 * kPointerSize : 2 * kPointerSize));
2290 __ test(eax, Immediate(kSmiTagMask));
2291 __ j(not_zero, &skip_allocation, not_taken);
2292 // Fall through!
2293 case NO_OVERWRITE:
2294 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
2295 __ bind(&skip_allocation);
2296 break;
2297 default: UNREACHABLE();
2298 }
2299 // Store the result in the HeapNumber and return.
2300 if (CpuFeatures::IsSupported(SSE2)) {
2301 CpuFeatures::Scope use_sse2(SSE2);
2302 __ cvtsi2sd(xmm0, Operand(ebx));
2303 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2304 } else {
2305 __ mov(Operand(esp, 1 * kPointerSize), ebx);
2306 __ fild_s(Operand(esp, 1 * kPointerSize));
2307 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2308 }
2309 __ ret(2 * kPointerSize);
2310 }
2311 break;
2312 }
2313 default: UNREACHABLE(); break;
2314 }
2315
2316 // If all else fails, use the runtime system to get the correct
2317 // result.
2318 __ bind(&call_runtime);
2319 switch (op_) {
2320 case Token::ADD: {
ager@chromium.org0ee099b2011-01-25 14:06:47 +00002321 GenerateAddStrings(masm);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002322 GenerateRegisterArgsPush(masm);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002323 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
2324 break;
2325 }
2326 case Token::SUB:
2327 GenerateRegisterArgsPush(masm);
2328 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
2329 break;
2330 case Token::MUL:
2331 GenerateRegisterArgsPush(masm);
2332 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
2333 break;
2334 case Token::DIV:
2335 GenerateRegisterArgsPush(masm);
2336 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
2337 break;
2338 case Token::MOD:
2339 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
2340 break;
2341 case Token::BIT_OR:
2342 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
2343 break;
2344 case Token::BIT_AND:
2345 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
2346 break;
2347 case Token::BIT_XOR:
2348 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
2349 break;
2350 case Token::SAR:
2351 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
2352 break;
2353 case Token::SHL:
2354 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
2355 break;
2356 case Token::SHR:
2357 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2358 break;
2359 default:
2360 UNREACHABLE();
2361 }
2362}
2363
2364
ager@chromium.org0ee099b2011-01-25 14:06:47 +00002365void TypeRecordingBinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
2366 NearLabel call_runtime;
2367
2368 // Registers containing left and right operands respectively.
2369 Register left = edx;
2370 Register right = eax;
2371
2372 // Test if left operand is a string.
2373 NearLabel left_not_string;
2374 __ test(left, Immediate(kSmiTagMask));
2375 __ j(zero, &left_not_string);
2376 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
2377 __ j(above_equal, &left_not_string);
2378
2379 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
2380 GenerateRegisterArgsPush(masm);
2381 __ TailCallStub(&string_add_left_stub);
2382
2383 // Left operand is not a string, test right.
2384 __ bind(&left_not_string);
2385 __ test(right, Immediate(kSmiTagMask));
2386 __ j(zero, &call_runtime);
2387 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
2388 __ j(above_equal, &call_runtime);
2389
2390 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
2391 GenerateRegisterArgsPush(masm);
2392 __ TailCallStub(&string_add_right_stub);
2393
2394 // Neither argument is a string.
2395 __ bind(&call_runtime);
2396}
2397
2398
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002399void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
2400 MacroAssembler* masm,
2401 Label* alloc_failure) {
2402 Label skip_allocation;
2403 OverwriteMode mode = mode_;
2404 switch (mode) {
2405 case OVERWRITE_LEFT: {
2406 // If the argument in edx is already an object, we skip the
2407 // allocation of a heap number.
2408 __ test(edx, Immediate(kSmiTagMask));
2409 __ j(not_zero, &skip_allocation, not_taken);
2410 // Allocate a heap number for the result. Keep eax and edx intact
2411 // for the possible runtime call.
2412 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2413 // Now edx can be overwritten losing one of the arguments as we are
2414 // now done and will not need it any more.
2415 __ mov(edx, Operand(ebx));
2416 __ bind(&skip_allocation);
2417 // Use object in edx as a result holder
2418 __ mov(eax, Operand(edx));
2419 break;
2420 }
2421 case OVERWRITE_RIGHT:
2422 // If the argument in eax is already an object, we skip the
2423 // allocation of a heap number.
2424 __ test(eax, Immediate(kSmiTagMask));
2425 __ j(not_zero, &skip_allocation, not_taken);
2426 // Fall through!
2427 case NO_OVERWRITE:
2428 // Allocate a heap number for the result. Keep eax and edx intact
2429 // for the possible runtime call.
2430 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2431 // Now eax can be overwritten losing one of the arguments as we are
2432 // now done and will not need it any more.
2433 __ mov(eax, ebx);
2434 __ bind(&skip_allocation);
2435 break;
2436 default: UNREACHABLE();
2437 }
2438}
2439
2440
2441void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
2442 __ pop(ecx);
2443 __ push(edx);
2444 __ push(eax);
2445 __ push(ecx);
2446}
2447
2448
ricow@chromium.org65fae842010-08-25 15:26:24 +00002449void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
whesse@chromium.org023421e2010-12-21 12:19:12 +00002450 // TAGGED case:
2451 // Input:
2452 // esp[4]: tagged number input argument (should be number).
2453 // esp[0]: return address.
2454 // Output:
2455 // eax: tagged double result.
2456 // UNTAGGED case:
2457 // Input::
2458 // esp[0]: return address.
2459 // xmm1: untagged double input argument
2460 // Output:
2461 // xmm1: untagged double result.
2462
ricow@chromium.org65fae842010-08-25 15:26:24 +00002463 Label runtime_call;
2464 Label runtime_call_clear_stack;
whesse@chromium.org023421e2010-12-21 12:19:12 +00002465 Label skip_cache;
2466 const bool tagged = (argument_type_ == TAGGED);
2467 if (tagged) {
2468 // Test that eax is a number.
2469 NearLabel input_not_smi;
2470 NearLabel loaded;
2471 __ mov(eax, Operand(esp, kPointerSize));
2472 __ test(eax, Immediate(kSmiTagMask));
2473 __ j(not_zero, &input_not_smi);
2474 // Input is a smi. Untag and load it onto the FPU stack.
2475 // Then load the low and high words of the double into ebx, edx.
2476 STATIC_ASSERT(kSmiTagSize == 1);
2477 __ sar(eax, 1);
2478 __ sub(Operand(esp), Immediate(2 * kPointerSize));
2479 __ mov(Operand(esp, 0), eax);
2480 __ fild_s(Operand(esp, 0));
2481 __ fst_d(Operand(esp, 0));
2482 __ pop(edx);
2483 __ pop(ebx);
2484 __ jmp(&loaded);
2485 __ bind(&input_not_smi);
2486 // Check if input is a HeapNumber.
2487 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2488 __ cmp(Operand(ebx), Immediate(Factory::heap_number_map()));
2489 __ j(not_equal, &runtime_call);
2490 // Input is a HeapNumber. Push it on the FPU stack and load its
2491 // low and high words into ebx, edx.
2492 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
2493 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
2494 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
ricow@chromium.org65fae842010-08-25 15:26:24 +00002495
whesse@chromium.org023421e2010-12-21 12:19:12 +00002496 __ bind(&loaded);
2497 } else { // UNTAGGED.
2498 if (CpuFeatures::IsSupported(SSE4_1)) {
2499 CpuFeatures::Scope sse4_scope(SSE4_1);
2500 __ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx.
2501 } else {
2502 __ pshufd(xmm0, xmm1, 0x1);
2503 __ movd(Operand(edx), xmm0);
2504 }
2505 __ movd(Operand(ebx), xmm1);
2506 }
2507
2508 // ST[0] or xmm1 == double value
ricow@chromium.org65fae842010-08-25 15:26:24 +00002509 // ebx = low 32 bits of double value
2510 // edx = high 32 bits of double value
2511 // Compute hash (the shifts are arithmetic):
2512 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
2513 __ mov(ecx, ebx);
2514 __ xor_(ecx, Operand(edx));
2515 __ mov(eax, ecx);
2516 __ sar(eax, 16);
2517 __ xor_(ecx, Operand(eax));
2518 __ mov(eax, ecx);
2519 __ sar(eax, 8);
2520 __ xor_(ecx, Operand(eax));
2521 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
2522 __ and_(Operand(ecx), Immediate(TranscendentalCache::kCacheSize - 1));
2523
whesse@chromium.org023421e2010-12-21 12:19:12 +00002524 // ST[0] or xmm1 == double value.
ricow@chromium.org65fae842010-08-25 15:26:24 +00002525 // ebx = low 32 bits of double value.
2526 // edx = high 32 bits of double value.
2527 // ecx = TranscendentalCache::hash(double value).
2528 __ mov(eax,
2529 Immediate(ExternalReference::transcendental_cache_array_address()));
2530 // Eax points to cache array.
2531 __ mov(eax, Operand(eax, type_ * sizeof(TranscendentalCache::caches_[0])));
2532 // Eax points to the cache for the type type_.
2533 // If NULL, the cache hasn't been initialized yet, so go through runtime.
2534 __ test(eax, Operand(eax));
2535 __ j(zero, &runtime_call_clear_stack);
2536#ifdef DEBUG
2537 // Check that the layout of cache elements match expectations.
2538 { TranscendentalCache::Element test_elem[2];
2539 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
2540 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
2541 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
2542 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
2543 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
2544 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
2545 CHECK_EQ(0, elem_in0 - elem_start);
2546 CHECK_EQ(kIntSize, elem_in1 - elem_start);
2547 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
2548 }
2549#endif
2550 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
2551 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
2552 __ lea(ecx, Operand(eax, ecx, times_4, 0));
2553 // Check if cache matches: Double value is stored in uint32_t[2] array.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00002554 NearLabel cache_miss;
ricow@chromium.org65fae842010-08-25 15:26:24 +00002555 __ cmp(ebx, Operand(ecx, 0));
2556 __ j(not_equal, &cache_miss);
2557 __ cmp(edx, Operand(ecx, kIntSize));
2558 __ j(not_equal, &cache_miss);
2559 // Cache hit!
2560 __ mov(eax, Operand(ecx, 2 * kIntSize));
whesse@chromium.org023421e2010-12-21 12:19:12 +00002561 if (tagged) {
2562 __ fstp(0);
2563 __ ret(kPointerSize);
2564 } else { // UNTAGGED.
2565 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2566 __ Ret();
2567 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002568
2569 __ bind(&cache_miss);
2570 // Update cache with new value.
2571 // We are short on registers, so use no_reg as scratch.
2572 // This gives slightly larger code.
whesse@chromium.org023421e2010-12-21 12:19:12 +00002573 if (tagged) {
2574 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
2575 } else { // UNTAGGED.
2576 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2577 __ sub(Operand(esp), Immediate(kDoubleSize));
2578 __ movdbl(Operand(esp, 0), xmm1);
2579 __ fld_d(Operand(esp, 0));
2580 __ add(Operand(esp), Immediate(kDoubleSize));
2581 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002582 GenerateOperation(masm);
2583 __ mov(Operand(ecx, 0), ebx);
2584 __ mov(Operand(ecx, kIntSize), edx);
2585 __ mov(Operand(ecx, 2 * kIntSize), eax);
2586 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
whesse@chromium.org023421e2010-12-21 12:19:12 +00002587 if (tagged) {
2588 __ ret(kPointerSize);
2589 } else { // UNTAGGED.
2590 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2591 __ Ret();
ricow@chromium.org65fae842010-08-25 15:26:24 +00002592
whesse@chromium.org023421e2010-12-21 12:19:12 +00002593 // Skip cache and return answer directly, only in untagged case.
2594 __ bind(&skip_cache);
2595 __ sub(Operand(esp), Immediate(kDoubleSize));
2596 __ movdbl(Operand(esp, 0), xmm1);
2597 __ fld_d(Operand(esp, 0));
2598 GenerateOperation(masm);
2599 __ fstp_d(Operand(esp, 0));
2600 __ movdbl(xmm1, Operand(esp, 0));
2601 __ add(Operand(esp), Immediate(kDoubleSize));
2602 // We return the value in xmm1 without adding it to the cache, but
2603 // we cause a scavenging GC so that future allocations will succeed.
2604 __ EnterInternalFrame();
2605 // Allocate an unused object bigger than a HeapNumber.
2606 __ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
2607 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
2608 __ LeaveInternalFrame();
2609 __ Ret();
2610 }
2611
2612 // Call runtime, doing whatever allocation and cleanup is necessary.
2613 if (tagged) {
2614 __ bind(&runtime_call_clear_stack);
2615 __ fstp(0);
2616 __ bind(&runtime_call);
2617 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
2618 } else { // UNTAGGED.
2619 __ bind(&runtime_call_clear_stack);
2620 __ bind(&runtime_call);
2621 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2622 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
2623 __ EnterInternalFrame();
2624 __ push(eax);
2625 __ CallRuntime(RuntimeFunction(), 1);
2626 __ LeaveInternalFrame();
2627 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2628 __ Ret();
2629 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002630}
2631
2632
2633Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
2634 switch (type_) {
ricow@chromium.org65fae842010-08-25 15:26:24 +00002635 case TranscendentalCache::SIN: return Runtime::kMath_sin;
2636 case TranscendentalCache::COS: return Runtime::kMath_cos;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002637 case TranscendentalCache::LOG: return Runtime::kMath_log;
ricow@chromium.org65fae842010-08-25 15:26:24 +00002638 default:
2639 UNIMPLEMENTED();
2640 return Runtime::kAbort;
2641 }
2642}
2643
2644
2645void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
2646 // Only free register is edi.
whesse@chromium.org023421e2010-12-21 12:19:12 +00002647 // Input value is on FP stack, and also in ebx/edx.
2648 // Input value is possibly in xmm1.
2649 // Address of result (a newly allocated HeapNumber) may be in eax.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002650 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
2651 // Both fsin and fcos require arguments in the range +/-2^63 and
2652 // return NaN for infinities and NaN. They can share all code except
2653 // the actual fsin/fcos operation.
whesse@chromium.org023421e2010-12-21 12:19:12 +00002654 NearLabel in_range, done;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002655 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
2656 // work. We must reduce it to the appropriate range.
2657 __ mov(edi, edx);
2658 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
2659 int supported_exponent_limit =
2660 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
2661 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
2662 __ j(below, &in_range, taken);
2663 // Check for infinity and NaN. Both return NaN for sin.
2664 __ cmp(Operand(edi), Immediate(0x7ff00000));
2665 NearLabel non_nan_result;
2666 __ j(not_equal, &non_nan_result, taken);
2667 // Input is +/-Infinity or NaN. Result is NaN.
2668 __ fstp(0);
2669 // NaN is represented by 0x7ff8000000000000.
2670 __ push(Immediate(0x7ff80000));
2671 __ push(Immediate(0));
2672 __ fld_d(Operand(esp, 0));
2673 __ add(Operand(esp), Immediate(2 * kPointerSize));
2674 __ jmp(&done);
ricow@chromium.org65fae842010-08-25 15:26:24 +00002675
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002676 __ bind(&non_nan_result);
ricow@chromium.org65fae842010-08-25 15:26:24 +00002677
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002678 // Use fpmod to restrict argument to the range +/-2*PI.
2679 __ mov(edi, eax); // Save eax before using fnstsw_ax.
2680 __ fldpi();
2681 __ fadd(0);
2682 __ fld(1);
2683 // FPU Stack: input, 2*pi, input.
2684 {
2685 NearLabel no_exceptions;
2686 __ fwait();
2687 __ fnstsw_ax();
2688 // Clear if Illegal Operand or Zero Division exceptions are set.
2689 __ test(Operand(eax), Immediate(5));
2690 __ j(zero, &no_exceptions);
2691 __ fnclex();
2692 __ bind(&no_exceptions);
2693 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002694
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002695 // Compute st(0) % st(1)
2696 {
2697 NearLabel partial_remainder_loop;
2698 __ bind(&partial_remainder_loop);
2699 __ fprem1();
2700 __ fwait();
2701 __ fnstsw_ax();
2702 __ test(Operand(eax), Immediate(0x400 /* C2 */));
2703 // If C2 is set, computation only has partial result. Loop to
2704 // continue computation.
2705 __ j(not_zero, &partial_remainder_loop);
2706 }
2707 // FPU Stack: input, 2*pi, input % 2*pi
2708 __ fstp(2);
2709 __ fstp(0);
2710 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
2711
2712 // FPU Stack: input % 2*pi
2713 __ bind(&in_range);
2714 switch (type_) {
2715 case TranscendentalCache::SIN:
2716 __ fsin();
2717 break;
2718 case TranscendentalCache::COS:
2719 __ fcos();
2720 break;
2721 default:
2722 UNREACHABLE();
2723 }
2724 __ bind(&done);
2725 } else {
2726 ASSERT(type_ == TranscendentalCache::LOG);
2727 __ fldln2();
2728 __ fxch();
2729 __ fyl2x();
ricow@chromium.org65fae842010-08-25 15:26:24 +00002730 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00002731}
2732
2733
2734// Get the integer part of a heap number. Surprisingly, all this bit twiddling
2735// is faster than using the built-in instructions on floating point registers.
2736// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
2737// trashed registers.
2738void IntegerConvert(MacroAssembler* masm,
2739 Register source,
2740 TypeInfo type_info,
2741 bool use_sse3,
2742 Label* conversion_failure) {
2743 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
2744 Label done, right_exponent, normal_exponent;
2745 Register scratch = ebx;
2746 Register scratch2 = edi;
2747 if (type_info.IsInteger32() && CpuFeatures::IsEnabled(SSE2)) {
2748 CpuFeatures::Scope scope(SSE2);
2749 __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset));
2750 return;
2751 }
2752 if (!type_info.IsInteger32() || !use_sse3) {
2753 // Get exponent word.
2754 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
2755 // Get exponent alone in scratch2.
2756 __ mov(scratch2, scratch);
2757 __ and_(scratch2, HeapNumber::kExponentMask);
2758 }
2759 if (use_sse3) {
2760 CpuFeatures::Scope scope(SSE3);
2761 if (!type_info.IsInteger32()) {
2762 // Check whether the exponent is too big for a 64 bit signed integer.
2763 static const uint32_t kTooBigExponent =
2764 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
2765 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
2766 __ j(greater_equal, conversion_failure);
2767 }
2768 // Load x87 register with heap number.
2769 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
2770 // Reserve space for 64 bit answer.
2771 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
2772 // Do conversion, which cannot fail because we checked the exponent.
2773 __ fisttp_d(Operand(esp, 0));
2774 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
2775 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
2776 } else {
2777 // Load ecx with zero. We use this either for the final shift or
2778 // for the answer.
2779 __ xor_(ecx, Operand(ecx));
2780 // Check whether the exponent matches a 32 bit signed int that cannot be
2781 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
2782 // exponent is 30 (biased). This is the exponent that we are fastest at and
2783 // also the highest exponent we can handle here.
2784 const uint32_t non_smi_exponent =
2785 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
2786 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
2787 // If we have a match of the int32-but-not-Smi exponent then skip some
2788 // logic.
2789 __ j(equal, &right_exponent);
2790 // If the exponent is higher than that then go to slow case. This catches
2791 // numbers that don't fit in a signed int32, infinities and NaNs.
2792 __ j(less, &normal_exponent);
2793
2794 {
2795 // Handle a big exponent. The only reason we have this code is that the
2796 // >>> operator has a tendency to generate numbers with an exponent of 31.
2797 const uint32_t big_non_smi_exponent =
2798 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
2799 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
2800 __ j(not_equal, conversion_failure);
2801 // We have the big exponent, typically from >>>. This means the number is
2802 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
2803 __ mov(scratch2, scratch);
2804 __ and_(scratch2, HeapNumber::kMantissaMask);
2805 // Put back the implicit 1.
2806 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
2807 // Shift up the mantissa bits to take up the space the exponent used to
2808 // take. We just orred in the implicit bit so that took care of one and
2809 // we want to use the full unsigned range so we subtract 1 bit from the
2810 // shift distance.
2811 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
2812 __ shl(scratch2, big_shift_distance);
2813 // Get the second half of the double.
2814 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
2815 // Shift down 21 bits to get the most significant 11 bits or the low
2816 // mantissa word.
2817 __ shr(ecx, 32 - big_shift_distance);
2818 __ or_(ecx, Operand(scratch2));
2819 // We have the answer in ecx, but we may need to negate it.
2820 __ test(scratch, Operand(scratch));
2821 __ j(positive, &done);
2822 __ neg(ecx);
2823 __ jmp(&done);
2824 }
2825
2826 __ bind(&normal_exponent);
2827 // Exponent word in scratch, exponent part of exponent word in scratch2.
2828 // Zero in ecx.
2829 // We know the exponent is smaller than 30 (biased). If it is less than
2830 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
2831 // it rounds to zero.
2832 const uint32_t zero_exponent =
2833 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
2834 __ sub(Operand(scratch2), Immediate(zero_exponent));
2835 // ecx already has a Smi zero.
2836 __ j(less, &done);
2837
2838 // We have a shifted exponent between 0 and 30 in scratch2.
2839 __ shr(scratch2, HeapNumber::kExponentShift);
2840 __ mov(ecx, Immediate(30));
2841 __ sub(ecx, Operand(scratch2));
2842
2843 __ bind(&right_exponent);
2844 // Here ecx is the shift, scratch is the exponent word.
2845 // Get the top bits of the mantissa.
2846 __ and_(scratch, HeapNumber::kMantissaMask);
2847 // Put back the implicit 1.
2848 __ or_(scratch, 1 << HeapNumber::kExponentShift);
2849 // Shift up the mantissa bits to take up the space the exponent used to
2850 // take. We have kExponentShift + 1 significant bits int he low end of the
2851 // word. Shift them to the top bits.
2852 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
2853 __ shl(scratch, shift_distance);
2854 // Get the second half of the double. For some exponents we don't
2855 // actually need this because the bits get shifted out again, but
2856 // it's probably slower to test than just to do it.
2857 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
2858 // Shift down 22 bits to get the most significant 10 bits or the low
2859 // mantissa word.
2860 __ shr(scratch2, 32 - shift_distance);
2861 __ or_(scratch2, Operand(scratch));
2862 // Move down according to the exponent.
2863 __ shr_cl(scratch2);
2864 // Now the unsigned answer is in scratch2. We need to move it to ecx and
2865 // we may need to fix the sign.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00002866 NearLabel negative;
ricow@chromium.org65fae842010-08-25 15:26:24 +00002867 __ xor_(ecx, Operand(ecx));
2868 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
2869 __ j(greater, &negative);
2870 __ mov(ecx, scratch2);
2871 __ jmp(&done);
2872 __ bind(&negative);
2873 __ sub(ecx, Operand(scratch2));
2874 __ bind(&done);
2875 }
2876}
2877
2878
2879// Input: edx, eax are the left and right objects of a bit op.
2880// Output: eax, ecx are left and right integers for a bit op.
2881void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm,
2882 TypeInfo type_info,
2883 bool use_sse3,
2884 Label* conversion_failure) {
2885 // Check float operands.
2886 Label arg1_is_object, check_undefined_arg1;
2887 Label arg2_is_object, check_undefined_arg2;
2888 Label load_arg2, done;
2889
2890 if (!type_info.IsDouble()) {
2891 if (!type_info.IsSmi()) {
2892 __ test(edx, Immediate(kSmiTagMask));
2893 __ j(not_zero, &arg1_is_object);
2894 } else {
2895 if (FLAG_debug_code) __ AbortIfNotSmi(edx);
2896 }
2897 __ SmiUntag(edx);
2898 __ jmp(&load_arg2);
2899 }
2900
2901 __ bind(&arg1_is_object);
2902
2903 // Get the untagged integer version of the edx heap number in ecx.
2904 IntegerConvert(masm, edx, type_info, use_sse3, conversion_failure);
2905 __ mov(edx, ecx);
2906
2907 // Here edx has the untagged integer, eax has a Smi or a heap number.
2908 __ bind(&load_arg2);
2909 if (!type_info.IsDouble()) {
2910 // Test if arg2 is a Smi.
2911 if (!type_info.IsSmi()) {
2912 __ test(eax, Immediate(kSmiTagMask));
2913 __ j(not_zero, &arg2_is_object);
2914 } else {
2915 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
2916 }
2917 __ SmiUntag(eax);
2918 __ mov(ecx, eax);
2919 __ jmp(&done);
2920 }
2921
2922 __ bind(&arg2_is_object);
2923
2924 // Get the untagged integer version of the eax heap number in ecx.
2925 IntegerConvert(masm, eax, type_info, use_sse3, conversion_failure);
2926 __ bind(&done);
2927 __ mov(eax, edx);
2928}
2929
2930
2931// Input: edx, eax are the left and right objects of a bit op.
2932// Output: eax, ecx are left and right integers for a bit op.
2933void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2934 bool use_sse3,
2935 Label* conversion_failure) {
2936 // Check float operands.
2937 Label arg1_is_object, check_undefined_arg1;
2938 Label arg2_is_object, check_undefined_arg2;
2939 Label load_arg2, done;
2940
2941 // Test if arg1 is a Smi.
2942 __ test(edx, Immediate(kSmiTagMask));
2943 __ j(not_zero, &arg1_is_object);
2944
2945 __ SmiUntag(edx);
2946 __ jmp(&load_arg2);
2947
2948 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2949 __ bind(&check_undefined_arg1);
2950 __ cmp(edx, Factory::undefined_value());
2951 __ j(not_equal, conversion_failure);
2952 __ mov(edx, Immediate(0));
2953 __ jmp(&load_arg2);
2954
2955 __ bind(&arg1_is_object);
2956 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
2957 __ cmp(ebx, Factory::heap_number_map());
2958 __ j(not_equal, &check_undefined_arg1);
2959
2960 // Get the untagged integer version of the edx heap number in ecx.
2961 IntegerConvert(masm,
2962 edx,
2963 TypeInfo::Unknown(),
2964 use_sse3,
2965 conversion_failure);
2966 __ mov(edx, ecx);
2967
2968 // Here edx has the untagged integer, eax has a Smi or a heap number.
2969 __ bind(&load_arg2);
2970
2971 // Test if arg2 is a Smi.
2972 __ test(eax, Immediate(kSmiTagMask));
2973 __ j(not_zero, &arg2_is_object);
2974
2975 __ SmiUntag(eax);
2976 __ mov(ecx, eax);
2977 __ jmp(&done);
2978
2979 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2980 __ bind(&check_undefined_arg2);
2981 __ cmp(eax, Factory::undefined_value());
2982 __ j(not_equal, conversion_failure);
2983 __ mov(ecx, Immediate(0));
2984 __ jmp(&done);
2985
2986 __ bind(&arg2_is_object);
2987 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2988 __ cmp(ebx, Factory::heap_number_map());
2989 __ j(not_equal, &check_undefined_arg2);
2990
2991 // Get the untagged integer version of the eax heap number in ecx.
2992 IntegerConvert(masm,
2993 eax,
2994 TypeInfo::Unknown(),
2995 use_sse3,
2996 conversion_failure);
2997 __ bind(&done);
2998 __ mov(eax, edx);
2999}
3000
3001
3002void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
3003 TypeInfo type_info,
3004 bool use_sse3,
3005 Label* conversion_failure) {
3006 if (type_info.IsNumber()) {
3007 LoadNumbersAsIntegers(masm, type_info, use_sse3, conversion_failure);
3008 } else {
3009 LoadUnknownsAsIntegers(masm, use_sse3, conversion_failure);
3010 }
3011}
3012
3013
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003014void FloatingPointHelper::CheckLoadedIntegersWereInt32(MacroAssembler* masm,
3015 bool use_sse3,
3016 Label* not_int32) {
3017 return;
3018}
3019
3020
ricow@chromium.org65fae842010-08-25 15:26:24 +00003021void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
3022 Register number) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003023 NearLabel load_smi, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003024
3025 __ test(number, Immediate(kSmiTagMask));
3026 __ j(zero, &load_smi, not_taken);
3027 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
3028 __ jmp(&done);
3029
3030 __ bind(&load_smi);
3031 __ SmiUntag(number);
3032 __ push(number);
3033 __ fild_s(Operand(esp, 0));
3034 __ pop(number);
3035
3036 __ bind(&done);
3037}
3038
3039
3040void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003041 NearLabel load_smi_edx, load_eax, load_smi_eax, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003042 // Load operand in edx into xmm0.
3043 __ test(edx, Immediate(kSmiTagMask));
3044 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
3045 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3046
3047 __ bind(&load_eax);
3048 // Load operand in eax into xmm1.
3049 __ test(eax, Immediate(kSmiTagMask));
3050 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
3051 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3052 __ jmp(&done);
3053
3054 __ bind(&load_smi_edx);
3055 __ SmiUntag(edx); // Untag smi before converting to float.
3056 __ cvtsi2sd(xmm0, Operand(edx));
3057 __ SmiTag(edx); // Retag smi for heap number overwriting test.
3058 __ jmp(&load_eax);
3059
3060 __ bind(&load_smi_eax);
3061 __ SmiUntag(eax); // Untag smi before converting to float.
3062 __ cvtsi2sd(xmm1, Operand(eax));
3063 __ SmiTag(eax); // Retag smi for heap number overwriting test.
3064
3065 __ bind(&done);
3066}
3067
3068
3069void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
3070 Label* not_numbers) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003071 NearLabel load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003072 // Load operand in edx into xmm0, or branch to not_numbers.
3073 __ test(edx, Immediate(kSmiTagMask));
3074 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
3075 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map());
3076 __ j(not_equal, not_numbers); // Argument in edx is not a number.
3077 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3078 __ bind(&load_eax);
3079 // Load operand in eax into xmm1, or branch to not_numbers.
3080 __ test(eax, Immediate(kSmiTagMask));
3081 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
3082 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::heap_number_map());
3083 __ j(equal, &load_float_eax);
3084 __ jmp(not_numbers); // Argument in eax is not a number.
3085 __ bind(&load_smi_edx);
3086 __ SmiUntag(edx); // Untag smi before converting to float.
3087 __ cvtsi2sd(xmm0, Operand(edx));
3088 __ SmiTag(edx); // Retag smi for heap number overwriting test.
3089 __ jmp(&load_eax);
3090 __ bind(&load_smi_eax);
3091 __ SmiUntag(eax); // Untag smi before converting to float.
3092 __ cvtsi2sd(xmm1, Operand(eax));
3093 __ SmiTag(eax); // Retag smi for heap number overwriting test.
3094 __ jmp(&done);
3095 __ bind(&load_float_eax);
3096 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3097 __ bind(&done);
3098}
3099
3100
3101void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
3102 Register scratch) {
3103 const Register left = edx;
3104 const Register right = eax;
3105 __ mov(scratch, left);
3106 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
3107 __ SmiUntag(scratch);
3108 __ cvtsi2sd(xmm0, Operand(scratch));
3109
3110 __ mov(scratch, right);
3111 __ SmiUntag(scratch);
3112 __ cvtsi2sd(xmm1, Operand(scratch));
3113}
3114
3115
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003116void FloatingPointHelper::CheckSSE2OperandsAreInt32(MacroAssembler* masm,
3117 Label* non_int32,
3118 Register scratch) {
3119 __ cvttsd2si(scratch, Operand(xmm0));
3120 __ cvtsi2sd(xmm2, Operand(scratch));
3121 __ ucomisd(xmm0, xmm2);
3122 __ j(not_zero, non_int32);
3123 __ j(carry, non_int32);
3124 __ cvttsd2si(scratch, Operand(xmm1));
3125 __ cvtsi2sd(xmm2, Operand(scratch));
3126 __ ucomisd(xmm1, xmm2);
3127 __ j(not_zero, non_int32);
3128 __ j(carry, non_int32);
3129}
3130
3131
ricow@chromium.org65fae842010-08-25 15:26:24 +00003132void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
3133 Register scratch,
3134 ArgLocation arg_location) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003135 NearLabel load_smi_1, load_smi_2, done_load_1, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003136 if (arg_location == ARGS_IN_REGISTERS) {
3137 __ mov(scratch, edx);
3138 } else {
3139 __ mov(scratch, Operand(esp, 2 * kPointerSize));
3140 }
3141 __ test(scratch, Immediate(kSmiTagMask));
3142 __ j(zero, &load_smi_1, not_taken);
3143 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
3144 __ bind(&done_load_1);
3145
3146 if (arg_location == ARGS_IN_REGISTERS) {
3147 __ mov(scratch, eax);
3148 } else {
3149 __ mov(scratch, Operand(esp, 1 * kPointerSize));
3150 }
3151 __ test(scratch, Immediate(kSmiTagMask));
3152 __ j(zero, &load_smi_2, not_taken);
3153 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
3154 __ jmp(&done);
3155
3156 __ bind(&load_smi_1);
3157 __ SmiUntag(scratch);
3158 __ push(scratch);
3159 __ fild_s(Operand(esp, 0));
3160 __ pop(scratch);
3161 __ jmp(&done_load_1);
3162
3163 __ bind(&load_smi_2);
3164 __ SmiUntag(scratch);
3165 __ push(scratch);
3166 __ fild_s(Operand(esp, 0));
3167 __ pop(scratch);
3168
3169 __ bind(&done);
3170}
3171
3172
3173void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
3174 Register scratch) {
3175 const Register left = edx;
3176 const Register right = eax;
3177 __ mov(scratch, left);
3178 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
3179 __ SmiUntag(scratch);
3180 __ push(scratch);
3181 __ fild_s(Operand(esp, 0));
3182
3183 __ mov(scratch, right);
3184 __ SmiUntag(scratch);
3185 __ mov(Operand(esp, 0), scratch);
3186 __ fild_s(Operand(esp, 0));
3187 __ pop(scratch);
3188}
3189
3190
3191void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
3192 Label* non_float,
3193 Register scratch) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003194 NearLabel test_other, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003195 // Test if both operands are floats or smi -> scratch=k_is_float;
3196 // Otherwise scratch = k_not_float.
3197 __ test(edx, Immediate(kSmiTagMask));
3198 __ j(zero, &test_other, not_taken); // argument in edx is OK
3199 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
3200 __ cmp(scratch, Factory::heap_number_map());
3201 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
3202
3203 __ bind(&test_other);
3204 __ test(eax, Immediate(kSmiTagMask));
3205 __ j(zero, &done); // argument in eax is OK
3206 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
3207 __ cmp(scratch, Factory::heap_number_map());
3208 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
3209
3210 // Fall-through: Both operands are numbers.
3211 __ bind(&done);
3212}
3213
3214
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003215void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
3216 Label* non_int32) {
3217 return;
3218}
3219
3220
ricow@chromium.org65fae842010-08-25 15:26:24 +00003221void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003222 Label slow, done, undo;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003223
3224 if (op_ == Token::SUB) {
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003225 if (include_smi_code_) {
3226 // Check whether the value is a smi.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003227 NearLabel try_float;
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003228 __ test(eax, Immediate(kSmiTagMask));
3229 __ j(not_zero, &try_float, not_taken);
ricow@chromium.org65fae842010-08-25 15:26:24 +00003230
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003231 if (negative_zero_ == kStrictNegativeZero) {
3232 // Go slow case if the value of the expression is zero
3233 // to make sure that we switch between 0 and -0.
3234 __ test(eax, Operand(eax));
3235 __ j(zero, &slow, not_taken);
3236 }
3237
3238 // The value of the expression is a smi that is not zero. Try
3239 // optimistic subtraction '0 - value'.
3240 __ mov(edx, Operand(eax));
3241 __ Set(eax, Immediate(0));
3242 __ sub(eax, Operand(edx));
3243 __ j(overflow, &undo, not_taken);
3244 __ StubReturn(1);
3245
3246 // Try floating point case.
3247 __ bind(&try_float);
3248 } else if (FLAG_debug_code) {
3249 __ AbortIfSmi(eax);
ricow@chromium.org65fae842010-08-25 15:26:24 +00003250 }
3251
ricow@chromium.org65fae842010-08-25 15:26:24 +00003252 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3253 __ cmp(edx, Factory::heap_number_map());
3254 __ j(not_equal, &slow);
3255 if (overwrite_ == UNARY_OVERWRITE) {
3256 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
3257 __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
3258 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
3259 } else {
3260 __ mov(edx, Operand(eax));
3261 // edx: operand
3262 __ AllocateHeapNumber(eax, ebx, ecx, &undo);
3263 // eax: allocated 'empty' number
3264 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
3265 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
3266 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
3267 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
3268 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
3269 }
3270 } else if (op_ == Token::BIT_NOT) {
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003271 if (include_smi_code_) {
3272 Label non_smi;
3273 __ test(eax, Immediate(kSmiTagMask));
3274 __ j(not_zero, &non_smi);
3275 __ not_(eax);
3276 __ and_(eax, ~kSmiTagMask); // Remove inverted smi-tag.
3277 __ ret(0);
3278 __ bind(&non_smi);
3279 } else if (FLAG_debug_code) {
3280 __ AbortIfSmi(eax);
3281 }
3282
ricow@chromium.org65fae842010-08-25 15:26:24 +00003283 // Check if the operand is a heap number.
3284 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3285 __ cmp(edx, Factory::heap_number_map());
3286 __ j(not_equal, &slow, not_taken);
3287
3288 // Convert the heap number in eax to an untagged integer in ecx.
3289 IntegerConvert(masm,
3290 eax,
3291 TypeInfo::Unknown(),
3292 CpuFeatures::IsSupported(SSE3),
3293 &slow);
3294
3295 // Do the bitwise operation and check if the result fits in a smi.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003296 NearLabel try_float;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003297 __ not_(ecx);
3298 __ cmp(ecx, 0xc0000000);
3299 __ j(sign, &try_float, not_taken);
3300
3301 // Tag the result as a smi and we're done.
3302 STATIC_ASSERT(kSmiTagSize == 1);
3303 __ lea(eax, Operand(ecx, times_2, kSmiTag));
3304 __ jmp(&done);
3305
3306 // Try to store the result in a heap number.
3307 __ bind(&try_float);
3308 if (overwrite_ == UNARY_NO_OVERWRITE) {
3309 // Allocate a fresh heap number, but don't overwrite eax until
3310 // we're sure we can do it without going through the slow case
3311 // that needs the value in eax.
3312 __ AllocateHeapNumber(ebx, edx, edi, &slow);
3313 __ mov(eax, Operand(ebx));
3314 }
3315 if (CpuFeatures::IsSupported(SSE2)) {
3316 CpuFeatures::Scope use_sse2(SSE2);
3317 __ cvtsi2sd(xmm0, Operand(ecx));
3318 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
3319 } else {
3320 __ push(ecx);
3321 __ fild_s(Operand(esp, 0));
3322 __ pop(ecx);
3323 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
3324 }
3325 } else {
3326 UNIMPLEMENTED();
3327 }
3328
3329 // Return from the stub.
3330 __ bind(&done);
3331 __ StubReturn(1);
3332
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00003333 // Restore eax and go slow case.
3334 __ bind(&undo);
3335 __ mov(eax, Operand(edx));
3336
ricow@chromium.org65fae842010-08-25 15:26:24 +00003337 // Handle the slow case by jumping to the JavaScript builtin.
3338 __ bind(&slow);
3339 __ pop(ecx); // pop return address.
3340 __ push(eax);
3341 __ push(ecx); // push return address
3342 switch (op_) {
3343 case Token::SUB:
3344 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
3345 break;
3346 case Token::BIT_NOT:
3347 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
3348 break;
3349 default:
3350 UNREACHABLE();
3351 }
3352}
3353
3354
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003355void MathPowStub::Generate(MacroAssembler* masm) {
3356 // Registers are used as follows:
3357 // edx = base
3358 // eax = exponent
3359 // ecx = temporary, result
3360
3361 CpuFeatures::Scope use_sse2(SSE2);
3362 Label allocate_return, call_runtime;
3363
3364 // Load input parameters.
3365 __ mov(edx, Operand(esp, 2 * kPointerSize));
3366 __ mov(eax, Operand(esp, 1 * kPointerSize));
3367
3368 // Save 1 in xmm3 - we need this several times later on.
3369 __ mov(ecx, Immediate(1));
3370 __ cvtsi2sd(xmm3, Operand(ecx));
3371
3372 Label exponent_nonsmi;
3373 Label base_nonsmi;
3374 // If the exponent is a heap number go to that specific case.
3375 __ test(eax, Immediate(kSmiTagMask));
3376 __ j(not_zero, &exponent_nonsmi);
3377 __ test(edx, Immediate(kSmiTagMask));
3378 __ j(not_zero, &base_nonsmi);
3379
3380 // Optimized version when both exponent and base is a smi.
3381 Label powi;
3382 __ SmiUntag(edx);
3383 __ cvtsi2sd(xmm0, Operand(edx));
3384 __ jmp(&powi);
3385 // exponent is smi and base is a heapnumber.
3386 __ bind(&base_nonsmi);
3387 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3388 Factory::heap_number_map());
3389 __ j(not_equal, &call_runtime);
3390
3391 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3392
3393 // Optimized version of pow if exponent is a smi.
3394 // xmm0 contains the base.
3395 __ bind(&powi);
3396 __ SmiUntag(eax);
3397
3398 // Save exponent in base as we need to check if exponent is negative later.
3399 // We know that base and exponent are in different registers.
3400 __ mov(edx, eax);
3401
3402 // Get absolute value of exponent.
3403 NearLabel no_neg;
3404 __ cmp(eax, 0);
3405 __ j(greater_equal, &no_neg);
3406 __ neg(eax);
3407 __ bind(&no_neg);
3408
3409 // Load xmm1 with 1.
3410 __ movsd(xmm1, xmm3);
3411 NearLabel while_true;
3412 NearLabel no_multiply;
3413
3414 __ bind(&while_true);
3415 __ shr(eax, 1);
3416 __ j(not_carry, &no_multiply);
3417 __ mulsd(xmm1, xmm0);
3418 __ bind(&no_multiply);
3419 __ test(eax, Operand(eax));
3420 __ mulsd(xmm0, xmm0);
3421 __ j(not_zero, &while_true);
3422
3423 // base has the original value of the exponent - if the exponent is
3424 // negative return 1/result.
3425 __ test(edx, Operand(edx));
3426 __ j(positive, &allocate_return);
3427 // Special case if xmm1 has reached infinity.
3428 __ mov(ecx, Immediate(0x7FB00000));
3429 __ movd(xmm0, Operand(ecx));
3430 __ cvtss2sd(xmm0, xmm0);
3431 __ ucomisd(xmm0, xmm1);
3432 __ j(equal, &call_runtime);
3433 __ divsd(xmm3, xmm1);
3434 __ movsd(xmm1, xmm3);
3435 __ jmp(&allocate_return);
3436
3437 // exponent (or both) is a heapnumber - no matter what we should now work
3438 // on doubles.
3439 __ bind(&exponent_nonsmi);
3440 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3441 Factory::heap_number_map());
3442 __ j(not_equal, &call_runtime);
3443 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3444 // Test if exponent is nan.
3445 __ ucomisd(xmm1, xmm1);
3446 __ j(parity_even, &call_runtime);
3447
3448 NearLabel base_not_smi;
3449 NearLabel handle_special_cases;
3450 __ test(edx, Immediate(kSmiTagMask));
3451 __ j(not_zero, &base_not_smi);
3452 __ SmiUntag(edx);
3453 __ cvtsi2sd(xmm0, Operand(edx));
3454 __ jmp(&handle_special_cases);
3455
3456 __ bind(&base_not_smi);
3457 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3458 Factory::heap_number_map());
3459 __ j(not_equal, &call_runtime);
3460 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
3461 __ and_(ecx, HeapNumber::kExponentMask);
3462 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask));
3463 // base is NaN or +/-Infinity
3464 __ j(greater_equal, &call_runtime);
3465 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3466
3467 // base is in xmm0 and exponent is in xmm1.
3468 __ bind(&handle_special_cases);
3469 NearLabel not_minus_half;
3470 // Test for -0.5.
3471 // Load xmm2 with -0.5.
3472 __ mov(ecx, Immediate(0xBF000000));
3473 __ movd(xmm2, Operand(ecx));
3474 __ cvtss2sd(xmm2, xmm2);
3475 // xmm2 now has -0.5.
3476 __ ucomisd(xmm2, xmm1);
3477 __ j(not_equal, &not_minus_half);
3478
3479 // Calculates reciprocal of square root.
3480 // Note that 1/sqrt(x) = sqrt(1/x))
3481 __ divsd(xmm3, xmm0);
3482 __ movsd(xmm1, xmm3);
3483 __ sqrtsd(xmm1, xmm1);
3484 __ jmp(&allocate_return);
3485
3486 // Test for 0.5.
3487 __ bind(&not_minus_half);
3488 // Load xmm2 with 0.5.
3489 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
3490 __ addsd(xmm2, xmm3);
3491 // xmm2 now has 0.5.
3492 __ ucomisd(xmm2, xmm1);
3493 __ j(not_equal, &call_runtime);
3494 // Calculates square root.
3495 __ movsd(xmm1, xmm0);
3496 __ sqrtsd(xmm1, xmm1);
3497
3498 __ bind(&allocate_return);
3499 __ AllocateHeapNumber(ecx, eax, edx, &call_runtime);
3500 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm1);
3501 __ mov(eax, ecx);
3502 __ ret(2);
3503
3504 __ bind(&call_runtime);
3505 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
3506}
3507
3508
ricow@chromium.org65fae842010-08-25 15:26:24 +00003509void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
3510 // The key is in edx and the parameter count is in eax.
3511
3512 // The displacement is used for skipping the frame pointer on the
3513 // stack. It is the offset of the last parameter (if any) relative
3514 // to the frame pointer.
3515 static const int kDisplacement = 1 * kPointerSize;
3516
3517 // Check that the key is a smi.
3518 Label slow;
3519 __ test(edx, Immediate(kSmiTagMask));
3520 __ j(not_zero, &slow, not_taken);
3521
3522 // Check if the calling frame is an arguments adaptor frame.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003523 NearLabel adaptor;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003524 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3525 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
3526 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3527 __ j(equal, &adaptor);
3528
3529 // Check index against formal parameters count limit passed in
3530 // through register eax. Use unsigned comparison to get negative
3531 // check for free.
3532 __ cmp(edx, Operand(eax));
3533 __ j(above_equal, &slow, not_taken);
3534
3535 // Read the argument from the stack and return it.
3536 STATIC_ASSERT(kSmiTagSize == 1);
3537 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
3538 __ lea(ebx, Operand(ebp, eax, times_2, 0));
3539 __ neg(edx);
3540 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
3541 __ ret(0);
3542
3543 // Arguments adaptor case: Check index against actual arguments
3544 // limit found in the arguments adaptor frame. Use unsigned
3545 // comparison to get negative check for free.
3546 __ bind(&adaptor);
3547 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3548 __ cmp(edx, Operand(ecx));
3549 __ j(above_equal, &slow, not_taken);
3550
3551 // Read the argument from the stack and return it.
3552 STATIC_ASSERT(kSmiTagSize == 1);
3553 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
3554 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
3555 __ neg(edx);
3556 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
3557 __ ret(0);
3558
3559 // Slow-case: Handle non-smi or out-of-bounds access to arguments
3560 // by calling the runtime system.
3561 __ bind(&slow);
3562 __ pop(ebx); // Return address.
3563 __ push(edx);
3564 __ push(ebx);
3565 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
3566}
3567
3568
3569void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
3570 // esp[0] : return address
3571 // esp[4] : number of parameters
3572 // esp[8] : receiver displacement
3573 // esp[16] : function
3574
3575 // The displacement is used for skipping the return address and the
3576 // frame pointer on the stack. It is the offset of the last
3577 // parameter (if any) relative to the frame pointer.
3578 static const int kDisplacement = 2 * kPointerSize;
3579
3580 // Check if the calling frame is an arguments adaptor frame.
3581 Label adaptor_frame, try_allocate, runtime;
3582 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3583 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3584 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3585 __ j(equal, &adaptor_frame);
3586
3587 // Get the length from the frame.
3588 __ mov(ecx, Operand(esp, 1 * kPointerSize));
3589 __ jmp(&try_allocate);
3590
3591 // Patch the arguments.length and the parameters pointer.
3592 __ bind(&adaptor_frame);
3593 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3594 __ mov(Operand(esp, 1 * kPointerSize), ecx);
3595 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
3596 __ mov(Operand(esp, 2 * kPointerSize), edx);
3597
3598 // Try the new space allocation. Start out with computing the size of
3599 // the arguments object and the elements array.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003600 NearLabel add_arguments_object;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003601 __ bind(&try_allocate);
3602 __ test(ecx, Operand(ecx));
3603 __ j(zero, &add_arguments_object);
3604 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
3605 __ bind(&add_arguments_object);
3606 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSize));
3607
3608 // Do the allocation of both objects in one go.
3609 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
3610
3611 // Get the arguments boilerplate from the current (global) context.
3612 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
3613 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3614 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
3615 __ mov(edi, Operand(edi, offset));
3616
3617 // Copy the JS object part.
3618 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
3619 __ mov(ebx, FieldOperand(edi, i));
3620 __ mov(FieldOperand(eax, i), ebx);
3621 }
3622
3623 // Setup the callee in-object property.
3624 STATIC_ASSERT(Heap::arguments_callee_index == 0);
3625 __ mov(ebx, Operand(esp, 3 * kPointerSize));
3626 __ mov(FieldOperand(eax, JSObject::kHeaderSize), ebx);
3627
3628 // Get the length (smi tagged) and set that as an in-object property too.
3629 STATIC_ASSERT(Heap::arguments_length_index == 1);
3630 __ mov(ecx, Operand(esp, 1 * kPointerSize));
3631 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
3632
3633 // If there are no actual arguments, we're done.
3634 Label done;
3635 __ test(ecx, Operand(ecx));
3636 __ j(zero, &done);
3637
3638 // Get the parameters pointer from the stack.
3639 __ mov(edx, Operand(esp, 2 * kPointerSize));
3640
3641 // Setup the elements pointer in the allocated arguments object and
3642 // initialize the header in the elements fixed array.
3643 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
3644 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3645 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3646 Immediate(Factory::fixed_array_map()));
3647 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3648 // Untag the length for the loop below.
3649 __ SmiUntag(ecx);
3650
3651 // Copy the fixed array slots.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003652 NearLabel loop;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003653 __ bind(&loop);
3654 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
3655 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
3656 __ add(Operand(edi), Immediate(kPointerSize));
3657 __ sub(Operand(edx), Immediate(kPointerSize));
3658 __ dec(ecx);
3659 __ j(not_zero, &loop);
3660
3661 // Return and remove the on-stack parameters.
3662 __ bind(&done);
3663 __ ret(3 * kPointerSize);
3664
3665 // Do the runtime call to allocate the arguments object.
3666 __ bind(&runtime);
3667 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
3668}
3669
3670
3671void RegExpExecStub::Generate(MacroAssembler* masm) {
3672 // Just jump directly to runtime if native RegExp is not selected at compile
3673 // time or if regexp entry in generated code is turned off runtime switch or
3674 // at compilation.
3675#ifdef V8_INTERPRETED_REGEXP
3676 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3677#else // V8_INTERPRETED_REGEXP
3678 if (!FLAG_regexp_entry_native) {
3679 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3680 return;
3681 }
3682
3683 // Stack frame on entry.
3684 // esp[0]: return address
3685 // esp[4]: last_match_info (expected JSArray)
3686 // esp[8]: previous index
3687 // esp[12]: subject string
3688 // esp[16]: JSRegExp object
3689
3690 static const int kLastMatchInfoOffset = 1 * kPointerSize;
3691 static const int kPreviousIndexOffset = 2 * kPointerSize;
3692 static const int kSubjectOffset = 3 * kPointerSize;
3693 static const int kJSRegExpOffset = 4 * kPointerSize;
3694
3695 Label runtime, invoke_regexp;
3696
3697 // Ensure that a RegExp stack is allocated.
3698 ExternalReference address_of_regexp_stack_memory_address =
3699 ExternalReference::address_of_regexp_stack_memory_address();
3700 ExternalReference address_of_regexp_stack_memory_size =
3701 ExternalReference::address_of_regexp_stack_memory_size();
3702 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3703 __ test(ebx, Operand(ebx));
3704 __ j(zero, &runtime, not_taken);
3705
3706 // Check that the first argument is a JSRegExp object.
3707 __ mov(eax, Operand(esp, kJSRegExpOffset));
3708 STATIC_ASSERT(kSmiTag == 0);
3709 __ test(eax, Immediate(kSmiTagMask));
3710 __ j(zero, &runtime);
3711 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
3712 __ j(not_equal, &runtime);
3713 // Check that the RegExp has been compiled (data contains a fixed array).
3714 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3715 if (FLAG_debug_code) {
3716 __ test(ecx, Immediate(kSmiTagMask));
3717 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
3718 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
3719 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
3720 }
3721
3722 // ecx: RegExp data (FixedArray)
3723 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
3724 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
3725 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
3726 __ j(not_equal, &runtime);
3727
3728 // ecx: RegExp data (FixedArray)
3729 // Check that the number of captures fit in the static offsets vector buffer.
3730 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3731 // Calculate number of capture registers (number_of_captures + 1) * 2. This
3732 // uses the asumption that smis are 2 * their untagged value.
3733 STATIC_ASSERT(kSmiTag == 0);
3734 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3735 __ add(Operand(edx), Immediate(2)); // edx was a smi.
3736 // Check that the static offsets vector buffer is large enough.
3737 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
3738 __ j(above, &runtime);
3739
3740 // ecx: RegExp data (FixedArray)
3741 // edx: Number of capture registers
3742 // Check that the second argument is a string.
3743 __ mov(eax, Operand(esp, kSubjectOffset));
3744 __ test(eax, Immediate(kSmiTagMask));
3745 __ j(zero, &runtime);
3746 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
3747 __ j(NegateCondition(is_string), &runtime);
3748 // Get the length of the string to ebx.
3749 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
3750
3751 // ebx: Length of subject string as a smi
3752 // ecx: RegExp data (FixedArray)
3753 // edx: Number of capture registers
3754 // Check that the third argument is a positive smi less than the subject
3755 // string length. A negative value will be greater (unsigned comparison).
3756 __ mov(eax, Operand(esp, kPreviousIndexOffset));
3757 __ test(eax, Immediate(kSmiTagMask));
3758 __ j(not_zero, &runtime);
3759 __ cmp(eax, Operand(ebx));
3760 __ j(above_equal, &runtime);
3761
3762 // ecx: RegExp data (FixedArray)
3763 // edx: Number of capture registers
3764 // Check that the fourth object is a JSArray object.
3765 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3766 __ test(eax, Immediate(kSmiTagMask));
3767 __ j(zero, &runtime);
3768 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3769 __ j(not_equal, &runtime);
3770 // Check that the JSArray is in fast case.
3771 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3772 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
3773 __ cmp(eax, Factory::fixed_array_map());
3774 __ j(not_equal, &runtime);
3775 // Check that the last match info has space for the capture registers and the
3776 // additional information.
3777 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
3778 __ SmiUntag(eax);
3779 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
3780 __ cmp(edx, Operand(eax));
3781 __ j(greater, &runtime);
3782
3783 // ecx: RegExp data (FixedArray)
3784 // Check the representation and encoding of the subject string.
3785 Label seq_ascii_string, seq_two_byte_string, check_code;
3786 __ mov(eax, Operand(esp, kSubjectOffset));
3787 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3788 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3789 // First check for flat two byte string.
3790 __ and_(ebx,
3791 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
3792 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
3793 __ j(zero, &seq_two_byte_string);
3794 // Any other flat string must be a flat ascii string.
3795 __ test(Operand(ebx),
3796 Immediate(kIsNotStringMask | kStringRepresentationMask));
3797 __ j(zero, &seq_ascii_string);
3798
3799 // Check for flat cons string.
3800 // A flat cons string is a cons string where the second part is the empty
3801 // string. In that case the subject string is just the first part of the cons
3802 // string. Also in this case the first part of the cons string is known to be
3803 // a sequential string or an external string.
3804 STATIC_ASSERT(kExternalStringTag != 0);
3805 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
3806 __ test(Operand(ebx),
3807 Immediate(kIsNotStringMask | kExternalStringTag));
3808 __ j(not_zero, &runtime);
3809 // String is a cons string.
3810 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
3811 __ cmp(Operand(edx), Factory::empty_string());
3812 __ j(not_equal, &runtime);
3813 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
3814 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3815 // String is a cons string with empty second part.
3816 // eax: first part of cons string.
3817 // ebx: map of first part of cons string.
3818 // Is first part a flat two byte string?
3819 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3820 kStringRepresentationMask | kStringEncodingMask);
3821 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
3822 __ j(zero, &seq_two_byte_string);
3823 // Any other flat string must be ascii.
3824 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3825 kStringRepresentationMask);
3826 __ j(not_zero, &runtime);
3827
3828 __ bind(&seq_ascii_string);
3829 // eax: subject string (flat ascii)
3830 // ecx: RegExp data (FixedArray)
3831 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
3832 __ Set(edi, Immediate(1)); // Type is ascii.
3833 __ jmp(&check_code);
3834
3835 __ bind(&seq_two_byte_string);
3836 // eax: subject string (flat two byte)
3837 // ecx: RegExp data (FixedArray)
3838 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
3839 __ Set(edi, Immediate(0)); // Type is two byte.
3840
3841 __ bind(&check_code);
3842 // Check that the irregexp code has been generated for the actual string
3843 // encoding. If it has, the field contains a code object otherwise it contains
3844 // the hole.
3845 __ CmpObjectType(edx, CODE_TYPE, ebx);
3846 __ j(not_equal, &runtime);
3847
3848 // eax: subject string
3849 // edx: code
3850 // edi: encoding of subject string (1 if ascii, 0 if two_byte);
3851 // Load used arguments before starting to push arguments for call to native
3852 // RegExp code to avoid handling changing stack height.
3853 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
3854 __ SmiUntag(ebx); // Previous index from smi.
3855
3856 // eax: subject string
3857 // ebx: previous index
3858 // edx: code
3859 // edi: encoding of subject string (1 if ascii 0 if two_byte);
3860 // All checks done. Now push arguments for native regexp code.
3861 __ IncrementCounter(&Counters::regexp_entry_native, 1);
3862
3863 static const int kRegExpExecuteArguments = 7;
3864 __ PrepareCallCFunction(kRegExpExecuteArguments, ecx);
3865
3866 // Argument 7: Indicate that this is a direct call from JavaScript.
3867 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
3868
3869 // Argument 6: Start (high end) of backtracking stack memory area.
3870 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3871 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3872 __ mov(Operand(esp, 5 * kPointerSize), ecx);
3873
3874 // Argument 5: static offsets vector buffer.
3875 __ mov(Operand(esp, 4 * kPointerSize),
3876 Immediate(ExternalReference::address_of_static_offsets_vector()));
3877
3878 // Argument 4: End of string data
3879 // Argument 3: Start of string data
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003880 NearLabel setup_two_byte, setup_rest;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003881 __ test(edi, Operand(edi));
3882 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
3883 __ j(zero, &setup_two_byte);
3884 __ SmiUntag(edi);
3885 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
3886 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3887 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
3888 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3889 __ jmp(&setup_rest);
3890
3891 __ bind(&setup_two_byte);
3892 STATIC_ASSERT(kSmiTag == 0);
3893 STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2).
3894 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
3895 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3896 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
3897 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3898
3899 __ bind(&setup_rest);
3900
3901 // Argument 2: Previous index.
3902 __ mov(Operand(esp, 1 * kPointerSize), ebx);
3903
3904 // Argument 1: Subject string.
3905 __ mov(Operand(esp, 0 * kPointerSize), eax);
3906
3907 // Locate the code entry and call it.
3908 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
3909 __ CallCFunction(edx, kRegExpExecuteArguments);
3910
3911 // Check the result.
3912 Label success;
3913 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
3914 __ j(equal, &success, taken);
3915 Label failure;
3916 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
3917 __ j(equal, &failure, taken);
3918 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
3919 // If not exception it can only be retry. Handle that in the runtime system.
3920 __ j(not_equal, &runtime);
3921 // Result must now be exception. If there is no pending exception already a
3922 // stack overflow (on the backtrack stack) was detected in RegExp code but
3923 // haven't created the exception yet. Handle that in the runtime system.
3924 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
3925 ExternalReference pending_exception(Top::k_pending_exception_address);
3926 __ mov(eax,
3927 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
3928 __ cmp(eax, Operand::StaticVariable(pending_exception));
3929 __ j(equal, &runtime);
3930 __ bind(&failure);
3931 // For failure and exception return null.
3932 __ mov(Operand(eax), Factory::null_value());
3933 __ ret(4 * kPointerSize);
3934
3935 // Load RegExp data.
3936 __ bind(&success);
3937 __ mov(eax, Operand(esp, kJSRegExpOffset));
3938 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3939 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3940 // Calculate number of capture registers (number_of_captures + 1) * 2.
3941 STATIC_ASSERT(kSmiTag == 0);
3942 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3943 __ add(Operand(edx), Immediate(2)); // edx was a smi.
3944
3945 // edx: Number of capture registers
3946 // Load last_match_info which is still known to be a fast case JSArray.
3947 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3948 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3949
3950 // ebx: last_match_info backing store (FixedArray)
3951 // edx: number of capture registers
3952 // Store the capture count.
3953 __ SmiTag(edx); // Number of capture registers to smi.
3954 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
3955 __ SmiUntag(edx); // Number of capture registers back from smi.
3956 // Store last subject and last input.
3957 __ mov(eax, Operand(esp, kSubjectOffset));
3958 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
3959 __ mov(ecx, ebx);
3960 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
3961 __ mov(eax, Operand(esp, kSubjectOffset));
3962 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
3963 __ mov(ecx, ebx);
3964 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
3965
3966 // Get the static offsets vector filled by the native regexp code.
3967 ExternalReference address_of_static_offsets_vector =
3968 ExternalReference::address_of_static_offsets_vector();
3969 __ mov(ecx, Immediate(address_of_static_offsets_vector));
3970
3971 // ebx: last_match_info backing store (FixedArray)
3972 // ecx: offsets vector
3973 // edx: number of capture registers
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00003974 NearLabel next_capture, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00003975 // Capture register counter starts from number of capture registers and
3976 // counts down until wraping after zero.
3977 __ bind(&next_capture);
3978 __ sub(Operand(edx), Immediate(1));
3979 __ j(negative, &done);
3980 // Read the value from the static offsets vector buffer.
3981 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
3982 __ SmiTag(edi);
3983 // Store the smi value in the last match info.
3984 __ mov(FieldOperand(ebx,
3985 edx,
3986 times_pointer_size,
3987 RegExpImpl::kFirstCaptureOffset),
3988 edi);
3989 __ jmp(&next_capture);
3990 __ bind(&done);
3991
3992 // Return last match info.
3993 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3994 __ ret(4 * kPointerSize);
3995
3996 // Do the runtime call to execute the regexp.
3997 __ bind(&runtime);
3998 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3999#endif // V8_INTERPRETED_REGEXP
4000}
4001
4002
kasperl@chromium.orga5551262010-12-07 12:49:48 +00004003void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
4004 const int kMaxInlineLength = 100;
4005 Label slowcase;
4006 NearLabel done;
4007 __ mov(ebx, Operand(esp, kPointerSize * 3));
4008 __ test(ebx, Immediate(kSmiTagMask));
4009 __ j(not_zero, &slowcase);
4010 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
4011 __ j(above, &slowcase);
4012 // Smi-tagging is equivalent to multiplying by 2.
4013 STATIC_ASSERT(kSmiTag == 0);
4014 STATIC_ASSERT(kSmiTagSize == 1);
4015 // Allocate RegExpResult followed by FixedArray with size in ebx.
4016 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
4017 // Elements: [Map][Length][..elements..]
4018 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
4019 times_half_pointer_size,
4020 ebx, // In: Number of elements (times 2, being a smi)
4021 eax, // Out: Start of allocation (tagged).
4022 ecx, // Out: End of allocation.
4023 edx, // Scratch register
4024 &slowcase,
4025 TAG_OBJECT);
4026 // eax: Start of allocated area, object-tagged.
4027
4028 // Set JSArray map to global.regexp_result_map().
4029 // Set empty properties FixedArray.
4030 // Set elements to point to FixedArray allocated right after the JSArray.
4031 // Interleave operations for better latency.
4032 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
4033 __ mov(ecx, Immediate(Factory::empty_fixed_array()));
4034 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
4035 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
4036 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
4037 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
4038 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
4039 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
4040
4041 // Set input, index and length fields from arguments.
4042 __ mov(ecx, Operand(esp, kPointerSize * 1));
4043 __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
4044 __ mov(ecx, Operand(esp, kPointerSize * 2));
4045 __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
4046 __ mov(ecx, Operand(esp, kPointerSize * 3));
4047 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
4048
4049 // Fill out the elements FixedArray.
4050 // eax: JSArray.
4051 // ebx: FixedArray.
4052 // ecx: Number of elements in array, as smi.
4053
4054 // Set map.
4055 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
4056 Immediate(Factory::fixed_array_map()));
4057 // Set length.
4058 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
4059 // Fill contents of fixed-array with the-hole.
4060 __ SmiUntag(ecx);
4061 __ mov(edx, Immediate(Factory::the_hole_value()));
4062 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
4063 // Fill fixed array elements with hole.
4064 // eax: JSArray.
4065 // ecx: Number of elements to fill.
4066 // ebx: Start of elements in FixedArray.
4067 // edx: the hole.
4068 Label loop;
4069 __ test(ecx, Operand(ecx));
4070 __ bind(&loop);
4071 __ j(less_equal, &done); // Jump if ecx is negative or zero.
4072 __ sub(Operand(ecx), Immediate(1));
4073 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
4074 __ jmp(&loop);
4075
4076 __ bind(&done);
4077 __ ret(3 * kPointerSize);
4078
4079 __ bind(&slowcase);
4080 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
4081}
4082
4083
ricow@chromium.org65fae842010-08-25 15:26:24 +00004084void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
4085 Register object,
4086 Register result,
4087 Register scratch1,
4088 Register scratch2,
4089 bool object_is_smi,
4090 Label* not_found) {
4091 // Use of registers. Register result is used as a temporary.
4092 Register number_string_cache = result;
4093 Register mask = scratch1;
4094 Register scratch = scratch2;
4095
4096 // Load the number string cache.
4097 ExternalReference roots_address = ExternalReference::roots_address();
4098 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
4099 __ mov(number_string_cache,
4100 Operand::StaticArray(scratch, times_pointer_size, roots_address));
4101 // Make the hash mask from the length of the number string cache. It
4102 // contains two elements (number and string) for each cache entry.
4103 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
4104 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
4105 __ sub(Operand(mask), Immediate(1)); // Make mask.
4106
4107 // Calculate the entry in the number string cache. The hash value in the
4108 // number string cache for smis is just the smi value, and the hash for
4109 // doubles is the xor of the upper and lower words. See
4110 // Heap::GetNumberStringCache.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004111 NearLabel smi_hash_calculated;
4112 NearLabel load_result_from_cache;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004113 if (object_is_smi) {
4114 __ mov(scratch, object);
4115 __ SmiUntag(scratch);
4116 } else {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004117 NearLabel not_smi, hash_calculated;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004118 STATIC_ASSERT(kSmiTag == 0);
4119 __ test(object, Immediate(kSmiTagMask));
4120 __ j(not_zero, &not_smi);
4121 __ mov(scratch, object);
4122 __ SmiUntag(scratch);
4123 __ jmp(&smi_hash_calculated);
4124 __ bind(&not_smi);
4125 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
4126 Factory::heap_number_map());
4127 __ j(not_equal, not_found);
4128 STATIC_ASSERT(8 == kDoubleSize);
4129 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
4130 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
4131 // Object is heap number and hash is now in scratch. Calculate cache index.
4132 __ and_(scratch, Operand(mask));
4133 Register index = scratch;
4134 Register probe = mask;
4135 __ mov(probe,
4136 FieldOperand(number_string_cache,
4137 index,
4138 times_twice_pointer_size,
4139 FixedArray::kHeaderSize));
4140 __ test(probe, Immediate(kSmiTagMask));
4141 __ j(zero, not_found);
4142 if (CpuFeatures::IsSupported(SSE2)) {
4143 CpuFeatures::Scope fscope(SSE2);
4144 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
4145 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
4146 __ ucomisd(xmm0, xmm1);
4147 } else {
4148 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
4149 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
4150 __ FCmp();
4151 }
4152 __ j(parity_even, not_found); // Bail out if NaN is involved.
4153 __ j(not_equal, not_found); // The cache did not contain this value.
4154 __ jmp(&load_result_from_cache);
4155 }
4156
4157 __ bind(&smi_hash_calculated);
4158 // Object is smi and hash is now in scratch. Calculate cache index.
4159 __ and_(scratch, Operand(mask));
4160 Register index = scratch;
4161 // Check if the entry is the smi we are looking for.
4162 __ cmp(object,
4163 FieldOperand(number_string_cache,
4164 index,
4165 times_twice_pointer_size,
4166 FixedArray::kHeaderSize));
4167 __ j(not_equal, not_found);
4168
4169 // Get the result from the cache.
4170 __ bind(&load_result_from_cache);
4171 __ mov(result,
4172 FieldOperand(number_string_cache,
4173 index,
4174 times_twice_pointer_size,
4175 FixedArray::kHeaderSize + kPointerSize));
4176 __ IncrementCounter(&Counters::number_to_string_native, 1);
4177}
4178
4179
4180void NumberToStringStub::Generate(MacroAssembler* masm) {
4181 Label runtime;
4182
4183 __ mov(ebx, Operand(esp, kPointerSize));
4184
4185 // Generate code to lookup number in the number string cache.
4186 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
4187 __ ret(1 * kPointerSize);
4188
4189 __ bind(&runtime);
4190 // Handle number to string in the runtime system if not found in the cache.
4191 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
4192}
4193
4194
4195static int NegativeComparisonResult(Condition cc) {
4196 ASSERT(cc != equal);
4197 ASSERT((cc == less) || (cc == less_equal)
4198 || (cc == greater) || (cc == greater_equal));
4199 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
4200}
4201
4202void CompareStub::Generate(MacroAssembler* masm) {
4203 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4204
4205 Label check_unequal_objects, done;
4206
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00004207 // Compare two smis if required.
4208 if (include_smi_compare_) {
4209 Label non_smi, smi_done;
4210 __ mov(ecx, Operand(edx));
4211 __ or_(ecx, Operand(eax));
4212 __ test(ecx, Immediate(kSmiTagMask));
4213 __ j(not_zero, &non_smi, not_taken);
4214 __ sub(edx, Operand(eax)); // Return on the result of the subtraction.
4215 __ j(no_overflow, &smi_done);
whesse@chromium.org4a5224e2010-10-20 12:37:07 +00004216 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00004217 __ bind(&smi_done);
4218 __ mov(eax, edx);
4219 __ ret(0);
4220 __ bind(&non_smi);
4221 } else if (FLAG_debug_code) {
4222 __ mov(ecx, Operand(edx));
4223 __ or_(ecx, Operand(eax));
4224 __ test(ecx, Immediate(kSmiTagMask));
4225 __ Assert(not_zero, "Unexpected smi operands.");
4226 }
4227
ricow@chromium.org65fae842010-08-25 15:26:24 +00004228 // NOTICE! This code is only reached after a smi-fast-case check, so
4229 // it is certain that at least one operand isn't a smi.
4230
4231 // Identical objects can be compared fast, but there are some tricky cases
4232 // for NaN and undefined.
4233 {
4234 Label not_identical;
4235 __ cmp(eax, Operand(edx));
4236 __ j(not_equal, &not_identical);
4237
4238 if (cc_ != equal) {
4239 // Check for undefined. undefined OP undefined is false even though
4240 // undefined == undefined.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004241 NearLabel check_for_nan;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004242 __ cmp(edx, Factory::undefined_value());
4243 __ j(not_equal, &check_for_nan);
4244 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
4245 __ ret(0);
4246 __ bind(&check_for_nan);
4247 }
4248
4249 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
4250 // so we do the second best thing - test it ourselves.
4251 // Note: if cc_ != equal, never_nan_nan_ is not used.
4252 if (never_nan_nan_ && (cc_ == equal)) {
4253 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4254 __ ret(0);
4255 } else {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004256 NearLabel heap_number;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004257 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
4258 Immediate(Factory::heap_number_map()));
4259 __ j(equal, &heap_number);
4260 if (cc_ != equal) {
4261 // Call runtime on identical JSObjects. Otherwise return equal.
4262 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
4263 __ j(above_equal, &not_identical);
4264 }
4265 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4266 __ ret(0);
4267
4268 __ bind(&heap_number);
4269 // It is a heap number, so return non-equal if it's NaN and equal if
4270 // it's not NaN.
4271 // The representation of NaN values has all exponent bits (52..62) set,
4272 // and not all mantissa bits (0..51) clear.
4273 // We only accept QNaNs, which have bit 51 set.
4274 // Read top bits of double representation (second word of value).
4275
4276 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
4277 // all bits in the mask are set. We only need to check the word
4278 // that contains the exponent and high bit of the mantissa.
4279 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0);
4280 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
lrn@chromium.org5d00b602011-01-05 09:51:43 +00004281 __ Set(eax, Immediate(0));
ricow@chromium.org65fae842010-08-25 15:26:24 +00004282 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
4283 // bits.
4284 __ add(edx, Operand(edx));
4285 __ cmp(edx, kQuietNaNHighBitsMask << 1);
4286 if (cc_ == equal) {
4287 STATIC_ASSERT(EQUAL != 1);
4288 __ setcc(above_equal, eax);
4289 __ ret(0);
4290 } else {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004291 NearLabel nan;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004292 __ j(above_equal, &nan);
4293 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4294 __ ret(0);
4295 __ bind(&nan);
4296 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
4297 __ ret(0);
4298 }
4299 }
4300
4301 __ bind(&not_identical);
4302 }
4303
4304 // Strict equality can quickly decide whether objects are equal.
4305 // Non-strict object equality is slower, so it is handled later in the stub.
4306 if (cc_ == equal && strict_) {
4307 Label slow; // Fallthrough label.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004308 NearLabel not_smis;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004309 // If we're doing a strict equality comparison, we don't have to do
4310 // type conversion, so we generate code to do fast comparison for objects
4311 // and oddballs. Non-smi numbers and strings still go through the usual
4312 // slow-case code.
4313 // If either is a Smi (we know that not both are), then they can only
4314 // be equal if the other is a HeapNumber. If so, use the slow case.
4315 STATIC_ASSERT(kSmiTag == 0);
4316 ASSERT_EQ(0, Smi::FromInt(0));
4317 __ mov(ecx, Immediate(kSmiTagMask));
4318 __ and_(ecx, Operand(eax));
4319 __ test(ecx, Operand(edx));
4320 __ j(not_zero, &not_smis);
4321 // One operand is a smi.
4322
4323 // Check whether the non-smi is a heap number.
4324 STATIC_ASSERT(kSmiTagMask == 1);
4325 // ecx still holds eax & kSmiTag, which is either zero or one.
4326 __ sub(Operand(ecx), Immediate(0x01));
4327 __ mov(ebx, edx);
4328 __ xor_(ebx, Operand(eax));
4329 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
4330 __ xor_(ebx, Operand(eax));
4331 // if eax was smi, ebx is now edx, else eax.
4332
4333 // Check if the non-smi operand is a heap number.
4334 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
4335 Immediate(Factory::heap_number_map()));
4336 // If heap number, handle it in the slow case.
4337 __ j(equal, &slow);
4338 // Return non-equal (ebx is not zero)
4339 __ mov(eax, ebx);
4340 __ ret(0);
4341
4342 __ bind(&not_smis);
4343 // If either operand is a JSObject or an oddball value, then they are not
4344 // equal since their pointers are different
4345 // There is no test for undetectability in strict equality.
4346
4347 // Get the type of the first operand.
4348 // If the first object is a JS object, we have done pointer comparison.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004349 NearLabel first_non_object;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004350 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4351 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
4352 __ j(below, &first_non_object);
4353
4354 // Return non-zero (eax is not zero)
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004355 NearLabel return_not_equal;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004356 STATIC_ASSERT(kHeapObjectTag != 0);
4357 __ bind(&return_not_equal);
4358 __ ret(0);
4359
4360 __ bind(&first_non_object);
4361 // Check for oddballs: true, false, null, undefined.
4362 __ CmpInstanceType(ecx, ODDBALL_TYPE);
4363 __ j(equal, &return_not_equal);
4364
4365 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ecx);
4366 __ j(above_equal, &return_not_equal);
4367
4368 // Check for oddballs: true, false, null, undefined.
4369 __ CmpInstanceType(ecx, ODDBALL_TYPE);
4370 __ j(equal, &return_not_equal);
4371
4372 // Fall through to the general case.
4373 __ bind(&slow);
4374 }
4375
4376 // Generate the number comparison code.
4377 if (include_number_compare_) {
4378 Label non_number_comparison;
4379 Label unordered;
4380 if (CpuFeatures::IsSupported(SSE2)) {
4381 CpuFeatures::Scope use_sse2(SSE2);
4382 CpuFeatures::Scope use_cmov(CMOV);
4383
4384 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
4385 __ ucomisd(xmm0, xmm1);
4386
4387 // Don't base result on EFLAGS when a NaN is involved.
4388 __ j(parity_even, &unordered, not_taken);
4389 // Return a result of -1, 0, or 1, based on EFLAGS.
4390 __ mov(eax, 0); // equal
4391 __ mov(ecx, Immediate(Smi::FromInt(1)));
4392 __ cmov(above, eax, Operand(ecx));
4393 __ mov(ecx, Immediate(Smi::FromInt(-1)));
4394 __ cmov(below, eax, Operand(ecx));
4395 __ ret(0);
4396 } else {
4397 FloatingPointHelper::CheckFloatOperands(
4398 masm, &non_number_comparison, ebx);
4399 FloatingPointHelper::LoadFloatOperand(masm, eax);
4400 FloatingPointHelper::LoadFloatOperand(masm, edx);
4401 __ FCmp();
4402
4403 // Don't base result on EFLAGS when a NaN is involved.
4404 __ j(parity_even, &unordered, not_taken);
4405
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004406 NearLabel below_label, above_label;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004407 // Return a result of -1, 0, or 1, based on EFLAGS.
4408 __ j(below, &below_label, not_taken);
4409 __ j(above, &above_label, not_taken);
4410
lrn@chromium.org5d00b602011-01-05 09:51:43 +00004411 __ Set(eax, Immediate(0));
ricow@chromium.org65fae842010-08-25 15:26:24 +00004412 __ ret(0);
4413
4414 __ bind(&below_label);
4415 __ mov(eax, Immediate(Smi::FromInt(-1)));
4416 __ ret(0);
4417
4418 __ bind(&above_label);
4419 __ mov(eax, Immediate(Smi::FromInt(1)));
4420 __ ret(0);
4421 }
4422
4423 // If one of the numbers was NaN, then the result is always false.
4424 // The cc is never not-equal.
4425 __ bind(&unordered);
4426 ASSERT(cc_ != not_equal);
4427 if (cc_ == less || cc_ == less_equal) {
4428 __ mov(eax, Immediate(Smi::FromInt(1)));
4429 } else {
4430 __ mov(eax, Immediate(Smi::FromInt(-1)));
4431 }
4432 __ ret(0);
4433
4434 // The number comparison code did not provide a valid result.
4435 __ bind(&non_number_comparison);
4436 }
4437
4438 // Fast negative check for symbol-to-symbol equality.
4439 Label check_for_strings;
4440 if (cc_ == equal) {
4441 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
4442 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
4443
4444 // We've already checked for object identity, so if both operands
4445 // are symbols they aren't equal. Register eax already holds a
4446 // non-zero value, which indicates not equal, so just return.
4447 __ ret(0);
4448 }
4449
4450 __ bind(&check_for_strings);
4451
4452 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
4453 &check_unequal_objects);
4454
4455 // Inline comparison of ascii strings.
4456 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
4457 edx,
4458 eax,
4459 ecx,
4460 ebx,
4461 edi);
4462#ifdef DEBUG
4463 __ Abort("Unexpected fall-through from string comparison");
4464#endif
4465
4466 __ bind(&check_unequal_objects);
4467 if (cc_ == equal && !strict_) {
4468 // Non-strict equality. Objects are unequal if
4469 // they are both JSObjects and not undetectable,
4470 // and their pointers are different.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004471 NearLabel not_both_objects;
4472 NearLabel return_unequal;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004473 // At most one is a smi, so we can test for smi by adding the two.
4474 // A smi plus a heap object has the low bit set, a heap object plus
4475 // a heap object has the low bit clear.
4476 STATIC_ASSERT(kSmiTag == 0);
4477 STATIC_ASSERT(kSmiTagMask == 1);
4478 __ lea(ecx, Operand(eax, edx, times_1, 0));
4479 __ test(ecx, Immediate(kSmiTagMask));
4480 __ j(not_zero, &not_both_objects);
4481 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
4482 __ j(below, &not_both_objects);
4483 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ebx);
4484 __ j(below, &not_both_objects);
4485 // We do not bail out after this point. Both are JSObjects, and
4486 // they are equal if and only if both are undetectable.
4487 // The and of the undetectable flags is 1 if and only if they are equal.
4488 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
4489 1 << Map::kIsUndetectable);
4490 __ j(zero, &return_unequal);
4491 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
4492 1 << Map::kIsUndetectable);
4493 __ j(zero, &return_unequal);
4494 // The objects are both undetectable, so they both compare as the value
4495 // undefined, and are equal.
4496 __ Set(eax, Immediate(EQUAL));
4497 __ bind(&return_unequal);
4498 // Return non-equal by returning the non-zero object pointer in eax,
4499 // or return equal if we fell through to here.
4500 __ ret(0); // rax, rdx were pushed
4501 __ bind(&not_both_objects);
4502 }
4503
4504 // Push arguments below the return address.
4505 __ pop(ecx);
4506 __ push(edx);
4507 __ push(eax);
4508
4509 // Figure out which native to call and setup the arguments.
4510 Builtins::JavaScript builtin;
4511 if (cc_ == equal) {
4512 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
4513 } else {
4514 builtin = Builtins::COMPARE;
4515 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
4516 }
4517
4518 // Restore return address on the stack.
4519 __ push(ecx);
4520
4521 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
4522 // tagged as a small integer.
4523 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
4524}
4525
4526
4527void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
4528 Label* label,
4529 Register object,
4530 Register scratch) {
4531 __ test(object, Immediate(kSmiTagMask));
4532 __ j(zero, label);
4533 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
4534 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4535 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
4536 __ cmp(scratch, kSymbolTag | kStringTag);
4537 __ j(not_equal, label);
4538}
4539
4540
4541void StackCheckStub::Generate(MacroAssembler* masm) {
whesse@chromium.org4a5224e2010-10-20 12:37:07 +00004542 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
ricow@chromium.org65fae842010-08-25 15:26:24 +00004543}
4544
4545
4546void CallFunctionStub::Generate(MacroAssembler* masm) {
4547 Label slow;
4548
4549 // If the receiver might be a value (string, number or boolean) check for this
4550 // and box it if it is.
4551 if (ReceiverMightBeValue()) {
4552 // Get the receiver from the stack.
4553 // +1 ~ return address
4554 Label receiver_is_value, receiver_is_js_object;
4555 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
4556
4557 // Check if receiver is a smi (which is a number value).
4558 __ test(eax, Immediate(kSmiTagMask));
4559 __ j(zero, &receiver_is_value, not_taken);
4560
4561 // Check if the receiver is a valid JS object.
4562 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
4563 __ j(above_equal, &receiver_is_js_object);
4564
4565 // Call the runtime to box the value.
4566 __ bind(&receiver_is_value);
4567 __ EnterInternalFrame();
4568 __ push(eax);
4569 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
4570 __ LeaveInternalFrame();
4571 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
4572
4573 __ bind(&receiver_is_js_object);
4574 }
4575
4576 // Get the function to call from the stack.
4577 // +2 ~ receiver, return address
4578 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
4579
4580 // Check that the function really is a JavaScript function.
4581 __ test(edi, Immediate(kSmiTagMask));
4582 __ j(zero, &slow, not_taken);
4583 // Goto slow case if we do not have a function.
4584 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
4585 __ j(not_equal, &slow, not_taken);
4586
4587 // Fast-case: Just invoke the function.
4588 ParameterCount actual(argc_);
4589 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
4590
4591 // Slow-case: Non-function called.
4592 __ bind(&slow);
4593 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
4594 // of the original receiver from the call site).
4595 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
4596 __ Set(eax, Immediate(argc_));
4597 __ Set(ebx, Immediate(0));
4598 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
4599 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
4600 __ jmp(adaptor, RelocInfo::CODE_TARGET);
4601}
4602
4603
4604void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
4605 // eax holds the exception.
4606
4607 // Adjust this code if not the case.
4608 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
4609
4610 // Drop the sp to the top of the handler.
4611 ExternalReference handler_address(Top::k_handler_address);
4612 __ mov(esp, Operand::StaticVariable(handler_address));
4613
4614 // Restore next handler and frame pointer, discard handler state.
4615 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4616 __ pop(Operand::StaticVariable(handler_address));
4617 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
4618 __ pop(ebp);
4619 __ pop(edx); // Remove state.
4620
4621 // Before returning we restore the context from the frame pointer if
4622 // not NULL. The frame pointer is NULL in the exception handler of
4623 // a JS entry frame.
lrn@chromium.org5d00b602011-01-05 09:51:43 +00004624 __ Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004625 NearLabel skip;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004626 __ cmp(ebp, 0);
4627 __ j(equal, &skip, not_taken);
4628 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4629 __ bind(&skip);
4630
4631 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
4632 __ ret(0);
4633}
4634
4635
ricow@chromium.org65fae842010-08-25 15:26:24 +00004636void CEntryStub::GenerateCore(MacroAssembler* masm,
4637 Label* throw_normal_exception,
4638 Label* throw_termination_exception,
4639 Label* throw_out_of_memory_exception,
4640 bool do_gc,
ager@chromium.org0ee099b2011-01-25 14:06:47 +00004641 bool always_allocate_scope) {
ricow@chromium.org65fae842010-08-25 15:26:24 +00004642 // eax: result parameter for PerformGC, if any
4643 // ebx: pointer to C function (C callee-saved)
4644 // ebp: frame pointer (restored after C call)
4645 // esp: stack pointer (restored after C call)
4646 // edi: number of arguments including receiver (C callee-saved)
4647 // esi: pointer to the first argument (C callee-saved)
4648
4649 // Result returned in eax, or eax+edx if result_size_ is 2.
4650
4651 // Check stack alignment.
4652 if (FLAG_debug_code) {
4653 __ CheckStackAlignment();
4654 }
4655
4656 if (do_gc) {
4657 // Pass failure code returned from last attempt as first argument to
4658 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
4659 // stack alignment is known to be correct. This function takes one argument
4660 // which is passed on the stack, and we know that the stack has been
4661 // prepared to pass at least one argument.
4662 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
4663 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
4664 }
4665
4666 ExternalReference scope_depth =
4667 ExternalReference::heap_always_allocate_scope_depth();
4668 if (always_allocate_scope) {
4669 __ inc(Operand::StaticVariable(scope_depth));
4670 }
4671
4672 // Call C function.
4673 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
4674 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
4675 __ call(Operand(ebx));
4676 // Result is in eax or edx:eax - do not destroy these registers!
4677
4678 if (always_allocate_scope) {
4679 __ dec(Operand::StaticVariable(scope_depth));
4680 }
4681
4682 // Make sure we're not trying to return 'the hole' from the runtime
4683 // call as this may lead to crashes in the IC code later.
4684 if (FLAG_debug_code) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004685 NearLabel okay;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004686 __ cmp(eax, Factory::the_hole_value());
4687 __ j(not_equal, &okay);
4688 __ int3();
4689 __ bind(&okay);
4690 }
4691
4692 // Check for failure result.
4693 Label failure_returned;
4694 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4695 __ lea(ecx, Operand(eax, 1));
4696 // Lower 2 bits of ecx are 0 iff eax has failure tag.
4697 __ test(ecx, Immediate(kFailureTagMask));
4698 __ j(zero, &failure_returned, not_taken);
4699
4700 // Exit the JavaScript to C++ exit frame.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00004701 __ LeaveExitFrame(save_doubles_);
ricow@chromium.org65fae842010-08-25 15:26:24 +00004702 __ ret(0);
4703
4704 // Handling of failure.
4705 __ bind(&failure_returned);
4706
4707 Label retry;
4708 // If the returned exception is RETRY_AFTER_GC continue at retry label
4709 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4710 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
4711 __ j(zero, &retry, taken);
4712
4713 // Special handling of out of memory exceptions.
4714 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4715 __ j(equal, throw_out_of_memory_exception);
4716
4717 // Retrieve the pending exception and clear the variable.
4718 ExternalReference pending_exception_address(Top::k_pending_exception_address);
4719 __ mov(eax, Operand::StaticVariable(pending_exception_address));
4720 __ mov(edx,
4721 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
4722 __ mov(Operand::StaticVariable(pending_exception_address), edx);
4723
4724 // Special handling of termination exceptions which are uncatchable
4725 // by javascript code.
4726 __ cmp(eax, Factory::termination_exception());
4727 __ j(equal, throw_termination_exception);
4728
4729 // Handle normal exception.
4730 __ jmp(throw_normal_exception);
4731
4732 // Retry.
4733 __ bind(&retry);
4734}
4735
4736
4737void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
4738 UncatchableExceptionType type) {
4739 // Adjust this code if not the case.
4740 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
4741
4742 // Drop sp to the top stack handler.
4743 ExternalReference handler_address(Top::k_handler_address);
4744 __ mov(esp, Operand::StaticVariable(handler_address));
4745
4746 // Unwind the handlers until the ENTRY handler is found.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00004747 NearLabel loop, done;
ricow@chromium.org65fae842010-08-25 15:26:24 +00004748 __ bind(&loop);
4749 // Load the type of the current stack handler.
4750 const int kStateOffset = StackHandlerConstants::kStateOffset;
4751 __ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
4752 __ j(equal, &done);
4753 // Fetch the next handler in the list.
4754 const int kNextOffset = StackHandlerConstants::kNextOffset;
4755 __ mov(esp, Operand(esp, kNextOffset));
4756 __ jmp(&loop);
4757 __ bind(&done);
4758
4759 // Set the top handler address to next handler past the current ENTRY handler.
4760 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4761 __ pop(Operand::StaticVariable(handler_address));
4762
4763 if (type == OUT_OF_MEMORY) {
4764 // Set external caught exception to false.
4765 ExternalReference external_caught(Top::k_external_caught_exception_address);
4766 __ mov(eax, false);
4767 __ mov(Operand::StaticVariable(external_caught), eax);
4768
4769 // Set pending exception and eax to out of memory exception.
4770 ExternalReference pending_exception(Top::k_pending_exception_address);
4771 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4772 __ mov(Operand::StaticVariable(pending_exception), eax);
4773 }
4774
4775 // Clear the context pointer.
lrn@chromium.org5d00b602011-01-05 09:51:43 +00004776 __ Set(esi, Immediate(0));
ricow@chromium.org65fae842010-08-25 15:26:24 +00004777
4778 // Restore fp from handler and discard handler state.
4779 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
4780 __ pop(ebp);
4781 __ pop(edx); // State.
4782
4783 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
4784 __ ret(0);
4785}
4786
4787
4788void CEntryStub::Generate(MacroAssembler* masm) {
4789 // eax: number of arguments including receiver
4790 // ebx: pointer to C function (C callee-saved)
4791 // ebp: frame pointer (restored after C call)
4792 // esp: stack pointer (restored after C call)
4793 // esi: current context (C callee-saved)
4794 // edi: JS function of the caller (C callee-saved)
4795
4796 // NOTE: Invocations of builtins may return failure objects instead
4797 // of a proper result. The builtin entry handles this by performing
4798 // a garbage collection and retrying the builtin (twice).
4799
4800 // Enter the exit frame that transitions from JavaScript to C++.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00004801 __ EnterExitFrame(save_doubles_);
ricow@chromium.org65fae842010-08-25 15:26:24 +00004802
4803 // eax: result parameter for PerformGC, if any (setup below)
4804 // ebx: pointer to builtin function (C callee-saved)
4805 // ebp: frame pointer (restored after C call)
4806 // esp: stack pointer (restored after C call)
4807 // edi: number of arguments including receiver (C callee-saved)
4808 // esi: argv pointer (C callee-saved)
4809
4810 Label throw_normal_exception;
4811 Label throw_termination_exception;
4812 Label throw_out_of_memory_exception;
4813
4814 // Call into the runtime system.
4815 GenerateCore(masm,
4816 &throw_normal_exception,
4817 &throw_termination_exception,
4818 &throw_out_of_memory_exception,
4819 false,
4820 false);
4821
4822 // Do space-specific GC and retry runtime call.
4823 GenerateCore(masm,
4824 &throw_normal_exception,
4825 &throw_termination_exception,
4826 &throw_out_of_memory_exception,
4827 true,
4828 false);
4829
4830 // Do full GC and retry runtime call one final time.
4831 Failure* failure = Failure::InternalError();
4832 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
4833 GenerateCore(masm,
4834 &throw_normal_exception,
4835 &throw_termination_exception,
4836 &throw_out_of_memory_exception,
4837 true,
4838 true);
4839
4840 __ bind(&throw_out_of_memory_exception);
4841 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
4842
4843 __ bind(&throw_termination_exception);
4844 GenerateThrowUncatchable(masm, TERMINATION);
4845
4846 __ bind(&throw_normal_exception);
4847 GenerateThrowTOS(masm);
4848}
4849
4850
4851void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4852 Label invoke, exit;
4853#ifdef ENABLE_LOGGING_AND_PROFILING
4854 Label not_outermost_js, not_outermost_js_2;
4855#endif
4856
4857 // Setup frame.
4858 __ push(ebp);
4859 __ mov(ebp, Operand(esp));
4860
4861 // Push marker in two places.
4862 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4863 __ push(Immediate(Smi::FromInt(marker))); // context slot
4864 __ push(Immediate(Smi::FromInt(marker))); // function slot
4865 // Save callee-saved registers (C calling conventions).
4866 __ push(edi);
4867 __ push(esi);
4868 __ push(ebx);
4869
4870 // Save copies of the top frame descriptor on the stack.
4871 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
4872 __ push(Operand::StaticVariable(c_entry_fp));
4873
4874#ifdef ENABLE_LOGGING_AND_PROFILING
4875 // If this is the outermost JS call, set js_entry_sp value.
4876 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
4877 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
4878 __ j(not_equal, &not_outermost_js);
4879 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
4880 __ bind(&not_outermost_js);
4881#endif
4882
4883 // Call a faked try-block that does the invoke.
4884 __ call(&invoke);
4885
4886 // Caught exception: Store result (exception) in the pending
4887 // exception field in the JSEnv and return a failure sentinel.
4888 ExternalReference pending_exception(Top::k_pending_exception_address);
4889 __ mov(Operand::StaticVariable(pending_exception), eax);
4890 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
4891 __ jmp(&exit);
4892
4893 // Invoke: Link this frame into the handler chain.
4894 __ bind(&invoke);
4895 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
4896
4897 // Clear any pending exceptions.
4898 __ mov(edx,
4899 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
4900 __ mov(Operand::StaticVariable(pending_exception), edx);
4901
4902 // Fake a receiver (NULL).
4903 __ push(Immediate(0)); // receiver
4904
4905 // Invoke the function by calling through JS entry trampoline
4906 // builtin and pop the faked function when we return. Notice that we
4907 // cannot store a reference to the trampoline code directly in this
4908 // stub, because the builtin stubs may not have been generated yet.
4909 if (is_construct) {
4910 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
4911 __ mov(edx, Immediate(construct_entry));
4912 } else {
4913 ExternalReference entry(Builtins::JSEntryTrampoline);
4914 __ mov(edx, Immediate(entry));
4915 }
4916 __ mov(edx, Operand(edx, 0)); // deref address
4917 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
4918 __ call(Operand(edx));
4919
4920 // Unlink this frame from the handler chain.
4921 __ pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
4922 // Pop next_sp.
4923 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
4924
4925#ifdef ENABLE_LOGGING_AND_PROFILING
4926 // If current EBP value is the same as js_entry_sp value, it means that
4927 // the current function is the outermost.
4928 __ cmp(ebp, Operand::StaticVariable(js_entry_sp));
4929 __ j(not_equal, &not_outermost_js_2);
4930 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
4931 __ bind(&not_outermost_js_2);
4932#endif
4933
4934 // Restore the top frame descriptor from the stack.
4935 __ bind(&exit);
4936 __ pop(Operand::StaticVariable(ExternalReference(Top::k_c_entry_fp_address)));
4937
4938 // Restore callee-saved registers (C calling conventions).
4939 __ pop(ebx);
4940 __ pop(esi);
4941 __ pop(edi);
4942 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
4943
4944 // Restore frame pointer and return.
4945 __ pop(ebp);
4946 __ ret(0);
4947}
4948
4949
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00004950// Generate stub code for instanceof.
4951// This code can patch a call site inlined cache of the instance of check,
4952// which looks like this.
4953//
4954// 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
4955// 75 0a jne <some near label>
4956// b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
4957//
4958// If call site patching is requested the stack will have the delta from the
4959// return address to the cmp instruction just below the return address. This
4960// also means that call site patching can only take place with arguments in
4961// registers. TOS looks like this when call site patching is requested
4962//
4963// esp[0] : return address
4964// esp[4] : delta from return address to cmp instruction
4965//
ricow@chromium.org65fae842010-08-25 15:26:24 +00004966void InstanceofStub::Generate(MacroAssembler* masm) {
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00004967 // Call site inlining and patching implies arguments in registers.
4968 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
4969
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00004970 // Fixed register usage throughout the stub.
4971 Register object = eax; // Object (lhs).
4972 Register map = ebx; // Map of the object.
4973 Register function = edx; // Function (rhs).
4974 Register prototype = edi; // Prototype of the function.
4975 Register scratch = ecx;
4976
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00004977 // Constants describing the call site code to patch.
4978 static const int kDeltaToCmpImmediate = 2;
4979 static const int kDeltaToMov = 8;
4980 static const int kDeltaToMovImmediate = 9;
4981 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
4982 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
4983 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
4984
4985 ExternalReference roots_address = ExternalReference::roots_address();
4986
4987 ASSERT_EQ(object.code(), InstanceofStub::left().code());
4988 ASSERT_EQ(function.code(), InstanceofStub::right().code());
4989
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00004990 // Get the object and function - they are always both needed.
4991 Label slow, not_js_object;
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00004992 if (!HasArgsInRegisters()) {
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00004993 __ mov(object, Operand(esp, 2 * kPointerSize));
4994 __ mov(function, Operand(esp, 1 * kPointerSize));
4995 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00004996
4997 // Check that the left hand is a JS object.
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00004998 __ test(object, Immediate(kSmiTagMask));
4999 __ j(zero, &not_js_object, not_taken);
5000 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005001
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005002 // If there is a call site cache don't look in the global cache, but do the
5003 // real lookup and update the call site cache.
5004 if (!HasCallSiteInlineCheck()) {
5005 // Look up the function and the map in the instanceof cache.
5006 NearLabel miss;
5007 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
5008 __ cmp(function,
5009 Operand::StaticArray(scratch, times_pointer_size, roots_address));
5010 __ j(not_equal, &miss);
5011 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
5012 __ cmp(map, Operand::StaticArray(
5013 scratch, times_pointer_size, roots_address));
5014 __ j(not_equal, &miss);
5015 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5016 __ mov(eax, Operand::StaticArray(
5017 scratch, times_pointer_size, roots_address));
5018 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
5019 __ bind(&miss);
5020 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00005021
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005022 // Get the prototype of the function.
5023 __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005024
5025 // Check that the function prototype is a JS object.
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005026 __ test(prototype, Immediate(kSmiTagMask));
ricow@chromium.org65fae842010-08-25 15:26:24 +00005027 __ j(zero, &slow, not_taken);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005028 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005029
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005030 // Update the global instanceof or call site inlined cache with the current
5031 // map and function. The cached answer will be set when it is known below.
5032 if (!HasCallSiteInlineCheck()) {
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005033 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
5034 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
5035 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
5036 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
5037 function);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005038 } else {
5039 // The constants for the code patching are based on no push instructions
5040 // at the call site.
5041 ASSERT(HasArgsInRegisters());
5042 // Get return address and delta to inlined map check.
5043 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5044 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5045 if (FLAG_debug_code) {
5046 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
5047 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
5048 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
5049 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
5050 }
5051 __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
5052 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00005053
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005054 // Loop through the prototype chain of the object looking for the function
5055 // prototype.
5056 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005057 NearLabel loop, is_instance, is_not_instance;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005058 __ bind(&loop);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005059 __ cmp(scratch, Operand(prototype));
ricow@chromium.org65fae842010-08-25 15:26:24 +00005060 __ j(equal, &is_instance);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005061 __ cmp(Operand(scratch), Immediate(Factory::null_value()));
ricow@chromium.org65fae842010-08-25 15:26:24 +00005062 __ j(equal, &is_not_instance);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005063 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
5064 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
ricow@chromium.org65fae842010-08-25 15:26:24 +00005065 __ jmp(&loop);
5066
5067 __ bind(&is_instance);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005068 if (!HasCallSiteInlineCheck()) {
5069 __ Set(eax, Immediate(0));
5070 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5071 __ mov(Operand::StaticArray(scratch,
5072 times_pointer_size, roots_address), eax);
5073 } else {
5074 // Get return address and delta to inlined map check.
5075 __ mov(eax, Factory::true_value());
5076 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5077 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5078 if (FLAG_debug_code) {
5079 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5080 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
5081 }
5082 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
5083 if (!ReturnTrueFalseObject()) {
5084 __ Set(eax, Immediate(0));
5085 }
5086 }
5087 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005088
5089 __ bind(&is_not_instance);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005090 if (!HasCallSiteInlineCheck()) {
5091 __ Set(eax, Immediate(Smi::FromInt(1)));
5092 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
5093 __ mov(Operand::StaticArray(
5094 scratch, times_pointer_size, roots_address), eax);
5095 } else {
5096 // Get return address and delta to inlined map check.
5097 __ mov(eax, Factory::false_value());
5098 __ mov(scratch, Operand(esp, 0 * kPointerSize));
5099 __ sub(scratch, Operand(esp, 1 * kPointerSize));
5100 if (FLAG_debug_code) {
5101 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
5102 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
5103 }
5104 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
5105 if (!ReturnTrueFalseObject()) {
5106 __ Set(eax, Immediate(Smi::FromInt(1)));
5107 }
5108 }
5109 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005110
5111 Label object_not_null, object_not_null_or_smi;
5112 __ bind(&not_js_object);
5113 // Before null, smi and string value checks, check that the rhs is a function
5114 // as for a non-function rhs an exception needs to be thrown.
5115 __ test(function, Immediate(kSmiTagMask));
5116 __ j(zero, &slow, not_taken);
5117 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
5118 __ j(not_equal, &slow, not_taken);
5119
5120 // Null is not instance of anything.
5121 __ cmp(object, Factory::null_value());
5122 __ j(not_equal, &object_not_null);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005123 __ Set(eax, Immediate(Smi::FromInt(1)));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005124 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005125
5126 __ bind(&object_not_null);
5127 // Smi values is not instance of anything.
5128 __ test(object, Immediate(kSmiTagMask));
5129 __ j(not_zero, &object_not_null_or_smi, not_taken);
5130 __ Set(eax, Immediate(Smi::FromInt(1)));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005131 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005132
5133 __ bind(&object_not_null_or_smi);
5134 // String values is not instance of anything.
5135 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
5136 __ j(NegateCondition(is_string), &slow);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005137 __ Set(eax, Immediate(Smi::FromInt(1)));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005138 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005139
5140 // Slow-case: Go through the JavaScript implementation.
5141 __ bind(&slow);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005142 if (!ReturnTrueFalseObject()) {
5143 // Tail call the builtin which returns 0 or 1.
5144 if (HasArgsInRegisters()) {
5145 // Push arguments below return address.
5146 __ pop(scratch);
5147 __ push(object);
5148 __ push(function);
5149 __ push(scratch);
5150 }
5151 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
5152 } else {
5153 // Call the builtin and convert 0/1 to true/false.
5154 __ EnterInternalFrame();
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005155 __ push(object);
5156 __ push(function);
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005157 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
5158 __ LeaveInternalFrame();
5159 NearLabel true_value, done;
5160 __ test(eax, Operand(eax));
5161 __ j(zero, &true_value);
5162 __ mov(eax, Factory::false_value());
5163 __ jmp(&done);
5164 __ bind(&true_value);
5165 __ mov(eax, Factory::true_value());
5166 __ bind(&done);
5167 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00005168 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00005169}
5170
5171
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00005172Register InstanceofStub::left() { return eax; }
5173
5174
5175Register InstanceofStub::right() { return edx; }
5176
5177
ricow@chromium.org65fae842010-08-25 15:26:24 +00005178int CompareStub::MinorKey() {
5179 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
5180 // stubs the never NaN NaN condition is only taken into account if the
5181 // condition is equals.
5182 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
5183 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
5184 return ConditionField::encode(static_cast<unsigned>(cc_))
5185 | RegisterField::encode(false) // lhs_ and rhs_ are not used
5186 | StrictField::encode(strict_)
5187 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00005188 | IncludeNumberCompareField::encode(include_number_compare_)
5189 | IncludeSmiCompareField::encode(include_smi_compare_);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005190}
5191
5192
5193// Unfortunately you have to run without snapshots to see most of these
5194// names in the profile since most compare stubs end up in the snapshot.
5195const char* CompareStub::GetName() {
5196 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
5197
5198 if (name_ != NULL) return name_;
5199 const int kMaxNameLength = 100;
5200 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
5201 if (name_ == NULL) return "OOM";
5202
5203 const char* cc_name;
5204 switch (cc_) {
5205 case less: cc_name = "LT"; break;
5206 case greater: cc_name = "GT"; break;
5207 case less_equal: cc_name = "LE"; break;
5208 case greater_equal: cc_name = "GE"; break;
5209 case equal: cc_name = "EQ"; break;
5210 case not_equal: cc_name = "NE"; break;
5211 default: cc_name = "UnknownCondition"; break;
5212 }
5213
5214 const char* strict_name = "";
5215 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
5216 strict_name = "_STRICT";
5217 }
5218
5219 const char* never_nan_nan_name = "";
5220 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
5221 never_nan_nan_name = "_NO_NAN";
5222 }
5223
5224 const char* include_number_compare_name = "";
5225 if (!include_number_compare_) {
5226 include_number_compare_name = "_NO_NUMBER";
5227 }
5228
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00005229 const char* include_smi_compare_name = "";
5230 if (!include_smi_compare_) {
5231 include_smi_compare_name = "_NO_SMI";
5232 }
5233
ricow@chromium.org65fae842010-08-25 15:26:24 +00005234 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00005235 "CompareStub_%s%s%s%s%s",
ricow@chromium.org65fae842010-08-25 15:26:24 +00005236 cc_name,
5237 strict_name,
5238 never_nan_nan_name,
erik.corry@gmail.comd88afa22010-09-15 12:33:05 +00005239 include_number_compare_name,
5240 include_smi_compare_name);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005241 return name_;
5242}
5243
5244
5245// -------------------------------------------------------------------------
5246// StringCharCodeAtGenerator
5247
5248void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
5249 Label flat_string;
5250 Label ascii_string;
5251 Label got_char_code;
5252
5253 // If the receiver is a smi trigger the non-string case.
5254 STATIC_ASSERT(kSmiTag == 0);
5255 __ test(object_, Immediate(kSmiTagMask));
5256 __ j(zero, receiver_not_string_);
5257
5258 // Fetch the instance type of the receiver into result register.
5259 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
5260 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
5261 // If the receiver is not a string trigger the non-string case.
5262 __ test(result_, Immediate(kIsNotStringMask));
5263 __ j(not_zero, receiver_not_string_);
5264
5265 // If the index is non-smi trigger the non-smi case.
5266 STATIC_ASSERT(kSmiTag == 0);
5267 __ test(index_, Immediate(kSmiTagMask));
5268 __ j(not_zero, &index_not_smi_);
5269
5270 // Put smi-tagged index into scratch register.
5271 __ mov(scratch_, index_);
5272 __ bind(&got_smi_index_);
5273
5274 // Check for index out of range.
5275 __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset));
5276 __ j(above_equal, index_out_of_range_);
5277
5278 // We need special handling for non-flat strings.
5279 STATIC_ASSERT(kSeqStringTag == 0);
5280 __ test(result_, Immediate(kStringRepresentationMask));
5281 __ j(zero, &flat_string);
5282
5283 // Handle non-flat strings.
5284 __ test(result_, Immediate(kIsConsStringMask));
5285 __ j(zero, &call_runtime_);
5286
5287 // ConsString.
5288 // Check whether the right hand side is the empty string (i.e. if
5289 // this is really a flat string in a cons string). If that is not
5290 // the case we would rather go to the runtime system now to flatten
5291 // the string.
5292 __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
5293 Immediate(Factory::empty_string()));
5294 __ j(not_equal, &call_runtime_);
5295 // Get the first of the two strings and load its instance type.
5296 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
5297 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
5298 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
5299 // If the first cons component is also non-flat, then go to runtime.
5300 STATIC_ASSERT(kSeqStringTag == 0);
5301 __ test(result_, Immediate(kStringRepresentationMask));
5302 __ j(not_zero, &call_runtime_);
5303
5304 // Check for 1-byte or 2-byte string.
5305 __ bind(&flat_string);
5306 STATIC_ASSERT(kAsciiStringTag != 0);
5307 __ test(result_, Immediate(kStringEncodingMask));
5308 __ j(not_zero, &ascii_string);
5309
5310 // 2-byte string.
5311 // Load the 2-byte character code into the result register.
5312 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
5313 __ movzx_w(result_, FieldOperand(object_,
5314 scratch_, times_1, // Scratch is smi-tagged.
5315 SeqTwoByteString::kHeaderSize));
5316 __ jmp(&got_char_code);
5317
5318 // ASCII string.
5319 // Load the byte into the result register.
5320 __ bind(&ascii_string);
5321 __ SmiUntag(scratch_);
5322 __ movzx_b(result_, FieldOperand(object_,
5323 scratch_, times_1,
5324 SeqAsciiString::kHeaderSize));
5325 __ bind(&got_char_code);
5326 __ SmiTag(result_);
5327 __ bind(&exit_);
5328}
5329
5330
5331void StringCharCodeAtGenerator::GenerateSlow(
5332 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5333 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
5334
5335 // Index is not a smi.
5336 __ bind(&index_not_smi_);
5337 // If index is a heap number, try converting it to an integer.
5338 __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
5339 call_helper.BeforeCall(masm);
5340 __ push(object_);
5341 __ push(index_);
5342 __ push(index_); // Consumed by runtime conversion function.
5343 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
5344 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
5345 } else {
5346 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
5347 // NumberToSmi discards numbers that are not exact integers.
5348 __ CallRuntime(Runtime::kNumberToSmi, 1);
5349 }
5350 if (!scratch_.is(eax)) {
5351 // Save the conversion result before the pop instructions below
5352 // have a chance to overwrite it.
5353 __ mov(scratch_, eax);
5354 }
5355 __ pop(index_);
5356 __ pop(object_);
5357 // Reload the instance type.
5358 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
5359 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
5360 call_helper.AfterCall(masm);
5361 // If index is still not a smi, it must be out of range.
5362 STATIC_ASSERT(kSmiTag == 0);
5363 __ test(scratch_, Immediate(kSmiTagMask));
5364 __ j(not_zero, index_out_of_range_);
5365 // Otherwise, return to the fast path.
5366 __ jmp(&got_smi_index_);
5367
5368 // Call runtime. We get here when the receiver is a string and the
5369 // index is a number, but the code of getting the actual character
5370 // is too complex (e.g., when the string needs to be flattened).
5371 __ bind(&call_runtime_);
5372 call_helper.BeforeCall(masm);
5373 __ push(object_);
5374 __ push(index_);
5375 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
5376 if (!result_.is(eax)) {
5377 __ mov(result_, eax);
5378 }
5379 call_helper.AfterCall(masm);
5380 __ jmp(&exit_);
5381
5382 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
5383}
5384
5385
5386// -------------------------------------------------------------------------
5387// StringCharFromCodeGenerator
5388
5389void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
5390 // Fast case of Heap::LookupSingleCharacterStringFromCode.
5391 STATIC_ASSERT(kSmiTag == 0);
5392 STATIC_ASSERT(kSmiShiftSize == 0);
5393 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
5394 __ test(code_,
5395 Immediate(kSmiTagMask |
5396 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
5397 __ j(not_zero, &slow_case_, not_taken);
5398
5399 __ Set(result_, Immediate(Factory::single_character_string_cache()));
5400 STATIC_ASSERT(kSmiTag == 0);
5401 STATIC_ASSERT(kSmiTagSize == 1);
5402 STATIC_ASSERT(kSmiShiftSize == 0);
5403 // At this point code register contains smi tagged ascii char code.
5404 __ mov(result_, FieldOperand(result_,
5405 code_, times_half_pointer_size,
5406 FixedArray::kHeaderSize));
5407 __ cmp(result_, Factory::undefined_value());
5408 __ j(equal, &slow_case_, not_taken);
5409 __ bind(&exit_);
5410}
5411
5412
5413void StringCharFromCodeGenerator::GenerateSlow(
5414 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5415 __ Abort("Unexpected fallthrough to CharFromCode slow case");
5416
5417 __ bind(&slow_case_);
5418 call_helper.BeforeCall(masm);
5419 __ push(code_);
5420 __ CallRuntime(Runtime::kCharFromCode, 1);
5421 if (!result_.is(eax)) {
5422 __ mov(result_, eax);
5423 }
5424 call_helper.AfterCall(masm);
5425 __ jmp(&exit_);
5426
5427 __ Abort("Unexpected fallthrough from CharFromCode slow case");
5428}
5429
5430
5431// -------------------------------------------------------------------------
5432// StringCharAtGenerator
5433
5434void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
5435 char_code_at_generator_.GenerateFast(masm);
5436 char_from_code_generator_.GenerateFast(masm);
5437}
5438
5439
5440void StringCharAtGenerator::GenerateSlow(
5441 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5442 char_code_at_generator_.GenerateSlow(masm, call_helper);
5443 char_from_code_generator_.GenerateSlow(masm, call_helper);
5444}
5445
5446
5447void StringAddStub::Generate(MacroAssembler* masm) {
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005448 Label string_add_runtime, call_builtin;
5449 Builtins::JavaScript builtin_id = Builtins::ADD;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005450
5451 // Load the two arguments.
5452 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
5453 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
5454
5455 // Make sure that both arguments are strings if not known in advance.
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005456 if (flags_ == NO_STRING_ADD_FLAGS) {
ricow@chromium.org65fae842010-08-25 15:26:24 +00005457 __ test(eax, Immediate(kSmiTagMask));
5458 __ j(zero, &string_add_runtime);
5459 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
5460 __ j(above_equal, &string_add_runtime);
5461
5462 // First argument is a a string, test second.
5463 __ test(edx, Immediate(kSmiTagMask));
5464 __ j(zero, &string_add_runtime);
5465 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
5466 __ j(above_equal, &string_add_runtime);
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005467 } else {
5468 // Here at least one of the arguments is definitely a string.
5469 // We convert the one that is not known to be a string.
5470 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
5471 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
5472 GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi,
5473 &call_builtin);
5474 builtin_id = Builtins::STRING_ADD_RIGHT;
5475 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
5476 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
5477 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
5478 &call_builtin);
5479 builtin_id = Builtins::STRING_ADD_LEFT;
5480 }
ricow@chromium.org65fae842010-08-25 15:26:24 +00005481 }
5482
5483 // Both arguments are strings.
5484 // eax: first string
5485 // edx: second string
5486 // Check if either of the strings are empty. In that case return the other.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005487 NearLabel second_not_zero_length, both_not_zero_length;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005488 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
5489 STATIC_ASSERT(kSmiTag == 0);
5490 __ test(ecx, Operand(ecx));
5491 __ j(not_zero, &second_not_zero_length);
5492 // Second string is empty, result is first string which is already in eax.
5493 __ IncrementCounter(&Counters::string_add_native, 1);
5494 __ ret(2 * kPointerSize);
5495 __ bind(&second_not_zero_length);
5496 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
5497 STATIC_ASSERT(kSmiTag == 0);
5498 __ test(ebx, Operand(ebx));
5499 __ j(not_zero, &both_not_zero_length);
5500 // First string is empty, result is second string which is in edx.
5501 __ mov(eax, edx);
5502 __ IncrementCounter(&Counters::string_add_native, 1);
5503 __ ret(2 * kPointerSize);
5504
5505 // Both strings are non-empty.
5506 // eax: first string
5507 // ebx: length of first string as a smi
5508 // ecx: length of second string as a smi
5509 // edx: second string
5510 // Look at the length of the result of adding the two strings.
5511 Label string_add_flat_result, longer_than_two;
5512 __ bind(&both_not_zero_length);
5513 __ add(ebx, Operand(ecx));
5514 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
5515 // Handle exceptionally long strings in the runtime system.
5516 __ j(overflow, &string_add_runtime);
5517 // Use the runtime system when adding two one character strings, as it
5518 // contains optimizations for this specific case using the symbol table.
5519 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
5520 __ j(not_equal, &longer_than_two);
5521
5522 // Check that both strings are non-external ascii strings.
5523 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
5524 &string_add_runtime);
5525
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005526 // Get the two characters forming the new string.
ricow@chromium.org65fae842010-08-25 15:26:24 +00005527 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
5528 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
5529
5530 // Try to lookup two character string in symbol table. If it is not found
5531 // just allocate a new one.
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005532 Label make_two_character_string, make_two_character_string_no_reload;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005533 StringHelper::GenerateTwoCharacterSymbolTableProbe(
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005534 masm, ebx, ecx, eax, edx, edi,
5535 &make_two_character_string_no_reload, &make_two_character_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005536 __ IncrementCounter(&Counters::string_add_native, 1);
5537 __ ret(2 * kPointerSize);
5538
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005539 // Allocate a two character string.
ricow@chromium.org65fae842010-08-25 15:26:24 +00005540 __ bind(&make_two_character_string);
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005541 // Reload the arguments.
5542 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
5543 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
5544 // Get the two characters forming the new string.
5545 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
5546 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
5547 __ bind(&make_two_character_string_no_reload);
5548 __ IncrementCounter(&Counters::string_add_make_two_char, 1);
5549 __ AllocateAsciiString(eax, // Result.
5550 2, // Length.
5551 edi, // Scratch 1.
5552 edx, // Scratch 2.
5553 &string_add_runtime);
5554 // Pack both characters in ebx.
5555 __ shl(ecx, kBitsPerByte);
5556 __ or_(ebx, Operand(ecx));
5557 // Set the characters in the new string.
5558 __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx);
5559 __ IncrementCounter(&Counters::string_add_native, 1);
5560 __ ret(2 * kPointerSize);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005561
5562 __ bind(&longer_than_two);
5563 // Check if resulting string will be flat.
5564 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength)));
5565 __ j(below, &string_add_flat_result);
5566
5567 // If result is not supposed to be flat allocate a cons string object. If both
5568 // strings are ascii the result is an ascii cons string.
5569 Label non_ascii, allocated, ascii_data;
5570 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
5571 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
5572 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
5573 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
5574 __ and_(ecx, Operand(edi));
5575 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
5576 __ test(ecx, Immediate(kAsciiStringTag));
5577 __ j(zero, &non_ascii);
5578 __ bind(&ascii_data);
5579 // Allocate an acsii cons string.
5580 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
5581 __ bind(&allocated);
5582 // Fill the fields of the cons string.
5583 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
5584 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
5585 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
5586 Immediate(String::kEmptyHashField));
5587 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
5588 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
5589 __ mov(eax, ecx);
5590 __ IncrementCounter(&Counters::string_add_native, 1);
5591 __ ret(2 * kPointerSize);
5592 __ bind(&non_ascii);
5593 // At least one of the strings is two-byte. Check whether it happens
5594 // to contain only ascii characters.
5595 // ecx: first instance type AND second instance type.
5596 // edi: second instance type.
5597 __ test(ecx, Immediate(kAsciiDataHintMask));
5598 __ j(not_zero, &ascii_data);
5599 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5600 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5601 __ xor_(edi, Operand(ecx));
5602 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
5603 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
5604 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
5605 __ j(equal, &ascii_data);
5606 // Allocate a two byte cons string.
5607 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime);
5608 __ jmp(&allocated);
5609
5610 // Handle creating a flat result. First check that both strings are not
5611 // external strings.
5612 // eax: first string
5613 // ebx: length of resulting flat string as a smi
5614 // edx: second string
5615 __ bind(&string_add_flat_result);
5616 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5617 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5618 __ and_(ecx, kStringRepresentationMask);
5619 __ cmp(ecx, kExternalStringTag);
5620 __ j(equal, &string_add_runtime);
5621 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5622 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5623 __ and_(ecx, kStringRepresentationMask);
5624 __ cmp(ecx, kExternalStringTag);
5625 __ j(equal, &string_add_runtime);
5626 // Now check if both strings are ascii strings.
5627 // eax: first string
5628 // ebx: length of resulting flat string as a smi
5629 // edx: second string
5630 Label non_ascii_string_add_flat_result;
5631 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
5632 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5633 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5634 __ j(zero, &non_ascii_string_add_flat_result);
5635 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5636 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5637 __ j(zero, &string_add_runtime);
5638
ricow@chromium.org65fae842010-08-25 15:26:24 +00005639 // Both strings are ascii strings. As they are short they are both flat.
5640 // ebx: length of resulting flat string as a smi
5641 __ SmiUntag(ebx);
5642 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5643 // eax: result string
5644 __ mov(ecx, eax);
5645 // Locate first character of result.
5646 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5647 // Load first argument and locate first character.
5648 __ mov(edx, Operand(esp, 2 * kPointerSize));
5649 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5650 __ SmiUntag(edi);
5651 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5652 // eax: result string
5653 // ecx: first character of result
5654 // edx: first char of first argument
5655 // edi: length of first argument
5656 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
5657 // Load second argument and locate first character.
5658 __ mov(edx, Operand(esp, 1 * kPointerSize));
5659 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5660 __ SmiUntag(edi);
5661 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5662 // eax: result string
5663 // ecx: next character of result
5664 // edx: first char of second argument
5665 // edi: length of second argument
5666 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
5667 __ IncrementCounter(&Counters::string_add_native, 1);
5668 __ ret(2 * kPointerSize);
5669
5670 // Handle creating a flat two byte result.
5671 // eax: first string - known to be two byte
5672 // ebx: length of resulting flat string as a smi
5673 // edx: second string
5674 __ bind(&non_ascii_string_add_flat_result);
5675 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5676 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5677 __ j(not_zero, &string_add_runtime);
5678 // Both strings are two byte strings. As they are short they are both
5679 // flat.
5680 __ SmiUntag(ebx);
5681 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5682 // eax: result string
5683 __ mov(ecx, eax);
5684 // Locate first character of result.
5685 __ add(Operand(ecx),
5686 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5687 // Load first argument and locate first character.
5688 __ mov(edx, Operand(esp, 2 * kPointerSize));
5689 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5690 __ SmiUntag(edi);
5691 __ add(Operand(edx),
5692 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5693 // eax: result string
5694 // ecx: first character of result
5695 // edx: first char of first argument
5696 // edi: length of first argument
5697 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
5698 // Load second argument and locate first character.
5699 __ mov(edx, Operand(esp, 1 * kPointerSize));
5700 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5701 __ SmiUntag(edi);
5702 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5703 // eax: result string
5704 // ecx: next character of result
5705 // edx: first char of second argument
5706 // edi: length of second argument
5707 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
5708 __ IncrementCounter(&Counters::string_add_native, 1);
5709 __ ret(2 * kPointerSize);
5710
5711 // Just jump to runtime to add the two strings.
5712 __ bind(&string_add_runtime);
5713 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005714
5715 if (call_builtin.is_linked()) {
5716 __ bind(&call_builtin);
5717 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
5718 }
5719}
5720
5721
5722void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
5723 int stack_offset,
5724 Register arg,
5725 Register scratch1,
5726 Register scratch2,
5727 Register scratch3,
5728 Label* slow) {
5729 // First check if the argument is already a string.
5730 Label not_string, done;
5731 __ test(arg, Immediate(kSmiTagMask));
5732 __ j(zero, &not_string);
5733 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
5734 __ j(below, &done);
5735
5736 // Check the number to string cache.
5737 Label not_cached;
5738 __ bind(&not_string);
5739 // Puts the cached result into scratch1.
5740 NumberToStringStub::GenerateLookupNumberStringCache(masm,
5741 arg,
5742 scratch1,
5743 scratch2,
5744 scratch3,
5745 false,
5746 &not_cached);
5747 __ mov(arg, scratch1);
5748 __ mov(Operand(esp, stack_offset), arg);
5749 __ jmp(&done);
5750
5751 // Check if the argument is a safe string wrapper.
5752 __ bind(&not_cached);
5753 __ test(arg, Immediate(kSmiTagMask));
5754 __ j(zero, slow);
5755 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
5756 __ j(not_equal, slow);
5757 __ test_b(FieldOperand(scratch1, Map::kBitField2Offset),
5758 1 << Map::kStringWrapperSafeForDefaultValueOf);
5759 __ j(zero, slow);
5760 __ mov(arg, FieldOperand(arg, JSValue::kValueOffset));
5761 __ mov(Operand(esp, stack_offset), arg);
5762
5763 __ bind(&done);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005764}
5765
5766
5767void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
5768 Register dest,
5769 Register src,
5770 Register count,
5771 Register scratch,
5772 bool ascii) {
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005773 NearLabel loop;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005774 __ bind(&loop);
5775 // This loop just copies one character at a time, as it is only used for very
5776 // short strings.
5777 if (ascii) {
5778 __ mov_b(scratch, Operand(src, 0));
5779 __ mov_b(Operand(dest, 0), scratch);
5780 __ add(Operand(src), Immediate(1));
5781 __ add(Operand(dest), Immediate(1));
5782 } else {
5783 __ mov_w(scratch, Operand(src, 0));
5784 __ mov_w(Operand(dest, 0), scratch);
5785 __ add(Operand(src), Immediate(2));
5786 __ add(Operand(dest), Immediate(2));
5787 }
5788 __ sub(Operand(count), Immediate(1));
5789 __ j(not_zero, &loop);
5790}
5791
5792
5793void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
5794 Register dest,
5795 Register src,
5796 Register count,
5797 Register scratch,
5798 bool ascii) {
5799 // Copy characters using rep movs of doublewords.
5800 // The destination is aligned on a 4 byte boundary because we are
5801 // copying to the beginning of a newly allocated string.
5802 ASSERT(dest.is(edi)); // rep movs destination
5803 ASSERT(src.is(esi)); // rep movs source
5804 ASSERT(count.is(ecx)); // rep movs count
5805 ASSERT(!scratch.is(dest));
5806 ASSERT(!scratch.is(src));
5807 ASSERT(!scratch.is(count));
5808
5809 // Nothing to do for zero characters.
5810 Label done;
5811 __ test(count, Operand(count));
5812 __ j(zero, &done);
5813
5814 // Make count the number of bytes to copy.
5815 if (!ascii) {
5816 __ shl(count, 1);
5817 }
5818
5819 // Don't enter the rep movs if there are less than 4 bytes to copy.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005820 NearLabel last_bytes;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005821 __ test(count, Immediate(~3));
5822 __ j(zero, &last_bytes);
5823
5824 // Copy from edi to esi using rep movs instruction.
5825 __ mov(scratch, count);
5826 __ sar(count, 2); // Number of doublewords to copy.
5827 __ cld();
5828 __ rep_movs();
5829
5830 // Find number of bytes left.
5831 __ mov(count, scratch);
5832 __ and_(count, 3);
5833
5834 // Check if there are more bytes to copy.
5835 __ bind(&last_bytes);
5836 __ test(count, Operand(count));
5837 __ j(zero, &done);
5838
5839 // Copy remaining characters.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005840 NearLabel loop;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005841 __ bind(&loop);
5842 __ mov_b(scratch, Operand(src, 0));
5843 __ mov_b(Operand(dest, 0), scratch);
5844 __ add(Operand(src), Immediate(1));
5845 __ add(Operand(dest), Immediate(1));
5846 __ sub(Operand(count), Immediate(1));
5847 __ j(not_zero, &loop);
5848
5849 __ bind(&done);
5850}
5851
5852
5853void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
5854 Register c1,
5855 Register c2,
5856 Register scratch1,
5857 Register scratch2,
5858 Register scratch3,
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005859 Label* not_probed,
ricow@chromium.org65fae842010-08-25 15:26:24 +00005860 Label* not_found) {
5861 // Register scratch3 is the general scratch register in this function.
5862 Register scratch = scratch3;
5863
5864 // Make sure that both characters are not digits as such strings has a
5865 // different hash algorithm. Don't try to look for these in the symbol table.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00005866 NearLabel not_array_index;
ricow@chromium.org65fae842010-08-25 15:26:24 +00005867 __ mov(scratch, c1);
5868 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5869 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
5870 __ j(above, &not_array_index);
5871 __ mov(scratch, c2);
5872 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5873 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00005874 __ j(below_equal, not_probed);
ricow@chromium.org65fae842010-08-25 15:26:24 +00005875
5876 __ bind(&not_array_index);
5877 // Calculate the two character string hash.
5878 Register hash = scratch1;
5879 GenerateHashInit(masm, hash, c1, scratch);
5880 GenerateHashAddCharacter(masm, hash, c2, scratch);
5881 GenerateHashGetHash(masm, hash, scratch);
5882
5883 // Collect the two characters in a register.
5884 Register chars = c1;
5885 __ shl(c2, kBitsPerByte);
5886 __ or_(chars, Operand(c2));
5887
5888 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5889 // hash: hash of two character string.
5890
5891 // Load the symbol table.
5892 Register symbol_table = c2;
5893 ExternalReference roots_address = ExternalReference::roots_address();
5894 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
5895 __ mov(symbol_table,
5896 Operand::StaticArray(scratch, times_pointer_size, roots_address));
5897
5898 // Calculate capacity mask from the symbol table capacity.
5899 Register mask = scratch2;
5900 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
5901 __ SmiUntag(mask);
5902 __ sub(Operand(mask), Immediate(1));
5903
5904 // Registers
5905 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5906 // hash: hash of two character string
5907 // symbol_table: symbol table
5908 // mask: capacity mask
5909 // scratch: -
5910
5911 // Perform a number of probes in the symbol table.
5912 static const int kProbes = 4;
5913 Label found_in_symbol_table;
5914 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
5915 for (int i = 0; i < kProbes; i++) {
5916 // Calculate entry in symbol table.
5917 __ mov(scratch, hash);
5918 if (i > 0) {
5919 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
5920 }
5921 __ and_(scratch, Operand(mask));
5922
5923 // Load the entry from the symbol table.
5924 Register candidate = scratch; // Scratch register contains candidate.
5925 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
5926 __ mov(candidate,
5927 FieldOperand(symbol_table,
5928 scratch,
5929 times_pointer_size,
5930 SymbolTable::kElementsStartOffset));
5931
5932 // If entry is undefined no string with this hash can be found.
5933 __ cmp(candidate, Factory::undefined_value());
5934 __ j(equal, not_found);
5935
5936 // If length is not 2 the string is not a candidate.
5937 __ cmp(FieldOperand(candidate, String::kLengthOffset),
5938 Immediate(Smi::FromInt(2)));
5939 __ j(not_equal, &next_probe[i]);
5940
5941 // As we are out of registers save the mask on the stack and use that
5942 // register as a temporary.
5943 __ push(mask);
5944 Register temp = mask;
5945
5946 // Check that the candidate is a non-external ascii string.
5947 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
5948 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
5949 __ JumpIfInstanceTypeIsNotSequentialAscii(
5950 temp, temp, &next_probe_pop_mask[i]);
5951
5952 // Check if the two characters match.
5953 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
5954 __ and_(temp, 0x0000ffff);
5955 __ cmp(chars, Operand(temp));
5956 __ j(equal, &found_in_symbol_table);
5957 __ bind(&next_probe_pop_mask[i]);
5958 __ pop(mask);
5959 __ bind(&next_probe[i]);
5960 }
5961
5962 // No matching 2 character string found by probing.
5963 __ jmp(not_found);
5964
5965 // Scratch register contains result when we fall through to here.
5966 Register result = scratch;
5967 __ bind(&found_in_symbol_table);
5968 __ pop(mask); // Pop saved mask from the stack.
5969 if (!result.is(eax)) {
5970 __ mov(eax, result);
5971 }
5972}
5973
5974
5975void StringHelper::GenerateHashInit(MacroAssembler* masm,
5976 Register hash,
5977 Register character,
5978 Register scratch) {
5979 // hash = character + (character << 10);
5980 __ mov(hash, character);
5981 __ shl(hash, 10);
5982 __ add(hash, Operand(character));
5983 // hash ^= hash >> 6;
5984 __ mov(scratch, hash);
5985 __ sar(scratch, 6);
5986 __ xor_(hash, Operand(scratch));
5987}
5988
5989
5990void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
5991 Register hash,
5992 Register character,
5993 Register scratch) {
5994 // hash += character;
5995 __ add(hash, Operand(character));
5996 // hash += hash << 10;
5997 __ mov(scratch, hash);
5998 __ shl(scratch, 10);
5999 __ add(hash, Operand(scratch));
6000 // hash ^= hash >> 6;
6001 __ mov(scratch, hash);
6002 __ sar(scratch, 6);
6003 __ xor_(hash, Operand(scratch));
6004}
6005
6006
6007void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
6008 Register hash,
6009 Register scratch) {
6010 // hash += hash << 3;
6011 __ mov(scratch, hash);
6012 __ shl(scratch, 3);
6013 __ add(hash, Operand(scratch));
6014 // hash ^= hash >> 11;
6015 __ mov(scratch, hash);
6016 __ sar(scratch, 11);
6017 __ xor_(hash, Operand(scratch));
6018 // hash += hash << 15;
6019 __ mov(scratch, hash);
6020 __ shl(scratch, 15);
6021 __ add(hash, Operand(scratch));
6022
6023 // if (hash == 0) hash = 27;
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00006024 NearLabel hash_not_zero;
ricow@chromium.org65fae842010-08-25 15:26:24 +00006025 __ test(hash, Operand(hash));
6026 __ j(not_zero, &hash_not_zero);
6027 __ mov(hash, Immediate(27));
6028 __ bind(&hash_not_zero);
6029}
6030
6031
6032void SubStringStub::Generate(MacroAssembler* masm) {
6033 Label runtime;
6034
6035 // Stack frame on entry.
6036 // esp[0]: return address
6037 // esp[4]: to
6038 // esp[8]: from
6039 // esp[12]: string
6040
6041 // Make sure first argument is a string.
6042 __ mov(eax, Operand(esp, 3 * kPointerSize));
6043 STATIC_ASSERT(kSmiTag == 0);
6044 __ test(eax, Immediate(kSmiTagMask));
6045 __ j(zero, &runtime);
6046 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
6047 __ j(NegateCondition(is_string), &runtime);
6048
6049 // eax: string
6050 // ebx: instance type
6051
6052 // Calculate length of sub string using the smi values.
6053 Label result_longer_than_two;
6054 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
6055 __ test(ecx, Immediate(kSmiTagMask));
6056 __ j(not_zero, &runtime);
6057 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
6058 __ test(edx, Immediate(kSmiTagMask));
6059 __ j(not_zero, &runtime);
6060 __ sub(ecx, Operand(edx));
6061 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
6062 Label return_eax;
6063 __ j(equal, &return_eax);
6064 // Special handling of sub-strings of length 1 and 2. One character strings
6065 // are handled in the runtime system (looked up in the single character
6066 // cache). Two character strings are looked for in the symbol cache.
6067 __ SmiUntag(ecx); // Result length is no longer smi.
6068 __ cmp(ecx, 2);
6069 __ j(greater, &result_longer_than_two);
6070 __ j(less, &runtime);
6071
6072 // Sub string of length 2 requested.
6073 // eax: string
6074 // ebx: instance type
6075 // ecx: sub string length (value is 2)
6076 // edx: from index (smi)
6077 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
6078
6079 // Get the two characters forming the sub string.
6080 __ SmiUntag(edx); // From index is no longer smi.
6081 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
6082 __ movzx_b(ecx,
6083 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
6084
6085 // Try to lookup two character string in symbol table.
6086 Label make_two_character_string;
6087 StringHelper::GenerateTwoCharacterSymbolTableProbe(
kmillikin@chromium.org3cdd9e12010-09-06 11:39:48 +00006088 masm, ebx, ecx, eax, edx, edi,
6089 &make_two_character_string, &make_two_character_string);
ricow@chromium.org65fae842010-08-25 15:26:24 +00006090 __ ret(3 * kPointerSize);
6091
6092 __ bind(&make_two_character_string);
6093 // Setup registers for allocating the two character string.
6094 __ mov(eax, Operand(esp, 3 * kPointerSize));
6095 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
6096 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
6097 __ Set(ecx, Immediate(2));
6098
6099 __ bind(&result_longer_than_two);
6100 // eax: string
6101 // ebx: instance type
6102 // ecx: result string length
6103 // Check for flat ascii string
6104 Label non_ascii_flat;
6105 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
6106
6107 // Allocate the result.
6108 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
6109
6110 // eax: result string
6111 // ecx: result string length
6112 __ mov(edx, esi); // esi used by following code.
6113 // Locate first character of result.
6114 __ mov(edi, eax);
6115 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
6116 // Load string argument and locate character of sub string start.
6117 __ mov(esi, Operand(esp, 3 * kPointerSize));
6118 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
6119 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
6120 __ SmiUntag(ebx);
6121 __ add(esi, Operand(ebx));
6122
6123 // eax: result string
6124 // ecx: result length
6125 // edx: original value of esi
6126 // edi: first character of result
6127 // esi: character of sub string start
6128 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
6129 __ mov(esi, edx); // Restore esi.
6130 __ IncrementCounter(&Counters::sub_string_native, 1);
6131 __ ret(3 * kPointerSize);
6132
6133 __ bind(&non_ascii_flat);
6134 // eax: string
6135 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
6136 // ecx: result string length
6137 // Check for flat two byte string
6138 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
6139 __ j(not_equal, &runtime);
6140
6141 // Allocate the result.
6142 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
6143
6144 // eax: result string
6145 // ecx: result string length
6146 __ mov(edx, esi); // esi used by following code.
6147 // Locate first character of result.
6148 __ mov(edi, eax);
6149 __ add(Operand(edi),
6150 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
6151 // Load string argument and locate character of sub string start.
6152 __ mov(esi, Operand(esp, 3 * kPointerSize));
6153 __ add(Operand(esi),
6154 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
6155 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
6156 // As from is a smi it is 2 times the value which matches the size of a two
6157 // byte character.
6158 STATIC_ASSERT(kSmiTag == 0);
6159 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
6160 __ add(esi, Operand(ebx));
6161
6162 // eax: result string
6163 // ecx: result length
6164 // edx: original value of esi
6165 // edi: first character of result
6166 // esi: character of sub string start
6167 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
6168 __ mov(esi, edx); // Restore esi.
6169
6170 __ bind(&return_eax);
6171 __ IncrementCounter(&Counters::sub_string_native, 1);
6172 __ ret(3 * kPointerSize);
6173
6174 // Just jump to runtime to create the sub string.
6175 __ bind(&runtime);
6176 __ TailCallRuntime(Runtime::kSubString, 3, 1);
6177}
6178
6179
6180void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
6181 Register left,
6182 Register right,
6183 Register scratch1,
6184 Register scratch2,
6185 Register scratch3) {
6186 Label result_not_equal;
6187 Label result_greater;
6188 Label compare_lengths;
6189
6190 __ IncrementCounter(&Counters::string_compare_native, 1);
6191
6192 // Find minimum length.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00006193 NearLabel left_shorter;
ricow@chromium.org65fae842010-08-25 15:26:24 +00006194 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
6195 __ mov(scratch3, scratch1);
6196 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
6197
6198 Register length_delta = scratch3;
6199
6200 __ j(less_equal, &left_shorter);
6201 // Right string is shorter. Change scratch1 to be length of right string.
6202 __ sub(scratch1, Operand(length_delta));
6203 __ bind(&left_shorter);
6204
6205 Register min_length = scratch1;
6206
6207 // If either length is zero, just compare lengths.
6208 __ test(min_length, Operand(min_length));
6209 __ j(zero, &compare_lengths);
6210
6211 // Change index to run from -min_length to -1 by adding min_length
6212 // to string start. This means that loop ends when index reaches zero,
6213 // which doesn't need an additional compare.
6214 __ SmiUntag(min_length);
6215 __ lea(left,
6216 FieldOperand(left,
6217 min_length, times_1,
6218 SeqAsciiString::kHeaderSize));
6219 __ lea(right,
6220 FieldOperand(right,
6221 min_length, times_1,
6222 SeqAsciiString::kHeaderSize));
6223 __ neg(min_length);
6224
6225 Register index = min_length; // index = -min_length;
6226
6227 {
6228 // Compare loop.
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00006229 NearLabel loop;
ricow@chromium.org65fae842010-08-25 15:26:24 +00006230 __ bind(&loop);
6231 // Compare characters.
6232 __ mov_b(scratch2, Operand(left, index, times_1, 0));
6233 __ cmpb(scratch2, Operand(right, index, times_1, 0));
6234 __ j(not_equal, &result_not_equal);
6235 __ add(Operand(index), Immediate(1));
6236 __ j(not_zero, &loop);
6237 }
6238
6239 // Compare lengths - strings up to min-length are equal.
6240 __ bind(&compare_lengths);
6241 __ test(length_delta, Operand(length_delta));
6242 __ j(not_zero, &result_not_equal);
6243
6244 // Result is EQUAL.
6245 STATIC_ASSERT(EQUAL == 0);
6246 STATIC_ASSERT(kSmiTag == 0);
6247 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
6248 __ ret(0);
6249
6250 __ bind(&result_not_equal);
6251 __ j(greater, &result_greater);
6252
6253 // Result is LESS.
6254 __ Set(eax, Immediate(Smi::FromInt(LESS)));
6255 __ ret(0);
6256
6257 // Result is GREATER.
6258 __ bind(&result_greater);
6259 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
6260 __ ret(0);
6261}
6262
6263
6264void StringCompareStub::Generate(MacroAssembler* masm) {
6265 Label runtime;
6266
6267 // Stack frame on entry.
6268 // esp[0]: return address
6269 // esp[4]: right string
6270 // esp[8]: left string
6271
6272 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
6273 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
6274
whesse@chromium.org4a1fe7d2010-09-27 12:32:04 +00006275 NearLabel not_same;
ricow@chromium.org65fae842010-08-25 15:26:24 +00006276 __ cmp(edx, Operand(eax));
6277 __ j(not_equal, &not_same);
6278 STATIC_ASSERT(EQUAL == 0);
6279 STATIC_ASSERT(kSmiTag == 0);
6280 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
6281 __ IncrementCounter(&Counters::string_compare_native, 1);
6282 __ ret(2 * kPointerSize);
6283
6284 __ bind(&not_same);
6285
6286 // Check that both objects are sequential ascii strings.
6287 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
6288
6289 // Compare flat ascii strings.
6290 // Drop arguments from the stack.
6291 __ pop(ecx);
6292 __ add(Operand(esp), Immediate(2 * kPointerSize));
6293 __ push(ecx);
6294 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
6295
6296 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
6297 // tagged as a small integer.
6298 __ bind(&runtime);
6299 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6300}
6301
kasperl@chromium.orga5551262010-12-07 12:49:48 +00006302
6303void StringCharAtStub::Generate(MacroAssembler* masm) {
6304 // Expects two arguments (object, index) on the stack:
6305
6306 // Stack frame on entry.
6307 // esp[0]: return address
6308 // esp[4]: index
6309 // esp[8]: object
6310
6311 Register object = ebx;
6312 Register index = eax;
6313 Register scratch1 = ecx;
6314 Register scratch2 = edx;
6315 Register result = eax;
6316
6317 __ pop(scratch1); // Return address.
6318 __ pop(index);
6319 __ pop(object);
6320 __ push(scratch1);
6321
6322 Label need_conversion;
6323 Label index_out_of_range;
6324 Label done;
6325 StringCharAtGenerator generator(object,
6326 index,
6327 scratch1,
6328 scratch2,
6329 result,
6330 &need_conversion,
6331 &need_conversion,
6332 &index_out_of_range,
6333 STRING_INDEX_IS_NUMBER);
6334 generator.GenerateFast(masm);
6335 __ jmp(&done);
6336
6337 __ bind(&index_out_of_range);
6338 // When the index is out of range, the spec requires us to return
6339 // the empty string.
6340 __ Set(result, Immediate(Factory::empty_string()));
6341 __ jmp(&done);
6342
6343 __ bind(&need_conversion);
6344 // Move smi zero into the result register, which will trigger
6345 // conversion.
6346 __ Set(result, Immediate(Smi::FromInt(0)));
6347 __ jmp(&done);
6348
6349 StubRuntimeCallHelper call_helper;
6350 generator.GenerateSlow(masm, call_helper);
6351
6352 __ bind(&done);
6353 __ ret(0);
6354}
6355
6356void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
6357 ASSERT(state_ == CompareIC::SMIS);
6358 NearLabel miss;
6359 __ mov(ecx, Operand(edx));
6360 __ or_(ecx, Operand(eax));
6361 __ test(ecx, Immediate(kSmiTagMask));
6362 __ j(not_zero, &miss, not_taken);
6363
6364 if (GetCondition() == equal) {
6365 // For equality we do not care about the sign of the result.
6366 __ sub(eax, Operand(edx));
6367 } else {
6368 NearLabel done;
6369 __ sub(edx, Operand(eax));
6370 __ j(no_overflow, &done);
6371 // Correct sign of result in case of overflow.
6372 __ not_(edx);
6373 __ bind(&done);
6374 __ mov(eax, edx);
6375 }
6376 __ ret(0);
6377
6378 __ bind(&miss);
6379 GenerateMiss(masm);
6380}
6381
6382
6383void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
6384 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
6385
6386 NearLabel generic_stub;
6387 NearLabel unordered;
6388 NearLabel miss;
6389 __ mov(ecx, Operand(edx));
6390 __ and_(ecx, Operand(eax));
6391 __ test(ecx, Immediate(kSmiTagMask));
6392 __ j(zero, &generic_stub, not_taken);
6393
6394 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
6395 __ j(not_equal, &miss, not_taken);
6396 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
6397 __ j(not_equal, &miss, not_taken);
6398
6399 // Inlining the double comparison and falling back to the general compare
6400 // stub if NaN is involved or SS2 or CMOV is unsupported.
6401 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) {
6402 CpuFeatures::Scope scope1(SSE2);
6403 CpuFeatures::Scope scope2(CMOV);
6404
6405 // Load left and right operand
6406 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
6407 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
6408
6409 // Compare operands
6410 __ ucomisd(xmm0, xmm1);
6411
6412 // Don't base result on EFLAGS when a NaN is involved.
6413 __ j(parity_even, &unordered, not_taken);
6414
6415 // Return a result of -1, 0, or 1, based on EFLAGS.
6416 // Performing mov, because xor would destroy the flag register.
6417 __ mov(eax, 0); // equal
6418 __ mov(ecx, Immediate(Smi::FromInt(1)));
6419 __ cmov(above, eax, Operand(ecx));
6420 __ mov(ecx, Immediate(Smi::FromInt(-1)));
6421 __ cmov(below, eax, Operand(ecx));
6422 __ ret(0);
6423
6424 __ bind(&unordered);
6425 }
6426
6427 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
6428 __ bind(&generic_stub);
6429 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
6430
6431 __ bind(&miss);
6432 GenerateMiss(masm);
6433}
6434
6435
6436void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
6437 ASSERT(state_ == CompareIC::OBJECTS);
6438 NearLabel miss;
6439 __ mov(ecx, Operand(edx));
6440 __ and_(ecx, Operand(eax));
6441 __ test(ecx, Immediate(kSmiTagMask));
6442 __ j(zero, &miss, not_taken);
6443
6444 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
6445 __ j(not_equal, &miss, not_taken);
6446 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
6447 __ j(not_equal, &miss, not_taken);
6448
6449 ASSERT(GetCondition() == equal);
6450 __ sub(eax, Operand(edx));
6451 __ ret(0);
6452
6453 __ bind(&miss);
6454 GenerateMiss(masm);
6455}
6456
6457
6458void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6459 // Save the registers.
6460 __ pop(ecx);
6461 __ push(edx);
6462 __ push(eax);
6463 __ push(ecx);
6464
6465 // Call the runtime system in a fresh internal frame.
6466 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
6467 __ EnterInternalFrame();
6468 __ push(edx);
6469 __ push(eax);
6470 __ push(Immediate(Smi::FromInt(op_)));
6471 __ CallExternalReference(miss, 3);
6472 __ LeaveInternalFrame();
6473
6474 // Compute the entry point of the rewritten stub.
6475 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
6476
6477 // Restore registers.
6478 __ pop(ecx);
6479 __ pop(eax);
6480 __ pop(edx);
6481 __ push(ecx);
6482
6483 // Do a tail call to the rewritten stub.
6484 __ jmp(Operand(edi));
6485}
6486
6487
ricow@chromium.org65fae842010-08-25 15:26:24 +00006488#undef __
6489
6490} } // namespace v8::internal
6491
6492#endif // V8_TARGET_ARCH_IA32