blob: 8bf2dd40bef536ce67a89180a480360c9d2c7fed [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
Kristian Monsen80d68ea2010-09-08 11:05:35 +010032#include "bootstrapper.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "code-stubs.h"
Steve Block44f0eee2011-05-26 01:26:41 +010034#include "isolate.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000035#include "jsregexp.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010036#include "regexp-macro-assembler.h"
37
38namespace v8 {
39namespace internal {
40
41#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010042
43void ToNumberStub::Generate(MacroAssembler* masm) {
44 // The ToNumber stub takes one argument in eax.
Ben Murdoch257744e2011-11-30 15:57:28 +000045 Label check_heap_number, call_builtin;
Steve Block1e0659c2011-05-24 12:43:12 +010046 __ test(eax, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +000047 __ j(not_zero, &check_heap_number, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +010048 __ ret(0);
49
50 __ bind(&check_heap_number);
51 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +010052 Factory* factory = masm->isolate()->factory();
53 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +000054 __ j(not_equal, &call_builtin, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +010055 __ ret(0);
56
57 __ bind(&call_builtin);
58 __ pop(ecx); // Pop return address.
59 __ push(eax);
60 __ push(ecx); // Push return address.
61 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
62}
63
64
Kristian Monsen80d68ea2010-09-08 11:05:35 +010065void FastNewClosureStub::Generate(MacroAssembler* masm) {
66 // Create a new closure from the given function info in new
67 // space. Set the context to the current context in esi.
68 Label gc;
69 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
70
71 // Get the function info from the stack.
72 __ mov(edx, Operand(esp, 1 * kPointerSize));
73
Steve Block44f0eee2011-05-26 01:26:41 +010074 int map_index = strict_mode_ == kStrictMode
75 ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
76 : Context::FUNCTION_MAP_INDEX;
77
Kristian Monsen80d68ea2010-09-08 11:05:35 +010078 // Compute the function map in the current global context and set that
79 // as the map of the allocated object.
80 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
81 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +010082 __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010083 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
84
85 // Initialize the rest of the function. We don't have to update the
86 // write barrier because the allocated object is in new space.
Steve Block44f0eee2011-05-26 01:26:41 +010087 Factory* factory = masm->isolate()->factory();
88 __ mov(ebx, Immediate(factory->empty_fixed_array()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010089 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
90 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
91 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +010092 Immediate(factory->the_hole_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010093 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
94 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
95 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
Ben Murdochb0fe1622011-05-05 13:52:32 +010096 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
Steve Block44f0eee2011-05-26 01:26:41 +010097 Immediate(factory->undefined_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010098
99 // Initialize the code pointer in the function to be the one
100 // found in the shared function info object.
101 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
102 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
103 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
104
105 // Return and remove the on-stack parameter.
106 __ ret(1 * kPointerSize);
107
108 // Create a new closure through the slower runtime call.
109 __ bind(&gc);
110 __ pop(ecx); // Temporarily remove return address.
111 __ pop(edx);
112 __ push(esi);
113 __ push(edx);
Steve Block44f0eee2011-05-26 01:26:41 +0100114 __ push(Immediate(factory->false_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100115 __ push(ecx); // Restore return address.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800116 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100117}
118
119
120void FastNewContextStub::Generate(MacroAssembler* masm) {
121 // Try to allocate the context in new space.
122 Label gc;
123 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
124 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
125 eax, ebx, ecx, &gc, TAG_OBJECT);
126
127 // Get the function from the stack.
128 __ mov(ecx, Operand(esp, 1 * kPointerSize));
129
130 // Setup the object header.
Steve Block44f0eee2011-05-26 01:26:41 +0100131 Factory* factory = masm->isolate()->factory();
132 __ mov(FieldOperand(eax, HeapObject::kMapOffset), factory->context_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100133 __ mov(FieldOperand(eax, Context::kLengthOffset),
134 Immediate(Smi::FromInt(length)));
135
136 // Setup the fixed slots.
Steve Block9fac8402011-05-12 15:51:54 +0100137 __ Set(ebx, Immediate(0)); // Set to NULL.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100138 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
139 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
140 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
141 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
142
143 // Copy the global object from the surrounding context. We go through the
144 // context in the function (ecx) to match the allocation behavior we have
145 // in the runtime system (see Heap::AllocateFunctionContext).
146 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
147 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
148 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
149
150 // Initialize the rest of the slots to undefined.
Steve Block44f0eee2011-05-26 01:26:41 +0100151 __ mov(ebx, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100152 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
153 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
154 }
155
156 // Return and remove the on-stack parameter.
157 __ mov(esi, Operand(eax));
158 __ ret(1 * kPointerSize);
159
160 // Need to collect. Call into runtime system.
161 __ bind(&gc);
162 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
163}
164
165
166void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
167 // Stack layout on entry:
168 //
169 // [esp + kPointerSize]: constant elements.
170 // [esp + (2 * kPointerSize)]: literal index.
171 // [esp + (3 * kPointerSize)]: literals array.
172
173 // All sizes here are multiples of kPointerSize.
174 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
175 int size = JSArray::kSize + elements_size;
176
177 // Load boilerplate object into ecx and check if we need to create a
178 // boilerplate.
179 Label slow_case;
180 __ mov(ecx, Operand(esp, 3 * kPointerSize));
181 __ mov(eax, Operand(esp, 2 * kPointerSize));
182 STATIC_ASSERT(kPointerSize == 4);
183 STATIC_ASSERT(kSmiTagSize == 1);
184 STATIC_ASSERT(kSmiTag == 0);
185 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
186 FixedArray::kHeaderSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100187 Factory* factory = masm->isolate()->factory();
188 __ cmp(ecx, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100189 __ j(equal, &slow_case);
190
191 if (FLAG_debug_code) {
192 const char* message;
193 Handle<Map> expected_map;
194 if (mode_ == CLONE_ELEMENTS) {
195 message = "Expected (writable) fixed array";
Steve Block44f0eee2011-05-26 01:26:41 +0100196 expected_map = factory->fixed_array_map();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100197 } else {
198 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
199 message = "Expected copy-on-write fixed array";
Steve Block44f0eee2011-05-26 01:26:41 +0100200 expected_map = factory->fixed_cow_array_map();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100201 }
202 __ push(ecx);
203 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
204 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
205 __ Assert(equal, message);
206 __ pop(ecx);
207 }
208
209 // Allocate both the JS array and the elements array in one big
210 // allocation. This avoids multiple limit checks.
211 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
212
213 // Copy the JS array part.
214 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
215 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
216 __ mov(ebx, FieldOperand(ecx, i));
217 __ mov(FieldOperand(eax, i), ebx);
218 }
219 }
220
221 if (length_ > 0) {
222 // Get hold of the elements array of the boilerplate and setup the
223 // elements pointer in the resulting object.
224 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
225 __ lea(edx, Operand(eax, JSArray::kSize));
226 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
227
228 // Copy the elements array.
229 for (int i = 0; i < elements_size; i += kPointerSize) {
230 __ mov(ebx, FieldOperand(ecx, i));
231 __ mov(FieldOperand(edx, i), ebx);
232 }
233 }
234
235 // Return and remove the on-stack parameters.
236 __ ret(3 * kPointerSize);
237
238 __ bind(&slow_case);
239 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
240}
241
242
243// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
244void ToBooleanStub::Generate(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000245 Label false_result, true_result, not_string;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100246 __ mov(eax, Operand(esp, 1 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +0000247 Factory* factory = masm->isolate()->factory();
248
249 // undefined -> false
250 __ cmp(eax, factory->undefined_value());
251 __ j(equal, &false_result);
252
253 // Boolean -> its value
254 __ cmp(eax, factory->true_value());
255 __ j(equal, &true_result);
256 __ cmp(eax, factory->false_value());
257 __ j(equal, &false_result);
258
259 // Smis: 0 -> false, all other -> true
260 __ test(eax, Operand(eax));
261 __ j(zero, &false_result);
262 __ test(eax, Immediate(kSmiTagMask));
263 __ j(zero, &true_result);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100264
265 // 'null' => false.
Steve Block44f0eee2011-05-26 01:26:41 +0100266 __ cmp(eax, factory->null_value());
Ben Murdoch257744e2011-11-30 15:57:28 +0000267 __ j(equal, &false_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100268
269 // Get the map and type of the heap object.
270 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
271 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
272
273 // Undetectable => false.
274 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
275 1 << Map::kIsUndetectable);
Ben Murdoch257744e2011-11-30 15:57:28 +0000276 __ j(not_zero, &false_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100277
278 // JavaScript object => true.
279 __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +0000280 __ j(above_equal, &true_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100281
282 // String value => false iff empty.
283 __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +0000284 __ j(above_equal, &not_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100285 STATIC_ASSERT(kSmiTag == 0);
286 __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +0000287 __ j(zero, &false_result, Label::kNear);
288 __ jmp(&true_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100289
290 __ bind(&not_string);
291 // HeapNumber => false iff +0, -0, or NaN.
Steve Block44f0eee2011-05-26 01:26:41 +0100292 __ cmp(edx, factory->heap_number_map());
Ben Murdoch257744e2011-11-30 15:57:28 +0000293 __ j(not_equal, &true_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100294 __ fldz();
295 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
296 __ FCmp();
Ben Murdoch257744e2011-11-30 15:57:28 +0000297 __ j(zero, &false_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100298 // Fall through to |true_result|.
299
300 // Return 1/0 for true/false in eax.
301 __ bind(&true_result);
302 __ mov(eax, 1);
303 __ ret(1 * kPointerSize);
304 __ bind(&false_result);
305 __ mov(eax, 0);
306 __ ret(1 * kPointerSize);
307}
308
309
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100310class FloatingPointHelper : public AllStatic {
311 public:
312
313 enum ArgLocation {
314 ARGS_ON_STACK,
315 ARGS_IN_REGISTERS
316 };
317
318 // Code pattern for loading a floating point value. Input value must
319 // be either a smi or a heap number object (fp value). Requirements:
320 // operand in register number. Returns operand as floating point number
321 // on FPU stack.
322 static void LoadFloatOperand(MacroAssembler* masm, Register number);
323
324 // Code pattern for loading floating point values. Input values must
325 // be either smi or heap number objects (fp values). Requirements:
326 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
327 // Returns operands as floating point numbers on FPU stack.
328 static void LoadFloatOperands(MacroAssembler* masm,
329 Register scratch,
330 ArgLocation arg_location = ARGS_ON_STACK);
331
332 // Similar to LoadFloatOperand but assumes that both operands are smis.
333 // Expects operands in edx, eax.
334 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
335
336 // Test if operands are smi or number objects (fp). Requirements:
337 // operand_1 in eax, operand_2 in edx; falls through on float
338 // operands, jumps to the non_float label otherwise.
339 static void CheckFloatOperands(MacroAssembler* masm,
340 Label* non_float,
341 Register scratch);
342
Ben Murdochb0fe1622011-05-05 13:52:32 +0100343 // Checks that the two floating point numbers on top of the FPU stack
344 // have int32 values.
345 static void CheckFloatOperandsAreInt32(MacroAssembler* masm,
346 Label* non_int32);
347
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100348 // Takes the operands in edx and eax and loads them as integers in eax
349 // and ecx.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100350 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
351 bool use_sse3,
352 Label* operand_conversion_failure);
353
Ben Murdochb0fe1622011-05-05 13:52:32 +0100354 // Must only be called after LoadUnknownsAsIntegers. Assumes that the
355 // operands are pushed on the stack, and that their conversions to int32
356 // are in eax and ecx. Checks that the original numbers were in the int32
357 // range.
358 static void CheckLoadedIntegersWereInt32(MacroAssembler* masm,
359 bool use_sse3,
360 Label* not_int32);
361
362 // Assumes that operands are smis or heap numbers and loads them
363 // into xmm0 and xmm1. Operands are in edx and eax.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100364 // Leaves operands unchanged.
365 static void LoadSSE2Operands(MacroAssembler* masm);
366
367 // Test if operands are numbers (smi or HeapNumber objects), and load
368 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
369 // either operand is not a number. Operands are in edx and eax.
370 // Leaves operands unchanged.
371 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
372
373 // Similar to LoadSSE2Operands but assumes that both operands are smis.
374 // Expects operands in edx, eax.
375 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100376
377 // Checks that the two floating point numbers loaded into xmm0 and xmm1
378 // have int32 values.
379 static void CheckSSE2OperandsAreInt32(MacroAssembler* masm,
380 Label* non_int32,
381 Register scratch);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100382};
383
384
Ben Murdoch257744e2011-11-30 15:57:28 +0000385// Get the integer part of a heap number. Surprisingly, all this bit twiddling
386// is faster than using the built-in instructions on floating point registers.
387// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
388// trashed registers.
389static void IntegerConvert(MacroAssembler* masm,
390 Register source,
391 bool use_sse3,
392 Label* conversion_failure) {
393 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
394 Label done, right_exponent, normal_exponent;
395 Register scratch = ebx;
396 Register scratch2 = edi;
397 // Get exponent word.
398 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
399 // Get exponent alone in scratch2.
400 __ mov(scratch2, scratch);
401 __ and_(scratch2, HeapNumber::kExponentMask);
402 if (use_sse3) {
403 CpuFeatures::Scope scope(SSE3);
404 // Check whether the exponent is too big for a 64 bit signed integer.
405 static const uint32_t kTooBigExponent =
406 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
407 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
408 __ j(greater_equal, conversion_failure);
409 // Load x87 register with heap number.
410 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
411 // Reserve space for 64 bit answer.
412 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
413 // Do conversion, which cannot fail because we checked the exponent.
414 __ fisttp_d(Operand(esp, 0));
415 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
416 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
417 } else {
418 // Load ecx with zero. We use this either for the final shift or
419 // for the answer.
420 __ xor_(ecx, Operand(ecx));
421 // Check whether the exponent matches a 32 bit signed int that cannot be
422 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
423 // exponent is 30 (biased). This is the exponent that we are fastest at and
424 // also the highest exponent we can handle here.
425 const uint32_t non_smi_exponent =
426 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
427 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
428 // If we have a match of the int32-but-not-Smi exponent then skip some
429 // logic.
430 __ j(equal, &right_exponent);
431 // If the exponent is higher than that then go to slow case. This catches
432 // numbers that don't fit in a signed int32, infinities and NaNs.
433 __ j(less, &normal_exponent);
434
435 {
436 // Handle a big exponent. The only reason we have this code is that the
437 // >>> operator has a tendency to generate numbers with an exponent of 31.
438 const uint32_t big_non_smi_exponent =
439 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
440 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
441 __ j(not_equal, conversion_failure);
442 // We have the big exponent, typically from >>>. This means the number is
443 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
444 __ mov(scratch2, scratch);
445 __ and_(scratch2, HeapNumber::kMantissaMask);
446 // Put back the implicit 1.
447 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
448 // Shift up the mantissa bits to take up the space the exponent used to
449 // take. We just orred in the implicit bit so that took care of one and
450 // we want to use the full unsigned range so we subtract 1 bit from the
451 // shift distance.
452 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
453 __ shl(scratch2, big_shift_distance);
454 // Get the second half of the double.
455 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
456 // Shift down 21 bits to get the most significant 11 bits or the low
457 // mantissa word.
458 __ shr(ecx, 32 - big_shift_distance);
459 __ or_(ecx, Operand(scratch2));
460 // We have the answer in ecx, but we may need to negate it.
461 __ test(scratch, Operand(scratch));
462 __ j(positive, &done);
463 __ neg(ecx);
464 __ jmp(&done);
465 }
466
467 __ bind(&normal_exponent);
468 // Exponent word in scratch, exponent part of exponent word in scratch2.
469 // Zero in ecx.
470 // We know the exponent is smaller than 30 (biased). If it is less than
471 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
472 // it rounds to zero.
473 const uint32_t zero_exponent =
474 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
475 __ sub(Operand(scratch2), Immediate(zero_exponent));
476 // ecx already has a Smi zero.
477 __ j(less, &done);
478
479 // We have a shifted exponent between 0 and 30 in scratch2.
480 __ shr(scratch2, HeapNumber::kExponentShift);
481 __ mov(ecx, Immediate(30));
482 __ sub(ecx, Operand(scratch2));
483
484 __ bind(&right_exponent);
485 // Here ecx is the shift, scratch is the exponent word.
486 // Get the top bits of the mantissa.
487 __ and_(scratch, HeapNumber::kMantissaMask);
488 // Put back the implicit 1.
489 __ or_(scratch, 1 << HeapNumber::kExponentShift);
490 // Shift up the mantissa bits to take up the space the exponent used to
491 // take. We have kExponentShift + 1 significant bits int he low end of the
492 // word. Shift them to the top bits.
493 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
494 __ shl(scratch, shift_distance);
495 // Get the second half of the double. For some exponents we don't
496 // actually need this because the bits get shifted out again, but
497 // it's probably slower to test than just to do it.
498 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
499 // Shift down 22 bits to get the most significant 10 bits or the low
500 // mantissa word.
501 __ shr(scratch2, 32 - shift_distance);
502 __ or_(scratch2, Operand(scratch));
503 // Move down according to the exponent.
504 __ shr_cl(scratch2);
505 // Now the unsigned answer is in scratch2. We need to move it to ecx and
506 // we may need to fix the sign.
507 Label negative;
508 __ xor_(ecx, Operand(ecx));
509 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
510 __ j(greater, &negative, Label::kNear);
511 __ mov(ecx, scratch2);
512 __ jmp(&done, Label::kNear);
513 __ bind(&negative);
514 __ sub(ecx, Operand(scratch2));
515 __ bind(&done);
516 }
517}
518
519
520Handle<Code> GetUnaryOpStub(int key, UnaryOpIC::TypeInfo type_info) {
521 UnaryOpStub stub(key, type_info);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100522 return stub.GetCode();
523}
524
525
Ben Murdoch257744e2011-11-30 15:57:28 +0000526const char* UnaryOpStub::GetName() {
527 if (name_ != NULL) return name_;
528 const int kMaxNameLength = 100;
529 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
530 kMaxNameLength);
531 if (name_ == NULL) return "OOM";
532 const char* op_name = Token::Name(op_);
533 const char* overwrite_name = NULL; // Make g++ happy.
534 switch (mode_) {
535 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
536 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
537 }
538
539 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
540 "UnaryOpStub_%s_%s_%s",
541 op_name,
542 overwrite_name,
543 UnaryOpIC::GetName(operand_type_));
544 return name_;
545}
546
547
548// TODO(svenpanne): Use virtual functions instead of switch.
549void UnaryOpStub::Generate(MacroAssembler* masm) {
550 switch (operand_type_) {
551 case UnaryOpIC::UNINITIALIZED:
552 GenerateTypeTransition(masm);
553 break;
554 case UnaryOpIC::SMI:
555 GenerateSmiStub(masm);
556 break;
557 case UnaryOpIC::HEAP_NUMBER:
558 GenerateHeapNumberStub(masm);
559 break;
560 case UnaryOpIC::GENERIC:
561 GenerateGenericStub(masm);
562 break;
563 }
564}
565
566
567void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
568 __ pop(ecx); // Save return address.
569 __ push(eax);
570 // the argument is now on top.
571 // Push this stub's key. Although the operation and the type info are
572 // encoded into the key, the encoding is opaque, so push them too.
573 __ push(Immediate(Smi::FromInt(MinorKey())));
574 __ push(Immediate(Smi::FromInt(op_)));
575 __ push(Immediate(Smi::FromInt(operand_type_)));
576
577 __ push(ecx); // Push return address.
578
579 // Patch the caller to an appropriate specialized stub and return the
580 // operation result to the caller of the stub.
581 __ TailCallExternalReference(
582 ExternalReference(IC_Utility(IC::kUnaryOp_Patch),
583 masm->isolate()), 4, 1);
584}
585
586
587// TODO(svenpanne): Use virtual functions instead of switch.
588void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
589 switch (op_) {
590 case Token::SUB:
591 GenerateSmiStubSub(masm);
592 break;
593 case Token::BIT_NOT:
594 GenerateSmiStubBitNot(masm);
595 break;
596 default:
597 UNREACHABLE();
598 }
599}
600
601
602void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
603 Label non_smi, undo, slow;
604 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
605 Label::kNear, Label::kNear, Label::kNear);
606 __ bind(&undo);
607 GenerateSmiCodeUndo(masm);
608 __ bind(&non_smi);
609 __ bind(&slow);
610 GenerateTypeTransition(masm);
611}
612
613
614void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
615 Label non_smi;
616 GenerateSmiCodeBitNot(masm, &non_smi);
617 __ bind(&non_smi);
618 GenerateTypeTransition(masm);
619}
620
621
622void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
623 Label* non_smi,
624 Label* undo,
625 Label* slow,
626 Label::Distance non_smi_near,
627 Label::Distance undo_near,
628 Label::Distance slow_near) {
629 // Check whether the value is a smi.
630 __ test(eax, Immediate(kSmiTagMask));
631 __ j(not_zero, non_smi, non_smi_near);
632
633 // We can't handle -0 with smis, so use a type transition for that case.
634 __ test(eax, Operand(eax));
635 __ j(zero, slow, slow_near);
636
637 // Try optimistic subtraction '0 - value', saving operand in eax for undo.
638 __ mov(edx, Operand(eax));
639 __ Set(eax, Immediate(0));
640 __ sub(eax, Operand(edx));
641 __ j(overflow, undo, undo_near);
642 __ ret(0);
643}
644
645
646void UnaryOpStub::GenerateSmiCodeBitNot(
647 MacroAssembler* masm,
648 Label* non_smi,
649 Label::Distance non_smi_near) {
650 // Check whether the value is a smi.
651 __ test(eax, Immediate(kSmiTagMask));
652 __ j(not_zero, non_smi, non_smi_near);
653
654 // Flip bits and revert inverted smi-tag.
655 __ not_(eax);
656 __ and_(eax, ~kSmiTagMask);
657 __ ret(0);
658}
659
660
661void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
662 __ mov(eax, Operand(edx));
663}
664
665
666// TODO(svenpanne): Use virtual functions instead of switch.
667void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
668 switch (op_) {
669 case Token::SUB:
670 GenerateHeapNumberStubSub(masm);
671 break;
672 case Token::BIT_NOT:
673 GenerateHeapNumberStubBitNot(masm);
674 break;
675 default:
676 UNREACHABLE();
677 }
678}
679
680
681void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
682 Label non_smi, undo, slow, call_builtin;
683 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
684 __ bind(&non_smi);
685 GenerateHeapNumberCodeSub(masm, &slow);
686 __ bind(&undo);
687 GenerateSmiCodeUndo(masm);
688 __ bind(&slow);
689 GenerateTypeTransition(masm);
690 __ bind(&call_builtin);
691 GenerateGenericCodeFallback(masm);
692}
693
694
695void UnaryOpStub::GenerateHeapNumberStubBitNot(
696 MacroAssembler* masm) {
697 Label non_smi, slow;
698 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
699 __ bind(&non_smi);
700 GenerateHeapNumberCodeBitNot(masm, &slow);
701 __ bind(&slow);
702 GenerateTypeTransition(masm);
703}
704
705
706void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
707 Label* slow) {
708 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
709 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
710 __ j(not_equal, slow);
711
712 if (mode_ == UNARY_OVERWRITE) {
713 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset),
714 Immediate(HeapNumber::kSignMask)); // Flip sign.
715 } else {
716 __ mov(edx, Operand(eax));
717 // edx: operand
718
719 Label slow_allocate_heapnumber, heapnumber_allocated;
720 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber);
721 __ jmp(&heapnumber_allocated);
722
723 __ bind(&slow_allocate_heapnumber);
724 __ EnterInternalFrame();
725 __ push(edx);
726 __ CallRuntime(Runtime::kNumberAlloc, 0);
727 __ pop(edx);
728 __ LeaveInternalFrame();
729
730 __ bind(&heapnumber_allocated);
731 // eax: allocated 'empty' number
732 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
733 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
734 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
735 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
736 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
737 }
738 __ ret(0);
739}
740
741
742void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
743 Label* slow) {
744 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
745 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
746 __ j(not_equal, slow);
747
748 // Convert the heap number in eax to an untagged integer in ecx.
749 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow);
750
751 // Do the bitwise operation and check if the result fits in a smi.
752 Label try_float;
753 __ not_(ecx);
754 __ cmp(ecx, 0xc0000000);
755 __ j(sign, &try_float, Label::kNear);
756
757 // Tag the result as a smi and we're done.
758 STATIC_ASSERT(kSmiTagSize == 1);
759 __ lea(eax, Operand(ecx, times_2, kSmiTag));
760 __ ret(0);
761
762 // Try to store the result in a heap number.
763 __ bind(&try_float);
764 if (mode_ == UNARY_NO_OVERWRITE) {
765 Label slow_allocate_heapnumber, heapnumber_allocated;
766 __ mov(ebx, eax);
767 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber);
768 __ jmp(&heapnumber_allocated);
769
770 __ bind(&slow_allocate_heapnumber);
771 __ EnterInternalFrame();
772 // Push the original HeapNumber on the stack. The integer value can't
773 // be stored since it's untagged and not in the smi range (so we can't
774 // smi-tag it). We'll recalculate the value after the GC instead.
775 __ push(ebx);
776 __ CallRuntime(Runtime::kNumberAlloc, 0);
777 // New HeapNumber is in eax.
778 __ pop(edx);
779 __ LeaveInternalFrame();
780 // IntegerConvert uses ebx and edi as scratch registers.
781 // This conversion won't go slow-case.
782 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow);
783 __ not_(ecx);
784
785 __ bind(&heapnumber_allocated);
786 }
787 if (CpuFeatures::IsSupported(SSE2)) {
788 CpuFeatures::Scope use_sse2(SSE2);
789 __ cvtsi2sd(xmm0, Operand(ecx));
790 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
791 } else {
792 __ push(ecx);
793 __ fild_s(Operand(esp, 0));
794 __ pop(ecx);
795 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
796 }
797 __ ret(0);
798}
799
800
801// TODO(svenpanne): Use virtual functions instead of switch.
802void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
803 switch (op_) {
804 case Token::SUB:
805 GenerateGenericStubSub(masm);
806 break;
807 case Token::BIT_NOT:
808 GenerateGenericStubBitNot(masm);
809 break;
810 default:
811 UNREACHABLE();
812 }
813}
814
815
816void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
817 Label non_smi, undo, slow;
818 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
819 __ bind(&non_smi);
820 GenerateHeapNumberCodeSub(masm, &slow);
821 __ bind(&undo);
822 GenerateSmiCodeUndo(masm);
823 __ bind(&slow);
824 GenerateGenericCodeFallback(masm);
825}
826
827
828void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
829 Label non_smi, slow;
830 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
831 __ bind(&non_smi);
832 GenerateHeapNumberCodeBitNot(masm, &slow);
833 __ bind(&slow);
834 GenerateGenericCodeFallback(masm);
835}
836
837
838void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
839 // Handle the slow case by jumping to the corresponding JavaScript builtin.
840 __ pop(ecx); // pop return address.
841 __ push(eax);
842 __ push(ecx); // push return address
843 switch (op_) {
844 case Token::SUB:
845 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
846 break;
847 case Token::BIT_NOT:
848 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
849 break;
850 default:
851 UNREACHABLE();
852 }
853}
854
855
856Handle<Code> GetBinaryOpStub(int key,
857 BinaryOpIC::TypeInfo type_info,
858 BinaryOpIC::TypeInfo result_type_info) {
859 BinaryOpStub stub(key, type_info, result_type_info);
860 return stub.GetCode();
861}
862
863
864void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100865 __ pop(ecx); // Save return address.
866 __ push(edx);
867 __ push(eax);
868 // Left and right arguments are now on top.
869 // Push this stub's key. Although the operation and the type info are
870 // encoded into the key, the encoding is opaque, so push them too.
871 __ push(Immediate(Smi::FromInt(MinorKey())));
872 __ push(Immediate(Smi::FromInt(op_)));
873 __ push(Immediate(Smi::FromInt(operands_type_)));
874
875 __ push(ecx); // Push return address.
876
877 // Patch the caller to an appropriate specialized stub and return the
878 // operation result to the caller of the stub.
879 __ TailCallExternalReference(
Ben Murdoch257744e2011-11-30 15:57:28 +0000880 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
Steve Block44f0eee2011-05-26 01:26:41 +0100881 masm->isolate()),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100882 5,
883 1);
884}
885
886
887// Prepare for a type transition runtime call when the args are already on
888// the stack, under the return address.
Ben Murdoch257744e2011-11-30 15:57:28 +0000889void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100890 __ pop(ecx); // Save return address.
891 // Left and right arguments are already on top of the stack.
892 // Push this stub's key. Although the operation and the type info are
893 // encoded into the key, the encoding is opaque, so push them too.
894 __ push(Immediate(Smi::FromInt(MinorKey())));
895 __ push(Immediate(Smi::FromInt(op_)));
896 __ push(Immediate(Smi::FromInt(operands_type_)));
897
898 __ push(ecx); // Push return address.
899
900 // Patch the caller to an appropriate specialized stub and return the
901 // operation result to the caller of the stub.
902 __ TailCallExternalReference(
Ben Murdoch257744e2011-11-30 15:57:28 +0000903 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
Steve Block44f0eee2011-05-26 01:26:41 +0100904 masm->isolate()),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100905 5,
906 1);
907}
908
909
Ben Murdoch257744e2011-11-30 15:57:28 +0000910void BinaryOpStub::Generate(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100911 switch (operands_type_) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000912 case BinaryOpIC::UNINITIALIZED:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100913 GenerateTypeTransition(masm);
914 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000915 case BinaryOpIC::SMI:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100916 GenerateSmiStub(masm);
917 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000918 case BinaryOpIC::INT32:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100919 GenerateInt32Stub(masm);
920 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000921 case BinaryOpIC::HEAP_NUMBER:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100922 GenerateHeapNumberStub(masm);
923 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000924 case BinaryOpIC::ODDBALL:
Steve Block44f0eee2011-05-26 01:26:41 +0100925 GenerateOddballStub(masm);
926 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000927 case BinaryOpIC::BOTH_STRING:
928 GenerateBothStringStub(masm);
929 break;
930 case BinaryOpIC::STRING:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100931 GenerateStringStub(masm);
932 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000933 case BinaryOpIC::GENERIC:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100934 GenerateGeneric(masm);
935 break;
936 default:
937 UNREACHABLE();
938 }
939}
940
941
Ben Murdoch257744e2011-11-30 15:57:28 +0000942const char* BinaryOpStub::GetName() {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100943 if (name_ != NULL) return name_;
944 const int kMaxNameLength = 100;
Steve Block44f0eee2011-05-26 01:26:41 +0100945 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
946 kMaxNameLength);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100947 if (name_ == NULL) return "OOM";
948 const char* op_name = Token::Name(op_);
949 const char* overwrite_name;
950 switch (mode_) {
951 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
952 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
953 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
954 default: overwrite_name = "UnknownOverwrite"; break;
955 }
956
957 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
Ben Murdoch257744e2011-11-30 15:57:28 +0000958 "BinaryOpStub_%s_%s_%s",
Ben Murdochb0fe1622011-05-05 13:52:32 +0100959 op_name,
960 overwrite_name,
Ben Murdoch257744e2011-11-30 15:57:28 +0000961 BinaryOpIC::GetName(operands_type_));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100962 return name_;
963}
964
965
Ben Murdoch257744e2011-11-30 15:57:28 +0000966void BinaryOpStub::GenerateSmiCode(
967 MacroAssembler* masm,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100968 Label* slow,
969 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
970 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
971 // dividend in eax and edx free for the division. Use eax, ebx for those.
972 Comment load_comment(masm, "-- Load arguments");
973 Register left = edx;
974 Register right = eax;
975 if (op_ == Token::DIV || op_ == Token::MOD) {
976 left = eax;
977 right = ebx;
978 __ mov(ebx, eax);
979 __ mov(eax, edx);
980 }
981
982
983 // 2. Prepare the smi check of both operands by oring them together.
984 Comment smi_check_comment(masm, "-- Smi check arguments");
985 Label not_smis;
986 Register combined = ecx;
987 ASSERT(!left.is(combined) && !right.is(combined));
988 switch (op_) {
989 case Token::BIT_OR:
990 // Perform the operation into eax and smi check the result. Preserve
991 // eax in case the result is not a smi.
992 ASSERT(!left.is(ecx) && !right.is(ecx));
993 __ mov(ecx, right);
994 __ or_(right, Operand(left)); // Bitwise or is commutative.
995 combined = right;
996 break;
997
998 case Token::BIT_XOR:
999 case Token::BIT_AND:
1000 case Token::ADD:
1001 case Token::SUB:
1002 case Token::MUL:
1003 case Token::DIV:
1004 case Token::MOD:
1005 __ mov(combined, right);
1006 __ or_(combined, Operand(left));
1007 break;
1008
1009 case Token::SHL:
1010 case Token::SAR:
1011 case Token::SHR:
1012 // Move the right operand into ecx for the shift operation, use eax
1013 // for the smi check register.
1014 ASSERT(!left.is(ecx) && !right.is(ecx));
1015 __ mov(ecx, right);
1016 __ or_(right, Operand(left));
1017 combined = right;
1018 break;
1019
1020 default:
1021 break;
1022 }
1023
1024 // 3. Perform the smi check of the operands.
1025 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
1026 __ test(combined, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00001027 __ j(not_zero, &not_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001028
1029 // 4. Operands are both smis, perform the operation leaving the result in
1030 // eax and check the result if necessary.
1031 Comment perform_smi(masm, "-- Perform smi operation");
1032 Label use_fp_on_smis;
1033 switch (op_) {
1034 case Token::BIT_OR:
1035 // Nothing to do.
1036 break;
1037
1038 case Token::BIT_XOR:
1039 ASSERT(right.is(eax));
1040 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
1041 break;
1042
1043 case Token::BIT_AND:
1044 ASSERT(right.is(eax));
1045 __ and_(right, Operand(left)); // Bitwise and is commutative.
1046 break;
1047
1048 case Token::SHL:
1049 // Remove tags from operands (but keep sign).
1050 __ SmiUntag(left);
1051 __ SmiUntag(ecx);
1052 // Perform the operation.
1053 __ shl_cl(left);
1054 // Check that the *signed* result fits in a smi.
1055 __ cmp(left, 0xc0000000);
Ben Murdoch257744e2011-11-30 15:57:28 +00001056 __ j(sign, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001057 // Tag the result and store it in register eax.
1058 __ SmiTag(left);
1059 __ mov(eax, left);
1060 break;
1061
1062 case Token::SAR:
1063 // Remove tags from operands (but keep sign).
1064 __ SmiUntag(left);
1065 __ SmiUntag(ecx);
1066 // Perform the operation.
1067 __ sar_cl(left);
1068 // Tag the result and store it in register eax.
1069 __ SmiTag(left);
1070 __ mov(eax, left);
1071 break;
1072
1073 case Token::SHR:
1074 // Remove tags from operands (but keep sign).
1075 __ SmiUntag(left);
1076 __ SmiUntag(ecx);
1077 // Perform the operation.
1078 __ shr_cl(left);
1079 // Check that the *unsigned* result fits in a smi.
1080 // Neither of the two high-order bits can be set:
1081 // - 0x80000000: high bit would be lost when smi tagging.
1082 // - 0x40000000: this number would convert to negative when
1083 // Smi tagging these two cases can only happen with shifts
1084 // by 0 or 1 when handed a valid smi.
1085 __ test(left, Immediate(0xc0000000));
Ben Murdoch257744e2011-11-30 15:57:28 +00001086 __ j(not_zero, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001087 // Tag the result and store it in register eax.
1088 __ SmiTag(left);
1089 __ mov(eax, left);
1090 break;
1091
1092 case Token::ADD:
1093 ASSERT(right.is(eax));
1094 __ add(right, Operand(left)); // Addition is commutative.
Ben Murdoch257744e2011-11-30 15:57:28 +00001095 __ j(overflow, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001096 break;
1097
1098 case Token::SUB:
1099 __ sub(left, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00001100 __ j(overflow, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001101 __ mov(eax, left);
1102 break;
1103
1104 case Token::MUL:
1105 // If the smi tag is 0 we can just leave the tag on one operand.
1106 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
1107 // We can't revert the multiplication if the result is not a smi
1108 // so save the right operand.
1109 __ mov(ebx, right);
1110 // Remove tag from one of the operands (but keep sign).
1111 __ SmiUntag(right);
1112 // Do multiplication.
1113 __ imul(right, Operand(left)); // Multiplication is commutative.
Ben Murdoch257744e2011-11-30 15:57:28 +00001114 __ j(overflow, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001115 // Check for negative zero result. Use combined = left | right.
1116 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
1117 break;
1118
1119 case Token::DIV:
1120 // We can't revert the division if the result is not a smi so
1121 // save the left operand.
1122 __ mov(edi, left);
1123 // Check for 0 divisor.
1124 __ test(right, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00001125 __ j(zero, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001126 // Sign extend left into edx:eax.
1127 ASSERT(left.is(eax));
1128 __ cdq();
1129 // Divide edx:eax by right.
1130 __ idiv(right);
1131 // Check for the corner case of dividing the most negative smi by
1132 // -1. We cannot use the overflow flag, since it is not set by idiv
1133 // instruction.
1134 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
1135 __ cmp(eax, 0x40000000);
1136 __ j(equal, &use_fp_on_smis);
1137 // Check for negative zero result. Use combined = left | right.
1138 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
1139 // Check that the remainder is zero.
1140 __ test(edx, Operand(edx));
1141 __ j(not_zero, &use_fp_on_smis);
1142 // Tag the result and store it in register eax.
1143 __ SmiTag(eax);
1144 break;
1145
1146 case Token::MOD:
1147 // Check for 0 divisor.
1148 __ test(right, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00001149 __ j(zero, &not_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001150
1151 // Sign extend left into edx:eax.
1152 ASSERT(left.is(eax));
1153 __ cdq();
1154 // Divide edx:eax by right.
1155 __ idiv(right);
1156 // Check for negative zero result. Use combined = left | right.
1157 __ NegativeZeroTest(edx, combined, slow);
1158 // Move remainder to register eax.
1159 __ mov(eax, edx);
1160 break;
1161
1162 default:
1163 UNREACHABLE();
1164 }
1165
1166 // 5. Emit return of result in eax. Some operations have registers pushed.
1167 switch (op_) {
1168 case Token::ADD:
1169 case Token::SUB:
1170 case Token::MUL:
1171 case Token::DIV:
1172 __ ret(0);
1173 break;
1174 case Token::MOD:
1175 case Token::BIT_OR:
1176 case Token::BIT_AND:
1177 case Token::BIT_XOR:
1178 case Token::SAR:
1179 case Token::SHL:
1180 case Token::SHR:
1181 __ ret(2 * kPointerSize);
1182 break;
1183 default:
1184 UNREACHABLE();
1185 }
1186
1187 // 6. For some operations emit inline code to perform floating point
1188 // operations on known smis (e.g., if the result of the operation
1189 // overflowed the smi range).
1190 if (allow_heapnumber_results == NO_HEAPNUMBER_RESULTS) {
1191 __ bind(&use_fp_on_smis);
1192 switch (op_) {
1193 // Undo the effects of some operations, and some register moves.
1194 case Token::SHL:
1195 // The arguments are saved on the stack, and only used from there.
1196 break;
1197 case Token::ADD:
1198 // Revert right = right + left.
1199 __ sub(right, Operand(left));
1200 break;
1201 case Token::SUB:
1202 // Revert left = left - right.
1203 __ add(left, Operand(right));
1204 break;
1205 case Token::MUL:
1206 // Right was clobbered but a copy is in ebx.
1207 __ mov(right, ebx);
1208 break;
1209 case Token::DIV:
1210 // Left was clobbered but a copy is in edi. Right is in ebx for
1211 // division. They should be in eax, ebx for jump to not_smi.
1212 __ mov(eax, edi);
1213 break;
1214 default:
1215 // No other operators jump to use_fp_on_smis.
1216 break;
1217 }
1218 __ jmp(&not_smis);
1219 } else {
1220 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS);
1221 switch (op_) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001222 case Token::SHL:
1223 case Token::SHR: {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001224 Comment perform_float(masm, "-- Perform float operation on smis");
1225 __ bind(&use_fp_on_smis);
1226 // Result we want is in left == edx, so we can put the allocated heap
1227 // number in eax.
1228 __ AllocateHeapNumber(eax, ecx, ebx, slow);
1229 // Store the result in the HeapNumber and return.
Ben Murdoch257744e2011-11-30 15:57:28 +00001230 // It's OK to overwrite the arguments on the stack because we
1231 // are about to return.
1232 if (op_ == Token::SHR) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001233 __ mov(Operand(esp, 1 * kPointerSize), left);
Ben Murdoch257744e2011-11-30 15:57:28 +00001234 __ mov(Operand(esp, 2 * kPointerSize), Immediate(0));
1235 __ fild_d(Operand(esp, 1 * kPointerSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001236 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001237 } else {
1238 ASSERT_EQ(Token::SHL, op_);
1239 if (CpuFeatures::IsSupported(SSE2)) {
1240 CpuFeatures::Scope use_sse2(SSE2);
1241 __ cvtsi2sd(xmm0, Operand(left));
1242 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1243 } else {
1244 __ mov(Operand(esp, 1 * kPointerSize), left);
1245 __ fild_s(Operand(esp, 1 * kPointerSize));
1246 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1247 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001248 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001249 __ ret(2 * kPointerSize);
1250 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001251 }
1252
1253 case Token::ADD:
1254 case Token::SUB:
1255 case Token::MUL:
1256 case Token::DIV: {
1257 Comment perform_float(masm, "-- Perform float operation on smis");
1258 __ bind(&use_fp_on_smis);
1259 // Restore arguments to edx, eax.
1260 switch (op_) {
1261 case Token::ADD:
1262 // Revert right = right + left.
1263 __ sub(right, Operand(left));
1264 break;
1265 case Token::SUB:
1266 // Revert left = left - right.
1267 __ add(left, Operand(right));
1268 break;
1269 case Token::MUL:
1270 // Right was clobbered but a copy is in ebx.
1271 __ mov(right, ebx);
1272 break;
1273 case Token::DIV:
1274 // Left was clobbered but a copy is in edi. Right is in ebx for
1275 // division.
1276 __ mov(edx, edi);
1277 __ mov(eax, right);
1278 break;
1279 default: UNREACHABLE();
1280 break;
1281 }
1282 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001283 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001284 CpuFeatures::Scope use_sse2(SSE2);
1285 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
1286 switch (op_) {
1287 case Token::ADD: __ addsd(xmm0, xmm1); break;
1288 case Token::SUB: __ subsd(xmm0, xmm1); break;
1289 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1290 case Token::DIV: __ divsd(xmm0, xmm1); break;
1291 default: UNREACHABLE();
1292 }
1293 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
1294 } else { // SSE2 not available, use FPU.
1295 FloatingPointHelper::LoadFloatSmis(masm, ebx);
1296 switch (op_) {
1297 case Token::ADD: __ faddp(1); break;
1298 case Token::SUB: __ fsubp(1); break;
1299 case Token::MUL: __ fmulp(1); break;
1300 case Token::DIV: __ fdivp(1); break;
1301 default: UNREACHABLE();
1302 }
1303 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
1304 }
1305 __ mov(eax, ecx);
1306 __ ret(0);
1307 break;
1308 }
1309
1310 default:
1311 break;
1312 }
1313 }
1314
1315 // 7. Non-smi operands, fall out to the non-smi code with the operands in
1316 // edx and eax.
1317 Comment done_comment(masm, "-- Enter non-smi code");
1318 __ bind(&not_smis);
1319 switch (op_) {
1320 case Token::BIT_OR:
1321 case Token::SHL:
1322 case Token::SAR:
1323 case Token::SHR:
1324 // Right operand is saved in ecx and eax was destroyed by the smi
1325 // check.
1326 __ mov(eax, ecx);
1327 break;
1328
1329 case Token::DIV:
1330 case Token::MOD:
1331 // Operands are in eax, ebx at this point.
1332 __ mov(edx, eax);
1333 __ mov(eax, ebx);
1334 break;
1335
1336 default:
1337 break;
1338 }
1339}
1340
1341
Ben Murdoch257744e2011-11-30 15:57:28 +00001342void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001343 Label call_runtime;
1344
1345 switch (op_) {
1346 case Token::ADD:
1347 case Token::SUB:
1348 case Token::MUL:
1349 case Token::DIV:
1350 break;
1351 case Token::MOD:
1352 case Token::BIT_OR:
1353 case Token::BIT_AND:
1354 case Token::BIT_XOR:
1355 case Token::SAR:
1356 case Token::SHL:
1357 case Token::SHR:
1358 GenerateRegisterArgsPush(masm);
1359 break;
1360 default:
1361 UNREACHABLE();
1362 }
1363
Ben Murdoch257744e2011-11-30 15:57:28 +00001364 if (result_type_ == BinaryOpIC::UNINITIALIZED ||
1365 result_type_ == BinaryOpIC::SMI) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001366 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS);
1367 } else {
1368 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1369 }
1370 __ bind(&call_runtime);
1371 switch (op_) {
1372 case Token::ADD:
1373 case Token::SUB:
1374 case Token::MUL:
1375 case Token::DIV:
1376 GenerateTypeTransition(masm);
1377 break;
1378 case Token::MOD:
1379 case Token::BIT_OR:
1380 case Token::BIT_AND:
1381 case Token::BIT_XOR:
1382 case Token::SAR:
1383 case Token::SHL:
1384 case Token::SHR:
1385 GenerateTypeTransitionWithSavedArgs(masm);
1386 break;
1387 default:
1388 UNREACHABLE();
1389 }
1390}
1391
1392
Ben Murdoch257744e2011-11-30 15:57:28 +00001393void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1394 ASSERT(operands_type_ == BinaryOpIC::STRING);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001395 ASSERT(op_ == Token::ADD);
Steve Block1e0659c2011-05-24 12:43:12 +01001396 // Try to add arguments as strings, otherwise, transition to the generic
Ben Murdoch257744e2011-11-30 15:57:28 +00001397 // BinaryOpIC type.
Steve Block1e0659c2011-05-24 12:43:12 +01001398 GenerateAddStrings(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001399 GenerateTypeTransition(masm);
1400}
1401
1402
Ben Murdoch257744e2011-11-30 15:57:28 +00001403void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001404 Label call_runtime;
Ben Murdoch257744e2011-11-30 15:57:28 +00001405 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING);
1406 ASSERT(op_ == Token::ADD);
1407 // If both arguments are strings, call the string add stub.
1408 // Otherwise, do a transition.
1409
1410 // Registers containing left and right operands respectively.
1411 Register left = edx;
1412 Register right = eax;
1413
1414 // Test if left operand is a string.
1415 __ test(left, Immediate(kSmiTagMask));
1416 __ j(zero, &call_runtime);
1417 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
1418 __ j(above_equal, &call_runtime);
1419
1420 // Test if right operand is a string.
1421 __ test(right, Immediate(kSmiTagMask));
1422 __ j(zero, &call_runtime);
1423 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
1424 __ j(above_equal, &call_runtime);
1425
1426 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1427 GenerateRegisterArgsPush(masm);
1428 __ TailCallStub(&string_add_stub);
1429
1430 __ bind(&call_runtime);
1431 GenerateTypeTransition(masm);
1432}
1433
1434
1435void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1436 Label call_runtime;
1437 ASSERT(operands_type_ == BinaryOpIC::INT32);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001438
1439 // Floating point case.
1440 switch (op_) {
1441 case Token::ADD:
1442 case Token::SUB:
1443 case Token::MUL:
1444 case Token::DIV: {
1445 Label not_floats;
1446 Label not_int32;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001447 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001448 CpuFeatures::Scope use_sse2(SSE2);
1449 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1450 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1451 switch (op_) {
1452 case Token::ADD: __ addsd(xmm0, xmm1); break;
1453 case Token::SUB: __ subsd(xmm0, xmm1); break;
1454 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1455 case Token::DIV: __ divsd(xmm0, xmm1); break;
1456 default: UNREACHABLE();
1457 }
1458 // Check result type if it is currently Int32.
Ben Murdoch257744e2011-11-30 15:57:28 +00001459 if (result_type_ <= BinaryOpIC::INT32) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001460 __ cvttsd2si(ecx, Operand(xmm0));
1461 __ cvtsi2sd(xmm2, Operand(ecx));
1462 __ ucomisd(xmm0, xmm2);
1463 __ j(not_zero, &not_int32);
1464 __ j(carry, &not_int32);
1465 }
1466 GenerateHeapResultAllocation(masm, &call_runtime);
1467 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1468 __ ret(0);
1469 } else { // SSE2 not available, use FPU.
1470 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1471 FloatingPointHelper::LoadFloatOperands(
1472 masm,
1473 ecx,
1474 FloatingPointHelper::ARGS_IN_REGISTERS);
1475 FloatingPointHelper::CheckFloatOperandsAreInt32(masm, &not_int32);
1476 switch (op_) {
1477 case Token::ADD: __ faddp(1); break;
1478 case Token::SUB: __ fsubp(1); break;
1479 case Token::MUL: __ fmulp(1); break;
1480 case Token::DIV: __ fdivp(1); break;
1481 default: UNREACHABLE();
1482 }
1483 Label after_alloc_failure;
1484 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1485 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1486 __ ret(0);
1487 __ bind(&after_alloc_failure);
1488 __ ffree();
1489 __ jmp(&call_runtime);
1490 }
1491
1492 __ bind(&not_floats);
1493 __ bind(&not_int32);
1494 GenerateTypeTransition(masm);
1495 break;
1496 }
1497
1498 case Token::MOD: {
1499 // For MOD we go directly to runtime in the non-smi case.
1500 break;
1501 }
1502 case Token::BIT_OR:
1503 case Token::BIT_AND:
1504 case Token::BIT_XOR:
1505 case Token::SAR:
1506 case Token::SHL:
1507 case Token::SHR: {
1508 GenerateRegisterArgsPush(masm);
1509 Label not_floats;
1510 Label not_int32;
1511 Label non_smi_result;
1512 /* {
1513 CpuFeatures::Scope use_sse2(SSE2);
1514 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1515 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1516 }*/
1517 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1518 use_sse3_,
1519 &not_floats);
1520 FloatingPointHelper::CheckLoadedIntegersWereInt32(masm, use_sse3_,
1521 &not_int32);
1522 switch (op_) {
1523 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1524 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1525 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
1526 case Token::SAR: __ sar_cl(eax); break;
1527 case Token::SHL: __ shl_cl(eax); break;
1528 case Token::SHR: __ shr_cl(eax); break;
1529 default: UNREACHABLE();
1530 }
1531 if (op_ == Token::SHR) {
1532 // Check if result is non-negative and fits in a smi.
1533 __ test(eax, Immediate(0xc0000000));
1534 __ j(not_zero, &call_runtime);
1535 } else {
1536 // Check if result fits in a smi.
1537 __ cmp(eax, 0xc0000000);
1538 __ j(negative, &non_smi_result);
1539 }
1540 // Tag smi result and return.
1541 __ SmiTag(eax);
1542 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1543
1544 // All ops except SHR return a signed int32 that we load in
1545 // a HeapNumber.
1546 if (op_ != Token::SHR) {
1547 __ bind(&non_smi_result);
1548 // Allocate a heap number if needed.
1549 __ mov(ebx, Operand(eax)); // ebx: result
Ben Murdoch257744e2011-11-30 15:57:28 +00001550 Label skip_allocation;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001551 switch (mode_) {
1552 case OVERWRITE_LEFT:
1553 case OVERWRITE_RIGHT:
1554 // If the operand was an object, we skip the
1555 // allocation of a heap number.
1556 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1557 1 * kPointerSize : 2 * kPointerSize));
1558 __ test(eax, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00001559 __ j(not_zero, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001560 // Fall through!
1561 case NO_OVERWRITE:
1562 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1563 __ bind(&skip_allocation);
1564 break;
1565 default: UNREACHABLE();
1566 }
1567 // Store the result in the HeapNumber and return.
Ben Murdoch8b112d22011-06-08 16:22:53 +01001568 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001569 CpuFeatures::Scope use_sse2(SSE2);
1570 __ cvtsi2sd(xmm0, Operand(ebx));
1571 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1572 } else {
1573 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1574 __ fild_s(Operand(esp, 1 * kPointerSize));
1575 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1576 }
1577 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1578 }
1579
1580 __ bind(&not_floats);
1581 __ bind(&not_int32);
1582 GenerateTypeTransitionWithSavedArgs(masm);
1583 break;
1584 }
1585 default: UNREACHABLE(); break;
1586 }
1587
1588 // If an allocation fails, or SHR or MOD hit a hard case,
1589 // use the runtime system to get the correct result.
1590 __ bind(&call_runtime);
1591
1592 switch (op_) {
1593 case Token::ADD:
1594 GenerateRegisterArgsPush(masm);
1595 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1596 break;
1597 case Token::SUB:
1598 GenerateRegisterArgsPush(masm);
1599 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1600 break;
1601 case Token::MUL:
1602 GenerateRegisterArgsPush(masm);
1603 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1604 break;
1605 case Token::DIV:
1606 GenerateRegisterArgsPush(masm);
1607 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1608 break;
1609 case Token::MOD:
1610 GenerateRegisterArgsPush(masm);
1611 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1612 break;
1613 case Token::BIT_OR:
1614 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1615 break;
1616 case Token::BIT_AND:
1617 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1618 break;
1619 case Token::BIT_XOR:
1620 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1621 break;
1622 case Token::SAR:
1623 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1624 break;
1625 case Token::SHL:
1626 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1627 break;
1628 case Token::SHR:
1629 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1630 break;
1631 default:
1632 UNREACHABLE();
1633 }
1634}
1635
1636
Ben Murdoch257744e2011-11-30 15:57:28 +00001637void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
Steve Block44f0eee2011-05-26 01:26:41 +01001638 if (op_ == Token::ADD) {
1639 // Handle string addition here, because it is the only operation
1640 // that does not do a ToNumber conversion on the operands.
1641 GenerateAddStrings(masm);
1642 }
1643
Ben Murdoch257744e2011-11-30 15:57:28 +00001644 Factory* factory = masm->isolate()->factory();
1645
Steve Block44f0eee2011-05-26 01:26:41 +01001646 // Convert odd ball arguments to numbers.
Ben Murdoch257744e2011-11-30 15:57:28 +00001647 Label check, done;
1648 __ cmp(edx, factory->undefined_value());
1649 __ j(not_equal, &check, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001650 if (Token::IsBitOp(op_)) {
1651 __ xor_(edx, Operand(edx));
1652 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001653 __ mov(edx, Immediate(factory->nan_value()));
Steve Block44f0eee2011-05-26 01:26:41 +01001654 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001655 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001656 __ bind(&check);
Ben Murdoch257744e2011-11-30 15:57:28 +00001657 __ cmp(eax, factory->undefined_value());
1658 __ j(not_equal, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001659 if (Token::IsBitOp(op_)) {
1660 __ xor_(eax, Operand(eax));
1661 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001662 __ mov(eax, Immediate(factory->nan_value()));
Steve Block44f0eee2011-05-26 01:26:41 +01001663 }
1664 __ bind(&done);
1665
1666 GenerateHeapNumberStub(masm);
1667}
1668
1669
Ben Murdoch257744e2011-11-30 15:57:28 +00001670void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001671 Label call_runtime;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001672
1673 // Floating point case.
1674 switch (op_) {
1675 case Token::ADD:
1676 case Token::SUB:
1677 case Token::MUL:
1678 case Token::DIV: {
1679 Label not_floats;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001680 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001681 CpuFeatures::Scope use_sse2(SSE2);
1682 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1683
1684 switch (op_) {
1685 case Token::ADD: __ addsd(xmm0, xmm1); break;
1686 case Token::SUB: __ subsd(xmm0, xmm1); break;
1687 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1688 case Token::DIV: __ divsd(xmm0, xmm1); break;
1689 default: UNREACHABLE();
1690 }
1691 GenerateHeapResultAllocation(masm, &call_runtime);
1692 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1693 __ ret(0);
1694 } else { // SSE2 not available, use FPU.
1695 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1696 FloatingPointHelper::LoadFloatOperands(
1697 masm,
1698 ecx,
1699 FloatingPointHelper::ARGS_IN_REGISTERS);
1700 switch (op_) {
1701 case Token::ADD: __ faddp(1); break;
1702 case Token::SUB: __ fsubp(1); break;
1703 case Token::MUL: __ fmulp(1); break;
1704 case Token::DIV: __ fdivp(1); break;
1705 default: UNREACHABLE();
1706 }
1707 Label after_alloc_failure;
1708 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1709 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1710 __ ret(0);
1711 __ bind(&after_alloc_failure);
1712 __ ffree();
1713 __ jmp(&call_runtime);
1714 }
1715
1716 __ bind(&not_floats);
1717 GenerateTypeTransition(masm);
1718 break;
1719 }
1720
1721 case Token::MOD: {
1722 // For MOD we go directly to runtime in the non-smi case.
1723 break;
1724 }
1725 case Token::BIT_OR:
1726 case Token::BIT_AND:
1727 case Token::BIT_XOR:
1728 case Token::SAR:
1729 case Token::SHL:
1730 case Token::SHR: {
1731 GenerateRegisterArgsPush(masm);
1732 Label not_floats;
1733 Label non_smi_result;
1734 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1735 use_sse3_,
1736 &not_floats);
1737 switch (op_) {
1738 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1739 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1740 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
1741 case Token::SAR: __ sar_cl(eax); break;
1742 case Token::SHL: __ shl_cl(eax); break;
1743 case Token::SHR: __ shr_cl(eax); break;
1744 default: UNREACHABLE();
1745 }
1746 if (op_ == Token::SHR) {
1747 // Check if result is non-negative and fits in a smi.
1748 __ test(eax, Immediate(0xc0000000));
1749 __ j(not_zero, &call_runtime);
1750 } else {
1751 // Check if result fits in a smi.
1752 __ cmp(eax, 0xc0000000);
1753 __ j(negative, &non_smi_result);
1754 }
1755 // Tag smi result and return.
1756 __ SmiTag(eax);
1757 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1758
1759 // All ops except SHR return a signed int32 that we load in
1760 // a HeapNumber.
1761 if (op_ != Token::SHR) {
1762 __ bind(&non_smi_result);
1763 // Allocate a heap number if needed.
1764 __ mov(ebx, Operand(eax)); // ebx: result
Ben Murdoch257744e2011-11-30 15:57:28 +00001765 Label skip_allocation;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001766 switch (mode_) {
1767 case OVERWRITE_LEFT:
1768 case OVERWRITE_RIGHT:
1769 // If the operand was an object, we skip the
1770 // allocation of a heap number.
1771 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1772 1 * kPointerSize : 2 * kPointerSize));
1773 __ test(eax, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00001774 __ j(not_zero, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001775 // Fall through!
1776 case NO_OVERWRITE:
1777 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1778 __ bind(&skip_allocation);
1779 break;
1780 default: UNREACHABLE();
1781 }
1782 // Store the result in the HeapNumber and return.
Ben Murdoch8b112d22011-06-08 16:22:53 +01001783 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001784 CpuFeatures::Scope use_sse2(SSE2);
1785 __ cvtsi2sd(xmm0, Operand(ebx));
1786 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1787 } else {
1788 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1789 __ fild_s(Operand(esp, 1 * kPointerSize));
1790 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1791 }
1792 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1793 }
1794
1795 __ bind(&not_floats);
1796 GenerateTypeTransitionWithSavedArgs(masm);
1797 break;
1798 }
1799 default: UNREACHABLE(); break;
1800 }
1801
1802 // If an allocation fails, or SHR or MOD hit a hard case,
1803 // use the runtime system to get the correct result.
1804 __ bind(&call_runtime);
1805
1806 switch (op_) {
1807 case Token::ADD:
1808 GenerateRegisterArgsPush(masm);
1809 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1810 break;
1811 case Token::SUB:
1812 GenerateRegisterArgsPush(masm);
1813 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1814 break;
1815 case Token::MUL:
1816 GenerateRegisterArgsPush(masm);
1817 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1818 break;
1819 case Token::DIV:
1820 GenerateRegisterArgsPush(masm);
1821 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1822 break;
1823 case Token::MOD:
1824 GenerateRegisterArgsPush(masm);
1825 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1826 break;
1827 case Token::BIT_OR:
1828 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1829 break;
1830 case Token::BIT_AND:
1831 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1832 break;
1833 case Token::BIT_XOR:
1834 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1835 break;
1836 case Token::SAR:
1837 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1838 break;
1839 case Token::SHL:
1840 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1841 break;
1842 case Token::SHR:
1843 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1844 break;
1845 default:
1846 UNREACHABLE();
1847 }
1848}
1849
1850
Ben Murdoch257744e2011-11-30 15:57:28 +00001851void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001852 Label call_runtime;
1853
Steve Block44f0eee2011-05-26 01:26:41 +01001854 Counters* counters = masm->isolate()->counters();
1855 __ IncrementCounter(counters->generic_binary_stub_calls(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001856
1857 switch (op_) {
1858 case Token::ADD:
1859 case Token::SUB:
1860 case Token::MUL:
1861 case Token::DIV:
1862 break;
1863 case Token::MOD:
1864 case Token::BIT_OR:
1865 case Token::BIT_AND:
1866 case Token::BIT_XOR:
1867 case Token::SAR:
1868 case Token::SHL:
1869 case Token::SHR:
1870 GenerateRegisterArgsPush(masm);
1871 break;
1872 default:
1873 UNREACHABLE();
1874 }
1875
1876 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1877
1878 // Floating point case.
1879 switch (op_) {
1880 case Token::ADD:
1881 case Token::SUB:
1882 case Token::MUL:
1883 case Token::DIV: {
1884 Label not_floats;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001885 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001886 CpuFeatures::Scope use_sse2(SSE2);
1887 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1888
1889 switch (op_) {
1890 case Token::ADD: __ addsd(xmm0, xmm1); break;
1891 case Token::SUB: __ subsd(xmm0, xmm1); break;
1892 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1893 case Token::DIV: __ divsd(xmm0, xmm1); break;
1894 default: UNREACHABLE();
1895 }
1896 GenerateHeapResultAllocation(masm, &call_runtime);
1897 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1898 __ ret(0);
1899 } else { // SSE2 not available, use FPU.
1900 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1901 FloatingPointHelper::LoadFloatOperands(
1902 masm,
1903 ecx,
1904 FloatingPointHelper::ARGS_IN_REGISTERS);
1905 switch (op_) {
1906 case Token::ADD: __ faddp(1); break;
1907 case Token::SUB: __ fsubp(1); break;
1908 case Token::MUL: __ fmulp(1); break;
1909 case Token::DIV: __ fdivp(1); break;
1910 default: UNREACHABLE();
1911 }
1912 Label after_alloc_failure;
1913 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1914 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1915 __ ret(0);
1916 __ bind(&after_alloc_failure);
1917 __ ffree();
1918 __ jmp(&call_runtime);
1919 }
1920 __ bind(&not_floats);
1921 break;
1922 }
1923 case Token::MOD: {
1924 // For MOD we go directly to runtime in the non-smi case.
1925 break;
1926 }
1927 case Token::BIT_OR:
1928 case Token::BIT_AND:
1929 case Token::BIT_XOR:
1930 case Token::SAR:
1931 case Token::SHL:
1932 case Token::SHR: {
1933 Label non_smi_result;
1934 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1935 use_sse3_,
1936 &call_runtime);
1937 switch (op_) {
1938 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1939 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1940 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
1941 case Token::SAR: __ sar_cl(eax); break;
1942 case Token::SHL: __ shl_cl(eax); break;
1943 case Token::SHR: __ shr_cl(eax); break;
1944 default: UNREACHABLE();
1945 }
1946 if (op_ == Token::SHR) {
1947 // Check if result is non-negative and fits in a smi.
1948 __ test(eax, Immediate(0xc0000000));
1949 __ j(not_zero, &call_runtime);
1950 } else {
1951 // Check if result fits in a smi.
1952 __ cmp(eax, 0xc0000000);
1953 __ j(negative, &non_smi_result);
1954 }
1955 // Tag smi result and return.
1956 __ SmiTag(eax);
1957 __ ret(2 * kPointerSize); // Drop the arguments from the stack.
1958
1959 // All ops except SHR return a signed int32 that we load in
1960 // a HeapNumber.
1961 if (op_ != Token::SHR) {
1962 __ bind(&non_smi_result);
1963 // Allocate a heap number if needed.
1964 __ mov(ebx, Operand(eax)); // ebx: result
Ben Murdoch257744e2011-11-30 15:57:28 +00001965 Label skip_allocation;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001966 switch (mode_) {
1967 case OVERWRITE_LEFT:
1968 case OVERWRITE_RIGHT:
1969 // If the operand was an object, we skip the
1970 // allocation of a heap number.
1971 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1972 1 * kPointerSize : 2 * kPointerSize));
1973 __ test(eax, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00001974 __ j(not_zero, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001975 // Fall through!
1976 case NO_OVERWRITE:
1977 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1978 __ bind(&skip_allocation);
1979 break;
1980 default: UNREACHABLE();
1981 }
1982 // Store the result in the HeapNumber and return.
Ben Murdoch8b112d22011-06-08 16:22:53 +01001983 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001984 CpuFeatures::Scope use_sse2(SSE2);
1985 __ cvtsi2sd(xmm0, Operand(ebx));
1986 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1987 } else {
1988 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1989 __ fild_s(Operand(esp, 1 * kPointerSize));
1990 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1991 }
1992 __ ret(2 * kPointerSize);
1993 }
1994 break;
1995 }
1996 default: UNREACHABLE(); break;
1997 }
1998
1999 // If all else fails, use the runtime system to get the correct
2000 // result.
2001 __ bind(&call_runtime);
2002 switch (op_) {
2003 case Token::ADD: {
Steve Block1e0659c2011-05-24 12:43:12 +01002004 GenerateAddStrings(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002005 GenerateRegisterArgsPush(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002006 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
2007 break;
2008 }
2009 case Token::SUB:
2010 GenerateRegisterArgsPush(masm);
2011 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
2012 break;
2013 case Token::MUL:
2014 GenerateRegisterArgsPush(masm);
2015 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
2016 break;
2017 case Token::DIV:
2018 GenerateRegisterArgsPush(masm);
2019 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
2020 break;
2021 case Token::MOD:
2022 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
2023 break;
2024 case Token::BIT_OR:
2025 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
2026 break;
2027 case Token::BIT_AND:
2028 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
2029 break;
2030 case Token::BIT_XOR:
2031 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
2032 break;
2033 case Token::SAR:
2034 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
2035 break;
2036 case Token::SHL:
2037 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
2038 break;
2039 case Token::SHR:
2040 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2041 break;
2042 default:
2043 UNREACHABLE();
2044 }
2045}
2046
2047
Ben Murdoch257744e2011-11-30 15:57:28 +00002048void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002049 ASSERT(op_ == Token::ADD);
Ben Murdoch257744e2011-11-30 15:57:28 +00002050 Label left_not_string, call_runtime;
Steve Block1e0659c2011-05-24 12:43:12 +01002051
2052 // Registers containing left and right operands respectively.
2053 Register left = edx;
2054 Register right = eax;
2055
2056 // Test if left operand is a string.
Steve Block1e0659c2011-05-24 12:43:12 +01002057 __ test(left, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002058 __ j(zero, &left_not_string, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002059 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002060 __ j(above_equal, &left_not_string, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002061
2062 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
2063 GenerateRegisterArgsPush(masm);
2064 __ TailCallStub(&string_add_left_stub);
2065
2066 // Left operand is not a string, test right.
2067 __ bind(&left_not_string);
2068 __ test(right, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002069 __ j(zero, &call_runtime, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002070 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002071 __ j(above_equal, &call_runtime, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002072
2073 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
2074 GenerateRegisterArgsPush(masm);
2075 __ TailCallStub(&string_add_right_stub);
2076
2077 // Neither argument is a string.
2078 __ bind(&call_runtime);
2079}
2080
2081
Ben Murdoch257744e2011-11-30 15:57:28 +00002082void BinaryOpStub::GenerateHeapResultAllocation(
Ben Murdochb0fe1622011-05-05 13:52:32 +01002083 MacroAssembler* masm,
2084 Label* alloc_failure) {
2085 Label skip_allocation;
2086 OverwriteMode mode = mode_;
2087 switch (mode) {
2088 case OVERWRITE_LEFT: {
2089 // If the argument in edx is already an object, we skip the
2090 // allocation of a heap number.
2091 __ test(edx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002092 __ j(not_zero, &skip_allocation);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002093 // Allocate a heap number for the result. Keep eax and edx intact
2094 // for the possible runtime call.
2095 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2096 // Now edx can be overwritten losing one of the arguments as we are
2097 // now done and will not need it any more.
2098 __ mov(edx, Operand(ebx));
2099 __ bind(&skip_allocation);
2100 // Use object in edx as a result holder
2101 __ mov(eax, Operand(edx));
2102 break;
2103 }
2104 case OVERWRITE_RIGHT:
2105 // If the argument in eax is already an object, we skip the
2106 // allocation of a heap number.
2107 __ test(eax, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002108 __ j(not_zero, &skip_allocation);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002109 // Fall through!
2110 case NO_OVERWRITE:
2111 // Allocate a heap number for the result. Keep eax and edx intact
2112 // for the possible runtime call.
2113 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2114 // Now eax can be overwritten losing one of the arguments as we are
2115 // now done and will not need it any more.
2116 __ mov(eax, ebx);
2117 __ bind(&skip_allocation);
2118 break;
2119 default: UNREACHABLE();
2120 }
2121}
2122
2123
Ben Murdoch257744e2011-11-30 15:57:28 +00002124void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002125 __ pop(ecx);
2126 __ push(edx);
2127 __ push(eax);
2128 __ push(ecx);
2129}
2130
2131
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002132void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002133 // TAGGED case:
2134 // Input:
2135 // esp[4]: tagged number input argument (should be number).
2136 // esp[0]: return address.
2137 // Output:
2138 // eax: tagged double result.
2139 // UNTAGGED case:
2140 // Input::
2141 // esp[0]: return address.
2142 // xmm1: untagged double input argument
2143 // Output:
2144 // xmm1: untagged double result.
2145
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002146 Label runtime_call;
2147 Label runtime_call_clear_stack;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002148 Label skip_cache;
2149 const bool tagged = (argument_type_ == TAGGED);
2150 if (tagged) {
2151 // Test that eax is a number.
Ben Murdoch257744e2011-11-30 15:57:28 +00002152 Label input_not_smi;
2153 Label loaded;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002154 __ mov(eax, Operand(esp, kPointerSize));
2155 __ test(eax, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002156 __ j(not_zero, &input_not_smi, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002157 // Input is a smi. Untag and load it onto the FPU stack.
2158 // Then load the low and high words of the double into ebx, edx.
2159 STATIC_ASSERT(kSmiTagSize == 1);
2160 __ sar(eax, 1);
2161 __ sub(Operand(esp), Immediate(2 * kPointerSize));
2162 __ mov(Operand(esp, 0), eax);
2163 __ fild_s(Operand(esp, 0));
2164 __ fst_d(Operand(esp, 0));
2165 __ pop(edx);
2166 __ pop(ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002167 __ jmp(&loaded, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002168 __ bind(&input_not_smi);
2169 // Check if input is a HeapNumber.
2170 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002171 Factory* factory = masm->isolate()->factory();
2172 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002173 __ j(not_equal, &runtime_call);
2174 // Input is a HeapNumber. Push it on the FPU stack and load its
2175 // low and high words into ebx, edx.
2176 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
2177 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
2178 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002179
Ben Murdochb0fe1622011-05-05 13:52:32 +01002180 __ bind(&loaded);
2181 } else { // UNTAGGED.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002182 if (CpuFeatures::IsSupported(SSE4_1)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002183 CpuFeatures::Scope sse4_scope(SSE4_1);
2184 __ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx.
2185 } else {
2186 __ pshufd(xmm0, xmm1, 0x1);
2187 __ movd(Operand(edx), xmm0);
2188 }
2189 __ movd(Operand(ebx), xmm1);
2190 }
2191
2192 // ST[0] or xmm1 == double value
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002193 // ebx = low 32 bits of double value
2194 // edx = high 32 bits of double value
2195 // Compute hash (the shifts are arithmetic):
2196 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
2197 __ mov(ecx, ebx);
2198 __ xor_(ecx, Operand(edx));
2199 __ mov(eax, ecx);
2200 __ sar(eax, 16);
2201 __ xor_(ecx, Operand(eax));
2202 __ mov(eax, ecx);
2203 __ sar(eax, 8);
2204 __ xor_(ecx, Operand(eax));
Steve Block44f0eee2011-05-26 01:26:41 +01002205 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
2206 __ and_(Operand(ecx),
2207 Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002208
Ben Murdochb0fe1622011-05-05 13:52:32 +01002209 // ST[0] or xmm1 == double value.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002210 // ebx = low 32 bits of double value.
2211 // edx = high 32 bits of double value.
2212 // ecx = TranscendentalCache::hash(double value).
Steve Block44f0eee2011-05-26 01:26:41 +01002213 ExternalReference cache_array =
2214 ExternalReference::transcendental_cache_array_address(masm->isolate());
2215 __ mov(eax, Immediate(cache_array));
2216 int cache_array_index =
2217 type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
2218 __ mov(eax, Operand(eax, cache_array_index));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002219 // Eax points to the cache for the type type_.
2220 // If NULL, the cache hasn't been initialized yet, so go through runtime.
2221 __ test(eax, Operand(eax));
2222 __ j(zero, &runtime_call_clear_stack);
2223#ifdef DEBUG
2224 // Check that the layout of cache elements match expectations.
Steve Block44f0eee2011-05-26 01:26:41 +01002225 { TranscendentalCache::SubCache::Element test_elem[2];
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002226 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
2227 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
2228 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
2229 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
2230 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
2231 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
2232 CHECK_EQ(0, elem_in0 - elem_start);
2233 CHECK_EQ(kIntSize, elem_in1 - elem_start);
2234 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
2235 }
2236#endif
2237 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
2238 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
2239 __ lea(ecx, Operand(eax, ecx, times_4, 0));
2240 // Check if cache matches: Double value is stored in uint32_t[2] array.
Ben Murdoch257744e2011-11-30 15:57:28 +00002241 Label cache_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002242 __ cmp(ebx, Operand(ecx, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002243 __ j(not_equal, &cache_miss, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002244 __ cmp(edx, Operand(ecx, kIntSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002245 __ j(not_equal, &cache_miss, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002246 // Cache hit!
2247 __ mov(eax, Operand(ecx, 2 * kIntSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002248 if (tagged) {
2249 __ fstp(0);
2250 __ ret(kPointerSize);
2251 } else { // UNTAGGED.
2252 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2253 __ Ret();
2254 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002255
2256 __ bind(&cache_miss);
2257 // Update cache with new value.
2258 // We are short on registers, so use no_reg as scratch.
2259 // This gives slightly larger code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002260 if (tagged) {
2261 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
2262 } else { // UNTAGGED.
2263 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2264 __ sub(Operand(esp), Immediate(kDoubleSize));
2265 __ movdbl(Operand(esp, 0), xmm1);
2266 __ fld_d(Operand(esp, 0));
2267 __ add(Operand(esp), Immediate(kDoubleSize));
2268 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002269 GenerateOperation(masm);
2270 __ mov(Operand(ecx, 0), ebx);
2271 __ mov(Operand(ecx, kIntSize), edx);
2272 __ mov(Operand(ecx, 2 * kIntSize), eax);
2273 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002274 if (tagged) {
2275 __ ret(kPointerSize);
2276 } else { // UNTAGGED.
2277 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2278 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002279
Ben Murdochb0fe1622011-05-05 13:52:32 +01002280 // Skip cache and return answer directly, only in untagged case.
2281 __ bind(&skip_cache);
2282 __ sub(Operand(esp), Immediate(kDoubleSize));
2283 __ movdbl(Operand(esp, 0), xmm1);
2284 __ fld_d(Operand(esp, 0));
2285 GenerateOperation(masm);
2286 __ fstp_d(Operand(esp, 0));
2287 __ movdbl(xmm1, Operand(esp, 0));
2288 __ add(Operand(esp), Immediate(kDoubleSize));
2289 // We return the value in xmm1 without adding it to the cache, but
2290 // we cause a scavenging GC so that future allocations will succeed.
2291 __ EnterInternalFrame();
2292 // Allocate an unused object bigger than a HeapNumber.
2293 __ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
2294 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
2295 __ LeaveInternalFrame();
2296 __ Ret();
2297 }
2298
2299 // Call runtime, doing whatever allocation and cleanup is necessary.
2300 if (tagged) {
2301 __ bind(&runtime_call_clear_stack);
2302 __ fstp(0);
2303 __ bind(&runtime_call);
Steve Block44f0eee2011-05-26 01:26:41 +01002304 ExternalReference runtime =
2305 ExternalReference(RuntimeFunction(), masm->isolate());
2306 __ TailCallExternalReference(runtime, 1, 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002307 } else { // UNTAGGED.
2308 __ bind(&runtime_call_clear_stack);
2309 __ bind(&runtime_call);
2310 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2311 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
2312 __ EnterInternalFrame();
2313 __ push(eax);
2314 __ CallRuntime(RuntimeFunction(), 1);
2315 __ LeaveInternalFrame();
2316 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2317 __ Ret();
2318 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002319}
2320
2321
2322Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
2323 switch (type_) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002324 case TranscendentalCache::SIN: return Runtime::kMath_sin;
2325 case TranscendentalCache::COS: return Runtime::kMath_cos;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002326 case TranscendentalCache::LOG: return Runtime::kMath_log;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002327 default:
2328 UNIMPLEMENTED();
2329 return Runtime::kAbort;
2330 }
2331}
2332
2333
2334void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
2335 // Only free register is edi.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002336 // Input value is on FP stack, and also in ebx/edx.
2337 // Input value is possibly in xmm1.
2338 // Address of result (a newly allocated HeapNumber) may be in eax.
2339 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
2340 // Both fsin and fcos require arguments in the range +/-2^63 and
2341 // return NaN for infinities and NaN. They can share all code except
2342 // the actual fsin/fcos operation.
Ben Murdoch257744e2011-11-30 15:57:28 +00002343 Label in_range, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002344 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
2345 // work. We must reduce it to the appropriate range.
2346 __ mov(edi, edx);
2347 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
2348 int supported_exponent_limit =
2349 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
2350 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00002351 __ j(below, &in_range, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002352 // Check for infinity and NaN. Both return NaN for sin.
2353 __ cmp(Operand(edi), Immediate(0x7ff00000));
Ben Murdoch257744e2011-11-30 15:57:28 +00002354 Label non_nan_result;
2355 __ j(not_equal, &non_nan_result, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002356 // Input is +/-Infinity or NaN. Result is NaN.
2357 __ fstp(0);
2358 // NaN is represented by 0x7ff8000000000000.
2359 __ push(Immediate(0x7ff80000));
2360 __ push(Immediate(0));
2361 __ fld_d(Operand(esp, 0));
2362 __ add(Operand(esp), Immediate(2 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002363 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002364
Ben Murdochb0fe1622011-05-05 13:52:32 +01002365 __ bind(&non_nan_result);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002366
Ben Murdochb0fe1622011-05-05 13:52:32 +01002367 // Use fpmod to restrict argument to the range +/-2*PI.
2368 __ mov(edi, eax); // Save eax before using fnstsw_ax.
2369 __ fldpi();
2370 __ fadd(0);
2371 __ fld(1);
2372 // FPU Stack: input, 2*pi, input.
2373 {
Ben Murdoch257744e2011-11-30 15:57:28 +00002374 Label no_exceptions;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002375 __ fwait();
2376 __ fnstsw_ax();
2377 // Clear if Illegal Operand or Zero Division exceptions are set.
2378 __ test(Operand(eax), Immediate(5));
Ben Murdoch257744e2011-11-30 15:57:28 +00002379 __ j(zero, &no_exceptions, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002380 __ fnclex();
2381 __ bind(&no_exceptions);
2382 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002383
Ben Murdochb0fe1622011-05-05 13:52:32 +01002384 // Compute st(0) % st(1)
2385 {
Ben Murdoch257744e2011-11-30 15:57:28 +00002386 Label partial_remainder_loop;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002387 __ bind(&partial_remainder_loop);
2388 __ fprem1();
2389 __ fwait();
2390 __ fnstsw_ax();
2391 __ test(Operand(eax), Immediate(0x400 /* C2 */));
2392 // If C2 is set, computation only has partial result. Loop to
2393 // continue computation.
2394 __ j(not_zero, &partial_remainder_loop);
2395 }
2396 // FPU Stack: input, 2*pi, input % 2*pi
2397 __ fstp(2);
2398 __ fstp(0);
2399 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
2400
2401 // FPU Stack: input % 2*pi
2402 __ bind(&in_range);
2403 switch (type_) {
2404 case TranscendentalCache::SIN:
2405 __ fsin();
2406 break;
2407 case TranscendentalCache::COS:
2408 __ fcos();
2409 break;
2410 default:
2411 UNREACHABLE();
2412 }
2413 __ bind(&done);
2414 } else {
2415 ASSERT(type_ == TranscendentalCache::LOG);
2416 __ fldln2();
2417 __ fxch();
2418 __ fyl2x();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002419 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002420}
2421
2422
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002423// Input: edx, eax are the left and right objects of a bit op.
2424// Output: eax, ecx are left and right integers for a bit op.
2425void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2426 bool use_sse3,
2427 Label* conversion_failure) {
2428 // Check float operands.
2429 Label arg1_is_object, check_undefined_arg1;
2430 Label arg2_is_object, check_undefined_arg2;
2431 Label load_arg2, done;
2432
2433 // Test if arg1 is a Smi.
2434 __ test(edx, Immediate(kSmiTagMask));
2435 __ j(not_zero, &arg1_is_object);
2436
2437 __ SmiUntag(edx);
2438 __ jmp(&load_arg2);
2439
2440 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2441 __ bind(&check_undefined_arg1);
Steve Block44f0eee2011-05-26 01:26:41 +01002442 Factory* factory = masm->isolate()->factory();
2443 __ cmp(edx, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002444 __ j(not_equal, conversion_failure);
2445 __ mov(edx, Immediate(0));
2446 __ jmp(&load_arg2);
2447
2448 __ bind(&arg1_is_object);
2449 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002450 __ cmp(ebx, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002451 __ j(not_equal, &check_undefined_arg1);
2452
2453 // Get the untagged integer version of the edx heap number in ecx.
Ben Murdoch257744e2011-11-30 15:57:28 +00002454 IntegerConvert(masm, edx, use_sse3, conversion_failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002455 __ mov(edx, ecx);
2456
2457 // Here edx has the untagged integer, eax has a Smi or a heap number.
2458 __ bind(&load_arg2);
2459
2460 // Test if arg2 is a Smi.
2461 __ test(eax, Immediate(kSmiTagMask));
2462 __ j(not_zero, &arg2_is_object);
2463
2464 __ SmiUntag(eax);
2465 __ mov(ecx, eax);
2466 __ jmp(&done);
2467
2468 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2469 __ bind(&check_undefined_arg2);
Steve Block44f0eee2011-05-26 01:26:41 +01002470 __ cmp(eax, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002471 __ j(not_equal, conversion_failure);
2472 __ mov(ecx, Immediate(0));
2473 __ jmp(&done);
2474
2475 __ bind(&arg2_is_object);
2476 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002477 __ cmp(ebx, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002478 __ j(not_equal, &check_undefined_arg2);
2479
2480 // Get the untagged integer version of the eax heap number in ecx.
Ben Murdoch257744e2011-11-30 15:57:28 +00002481 IntegerConvert(masm, eax, use_sse3, conversion_failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002482 __ bind(&done);
2483 __ mov(eax, edx);
2484}
2485
2486
Ben Murdochb0fe1622011-05-05 13:52:32 +01002487void FloatingPointHelper::CheckLoadedIntegersWereInt32(MacroAssembler* masm,
2488 bool use_sse3,
2489 Label* not_int32) {
2490 return;
2491}
2492
2493
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002494void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
2495 Register number) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002496 Label load_smi, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002497
2498 __ test(number, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002499 __ j(zero, &load_smi, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002500 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002501 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002502
2503 __ bind(&load_smi);
2504 __ SmiUntag(number);
2505 __ push(number);
2506 __ fild_s(Operand(esp, 0));
2507 __ pop(number);
2508
2509 __ bind(&done);
2510}
2511
2512
2513void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002514 Label load_smi_edx, load_eax, load_smi_eax, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002515 // Load operand in edx into xmm0.
2516 __ test(edx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002517 // Argument in edx is a smi.
2518 __ j(zero, &load_smi_edx, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002519 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2520
2521 __ bind(&load_eax);
2522 // Load operand in eax into xmm1.
2523 __ test(eax, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002524 // Argument in eax is a smi.
2525 __ j(zero, &load_smi_eax, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002526 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002527 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002528
2529 __ bind(&load_smi_edx);
2530 __ SmiUntag(edx); // Untag smi before converting to float.
2531 __ cvtsi2sd(xmm0, Operand(edx));
2532 __ SmiTag(edx); // Retag smi for heap number overwriting test.
2533 __ jmp(&load_eax);
2534
2535 __ bind(&load_smi_eax);
2536 __ SmiUntag(eax); // Untag smi before converting to float.
2537 __ cvtsi2sd(xmm1, Operand(eax));
2538 __ SmiTag(eax); // Retag smi for heap number overwriting test.
2539
2540 __ bind(&done);
2541}
2542
2543
2544void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
2545 Label* not_numbers) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002546 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002547 // Load operand in edx into xmm0, or branch to not_numbers.
2548 __ test(edx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002549 // Argument in edx is a smi.
2550 __ j(zero, &load_smi_edx, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002551 Factory* factory = masm->isolate()->factory();
2552 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002553 __ j(not_equal, not_numbers); // Argument in edx is not a number.
2554 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2555 __ bind(&load_eax);
2556 // Load operand in eax into xmm1, or branch to not_numbers.
2557 __ test(eax, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002558 // Argument in eax is a smi.
2559 __ j(zero, &load_smi_eax, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002560 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
Ben Murdoch257744e2011-11-30 15:57:28 +00002561 __ j(equal, &load_float_eax, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002562 __ jmp(not_numbers); // Argument in eax is not a number.
2563 __ bind(&load_smi_edx);
2564 __ SmiUntag(edx); // Untag smi before converting to float.
2565 __ cvtsi2sd(xmm0, Operand(edx));
2566 __ SmiTag(edx); // Retag smi for heap number overwriting test.
2567 __ jmp(&load_eax);
2568 __ bind(&load_smi_eax);
2569 __ SmiUntag(eax); // Untag smi before converting to float.
2570 __ cvtsi2sd(xmm1, Operand(eax));
2571 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Ben Murdoch257744e2011-11-30 15:57:28 +00002572 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002573 __ bind(&load_float_eax);
2574 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2575 __ bind(&done);
2576}
2577
2578
2579void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
2580 Register scratch) {
2581 const Register left = edx;
2582 const Register right = eax;
2583 __ mov(scratch, left);
2584 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
2585 __ SmiUntag(scratch);
2586 __ cvtsi2sd(xmm0, Operand(scratch));
2587
2588 __ mov(scratch, right);
2589 __ SmiUntag(scratch);
2590 __ cvtsi2sd(xmm1, Operand(scratch));
2591}
2592
2593
Ben Murdochb0fe1622011-05-05 13:52:32 +01002594void FloatingPointHelper::CheckSSE2OperandsAreInt32(MacroAssembler* masm,
2595 Label* non_int32,
2596 Register scratch) {
2597 __ cvttsd2si(scratch, Operand(xmm0));
2598 __ cvtsi2sd(xmm2, Operand(scratch));
2599 __ ucomisd(xmm0, xmm2);
2600 __ j(not_zero, non_int32);
2601 __ j(carry, non_int32);
2602 __ cvttsd2si(scratch, Operand(xmm1));
2603 __ cvtsi2sd(xmm2, Operand(scratch));
2604 __ ucomisd(xmm1, xmm2);
2605 __ j(not_zero, non_int32);
2606 __ j(carry, non_int32);
2607}
2608
2609
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002610void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
2611 Register scratch,
2612 ArgLocation arg_location) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002613 Label load_smi_1, load_smi_2, done_load_1, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002614 if (arg_location == ARGS_IN_REGISTERS) {
2615 __ mov(scratch, edx);
2616 } else {
2617 __ mov(scratch, Operand(esp, 2 * kPointerSize));
2618 }
2619 __ test(scratch, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002620 __ j(zero, &load_smi_1, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002621 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
2622 __ bind(&done_load_1);
2623
2624 if (arg_location == ARGS_IN_REGISTERS) {
2625 __ mov(scratch, eax);
2626 } else {
2627 __ mov(scratch, Operand(esp, 1 * kPointerSize));
2628 }
2629 __ test(scratch, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002630 __ j(zero, &load_smi_2, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002631 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002632 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002633
2634 __ bind(&load_smi_1);
2635 __ SmiUntag(scratch);
2636 __ push(scratch);
2637 __ fild_s(Operand(esp, 0));
2638 __ pop(scratch);
2639 __ jmp(&done_load_1);
2640
2641 __ bind(&load_smi_2);
2642 __ SmiUntag(scratch);
2643 __ push(scratch);
2644 __ fild_s(Operand(esp, 0));
2645 __ pop(scratch);
2646
2647 __ bind(&done);
2648}
2649
2650
2651void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
2652 Register scratch) {
2653 const Register left = edx;
2654 const Register right = eax;
2655 __ mov(scratch, left);
2656 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
2657 __ SmiUntag(scratch);
2658 __ push(scratch);
2659 __ fild_s(Operand(esp, 0));
2660
2661 __ mov(scratch, right);
2662 __ SmiUntag(scratch);
2663 __ mov(Operand(esp, 0), scratch);
2664 __ fild_s(Operand(esp, 0));
2665 __ pop(scratch);
2666}
2667
2668
2669void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
2670 Label* non_float,
2671 Register scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002672 Label test_other, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002673 // Test if both operands are floats or smi -> scratch=k_is_float;
2674 // Otherwise scratch = k_not_float.
2675 __ test(edx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002676 __ j(zero, &test_other, Label::kNear); // argument in edx is OK
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002677 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002678 Factory* factory = masm->isolate()->factory();
2679 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002680 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
2681
2682 __ bind(&test_other);
2683 __ test(eax, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002684 __ j(zero, &done, Label::kNear); // argument in eax is OK
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002685 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002686 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002687 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
2688
2689 // Fall-through: Both operands are numbers.
2690 __ bind(&done);
2691}
2692
2693
Ben Murdochb0fe1622011-05-05 13:52:32 +01002694void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
2695 Label* non_int32) {
2696 return;
2697}
2698
2699
Ben Murdochb0fe1622011-05-05 13:52:32 +01002700void MathPowStub::Generate(MacroAssembler* masm) {
2701 // Registers are used as follows:
2702 // edx = base
2703 // eax = exponent
2704 // ecx = temporary, result
2705
2706 CpuFeatures::Scope use_sse2(SSE2);
2707 Label allocate_return, call_runtime;
2708
2709 // Load input parameters.
2710 __ mov(edx, Operand(esp, 2 * kPointerSize));
2711 __ mov(eax, Operand(esp, 1 * kPointerSize));
2712
2713 // Save 1 in xmm3 - we need this several times later on.
2714 __ mov(ecx, Immediate(1));
2715 __ cvtsi2sd(xmm3, Operand(ecx));
2716
2717 Label exponent_nonsmi;
2718 Label base_nonsmi;
2719 // If the exponent is a heap number go to that specific case.
2720 __ test(eax, Immediate(kSmiTagMask));
2721 __ j(not_zero, &exponent_nonsmi);
2722 __ test(edx, Immediate(kSmiTagMask));
2723 __ j(not_zero, &base_nonsmi);
2724
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002725 // Optimized version when both exponent and base are smis.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002726 Label powi;
2727 __ SmiUntag(edx);
2728 __ cvtsi2sd(xmm0, Operand(edx));
2729 __ jmp(&powi);
2730 // exponent is smi and base is a heapnumber.
2731 __ bind(&base_nonsmi);
Steve Block44f0eee2011-05-26 01:26:41 +01002732 Factory* factory = masm->isolate()->factory();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002733 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002734 factory->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002735 __ j(not_equal, &call_runtime);
2736
2737 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2738
2739 // Optimized version of pow if exponent is a smi.
2740 // xmm0 contains the base.
2741 __ bind(&powi);
2742 __ SmiUntag(eax);
2743
2744 // Save exponent in base as we need to check if exponent is negative later.
2745 // We know that base and exponent are in different registers.
2746 __ mov(edx, eax);
2747
2748 // Get absolute value of exponent.
Ben Murdoch257744e2011-11-30 15:57:28 +00002749 Label no_neg;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002750 __ cmp(eax, 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002751 __ j(greater_equal, &no_neg, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002752 __ neg(eax);
2753 __ bind(&no_neg);
2754
2755 // Load xmm1 with 1.
2756 __ movsd(xmm1, xmm3);
Ben Murdoch257744e2011-11-30 15:57:28 +00002757 Label while_true;
2758 Label no_multiply;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002759
2760 __ bind(&while_true);
2761 __ shr(eax, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002762 __ j(not_carry, &no_multiply, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002763 __ mulsd(xmm1, xmm0);
2764 __ bind(&no_multiply);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002765 __ mulsd(xmm0, xmm0);
2766 __ j(not_zero, &while_true);
2767
2768 // base has the original value of the exponent - if the exponent is
2769 // negative return 1/result.
2770 __ test(edx, Operand(edx));
2771 __ j(positive, &allocate_return);
2772 // Special case if xmm1 has reached infinity.
2773 __ mov(ecx, Immediate(0x7FB00000));
2774 __ movd(xmm0, Operand(ecx));
2775 __ cvtss2sd(xmm0, xmm0);
2776 __ ucomisd(xmm0, xmm1);
2777 __ j(equal, &call_runtime);
2778 __ divsd(xmm3, xmm1);
2779 __ movsd(xmm1, xmm3);
2780 __ jmp(&allocate_return);
2781
2782 // exponent (or both) is a heapnumber - no matter what we should now work
2783 // on doubles.
2784 __ bind(&exponent_nonsmi);
2785 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002786 factory->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002787 __ j(not_equal, &call_runtime);
2788 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2789 // Test if exponent is nan.
2790 __ ucomisd(xmm1, xmm1);
2791 __ j(parity_even, &call_runtime);
2792
Ben Murdoch257744e2011-11-30 15:57:28 +00002793 Label base_not_smi;
2794 Label handle_special_cases;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002795 __ test(edx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002796 __ j(not_zero, &base_not_smi, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002797 __ SmiUntag(edx);
2798 __ cvtsi2sd(xmm0, Operand(edx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002799 __ jmp(&handle_special_cases, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002800
2801 __ bind(&base_not_smi);
2802 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002803 factory->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002804 __ j(not_equal, &call_runtime);
2805 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
2806 __ and_(ecx, HeapNumber::kExponentMask);
2807 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask));
2808 // base is NaN or +/-Infinity
2809 __ j(greater_equal, &call_runtime);
2810 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2811
2812 // base is in xmm0 and exponent is in xmm1.
2813 __ bind(&handle_special_cases);
Ben Murdoch257744e2011-11-30 15:57:28 +00002814 Label not_minus_half;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002815 // Test for -0.5.
2816 // Load xmm2 with -0.5.
2817 __ mov(ecx, Immediate(0xBF000000));
2818 __ movd(xmm2, Operand(ecx));
2819 __ cvtss2sd(xmm2, xmm2);
2820 // xmm2 now has -0.5.
2821 __ ucomisd(xmm2, xmm1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002822 __ j(not_equal, &not_minus_half, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002823
2824 // Calculates reciprocal of square root.
Steve Block1e0659c2011-05-24 12:43:12 +01002825 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch257744e2011-11-30 15:57:28 +00002826 __ xorps(xmm1, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002827 __ addsd(xmm1, xmm0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002828 __ sqrtsd(xmm1, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002829 __ divsd(xmm3, xmm1);
2830 __ movsd(xmm1, xmm3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002831 __ jmp(&allocate_return);
2832
2833 // Test for 0.5.
2834 __ bind(&not_minus_half);
2835 // Load xmm2 with 0.5.
2836 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
2837 __ addsd(xmm2, xmm3);
2838 // xmm2 now has 0.5.
2839 __ ucomisd(xmm2, xmm1);
2840 __ j(not_equal, &call_runtime);
2841 // Calculates square root.
Steve Block1e0659c2011-05-24 12:43:12 +01002842 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch257744e2011-11-30 15:57:28 +00002843 __ xorps(xmm1, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002844 __ addsd(xmm1, xmm0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002845 __ sqrtsd(xmm1, xmm1);
2846
2847 __ bind(&allocate_return);
2848 __ AllocateHeapNumber(ecx, eax, edx, &call_runtime);
2849 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm1);
2850 __ mov(eax, ecx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002851 __ ret(2 * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002852
2853 __ bind(&call_runtime);
2854 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
2855}
2856
2857
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002858void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2859 // The key is in edx and the parameter count is in eax.
2860
2861 // The displacement is used for skipping the frame pointer on the
2862 // stack. It is the offset of the last parameter (if any) relative
2863 // to the frame pointer.
2864 static const int kDisplacement = 1 * kPointerSize;
2865
2866 // Check that the key is a smi.
2867 Label slow;
2868 __ test(edx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002869 __ j(not_zero, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002870
2871 // Check if the calling frame is an arguments adaptor frame.
Ben Murdoch257744e2011-11-30 15:57:28 +00002872 Label adaptor;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002873 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2874 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
2875 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002876 __ j(equal, &adaptor, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002877
2878 // Check index against formal parameters count limit passed in
2879 // through register eax. Use unsigned comparison to get negative
2880 // check for free.
2881 __ cmp(edx, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00002882 __ j(above_equal, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002883
2884 // Read the argument from the stack and return it.
2885 STATIC_ASSERT(kSmiTagSize == 1);
2886 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
2887 __ lea(ebx, Operand(ebp, eax, times_2, 0));
2888 __ neg(edx);
2889 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
2890 __ ret(0);
2891
2892 // Arguments adaptor case: Check index against actual arguments
2893 // limit found in the arguments adaptor frame. Use unsigned
2894 // comparison to get negative check for free.
2895 __ bind(&adaptor);
2896 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2897 __ cmp(edx, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002898 __ j(above_equal, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002899
2900 // Read the argument from the stack and return it.
2901 STATIC_ASSERT(kSmiTagSize == 1);
2902 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
2903 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
2904 __ neg(edx);
2905 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
2906 __ ret(0);
2907
2908 // Slow-case: Handle non-smi or out-of-bounds access to arguments
2909 // by calling the runtime system.
2910 __ bind(&slow);
2911 __ pop(ebx); // Return address.
2912 __ push(edx);
2913 __ push(ebx);
2914 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2915}
2916
2917
2918void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2919 // esp[0] : return address
2920 // esp[4] : number of parameters
2921 // esp[8] : receiver displacement
2922 // esp[16] : function
2923
2924 // The displacement is used for skipping the return address and the
2925 // frame pointer on the stack. It is the offset of the last
2926 // parameter (if any) relative to the frame pointer.
2927 static const int kDisplacement = 2 * kPointerSize;
2928
2929 // Check if the calling frame is an arguments adaptor frame.
2930 Label adaptor_frame, try_allocate, runtime;
2931 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2932 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
2933 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2934 __ j(equal, &adaptor_frame);
2935
2936 // Get the length from the frame.
2937 __ mov(ecx, Operand(esp, 1 * kPointerSize));
2938 __ jmp(&try_allocate);
2939
2940 // Patch the arguments.length and the parameters pointer.
2941 __ bind(&adaptor_frame);
2942 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2943 __ mov(Operand(esp, 1 * kPointerSize), ecx);
2944 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
2945 __ mov(Operand(esp, 2 * kPointerSize), edx);
2946
2947 // Try the new space allocation. Start out with computing the size of
2948 // the arguments object and the elements array.
Ben Murdoch257744e2011-11-30 15:57:28 +00002949 Label add_arguments_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002950 __ bind(&try_allocate);
2951 __ test(ecx, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002952 __ j(zero, &add_arguments_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002953 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
2954 __ bind(&add_arguments_object);
Steve Block44f0eee2011-05-26 01:26:41 +01002955 __ add(Operand(ecx), Immediate(GetArgumentsObjectSize()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002956
2957 // Do the allocation of both objects in one go.
2958 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
2959
2960 // Get the arguments boilerplate from the current (global) context.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002961 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2962 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002963 __ mov(edi, Operand(edi,
2964 Context::SlotOffset(GetArgumentsBoilerplateIndex())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002965
2966 // Copy the JS object part.
2967 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
2968 __ mov(ebx, FieldOperand(edi, i));
2969 __ mov(FieldOperand(eax, i), ebx);
2970 }
2971
Steve Block44f0eee2011-05-26 01:26:41 +01002972 if (type_ == NEW_NON_STRICT) {
2973 // Setup the callee in-object property.
2974 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
2975 __ mov(ebx, Operand(esp, 3 * kPointerSize));
2976 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
2977 Heap::kArgumentsCalleeIndex * kPointerSize),
2978 ebx);
2979 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002980
2981 // Get the length (smi tagged) and set that as an in-object property too.
Steve Block44f0eee2011-05-26 01:26:41 +01002982 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002983 __ mov(ecx, Operand(esp, 1 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01002984 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
2985 Heap::kArgumentsLengthIndex * kPointerSize),
2986 ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002987
2988 // If there are no actual arguments, we're done.
2989 Label done;
2990 __ test(ecx, Operand(ecx));
2991 __ j(zero, &done);
2992
2993 // Get the parameters pointer from the stack.
2994 __ mov(edx, Operand(esp, 2 * kPointerSize));
2995
2996 // Setup the elements pointer in the allocated arguments object and
2997 // initialize the header in the elements fixed array.
Steve Block44f0eee2011-05-26 01:26:41 +01002998 __ lea(edi, Operand(eax, GetArgumentsObjectSize()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002999 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3000 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003001 Immediate(masm->isolate()->factory()->fixed_array_map()));
3002
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003003 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3004 // Untag the length for the loop below.
3005 __ SmiUntag(ecx);
3006
3007 // Copy the fixed array slots.
Ben Murdoch257744e2011-11-30 15:57:28 +00003008 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003009 __ bind(&loop);
3010 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
3011 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
3012 __ add(Operand(edi), Immediate(kPointerSize));
3013 __ sub(Operand(edx), Immediate(kPointerSize));
3014 __ dec(ecx);
3015 __ j(not_zero, &loop);
3016
3017 // Return and remove the on-stack parameters.
3018 __ bind(&done);
3019 __ ret(3 * kPointerSize);
3020
3021 // Do the runtime call to allocate the arguments object.
3022 __ bind(&runtime);
3023 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
3024}
3025
3026
3027void RegExpExecStub::Generate(MacroAssembler* masm) {
3028 // Just jump directly to runtime if native RegExp is not selected at compile
3029 // time or if regexp entry in generated code is turned off runtime switch or
3030 // at compilation.
3031#ifdef V8_INTERPRETED_REGEXP
3032 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3033#else // V8_INTERPRETED_REGEXP
3034 if (!FLAG_regexp_entry_native) {
3035 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3036 return;
3037 }
3038
3039 // Stack frame on entry.
3040 // esp[0]: return address
3041 // esp[4]: last_match_info (expected JSArray)
3042 // esp[8]: previous index
3043 // esp[12]: subject string
3044 // esp[16]: JSRegExp object
3045
3046 static const int kLastMatchInfoOffset = 1 * kPointerSize;
3047 static const int kPreviousIndexOffset = 2 * kPointerSize;
3048 static const int kSubjectOffset = 3 * kPointerSize;
3049 static const int kJSRegExpOffset = 4 * kPointerSize;
3050
3051 Label runtime, invoke_regexp;
3052
3053 // Ensure that a RegExp stack is allocated.
3054 ExternalReference address_of_regexp_stack_memory_address =
Steve Block44f0eee2011-05-26 01:26:41 +01003055 ExternalReference::address_of_regexp_stack_memory_address(
3056 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003057 ExternalReference address_of_regexp_stack_memory_size =
Steve Block44f0eee2011-05-26 01:26:41 +01003058 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003059 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3060 __ test(ebx, Operand(ebx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003061 __ j(zero, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003062
3063 // Check that the first argument is a JSRegExp object.
3064 __ mov(eax, Operand(esp, kJSRegExpOffset));
3065 STATIC_ASSERT(kSmiTag == 0);
3066 __ test(eax, Immediate(kSmiTagMask));
3067 __ j(zero, &runtime);
3068 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
3069 __ j(not_equal, &runtime);
3070 // Check that the RegExp has been compiled (data contains a fixed array).
3071 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3072 if (FLAG_debug_code) {
3073 __ test(ecx, Immediate(kSmiTagMask));
3074 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
3075 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
3076 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
3077 }
3078
3079 // ecx: RegExp data (FixedArray)
3080 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
3081 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
3082 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
3083 __ j(not_equal, &runtime);
3084
3085 // ecx: RegExp data (FixedArray)
3086 // Check that the number of captures fit in the static offsets vector buffer.
3087 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3088 // Calculate number of capture registers (number_of_captures + 1) * 2. This
3089 // uses the asumption that smis are 2 * their untagged value.
3090 STATIC_ASSERT(kSmiTag == 0);
3091 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3092 __ add(Operand(edx), Immediate(2)); // edx was a smi.
3093 // Check that the static offsets vector buffer is large enough.
3094 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
3095 __ j(above, &runtime);
3096
3097 // ecx: RegExp data (FixedArray)
3098 // edx: Number of capture registers
3099 // Check that the second argument is a string.
3100 __ mov(eax, Operand(esp, kSubjectOffset));
3101 __ test(eax, Immediate(kSmiTagMask));
3102 __ j(zero, &runtime);
3103 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
3104 __ j(NegateCondition(is_string), &runtime);
3105 // Get the length of the string to ebx.
3106 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
3107
3108 // ebx: Length of subject string as a smi
3109 // ecx: RegExp data (FixedArray)
3110 // edx: Number of capture registers
3111 // Check that the third argument is a positive smi less than the subject
3112 // string length. A negative value will be greater (unsigned comparison).
3113 __ mov(eax, Operand(esp, kPreviousIndexOffset));
3114 __ test(eax, Immediate(kSmiTagMask));
3115 __ j(not_zero, &runtime);
3116 __ cmp(eax, Operand(ebx));
3117 __ j(above_equal, &runtime);
3118
3119 // ecx: RegExp data (FixedArray)
3120 // edx: Number of capture registers
3121 // Check that the fourth object is a JSArray object.
3122 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3123 __ test(eax, Immediate(kSmiTagMask));
3124 __ j(zero, &runtime);
3125 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3126 __ j(not_equal, &runtime);
3127 // Check that the JSArray is in fast case.
3128 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3129 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003130 Factory* factory = masm->isolate()->factory();
3131 __ cmp(eax, factory->fixed_array_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003132 __ j(not_equal, &runtime);
3133 // Check that the last match info has space for the capture registers and the
3134 // additional information.
3135 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
3136 __ SmiUntag(eax);
3137 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
3138 __ cmp(edx, Operand(eax));
3139 __ j(greater, &runtime);
3140
3141 // ecx: RegExp data (FixedArray)
3142 // Check the representation and encoding of the subject string.
3143 Label seq_ascii_string, seq_two_byte_string, check_code;
3144 __ mov(eax, Operand(esp, kSubjectOffset));
3145 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3146 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3147 // First check for flat two byte string.
3148 __ and_(ebx,
3149 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
3150 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
3151 __ j(zero, &seq_two_byte_string);
3152 // Any other flat string must be a flat ascii string.
3153 __ test(Operand(ebx),
3154 Immediate(kIsNotStringMask | kStringRepresentationMask));
3155 __ j(zero, &seq_ascii_string);
3156
3157 // Check for flat cons string.
3158 // A flat cons string is a cons string where the second part is the empty
3159 // string. In that case the subject string is just the first part of the cons
3160 // string. Also in this case the first part of the cons string is known to be
3161 // a sequential string or an external string.
3162 STATIC_ASSERT(kExternalStringTag != 0);
3163 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
3164 __ test(Operand(ebx),
3165 Immediate(kIsNotStringMask | kExternalStringTag));
3166 __ j(not_zero, &runtime);
3167 // String is a cons string.
3168 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003169 __ cmp(Operand(edx), factory->empty_string());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003170 __ j(not_equal, &runtime);
3171 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
3172 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3173 // String is a cons string with empty second part.
3174 // eax: first part of cons string.
3175 // ebx: map of first part of cons string.
3176 // Is first part a flat two byte string?
3177 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3178 kStringRepresentationMask | kStringEncodingMask);
3179 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
3180 __ j(zero, &seq_two_byte_string);
3181 // Any other flat string must be ascii.
3182 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3183 kStringRepresentationMask);
3184 __ j(not_zero, &runtime);
3185
3186 __ bind(&seq_ascii_string);
3187 // eax: subject string (flat ascii)
3188 // ecx: RegExp data (FixedArray)
3189 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
3190 __ Set(edi, Immediate(1)); // Type is ascii.
3191 __ jmp(&check_code);
3192
3193 __ bind(&seq_two_byte_string);
3194 // eax: subject string (flat two byte)
3195 // ecx: RegExp data (FixedArray)
3196 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
3197 __ Set(edi, Immediate(0)); // Type is two byte.
3198
3199 __ bind(&check_code);
3200 // Check that the irregexp code has been generated for the actual string
3201 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +00003202 // a smi (code flushing support).
3203 __ JumpIfSmi(edx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003204
3205 // eax: subject string
3206 // edx: code
3207 // edi: encoding of subject string (1 if ascii, 0 if two_byte);
3208 // Load used arguments before starting to push arguments for call to native
3209 // RegExp code to avoid handling changing stack height.
3210 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
3211 __ SmiUntag(ebx); // Previous index from smi.
3212
3213 // eax: subject string
3214 // ebx: previous index
3215 // edx: code
3216 // edi: encoding of subject string (1 if ascii 0 if two_byte);
3217 // All checks done. Now push arguments for native regexp code.
Steve Block44f0eee2011-05-26 01:26:41 +01003218 Counters* counters = masm->isolate()->counters();
3219 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003220
Steve Block44f0eee2011-05-26 01:26:41 +01003221 // Isolates: note we add an additional parameter here (isolate pointer).
3222 static const int kRegExpExecuteArguments = 8;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003223 __ EnterApiExitFrame(kRegExpExecuteArguments);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003224
Steve Block44f0eee2011-05-26 01:26:41 +01003225 // Argument 8: Pass current isolate address.
3226 __ mov(Operand(esp, 7 * kPointerSize),
3227 Immediate(ExternalReference::isolate_address()));
3228
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003229 // Argument 7: Indicate that this is a direct call from JavaScript.
3230 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
3231
3232 // Argument 6: Start (high end) of backtracking stack memory area.
3233 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3234 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3235 __ mov(Operand(esp, 5 * kPointerSize), ecx);
3236
3237 // Argument 5: static offsets vector buffer.
3238 __ mov(Operand(esp, 4 * kPointerSize),
Steve Block44f0eee2011-05-26 01:26:41 +01003239 Immediate(ExternalReference::address_of_static_offsets_vector(
3240 masm->isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003241
3242 // Argument 4: End of string data
3243 // Argument 3: Start of string data
Ben Murdoch257744e2011-11-30 15:57:28 +00003244 Label setup_two_byte, setup_rest;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003245 __ test(edi, Operand(edi));
3246 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003247 __ j(zero, &setup_two_byte, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003248 __ SmiUntag(edi);
3249 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
3250 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3251 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
3252 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003253 __ jmp(&setup_rest, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003254
3255 __ bind(&setup_two_byte);
3256 STATIC_ASSERT(kSmiTag == 0);
3257 STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2).
3258 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
3259 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3260 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
3261 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3262
3263 __ bind(&setup_rest);
3264
3265 // Argument 2: Previous index.
3266 __ mov(Operand(esp, 1 * kPointerSize), ebx);
3267
3268 // Argument 1: Subject string.
3269 __ mov(Operand(esp, 0 * kPointerSize), eax);
3270
3271 // Locate the code entry and call it.
3272 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003273 __ call(Operand(edx));
3274
3275 // Drop arguments and come back to JS mode.
3276 __ LeaveApiExitFrame();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003277
3278 // Check the result.
3279 Label success;
3280 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
Ben Murdoch257744e2011-11-30 15:57:28 +00003281 __ j(equal, &success);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003282 Label failure;
3283 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
Ben Murdoch257744e2011-11-30 15:57:28 +00003284 __ j(equal, &failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003285 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
3286 // If not exception it can only be retry. Handle that in the runtime system.
3287 __ j(not_equal, &runtime);
3288 // Result must now be exception. If there is no pending exception already a
3289 // stack overflow (on the backtrack stack) was detected in RegExp code but
3290 // haven't created the exception yet. Handle that in the runtime system.
3291 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Steve Block44f0eee2011-05-26 01:26:41 +01003292 ExternalReference pending_exception(Isolate::k_pending_exception_address,
3293 masm->isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003294 __ mov(edx,
Steve Block44f0eee2011-05-26 01:26:41 +01003295 Operand::StaticVariable(ExternalReference::the_hole_value_location(
3296 masm->isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003297 __ mov(eax, Operand::StaticVariable(pending_exception));
3298 __ cmp(edx, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003299 __ j(equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003300 // For exception, throw the exception again.
3301
3302 // Clear the pending exception variable.
3303 __ mov(Operand::StaticVariable(pending_exception), edx);
3304
3305 // Special handling of termination exceptions which are uncatchable
3306 // by javascript code.
Steve Block44f0eee2011-05-26 01:26:41 +01003307 __ cmp(eax, factory->termination_exception());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003308 Label throw_termination_exception;
3309 __ j(equal, &throw_termination_exception);
3310
3311 // Handle normal exception by following handler chain.
3312 __ Throw(eax);
3313
3314 __ bind(&throw_termination_exception);
3315 __ ThrowUncatchable(TERMINATION, eax);
3316
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003317 __ bind(&failure);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003318 // For failure to match, return null.
Steve Block44f0eee2011-05-26 01:26:41 +01003319 __ mov(Operand(eax), factory->null_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003320 __ ret(4 * kPointerSize);
3321
3322 // Load RegExp data.
3323 __ bind(&success);
3324 __ mov(eax, Operand(esp, kJSRegExpOffset));
3325 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3326 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3327 // Calculate number of capture registers (number_of_captures + 1) * 2.
3328 STATIC_ASSERT(kSmiTag == 0);
3329 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3330 __ add(Operand(edx), Immediate(2)); // edx was a smi.
3331
3332 // edx: Number of capture registers
3333 // Load last_match_info which is still known to be a fast case JSArray.
3334 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3335 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3336
3337 // ebx: last_match_info backing store (FixedArray)
3338 // edx: number of capture registers
3339 // Store the capture count.
3340 __ SmiTag(edx); // Number of capture registers to smi.
3341 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
3342 __ SmiUntag(edx); // Number of capture registers back from smi.
3343 // Store last subject and last input.
3344 __ mov(eax, Operand(esp, kSubjectOffset));
3345 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
3346 __ mov(ecx, ebx);
3347 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
3348 __ mov(eax, Operand(esp, kSubjectOffset));
3349 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
3350 __ mov(ecx, ebx);
3351 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
3352
3353 // Get the static offsets vector filled by the native regexp code.
3354 ExternalReference address_of_static_offsets_vector =
Steve Block44f0eee2011-05-26 01:26:41 +01003355 ExternalReference::address_of_static_offsets_vector(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003356 __ mov(ecx, Immediate(address_of_static_offsets_vector));
3357
3358 // ebx: last_match_info backing store (FixedArray)
3359 // ecx: offsets vector
3360 // edx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +00003361 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003362 // Capture register counter starts from number of capture registers and
3363 // counts down until wraping after zero.
3364 __ bind(&next_capture);
3365 __ sub(Operand(edx), Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003366 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003367 // Read the value from the static offsets vector buffer.
3368 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
3369 __ SmiTag(edi);
3370 // Store the smi value in the last match info.
3371 __ mov(FieldOperand(ebx,
3372 edx,
3373 times_pointer_size,
3374 RegExpImpl::kFirstCaptureOffset),
3375 edi);
3376 __ jmp(&next_capture);
3377 __ bind(&done);
3378
3379 // Return last match info.
3380 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3381 __ ret(4 * kPointerSize);
3382
3383 // Do the runtime call to execute the regexp.
3384 __ bind(&runtime);
3385 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3386#endif // V8_INTERPRETED_REGEXP
3387}
3388
3389
Ben Murdochb0fe1622011-05-05 13:52:32 +01003390void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3391 const int kMaxInlineLength = 100;
3392 Label slowcase;
Ben Murdoch257744e2011-11-30 15:57:28 +00003393 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003394 __ mov(ebx, Operand(esp, kPointerSize * 3));
3395 __ test(ebx, Immediate(kSmiTagMask));
3396 __ j(not_zero, &slowcase);
3397 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
3398 __ j(above, &slowcase);
3399 // Smi-tagging is equivalent to multiplying by 2.
3400 STATIC_ASSERT(kSmiTag == 0);
3401 STATIC_ASSERT(kSmiTagSize == 1);
3402 // Allocate RegExpResult followed by FixedArray with size in ebx.
3403 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
3404 // Elements: [Map][Length][..elements..]
3405 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
3406 times_half_pointer_size,
3407 ebx, // In: Number of elements (times 2, being a smi)
3408 eax, // Out: Start of allocation (tagged).
3409 ecx, // Out: End of allocation.
3410 edx, // Scratch register
3411 &slowcase,
3412 TAG_OBJECT);
3413 // eax: Start of allocated area, object-tagged.
3414
3415 // Set JSArray map to global.regexp_result_map().
3416 // Set empty properties FixedArray.
3417 // Set elements to point to FixedArray allocated right after the JSArray.
3418 // Interleave operations for better latency.
3419 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
Steve Block44f0eee2011-05-26 01:26:41 +01003420 Factory* factory = masm->isolate()->factory();
3421 __ mov(ecx, Immediate(factory->empty_fixed_array()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003422 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
3423 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
3424 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
3425 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
3426 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
3427 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
3428
3429 // Set input, index and length fields from arguments.
3430 __ mov(ecx, Operand(esp, kPointerSize * 1));
3431 __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
3432 __ mov(ecx, Operand(esp, kPointerSize * 2));
3433 __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
3434 __ mov(ecx, Operand(esp, kPointerSize * 3));
3435 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
3436
3437 // Fill out the elements FixedArray.
3438 // eax: JSArray.
3439 // ebx: FixedArray.
3440 // ecx: Number of elements in array, as smi.
3441
3442 // Set map.
3443 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003444 Immediate(factory->fixed_array_map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003445 // Set length.
3446 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
3447 // Fill contents of fixed-array with the-hole.
3448 __ SmiUntag(ecx);
Steve Block44f0eee2011-05-26 01:26:41 +01003449 __ mov(edx, Immediate(factory->the_hole_value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003450 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
3451 // Fill fixed array elements with hole.
3452 // eax: JSArray.
3453 // ecx: Number of elements to fill.
3454 // ebx: Start of elements in FixedArray.
3455 // edx: the hole.
3456 Label loop;
3457 __ test(ecx, Operand(ecx));
3458 __ bind(&loop);
Ben Murdoch257744e2011-11-30 15:57:28 +00003459 __ j(less_equal, &done, Label::kNear); // Jump if ecx is negative or zero.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003460 __ sub(Operand(ecx), Immediate(1));
3461 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
3462 __ jmp(&loop);
3463
3464 __ bind(&done);
3465 __ ret(3 * kPointerSize);
3466
3467 __ bind(&slowcase);
3468 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
3469}
3470
3471
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003472void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
3473 Register object,
3474 Register result,
3475 Register scratch1,
3476 Register scratch2,
3477 bool object_is_smi,
3478 Label* not_found) {
3479 // Use of registers. Register result is used as a temporary.
3480 Register number_string_cache = result;
3481 Register mask = scratch1;
3482 Register scratch = scratch2;
3483
3484 // Load the number string cache.
Steve Block44f0eee2011-05-26 01:26:41 +01003485 ExternalReference roots_address =
3486 ExternalReference::roots_address(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003487 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
3488 __ mov(number_string_cache,
3489 Operand::StaticArray(scratch, times_pointer_size, roots_address));
3490 // Make the hash mask from the length of the number string cache. It
3491 // contains two elements (number and string) for each cache entry.
3492 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
3493 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
3494 __ sub(Operand(mask), Immediate(1)); // Make mask.
3495
3496 // Calculate the entry in the number string cache. The hash value in the
3497 // number string cache for smis is just the smi value, and the hash for
3498 // doubles is the xor of the upper and lower words. See
3499 // Heap::GetNumberStringCache.
Ben Murdoch257744e2011-11-30 15:57:28 +00003500 Label smi_hash_calculated;
3501 Label load_result_from_cache;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003502 if (object_is_smi) {
3503 __ mov(scratch, object);
3504 __ SmiUntag(scratch);
3505 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003506 Label not_smi;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003507 STATIC_ASSERT(kSmiTag == 0);
3508 __ test(object, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00003509 __ j(not_zero, &not_smi, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003510 __ mov(scratch, object);
3511 __ SmiUntag(scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003512 __ jmp(&smi_hash_calculated, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003513 __ bind(&not_smi);
3514 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003515 masm->isolate()->factory()->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003516 __ j(not_equal, not_found);
3517 STATIC_ASSERT(8 == kDoubleSize);
3518 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
3519 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
3520 // Object is heap number and hash is now in scratch. Calculate cache index.
3521 __ and_(scratch, Operand(mask));
3522 Register index = scratch;
3523 Register probe = mask;
3524 __ mov(probe,
3525 FieldOperand(number_string_cache,
3526 index,
3527 times_twice_pointer_size,
3528 FixedArray::kHeaderSize));
3529 __ test(probe, Immediate(kSmiTagMask));
3530 __ j(zero, not_found);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003531 if (CpuFeatures::IsSupported(SSE2)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003532 CpuFeatures::Scope fscope(SSE2);
3533 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
3534 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
3535 __ ucomisd(xmm0, xmm1);
3536 } else {
3537 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
3538 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
3539 __ FCmp();
3540 }
3541 __ j(parity_even, not_found); // Bail out if NaN is involved.
3542 __ j(not_equal, not_found); // The cache did not contain this value.
Ben Murdoch257744e2011-11-30 15:57:28 +00003543 __ jmp(&load_result_from_cache, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003544 }
3545
3546 __ bind(&smi_hash_calculated);
3547 // Object is smi and hash is now in scratch. Calculate cache index.
3548 __ and_(scratch, Operand(mask));
3549 Register index = scratch;
3550 // Check if the entry is the smi we are looking for.
3551 __ cmp(object,
3552 FieldOperand(number_string_cache,
3553 index,
3554 times_twice_pointer_size,
3555 FixedArray::kHeaderSize));
3556 __ j(not_equal, not_found);
3557
3558 // Get the result from the cache.
3559 __ bind(&load_result_from_cache);
3560 __ mov(result,
3561 FieldOperand(number_string_cache,
3562 index,
3563 times_twice_pointer_size,
3564 FixedArray::kHeaderSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003565 Counters* counters = masm->isolate()->counters();
3566 __ IncrementCounter(counters->number_to_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003567}
3568
3569
3570void NumberToStringStub::Generate(MacroAssembler* masm) {
3571 Label runtime;
3572
3573 __ mov(ebx, Operand(esp, kPointerSize));
3574
3575 // Generate code to lookup number in the number string cache.
3576 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
3577 __ ret(1 * kPointerSize);
3578
3579 __ bind(&runtime);
3580 // Handle number to string in the runtime system if not found in the cache.
3581 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3582}
3583
3584
3585static int NegativeComparisonResult(Condition cc) {
3586 ASSERT(cc != equal);
3587 ASSERT((cc == less) || (cc == less_equal)
3588 || (cc == greater) || (cc == greater_equal));
3589 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
3590}
3591
3592void CompareStub::Generate(MacroAssembler* masm) {
3593 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3594
3595 Label check_unequal_objects, done;
3596
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003597 // Compare two smis if required.
3598 if (include_smi_compare_) {
3599 Label non_smi, smi_done;
3600 __ mov(ecx, Operand(edx));
3601 __ or_(ecx, Operand(eax));
3602 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00003603 __ j(not_zero, &non_smi);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003604 __ sub(edx, Operand(eax)); // Return on the result of the subtraction.
3605 __ j(no_overflow, &smi_done);
Ben Murdochf87a2032010-10-22 12:50:53 +01003606 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003607 __ bind(&smi_done);
3608 __ mov(eax, edx);
3609 __ ret(0);
3610 __ bind(&non_smi);
3611 } else if (FLAG_debug_code) {
3612 __ mov(ecx, Operand(edx));
3613 __ or_(ecx, Operand(eax));
3614 __ test(ecx, Immediate(kSmiTagMask));
3615 __ Assert(not_zero, "Unexpected smi operands.");
3616 }
3617
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003618 // NOTICE! This code is only reached after a smi-fast-case check, so
3619 // it is certain that at least one operand isn't a smi.
3620
3621 // Identical objects can be compared fast, but there are some tricky cases
3622 // for NaN and undefined.
3623 {
3624 Label not_identical;
3625 __ cmp(eax, Operand(edx));
3626 __ j(not_equal, &not_identical);
3627
3628 if (cc_ != equal) {
3629 // Check for undefined. undefined OP undefined is false even though
3630 // undefined == undefined.
Ben Murdoch257744e2011-11-30 15:57:28 +00003631 Label check_for_nan;
Steve Block44f0eee2011-05-26 01:26:41 +01003632 __ cmp(edx, masm->isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003633 __ j(not_equal, &check_for_nan, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003634 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
3635 __ ret(0);
3636 __ bind(&check_for_nan);
3637 }
3638
Steve Block44f0eee2011-05-26 01:26:41 +01003639 // Test for NaN. Sadly, we can't just compare to factory->nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003640 // so we do the second best thing - test it ourselves.
3641 // Note: if cc_ != equal, never_nan_nan_ is not used.
3642 if (never_nan_nan_ && (cc_ == equal)) {
3643 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3644 __ ret(0);
3645 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003646 Label heap_number;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003647 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003648 Immediate(masm->isolate()->factory()->heap_number_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003649 __ j(equal, &heap_number, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003650 if (cc_ != equal) {
3651 // Call runtime on identical JSObjects. Otherwise return equal.
3652 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3653 __ j(above_equal, &not_identical);
3654 }
3655 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3656 __ ret(0);
3657
3658 __ bind(&heap_number);
3659 // It is a heap number, so return non-equal if it's NaN and equal if
3660 // it's not NaN.
3661 // The representation of NaN values has all exponent bits (52..62) set,
3662 // and not all mantissa bits (0..51) clear.
3663 // We only accept QNaNs, which have bit 51 set.
3664 // Read top bits of double representation (second word of value).
3665
3666 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
3667 // all bits in the mask are set. We only need to check the word
3668 // that contains the exponent and high bit of the mantissa.
3669 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0);
3670 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
Steve Block9fac8402011-05-12 15:51:54 +01003671 __ Set(eax, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003672 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
3673 // bits.
3674 __ add(edx, Operand(edx));
3675 __ cmp(edx, kQuietNaNHighBitsMask << 1);
3676 if (cc_ == equal) {
3677 STATIC_ASSERT(EQUAL != 1);
3678 __ setcc(above_equal, eax);
3679 __ ret(0);
3680 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003681 Label nan;
3682 __ j(above_equal, &nan, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003683 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3684 __ ret(0);
3685 __ bind(&nan);
3686 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
3687 __ ret(0);
3688 }
3689 }
3690
3691 __ bind(&not_identical);
3692 }
3693
3694 // Strict equality can quickly decide whether objects are equal.
3695 // Non-strict object equality is slower, so it is handled later in the stub.
3696 if (cc_ == equal && strict_) {
3697 Label slow; // Fallthrough label.
Ben Murdoch257744e2011-11-30 15:57:28 +00003698 Label not_smis;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003699 // If we're doing a strict equality comparison, we don't have to do
3700 // type conversion, so we generate code to do fast comparison for objects
3701 // and oddballs. Non-smi numbers and strings still go through the usual
3702 // slow-case code.
3703 // If either is a Smi (we know that not both are), then they can only
3704 // be equal if the other is a HeapNumber. If so, use the slow case.
3705 STATIC_ASSERT(kSmiTag == 0);
3706 ASSERT_EQ(0, Smi::FromInt(0));
3707 __ mov(ecx, Immediate(kSmiTagMask));
3708 __ and_(ecx, Operand(eax));
3709 __ test(ecx, Operand(edx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003710 __ j(not_zero, &not_smis, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003711 // One operand is a smi.
3712
3713 // Check whether the non-smi is a heap number.
3714 STATIC_ASSERT(kSmiTagMask == 1);
3715 // ecx still holds eax & kSmiTag, which is either zero or one.
3716 __ sub(Operand(ecx), Immediate(0x01));
3717 __ mov(ebx, edx);
3718 __ xor_(ebx, Operand(eax));
3719 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
3720 __ xor_(ebx, Operand(eax));
3721 // if eax was smi, ebx is now edx, else eax.
3722
3723 // Check if the non-smi operand is a heap number.
3724 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003725 Immediate(masm->isolate()->factory()->heap_number_map()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003726 // If heap number, handle it in the slow case.
3727 __ j(equal, &slow);
3728 // Return non-equal (ebx is not zero)
3729 __ mov(eax, ebx);
3730 __ ret(0);
3731
3732 __ bind(&not_smis);
3733 // If either operand is a JSObject or an oddball value, then they are not
3734 // equal since their pointers are different
3735 // There is no test for undetectability in strict equality.
3736
3737 // Get the type of the first operand.
3738 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch257744e2011-11-30 15:57:28 +00003739 Label first_non_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003740 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3741 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003742 __ j(below, &first_non_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003743
3744 // Return non-zero (eax is not zero)
Ben Murdoch257744e2011-11-30 15:57:28 +00003745 Label return_not_equal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003746 STATIC_ASSERT(kHeapObjectTag != 0);
3747 __ bind(&return_not_equal);
3748 __ ret(0);
3749
3750 __ bind(&first_non_object);
3751 // Check for oddballs: true, false, null, undefined.
3752 __ CmpInstanceType(ecx, ODDBALL_TYPE);
3753 __ j(equal, &return_not_equal);
3754
3755 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ecx);
3756 __ j(above_equal, &return_not_equal);
3757
3758 // Check for oddballs: true, false, null, undefined.
3759 __ CmpInstanceType(ecx, ODDBALL_TYPE);
3760 __ j(equal, &return_not_equal);
3761
3762 // Fall through to the general case.
3763 __ bind(&slow);
3764 }
3765
3766 // Generate the number comparison code.
3767 if (include_number_compare_) {
3768 Label non_number_comparison;
3769 Label unordered;
Ben Murdoch8b112d22011-06-08 16:22:53 +01003770 if (CpuFeatures::IsSupported(SSE2)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003771 CpuFeatures::Scope use_sse2(SSE2);
3772 CpuFeatures::Scope use_cmov(CMOV);
3773
3774 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
3775 __ ucomisd(xmm0, xmm1);
3776
3777 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00003778 __ j(parity_even, &unordered);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003779 // Return a result of -1, 0, or 1, based on EFLAGS.
3780 __ mov(eax, 0); // equal
3781 __ mov(ecx, Immediate(Smi::FromInt(1)));
3782 __ cmov(above, eax, Operand(ecx));
3783 __ mov(ecx, Immediate(Smi::FromInt(-1)));
3784 __ cmov(below, eax, Operand(ecx));
3785 __ ret(0);
3786 } else {
3787 FloatingPointHelper::CheckFloatOperands(
3788 masm, &non_number_comparison, ebx);
3789 FloatingPointHelper::LoadFloatOperand(masm, eax);
3790 FloatingPointHelper::LoadFloatOperand(masm, edx);
3791 __ FCmp();
3792
3793 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00003794 __ j(parity_even, &unordered);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003795
Ben Murdoch257744e2011-11-30 15:57:28 +00003796 Label below_label, above_label;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003797 // Return a result of -1, 0, or 1, based on EFLAGS.
Ben Murdoch257744e2011-11-30 15:57:28 +00003798 __ j(below, &below_label);
3799 __ j(above, &above_label);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003800
Steve Block9fac8402011-05-12 15:51:54 +01003801 __ Set(eax, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003802 __ ret(0);
3803
3804 __ bind(&below_label);
3805 __ mov(eax, Immediate(Smi::FromInt(-1)));
3806 __ ret(0);
3807
3808 __ bind(&above_label);
3809 __ mov(eax, Immediate(Smi::FromInt(1)));
3810 __ ret(0);
3811 }
3812
3813 // If one of the numbers was NaN, then the result is always false.
3814 // The cc is never not-equal.
3815 __ bind(&unordered);
3816 ASSERT(cc_ != not_equal);
3817 if (cc_ == less || cc_ == less_equal) {
3818 __ mov(eax, Immediate(Smi::FromInt(1)));
3819 } else {
3820 __ mov(eax, Immediate(Smi::FromInt(-1)));
3821 }
3822 __ ret(0);
3823
3824 // The number comparison code did not provide a valid result.
3825 __ bind(&non_number_comparison);
3826 }
3827
3828 // Fast negative check for symbol-to-symbol equality.
3829 Label check_for_strings;
3830 if (cc_ == equal) {
3831 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
3832 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
3833
3834 // We've already checked for object identity, so if both operands
3835 // are symbols they aren't equal. Register eax already holds a
3836 // non-zero value, which indicates not equal, so just return.
3837 __ ret(0);
3838 }
3839
3840 __ bind(&check_for_strings);
3841
3842 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
3843 &check_unequal_objects);
3844
3845 // Inline comparison of ascii strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00003846 if (cc_ == equal) {
3847 StringCompareStub::GenerateFlatAsciiStringEquals(masm,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003848 edx,
3849 eax,
3850 ecx,
Ben Murdoch257744e2011-11-30 15:57:28 +00003851 ebx);
3852 } else {
3853 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
3854 edx,
3855 eax,
3856 ecx,
3857 ebx,
3858 edi);
3859 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003860#ifdef DEBUG
3861 __ Abort("Unexpected fall-through from string comparison");
3862#endif
3863
3864 __ bind(&check_unequal_objects);
3865 if (cc_ == equal && !strict_) {
3866 // Non-strict equality. Objects are unequal if
3867 // they are both JSObjects and not undetectable,
3868 // and their pointers are different.
Ben Murdoch257744e2011-11-30 15:57:28 +00003869 Label not_both_objects;
3870 Label return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003871 // At most one is a smi, so we can test for smi by adding the two.
3872 // A smi plus a heap object has the low bit set, a heap object plus
3873 // a heap object has the low bit clear.
3874 STATIC_ASSERT(kSmiTag == 0);
3875 STATIC_ASSERT(kSmiTagMask == 1);
3876 __ lea(ecx, Operand(eax, edx, times_1, 0));
3877 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00003878 __ j(not_zero, &not_both_objects, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003879 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003880 __ j(below, &not_both_objects, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003881 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003882 __ j(below, &not_both_objects, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003883 // We do not bail out after this point. Both are JSObjects, and
3884 // they are equal if and only if both are undetectable.
3885 // The and of the undetectable flags is 1 if and only if they are equal.
3886 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
3887 1 << Map::kIsUndetectable);
Ben Murdoch257744e2011-11-30 15:57:28 +00003888 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003889 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
3890 1 << Map::kIsUndetectable);
Ben Murdoch257744e2011-11-30 15:57:28 +00003891 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003892 // The objects are both undetectable, so they both compare as the value
3893 // undefined, and are equal.
3894 __ Set(eax, Immediate(EQUAL));
3895 __ bind(&return_unequal);
3896 // Return non-equal by returning the non-zero object pointer in eax,
3897 // or return equal if we fell through to here.
3898 __ ret(0); // rax, rdx were pushed
3899 __ bind(&not_both_objects);
3900 }
3901
3902 // Push arguments below the return address.
3903 __ pop(ecx);
3904 __ push(edx);
3905 __ push(eax);
3906
3907 // Figure out which native to call and setup the arguments.
3908 Builtins::JavaScript builtin;
3909 if (cc_ == equal) {
3910 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
3911 } else {
3912 builtin = Builtins::COMPARE;
3913 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
3914 }
3915
3916 // Restore return address on the stack.
3917 __ push(ecx);
3918
3919 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
3920 // tagged as a small integer.
3921 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
3922}
3923
3924
3925void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3926 Label* label,
3927 Register object,
3928 Register scratch) {
3929 __ test(object, Immediate(kSmiTagMask));
3930 __ j(zero, label);
3931 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
3932 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3933 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
3934 __ cmp(scratch, kSymbolTag | kStringTag);
3935 __ j(not_equal, label);
3936}
3937
3938
3939void StackCheckStub::Generate(MacroAssembler* masm) {
Ben Murdochf87a2032010-10-22 12:50:53 +01003940 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003941}
3942
3943
3944void CallFunctionStub::Generate(MacroAssembler* masm) {
3945 Label slow;
3946
Ben Murdoch257744e2011-11-30 15:57:28 +00003947 // The receiver might implicitly be the global object. This is
3948 // indicated by passing the hole as the receiver to the call
3949 // function stub.
3950 if (ReceiverMightBeImplicit()) {
3951 Label call;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003952 // Get the receiver from the stack.
3953 // +1 ~ return address
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003954 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003955 // Call as function is indicated with the hole.
3956 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
3957 __ j(not_equal, &call, Label::kNear);
3958 // Patch the receiver on the stack with the global receiver object.
3959 __ mov(ebx, GlobalObjectOperand());
3960 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
3961 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ebx);
3962 __ bind(&call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003963 }
3964
3965 // Get the function to call from the stack.
3966 // +2 ~ receiver, return address
3967 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
3968
3969 // Check that the function really is a JavaScript function.
3970 __ test(edi, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00003971 __ j(zero, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003972 // Goto slow case if we do not have a function.
3973 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003974 __ j(not_equal, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003975
3976 // Fast-case: Just invoke the function.
3977 ParameterCount actual(argc_);
Ben Murdoch257744e2011-11-30 15:57:28 +00003978
3979 if (ReceiverMightBeImplicit()) {
3980 Label call_as_function;
3981 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
3982 __ j(equal, &call_as_function);
3983 __ InvokeFunction(edi,
3984 actual,
3985 JUMP_FUNCTION,
3986 NullCallWrapper(),
3987 CALL_AS_METHOD);
3988 __ bind(&call_as_function);
3989 }
3990 __ InvokeFunction(edi,
3991 actual,
3992 JUMP_FUNCTION,
3993 NullCallWrapper(),
3994 CALL_AS_FUNCTION);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003995
3996 // Slow-case: Non-function called.
3997 __ bind(&slow);
3998 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3999 // of the original receiver from the call site).
4000 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
4001 __ Set(eax, Immediate(argc_));
4002 __ Set(ebx, Immediate(0));
4003 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +01004004 Handle<Code> adaptor =
4005 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004006 __ jmp(adaptor, RelocInfo::CODE_TARGET);
4007}
4008
4009
Steve Block44f0eee2011-05-26 01:26:41 +01004010bool CEntryStub::NeedsImmovableCode() {
4011 return false;
4012}
4013
4014
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004015void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004016 __ Throw(eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004017}
4018
4019
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004020void CEntryStub::GenerateCore(MacroAssembler* masm,
4021 Label* throw_normal_exception,
4022 Label* throw_termination_exception,
4023 Label* throw_out_of_memory_exception,
4024 bool do_gc,
Steve Block1e0659c2011-05-24 12:43:12 +01004025 bool always_allocate_scope) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004026 // eax: result parameter for PerformGC, if any
4027 // ebx: pointer to C function (C callee-saved)
4028 // ebp: frame pointer (restored after C call)
4029 // esp: stack pointer (restored after C call)
4030 // edi: number of arguments including receiver (C callee-saved)
4031 // esi: pointer to the first argument (C callee-saved)
4032
4033 // Result returned in eax, or eax+edx if result_size_ is 2.
4034
4035 // Check stack alignment.
4036 if (FLAG_debug_code) {
4037 __ CheckStackAlignment();
4038 }
4039
4040 if (do_gc) {
4041 // Pass failure code returned from last attempt as first argument to
4042 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
4043 // stack alignment is known to be correct. This function takes one argument
4044 // which is passed on the stack, and we know that the stack has been
4045 // prepared to pass at least one argument.
4046 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
4047 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
4048 }
4049
4050 ExternalReference scope_depth =
Steve Block44f0eee2011-05-26 01:26:41 +01004051 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004052 if (always_allocate_scope) {
4053 __ inc(Operand::StaticVariable(scope_depth));
4054 }
4055
4056 // Call C function.
4057 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
4058 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
Steve Block44f0eee2011-05-26 01:26:41 +01004059 __ mov(Operand(esp, 2 * kPointerSize),
4060 Immediate(ExternalReference::isolate_address()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004061 __ call(Operand(ebx));
4062 // Result is in eax or edx:eax - do not destroy these registers!
4063
4064 if (always_allocate_scope) {
4065 __ dec(Operand::StaticVariable(scope_depth));
4066 }
4067
4068 // Make sure we're not trying to return 'the hole' from the runtime
4069 // call as this may lead to crashes in the IC code later.
4070 if (FLAG_debug_code) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004071 Label okay;
Steve Block44f0eee2011-05-26 01:26:41 +01004072 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004073 __ j(not_equal, &okay, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004074 __ int3();
4075 __ bind(&okay);
4076 }
4077
4078 // Check for failure result.
4079 Label failure_returned;
4080 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4081 __ lea(ecx, Operand(eax, 1));
4082 // Lower 2 bits of ecx are 0 iff eax has failure tag.
4083 __ test(ecx, Immediate(kFailureTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00004084 __ j(zero, &failure_returned);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004085
Steve Block44f0eee2011-05-26 01:26:41 +01004086 ExternalReference pending_exception_address(
4087 Isolate::k_pending_exception_address, masm->isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01004088
4089 // Check that there is no pending exception, otherwise we
4090 // should have returned some failure value.
4091 if (FLAG_debug_code) {
4092 __ push(edx);
4093 __ mov(edx, Operand::StaticVariable(
Steve Block44f0eee2011-05-26 01:26:41 +01004094 ExternalReference::the_hole_value_location(masm->isolate())));
Ben Murdoch257744e2011-11-30 15:57:28 +00004095 Label okay;
Steve Block1e0659c2011-05-24 12:43:12 +01004096 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
4097 // Cannot use check here as it attempts to generate call into runtime.
Ben Murdoch257744e2011-11-30 15:57:28 +00004098 __ j(equal, &okay, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004099 __ int3();
4100 __ bind(&okay);
4101 __ pop(edx);
4102 }
4103
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004104 // Exit the JavaScript to C++ exit frame.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004105 __ LeaveExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004106 __ ret(0);
4107
4108 // Handling of failure.
4109 __ bind(&failure_returned);
4110
4111 Label retry;
4112 // If the returned exception is RETRY_AFTER_GC continue at retry label
4113 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4114 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00004115 __ j(zero, &retry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004116
4117 // Special handling of out of memory exceptions.
4118 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4119 __ j(equal, throw_out_of_memory_exception);
4120
4121 // Retrieve the pending exception and clear the variable.
Steve Block44f0eee2011-05-26 01:26:41 +01004122 ExternalReference the_hole_location =
4123 ExternalReference::the_hole_value_location(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004124 __ mov(eax, Operand::StaticVariable(pending_exception_address));
Steve Block44f0eee2011-05-26 01:26:41 +01004125 __ mov(edx, Operand::StaticVariable(the_hole_location));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004126 __ mov(Operand::StaticVariable(pending_exception_address), edx);
4127
4128 // Special handling of termination exceptions which are uncatchable
4129 // by javascript code.
Steve Block44f0eee2011-05-26 01:26:41 +01004130 __ cmp(eax, masm->isolate()->factory()->termination_exception());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004131 __ j(equal, throw_termination_exception);
4132
4133 // Handle normal exception.
4134 __ jmp(throw_normal_exception);
4135
4136 // Retry.
4137 __ bind(&retry);
4138}
4139
4140
4141void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
4142 UncatchableExceptionType type) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004143 __ ThrowUncatchable(type, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004144}
4145
4146
4147void CEntryStub::Generate(MacroAssembler* masm) {
4148 // eax: number of arguments including receiver
4149 // ebx: pointer to C function (C callee-saved)
4150 // ebp: frame pointer (restored after C call)
4151 // esp: stack pointer (restored after C call)
4152 // esi: current context (C callee-saved)
4153 // edi: JS function of the caller (C callee-saved)
4154
4155 // NOTE: Invocations of builtins may return failure objects instead
4156 // of a proper result. The builtin entry handles this by performing
4157 // a garbage collection and retrying the builtin (twice).
4158
4159 // Enter the exit frame that transitions from JavaScript to C++.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004160 __ EnterExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004161
4162 // eax: result parameter for PerformGC, if any (setup below)
4163 // ebx: pointer to builtin function (C callee-saved)
4164 // ebp: frame pointer (restored after C call)
4165 // esp: stack pointer (restored after C call)
4166 // edi: number of arguments including receiver (C callee-saved)
4167 // esi: argv pointer (C callee-saved)
4168
4169 Label throw_normal_exception;
4170 Label throw_termination_exception;
4171 Label throw_out_of_memory_exception;
4172
4173 // Call into the runtime system.
4174 GenerateCore(masm,
4175 &throw_normal_exception,
4176 &throw_termination_exception,
4177 &throw_out_of_memory_exception,
4178 false,
4179 false);
4180
4181 // Do space-specific GC and retry runtime call.
4182 GenerateCore(masm,
4183 &throw_normal_exception,
4184 &throw_termination_exception,
4185 &throw_out_of_memory_exception,
4186 true,
4187 false);
4188
4189 // Do full GC and retry runtime call one final time.
4190 Failure* failure = Failure::InternalError();
4191 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
4192 GenerateCore(masm,
4193 &throw_normal_exception,
4194 &throw_termination_exception,
4195 &throw_out_of_memory_exception,
4196 true,
4197 true);
4198
4199 __ bind(&throw_out_of_memory_exception);
4200 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
4201
4202 __ bind(&throw_termination_exception);
4203 GenerateThrowUncatchable(masm, TERMINATION);
4204
4205 __ bind(&throw_normal_exception);
4206 GenerateThrowTOS(masm);
4207}
4208
4209
4210void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4211 Label invoke, exit;
4212#ifdef ENABLE_LOGGING_AND_PROFILING
4213 Label not_outermost_js, not_outermost_js_2;
4214#endif
4215
4216 // Setup frame.
4217 __ push(ebp);
4218 __ mov(ebp, Operand(esp));
4219
4220 // Push marker in two places.
4221 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4222 __ push(Immediate(Smi::FromInt(marker))); // context slot
4223 __ push(Immediate(Smi::FromInt(marker))); // function slot
4224 // Save callee-saved registers (C calling conventions).
4225 __ push(edi);
4226 __ push(esi);
4227 __ push(ebx);
4228
4229 // Save copies of the top frame descriptor on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01004230 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004231 __ push(Operand::StaticVariable(c_entry_fp));
4232
4233#ifdef ENABLE_LOGGING_AND_PROFILING
4234 // If this is the outermost JS call, set js_entry_sp value.
Steve Block44f0eee2011-05-26 01:26:41 +01004235 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
4236 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004237 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
4238 __ j(not_equal, &not_outermost_js);
4239 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
Steve Block053d10c2011-06-13 19:13:29 +01004240 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4241 Label cont;
4242 __ jmp(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004243 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01004244 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
4245 __ bind(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004246#endif
4247
4248 // Call a faked try-block that does the invoke.
4249 __ call(&invoke);
4250
4251 // Caught exception: Store result (exception) in the pending
4252 // exception field in the JSEnv and return a failure sentinel.
Steve Block44f0eee2011-05-26 01:26:41 +01004253 ExternalReference pending_exception(Isolate::k_pending_exception_address,
4254 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004255 __ mov(Operand::StaticVariable(pending_exception), eax);
4256 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
4257 __ jmp(&exit);
4258
4259 // Invoke: Link this frame into the handler chain.
4260 __ bind(&invoke);
4261 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
4262
4263 // Clear any pending exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01004264 ExternalReference the_hole_location =
4265 ExternalReference::the_hole_value_location(masm->isolate());
4266 __ mov(edx, Operand::StaticVariable(the_hole_location));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004267 __ mov(Operand::StaticVariable(pending_exception), edx);
4268
4269 // Fake a receiver (NULL).
4270 __ push(Immediate(0)); // receiver
4271
4272 // Invoke the function by calling through JS entry trampoline
4273 // builtin and pop the faked function when we return. Notice that we
4274 // cannot store a reference to the trampoline code directly in this
4275 // stub, because the builtin stubs may not have been generated yet.
4276 if (is_construct) {
Steve Block44f0eee2011-05-26 01:26:41 +01004277 ExternalReference construct_entry(
4278 Builtins::kJSConstructEntryTrampoline,
4279 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004280 __ mov(edx, Immediate(construct_entry));
4281 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004282 ExternalReference entry(Builtins::kJSEntryTrampoline,
4283 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004284 __ mov(edx, Immediate(entry));
4285 }
4286 __ mov(edx, Operand(edx, 0)); // deref address
4287 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
4288 __ call(Operand(edx));
4289
4290 // Unlink this frame from the handler chain.
Steve Block053d10c2011-06-13 19:13:29 +01004291 __ PopTryHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004292
Steve Block053d10c2011-06-13 19:13:29 +01004293 __ bind(&exit);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004294#ifdef ENABLE_LOGGING_AND_PROFILING
Steve Block053d10c2011-06-13 19:13:29 +01004295 // Check if the current stack frame is marked as the outermost JS frame.
4296 __ pop(ebx);
4297 __ cmp(Operand(ebx),
4298 Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004299 __ j(not_equal, &not_outermost_js_2);
4300 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
4301 __ bind(&not_outermost_js_2);
4302#endif
4303
4304 // Restore the top frame descriptor from the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01004305 __ pop(Operand::StaticVariable(ExternalReference(
4306 Isolate::k_c_entry_fp_address,
4307 masm->isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004308
4309 // Restore callee-saved registers (C calling conventions).
4310 __ pop(ebx);
4311 __ pop(esi);
4312 __ pop(edi);
4313 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
4314
4315 // Restore frame pointer and return.
4316 __ pop(ebp);
4317 __ ret(0);
4318}
4319
4320
Ben Murdoch086aeea2011-05-13 15:57:08 +01004321// Generate stub code for instanceof.
4322// This code can patch a call site inlined cache of the instance of check,
4323// which looks like this.
4324//
4325// 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
4326// 75 0a jne <some near label>
4327// b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
4328//
4329// If call site patching is requested the stack will have the delta from the
4330// return address to the cmp instruction just below the return address. This
4331// also means that call site patching can only take place with arguments in
4332// registers. TOS looks like this when call site patching is requested
4333//
4334// esp[0] : return address
4335// esp[4] : delta from return address to cmp instruction
4336//
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004337void InstanceofStub::Generate(MacroAssembler* masm) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004338 // Call site inlining and patching implies arguments in registers.
4339 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
4340
Ben Murdochb0fe1622011-05-05 13:52:32 +01004341 // Fixed register usage throughout the stub.
4342 Register object = eax; // Object (lhs).
4343 Register map = ebx; // Map of the object.
4344 Register function = edx; // Function (rhs).
4345 Register prototype = edi; // Prototype of the function.
4346 Register scratch = ecx;
4347
Ben Murdoch086aeea2011-05-13 15:57:08 +01004348 // Constants describing the call site code to patch.
4349 static const int kDeltaToCmpImmediate = 2;
4350 static const int kDeltaToMov = 8;
4351 static const int kDeltaToMovImmediate = 9;
4352 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
4353 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
4354 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
4355
Steve Block44f0eee2011-05-26 01:26:41 +01004356 ExternalReference roots_address =
4357 ExternalReference::roots_address(masm->isolate());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004358
4359 ASSERT_EQ(object.code(), InstanceofStub::left().code());
4360 ASSERT_EQ(function.code(), InstanceofStub::right().code());
4361
Ben Murdochb0fe1622011-05-05 13:52:32 +01004362 // Get the object and function - they are always both needed.
4363 Label slow, not_js_object;
Ben Murdoch086aeea2011-05-13 15:57:08 +01004364 if (!HasArgsInRegisters()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004365 __ mov(object, Operand(esp, 2 * kPointerSize));
4366 __ mov(function, Operand(esp, 1 * kPointerSize));
4367 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004368
4369 // Check that the left hand is a JS object.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004370 __ test(object, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00004371 __ j(zero, &not_js_object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004372 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004373
Ben Murdoch086aeea2011-05-13 15:57:08 +01004374 // If there is a call site cache don't look in the global cache, but do the
4375 // real lookup and update the call site cache.
4376 if (!HasCallSiteInlineCheck()) {
4377 // Look up the function and the map in the instanceof cache.
Ben Murdoch257744e2011-11-30 15:57:28 +00004378 Label miss;
Ben Murdoch086aeea2011-05-13 15:57:08 +01004379 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
4380 __ cmp(function,
4381 Operand::StaticArray(scratch, times_pointer_size, roots_address));
Ben Murdoch257744e2011-11-30 15:57:28 +00004382 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004383 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
4384 __ cmp(map, Operand::StaticArray(
4385 scratch, times_pointer_size, roots_address));
Ben Murdoch257744e2011-11-30 15:57:28 +00004386 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004387 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4388 __ mov(eax, Operand::StaticArray(
4389 scratch, times_pointer_size, roots_address));
4390 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4391 __ bind(&miss);
4392 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004393
Ben Murdochb0fe1622011-05-05 13:52:32 +01004394 // Get the prototype of the function.
4395 __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004396
4397 // Check that the function prototype is a JS object.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004398 __ test(prototype, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00004399 __ j(zero, &slow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004400 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004401
Ben Murdoch086aeea2011-05-13 15:57:08 +01004402 // Update the global instanceof or call site inlined cache with the current
4403 // map and function. The cached answer will be set when it is known below.
4404 if (!HasCallSiteInlineCheck()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004405 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
4406 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
4407 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
4408 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
4409 function);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004410 } else {
4411 // The constants for the code patching are based on no push instructions
4412 // at the call site.
4413 ASSERT(HasArgsInRegisters());
4414 // Get return address and delta to inlined map check.
4415 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4416 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4417 if (FLAG_debug_code) {
4418 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
4419 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
4420 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
4421 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
4422 }
4423 __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
4424 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004425
Ben Murdochb0fe1622011-05-05 13:52:32 +01004426 // Loop through the prototype chain of the object looking for the function
4427 // prototype.
4428 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00004429 Label loop, is_instance, is_not_instance;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004430 __ bind(&loop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004431 __ cmp(scratch, Operand(prototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00004432 __ j(equal, &is_instance, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004433 Factory* factory = masm->isolate()->factory();
4434 __ cmp(Operand(scratch), Immediate(factory->null_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00004435 __ j(equal, &is_not_instance, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004436 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
4437 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004438 __ jmp(&loop);
4439
4440 __ bind(&is_instance);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004441 if (!HasCallSiteInlineCheck()) {
4442 __ Set(eax, Immediate(0));
4443 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4444 __ mov(Operand::StaticArray(scratch,
4445 times_pointer_size, roots_address), eax);
4446 } else {
4447 // Get return address and delta to inlined map check.
Steve Block44f0eee2011-05-26 01:26:41 +01004448 __ mov(eax, factory->true_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004449 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4450 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4451 if (FLAG_debug_code) {
4452 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
4453 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4454 }
4455 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
4456 if (!ReturnTrueFalseObject()) {
4457 __ Set(eax, Immediate(0));
4458 }
4459 }
4460 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004461
4462 __ bind(&is_not_instance);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004463 if (!HasCallSiteInlineCheck()) {
4464 __ Set(eax, Immediate(Smi::FromInt(1)));
4465 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4466 __ mov(Operand::StaticArray(
4467 scratch, times_pointer_size, roots_address), eax);
4468 } else {
4469 // Get return address and delta to inlined map check.
Steve Block44f0eee2011-05-26 01:26:41 +01004470 __ mov(eax, factory->false_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004471 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4472 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4473 if (FLAG_debug_code) {
4474 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
4475 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4476 }
4477 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
4478 if (!ReturnTrueFalseObject()) {
4479 __ Set(eax, Immediate(Smi::FromInt(1)));
4480 }
4481 }
4482 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004483
4484 Label object_not_null, object_not_null_or_smi;
4485 __ bind(&not_js_object);
4486 // Before null, smi and string value checks, check that the rhs is a function
4487 // as for a non-function rhs an exception needs to be thrown.
4488 __ test(function, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00004489 __ j(zero, &slow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004490 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00004491 __ j(not_equal, &slow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004492
4493 // Null is not instance of anything.
Steve Block44f0eee2011-05-26 01:26:41 +01004494 __ cmp(object, factory->null_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004495 __ j(not_equal, &object_not_null);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004496 __ Set(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004497 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004498
4499 __ bind(&object_not_null);
4500 // Smi values is not instance of anything.
4501 __ test(object, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00004502 __ j(not_zero, &object_not_null_or_smi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004503 __ Set(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004504 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004505
4506 __ bind(&object_not_null_or_smi);
4507 // String values is not instance of anything.
4508 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
4509 __ j(NegateCondition(is_string), &slow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004510 __ Set(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004511 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004512
4513 // Slow-case: Go through the JavaScript implementation.
4514 __ bind(&slow);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004515 if (!ReturnTrueFalseObject()) {
4516 // Tail call the builtin which returns 0 or 1.
4517 if (HasArgsInRegisters()) {
4518 // Push arguments below return address.
4519 __ pop(scratch);
4520 __ push(object);
4521 __ push(function);
4522 __ push(scratch);
4523 }
4524 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
4525 } else {
4526 // Call the builtin and convert 0/1 to true/false.
4527 __ EnterInternalFrame();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004528 __ push(object);
4529 __ push(function);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004530 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
4531 __ LeaveInternalFrame();
Ben Murdoch257744e2011-11-30 15:57:28 +00004532 Label true_value, done;
Ben Murdoch086aeea2011-05-13 15:57:08 +01004533 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00004534 __ j(zero, &true_value, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004535 __ mov(eax, factory->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004536 __ jmp(&done, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004537 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01004538 __ mov(eax, factory->true_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004539 __ bind(&done);
4540 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004541 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004542}
4543
4544
Ben Murdoch086aeea2011-05-13 15:57:08 +01004545Register InstanceofStub::left() { return eax; }
4546
4547
4548Register InstanceofStub::right() { return edx; }
4549
4550
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004551int CompareStub::MinorKey() {
4552 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
4553 // stubs the never NaN NaN condition is only taken into account if the
4554 // condition is equals.
4555 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
4556 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4557 return ConditionField::encode(static_cast<unsigned>(cc_))
4558 | RegisterField::encode(false) // lhs_ and rhs_ are not used
4559 | StrictField::encode(strict_)
4560 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004561 | IncludeNumberCompareField::encode(include_number_compare_)
4562 | IncludeSmiCompareField::encode(include_smi_compare_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004563}
4564
4565
4566// Unfortunately you have to run without snapshots to see most of these
4567// names in the profile since most compare stubs end up in the snapshot.
4568const char* CompareStub::GetName() {
4569 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4570
4571 if (name_ != NULL) return name_;
4572 const int kMaxNameLength = 100;
Steve Block44f0eee2011-05-26 01:26:41 +01004573 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
4574 kMaxNameLength);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004575 if (name_ == NULL) return "OOM";
4576
4577 const char* cc_name;
4578 switch (cc_) {
4579 case less: cc_name = "LT"; break;
4580 case greater: cc_name = "GT"; break;
4581 case less_equal: cc_name = "LE"; break;
4582 case greater_equal: cc_name = "GE"; break;
4583 case equal: cc_name = "EQ"; break;
4584 case not_equal: cc_name = "NE"; break;
4585 default: cc_name = "UnknownCondition"; break;
4586 }
4587
4588 const char* strict_name = "";
4589 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
4590 strict_name = "_STRICT";
4591 }
4592
4593 const char* never_nan_nan_name = "";
4594 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
4595 never_nan_nan_name = "_NO_NAN";
4596 }
4597
4598 const char* include_number_compare_name = "";
4599 if (!include_number_compare_) {
4600 include_number_compare_name = "_NO_NUMBER";
4601 }
4602
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004603 const char* include_smi_compare_name = "";
4604 if (!include_smi_compare_) {
4605 include_smi_compare_name = "_NO_SMI";
4606 }
4607
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004608 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004609 "CompareStub_%s%s%s%s%s",
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004610 cc_name,
4611 strict_name,
4612 never_nan_nan_name,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004613 include_number_compare_name,
4614 include_smi_compare_name);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004615 return name_;
4616}
4617
4618
4619// -------------------------------------------------------------------------
4620// StringCharCodeAtGenerator
4621
4622void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4623 Label flat_string;
4624 Label ascii_string;
4625 Label got_char_code;
4626
4627 // If the receiver is a smi trigger the non-string case.
4628 STATIC_ASSERT(kSmiTag == 0);
4629 __ test(object_, Immediate(kSmiTagMask));
4630 __ j(zero, receiver_not_string_);
4631
4632 // Fetch the instance type of the receiver into result register.
4633 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4634 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4635 // If the receiver is not a string trigger the non-string case.
4636 __ test(result_, Immediate(kIsNotStringMask));
4637 __ j(not_zero, receiver_not_string_);
4638
4639 // If the index is non-smi trigger the non-smi case.
4640 STATIC_ASSERT(kSmiTag == 0);
4641 __ test(index_, Immediate(kSmiTagMask));
4642 __ j(not_zero, &index_not_smi_);
4643
4644 // Put smi-tagged index into scratch register.
4645 __ mov(scratch_, index_);
4646 __ bind(&got_smi_index_);
4647
4648 // Check for index out of range.
4649 __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset));
4650 __ j(above_equal, index_out_of_range_);
4651
4652 // We need special handling for non-flat strings.
4653 STATIC_ASSERT(kSeqStringTag == 0);
4654 __ test(result_, Immediate(kStringRepresentationMask));
4655 __ j(zero, &flat_string);
4656
4657 // Handle non-flat strings.
4658 __ test(result_, Immediate(kIsConsStringMask));
4659 __ j(zero, &call_runtime_);
4660
4661 // ConsString.
4662 // Check whether the right hand side is the empty string (i.e. if
4663 // this is really a flat string in a cons string). If that is not
4664 // the case we would rather go to the runtime system now to flatten
4665 // the string.
4666 __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01004667 Immediate(masm->isolate()->factory()->empty_string()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004668 __ j(not_equal, &call_runtime_);
4669 // Get the first of the two strings and load its instance type.
4670 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
4671 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4672 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4673 // If the first cons component is also non-flat, then go to runtime.
4674 STATIC_ASSERT(kSeqStringTag == 0);
4675 __ test(result_, Immediate(kStringRepresentationMask));
4676 __ j(not_zero, &call_runtime_);
4677
4678 // Check for 1-byte or 2-byte string.
4679 __ bind(&flat_string);
4680 STATIC_ASSERT(kAsciiStringTag != 0);
4681 __ test(result_, Immediate(kStringEncodingMask));
4682 __ j(not_zero, &ascii_string);
4683
4684 // 2-byte string.
4685 // Load the 2-byte character code into the result register.
4686 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4687 __ movzx_w(result_, FieldOperand(object_,
4688 scratch_, times_1, // Scratch is smi-tagged.
4689 SeqTwoByteString::kHeaderSize));
4690 __ jmp(&got_char_code);
4691
4692 // ASCII string.
4693 // Load the byte into the result register.
4694 __ bind(&ascii_string);
4695 __ SmiUntag(scratch_);
4696 __ movzx_b(result_, FieldOperand(object_,
4697 scratch_, times_1,
4698 SeqAsciiString::kHeaderSize));
4699 __ bind(&got_char_code);
4700 __ SmiTag(result_);
4701 __ bind(&exit_);
4702}
4703
4704
4705void StringCharCodeAtGenerator::GenerateSlow(
4706 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
4707 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
4708
4709 // Index is not a smi.
4710 __ bind(&index_not_smi_);
4711 // If index is a heap number, try converting it to an integer.
Steve Block44f0eee2011-05-26 01:26:41 +01004712 __ CheckMap(index_,
4713 masm->isolate()->factory()->heap_number_map(),
4714 index_not_number_,
Ben Murdoch257744e2011-11-30 15:57:28 +00004715 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004716 call_helper.BeforeCall(masm);
4717 __ push(object_);
4718 __ push(index_);
4719 __ push(index_); // Consumed by runtime conversion function.
4720 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
4721 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
4722 } else {
4723 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
4724 // NumberToSmi discards numbers that are not exact integers.
4725 __ CallRuntime(Runtime::kNumberToSmi, 1);
4726 }
4727 if (!scratch_.is(eax)) {
4728 // Save the conversion result before the pop instructions below
4729 // have a chance to overwrite it.
4730 __ mov(scratch_, eax);
4731 }
4732 __ pop(index_);
4733 __ pop(object_);
4734 // Reload the instance type.
4735 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4736 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4737 call_helper.AfterCall(masm);
4738 // If index is still not a smi, it must be out of range.
4739 STATIC_ASSERT(kSmiTag == 0);
4740 __ test(scratch_, Immediate(kSmiTagMask));
4741 __ j(not_zero, index_out_of_range_);
4742 // Otherwise, return to the fast path.
4743 __ jmp(&got_smi_index_);
4744
4745 // Call runtime. We get here when the receiver is a string and the
4746 // index is a number, but the code of getting the actual character
4747 // is too complex (e.g., when the string needs to be flattened).
4748 __ bind(&call_runtime_);
4749 call_helper.BeforeCall(masm);
4750 __ push(object_);
4751 __ push(index_);
4752 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
4753 if (!result_.is(eax)) {
4754 __ mov(result_, eax);
4755 }
4756 call_helper.AfterCall(masm);
4757 __ jmp(&exit_);
4758
4759 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
4760}
4761
4762
4763// -------------------------------------------------------------------------
4764// StringCharFromCodeGenerator
4765
4766void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
4767 // Fast case of Heap::LookupSingleCharacterStringFromCode.
4768 STATIC_ASSERT(kSmiTag == 0);
4769 STATIC_ASSERT(kSmiShiftSize == 0);
4770 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
4771 __ test(code_,
4772 Immediate(kSmiTagMask |
4773 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00004774 __ j(not_zero, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004775
Steve Block44f0eee2011-05-26 01:26:41 +01004776 Factory* factory = masm->isolate()->factory();
4777 __ Set(result_, Immediate(factory->single_character_string_cache()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004778 STATIC_ASSERT(kSmiTag == 0);
4779 STATIC_ASSERT(kSmiTagSize == 1);
4780 STATIC_ASSERT(kSmiShiftSize == 0);
4781 // At this point code register contains smi tagged ascii char code.
4782 __ mov(result_, FieldOperand(result_,
4783 code_, times_half_pointer_size,
4784 FixedArray::kHeaderSize));
Steve Block44f0eee2011-05-26 01:26:41 +01004785 __ cmp(result_, factory->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004786 __ j(equal, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004787 __ bind(&exit_);
4788}
4789
4790
4791void StringCharFromCodeGenerator::GenerateSlow(
4792 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
4793 __ Abort("Unexpected fallthrough to CharFromCode slow case");
4794
4795 __ bind(&slow_case_);
4796 call_helper.BeforeCall(masm);
4797 __ push(code_);
4798 __ CallRuntime(Runtime::kCharFromCode, 1);
4799 if (!result_.is(eax)) {
4800 __ mov(result_, eax);
4801 }
4802 call_helper.AfterCall(masm);
4803 __ jmp(&exit_);
4804
4805 __ Abort("Unexpected fallthrough from CharFromCode slow case");
4806}
4807
4808
4809// -------------------------------------------------------------------------
4810// StringCharAtGenerator
4811
4812void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
4813 char_code_at_generator_.GenerateFast(masm);
4814 char_from_code_generator_.GenerateFast(masm);
4815}
4816
4817
4818void StringCharAtGenerator::GenerateSlow(
4819 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
4820 char_code_at_generator_.GenerateSlow(masm, call_helper);
4821 char_from_code_generator_.GenerateSlow(masm, call_helper);
4822}
4823
4824
4825void StringAddStub::Generate(MacroAssembler* masm) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01004826 Label string_add_runtime, call_builtin;
4827 Builtins::JavaScript builtin_id = Builtins::ADD;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004828
4829 // Load the two arguments.
4830 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
4831 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
4832
4833 // Make sure that both arguments are strings if not known in advance.
Iain Merrick9ac36c92010-09-13 15:29:50 +01004834 if (flags_ == NO_STRING_ADD_FLAGS) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004835 __ test(eax, Immediate(kSmiTagMask));
4836 __ j(zero, &string_add_runtime);
4837 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
4838 __ j(above_equal, &string_add_runtime);
4839
4840 // First argument is a a string, test second.
4841 __ test(edx, Immediate(kSmiTagMask));
4842 __ j(zero, &string_add_runtime);
4843 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
4844 __ j(above_equal, &string_add_runtime);
Iain Merrick9ac36c92010-09-13 15:29:50 +01004845 } else {
4846 // Here at least one of the arguments is definitely a string.
4847 // We convert the one that is not known to be a string.
4848 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
4849 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
4850 GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi,
4851 &call_builtin);
4852 builtin_id = Builtins::STRING_ADD_RIGHT;
4853 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
4854 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
4855 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
4856 &call_builtin);
4857 builtin_id = Builtins::STRING_ADD_LEFT;
4858 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004859 }
4860
4861 // Both arguments are strings.
4862 // eax: first string
4863 // edx: second string
4864 // Check if either of the strings are empty. In that case return the other.
Ben Murdoch257744e2011-11-30 15:57:28 +00004865 Label second_not_zero_length, both_not_zero_length;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004866 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
4867 STATIC_ASSERT(kSmiTag == 0);
4868 __ test(ecx, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +00004869 __ j(not_zero, &second_not_zero_length, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004870 // Second string is empty, result is first string which is already in eax.
Steve Block44f0eee2011-05-26 01:26:41 +01004871 Counters* counters = masm->isolate()->counters();
4872 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004873 __ ret(2 * kPointerSize);
4874 __ bind(&second_not_zero_length);
4875 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
4876 STATIC_ASSERT(kSmiTag == 0);
4877 __ test(ebx, Operand(ebx));
Ben Murdoch257744e2011-11-30 15:57:28 +00004878 __ j(not_zero, &both_not_zero_length, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004879 // First string is empty, result is second string which is in edx.
4880 __ mov(eax, edx);
Steve Block44f0eee2011-05-26 01:26:41 +01004881 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004882 __ ret(2 * kPointerSize);
4883
4884 // Both strings are non-empty.
4885 // eax: first string
4886 // ebx: length of first string as a smi
4887 // ecx: length of second string as a smi
4888 // edx: second string
4889 // Look at the length of the result of adding the two strings.
4890 Label string_add_flat_result, longer_than_two;
4891 __ bind(&both_not_zero_length);
4892 __ add(ebx, Operand(ecx));
4893 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
4894 // Handle exceptionally long strings in the runtime system.
4895 __ j(overflow, &string_add_runtime);
Steve Block44f0eee2011-05-26 01:26:41 +01004896 // Use the symbol table when adding two one character strings, as it
4897 // helps later optimizations to return a symbol here.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004898 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
4899 __ j(not_equal, &longer_than_two);
4900
4901 // Check that both strings are non-external ascii strings.
4902 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
4903 &string_add_runtime);
4904
Iain Merrick9ac36c92010-09-13 15:29:50 +01004905 // Get the two characters forming the new string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004906 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
4907 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
4908
4909 // Try to lookup two character string in symbol table. If it is not found
4910 // just allocate a new one.
Iain Merrick9ac36c92010-09-13 15:29:50 +01004911 Label make_two_character_string, make_two_character_string_no_reload;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004912 StringHelper::GenerateTwoCharacterSymbolTableProbe(
Iain Merrick9ac36c92010-09-13 15:29:50 +01004913 masm, ebx, ecx, eax, edx, edi,
4914 &make_two_character_string_no_reload, &make_two_character_string);
Steve Block44f0eee2011-05-26 01:26:41 +01004915 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004916 __ ret(2 * kPointerSize);
4917
Iain Merrick9ac36c92010-09-13 15:29:50 +01004918 // Allocate a two character string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004919 __ bind(&make_two_character_string);
Iain Merrick9ac36c92010-09-13 15:29:50 +01004920 // Reload the arguments.
4921 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
4922 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
4923 // Get the two characters forming the new string.
4924 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
4925 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
4926 __ bind(&make_two_character_string_no_reload);
Steve Block44f0eee2011-05-26 01:26:41 +01004927 __ IncrementCounter(counters->string_add_make_two_char(), 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01004928 __ AllocateAsciiString(eax, // Result.
4929 2, // Length.
4930 edi, // Scratch 1.
4931 edx, // Scratch 2.
4932 &string_add_runtime);
4933 // Pack both characters in ebx.
4934 __ shl(ecx, kBitsPerByte);
4935 __ or_(ebx, Operand(ecx));
4936 // Set the characters in the new string.
4937 __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx);
Steve Block44f0eee2011-05-26 01:26:41 +01004938 __ IncrementCounter(counters->string_add_native(), 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01004939 __ ret(2 * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004940
4941 __ bind(&longer_than_two);
4942 // Check if resulting string will be flat.
4943 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength)));
4944 __ j(below, &string_add_flat_result);
4945
4946 // If result is not supposed to be flat allocate a cons string object. If both
4947 // strings are ascii the result is an ascii cons string.
4948 Label non_ascii, allocated, ascii_data;
4949 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
4950 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
4951 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
4952 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
4953 __ and_(ecx, Operand(edi));
4954 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
4955 __ test(ecx, Immediate(kAsciiStringTag));
4956 __ j(zero, &non_ascii);
4957 __ bind(&ascii_data);
4958 // Allocate an acsii cons string.
4959 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
4960 __ bind(&allocated);
4961 // Fill the fields of the cons string.
4962 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
4963 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
4964 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
4965 Immediate(String::kEmptyHashField));
4966 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
4967 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
4968 __ mov(eax, ecx);
Steve Block44f0eee2011-05-26 01:26:41 +01004969 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004970 __ ret(2 * kPointerSize);
4971 __ bind(&non_ascii);
4972 // At least one of the strings is two-byte. Check whether it happens
4973 // to contain only ascii characters.
4974 // ecx: first instance type AND second instance type.
4975 // edi: second instance type.
4976 __ test(ecx, Immediate(kAsciiDataHintMask));
4977 __ j(not_zero, &ascii_data);
4978 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
4979 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
4980 __ xor_(edi, Operand(ecx));
4981 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
4982 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
4983 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
4984 __ j(equal, &ascii_data);
4985 // Allocate a two byte cons string.
4986 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime);
4987 __ jmp(&allocated);
4988
4989 // Handle creating a flat result. First check that both strings are not
4990 // external strings.
4991 // eax: first string
4992 // ebx: length of resulting flat string as a smi
4993 // edx: second string
4994 __ bind(&string_add_flat_result);
4995 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
4996 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
4997 __ and_(ecx, kStringRepresentationMask);
4998 __ cmp(ecx, kExternalStringTag);
4999 __ j(equal, &string_add_runtime);
5000 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5001 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5002 __ and_(ecx, kStringRepresentationMask);
5003 __ cmp(ecx, kExternalStringTag);
5004 __ j(equal, &string_add_runtime);
5005 // Now check if both strings are ascii strings.
5006 // eax: first string
5007 // ebx: length of resulting flat string as a smi
5008 // edx: second string
5009 Label non_ascii_string_add_flat_result;
5010 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
5011 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5012 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5013 __ j(zero, &non_ascii_string_add_flat_result);
5014 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5015 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5016 __ j(zero, &string_add_runtime);
5017
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005018 // Both strings are ascii strings. As they are short they are both flat.
5019 // ebx: length of resulting flat string as a smi
5020 __ SmiUntag(ebx);
5021 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5022 // eax: result string
5023 __ mov(ecx, eax);
5024 // Locate first character of result.
5025 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5026 // Load first argument and locate first character.
5027 __ mov(edx, Operand(esp, 2 * kPointerSize));
5028 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5029 __ SmiUntag(edi);
5030 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5031 // eax: result string
5032 // ecx: first character of result
5033 // edx: first char of first argument
5034 // edi: length of first argument
5035 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
5036 // Load second argument and locate first character.
5037 __ mov(edx, Operand(esp, 1 * kPointerSize));
5038 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5039 __ SmiUntag(edi);
5040 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5041 // eax: result string
5042 // ecx: next character of result
5043 // edx: first char of second argument
5044 // edi: length of second argument
5045 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Block44f0eee2011-05-26 01:26:41 +01005046 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005047 __ ret(2 * kPointerSize);
5048
5049 // Handle creating a flat two byte result.
5050 // eax: first string - known to be two byte
5051 // ebx: length of resulting flat string as a smi
5052 // edx: second string
5053 __ bind(&non_ascii_string_add_flat_result);
5054 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5055 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5056 __ j(not_zero, &string_add_runtime);
5057 // Both strings are two byte strings. As they are short they are both
5058 // flat.
5059 __ SmiUntag(ebx);
5060 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5061 // eax: result string
5062 __ mov(ecx, eax);
5063 // Locate first character of result.
5064 __ add(Operand(ecx),
5065 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5066 // Load first argument and locate first character.
5067 __ mov(edx, Operand(esp, 2 * kPointerSize));
5068 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5069 __ SmiUntag(edi);
5070 __ add(Operand(edx),
5071 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5072 // eax: result string
5073 // ecx: first character of result
5074 // edx: first char of first argument
5075 // edi: length of first argument
5076 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
5077 // Load second argument and locate first character.
5078 __ mov(edx, Operand(esp, 1 * kPointerSize));
5079 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5080 __ SmiUntag(edi);
5081 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5082 // eax: result string
5083 // ecx: next character of result
5084 // edx: first char of second argument
5085 // edi: length of second argument
5086 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Block44f0eee2011-05-26 01:26:41 +01005087 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005088 __ ret(2 * kPointerSize);
5089
5090 // Just jump to runtime to add the two strings.
5091 __ bind(&string_add_runtime);
5092 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005093
5094 if (call_builtin.is_linked()) {
5095 __ bind(&call_builtin);
5096 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
5097 }
5098}
5099
5100
5101void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
5102 int stack_offset,
5103 Register arg,
5104 Register scratch1,
5105 Register scratch2,
5106 Register scratch3,
5107 Label* slow) {
5108 // First check if the argument is already a string.
5109 Label not_string, done;
5110 __ test(arg, Immediate(kSmiTagMask));
5111 __ j(zero, &not_string);
5112 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
5113 __ j(below, &done);
5114
5115 // Check the number to string cache.
5116 Label not_cached;
5117 __ bind(&not_string);
5118 // Puts the cached result into scratch1.
5119 NumberToStringStub::GenerateLookupNumberStringCache(masm,
5120 arg,
5121 scratch1,
5122 scratch2,
5123 scratch3,
5124 false,
5125 &not_cached);
5126 __ mov(arg, scratch1);
5127 __ mov(Operand(esp, stack_offset), arg);
5128 __ jmp(&done);
5129
5130 // Check if the argument is a safe string wrapper.
5131 __ bind(&not_cached);
5132 __ test(arg, Immediate(kSmiTagMask));
5133 __ j(zero, slow);
5134 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
5135 __ j(not_equal, slow);
5136 __ test_b(FieldOperand(scratch1, Map::kBitField2Offset),
5137 1 << Map::kStringWrapperSafeForDefaultValueOf);
5138 __ j(zero, slow);
5139 __ mov(arg, FieldOperand(arg, JSValue::kValueOffset));
5140 __ mov(Operand(esp, stack_offset), arg);
5141
5142 __ bind(&done);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005143}
5144
5145
5146void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
5147 Register dest,
5148 Register src,
5149 Register count,
5150 Register scratch,
5151 bool ascii) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005152 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005153 __ bind(&loop);
5154 // This loop just copies one character at a time, as it is only used for very
5155 // short strings.
5156 if (ascii) {
5157 __ mov_b(scratch, Operand(src, 0));
5158 __ mov_b(Operand(dest, 0), scratch);
5159 __ add(Operand(src), Immediate(1));
5160 __ add(Operand(dest), Immediate(1));
5161 } else {
5162 __ mov_w(scratch, Operand(src, 0));
5163 __ mov_w(Operand(dest, 0), scratch);
5164 __ add(Operand(src), Immediate(2));
5165 __ add(Operand(dest), Immediate(2));
5166 }
5167 __ sub(Operand(count), Immediate(1));
5168 __ j(not_zero, &loop);
5169}
5170
5171
5172void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
5173 Register dest,
5174 Register src,
5175 Register count,
5176 Register scratch,
5177 bool ascii) {
5178 // Copy characters using rep movs of doublewords.
5179 // The destination is aligned on a 4 byte boundary because we are
5180 // copying to the beginning of a newly allocated string.
5181 ASSERT(dest.is(edi)); // rep movs destination
5182 ASSERT(src.is(esi)); // rep movs source
5183 ASSERT(count.is(ecx)); // rep movs count
5184 ASSERT(!scratch.is(dest));
5185 ASSERT(!scratch.is(src));
5186 ASSERT(!scratch.is(count));
5187
5188 // Nothing to do for zero characters.
5189 Label done;
5190 __ test(count, Operand(count));
5191 __ j(zero, &done);
5192
5193 // Make count the number of bytes to copy.
5194 if (!ascii) {
5195 __ shl(count, 1);
5196 }
5197
5198 // Don't enter the rep movs if there are less than 4 bytes to copy.
Ben Murdoch257744e2011-11-30 15:57:28 +00005199 Label last_bytes;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005200 __ test(count, Immediate(~3));
Ben Murdoch257744e2011-11-30 15:57:28 +00005201 __ j(zero, &last_bytes, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005202
5203 // Copy from edi to esi using rep movs instruction.
5204 __ mov(scratch, count);
5205 __ sar(count, 2); // Number of doublewords to copy.
5206 __ cld();
5207 __ rep_movs();
5208
5209 // Find number of bytes left.
5210 __ mov(count, scratch);
5211 __ and_(count, 3);
5212
5213 // Check if there are more bytes to copy.
5214 __ bind(&last_bytes);
5215 __ test(count, Operand(count));
5216 __ j(zero, &done);
5217
5218 // Copy remaining characters.
Ben Murdoch257744e2011-11-30 15:57:28 +00005219 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005220 __ bind(&loop);
5221 __ mov_b(scratch, Operand(src, 0));
5222 __ mov_b(Operand(dest, 0), scratch);
5223 __ add(Operand(src), Immediate(1));
5224 __ add(Operand(dest), Immediate(1));
5225 __ sub(Operand(count), Immediate(1));
5226 __ j(not_zero, &loop);
5227
5228 __ bind(&done);
5229}
5230
5231
5232void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
5233 Register c1,
5234 Register c2,
5235 Register scratch1,
5236 Register scratch2,
5237 Register scratch3,
Iain Merrick9ac36c92010-09-13 15:29:50 +01005238 Label* not_probed,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005239 Label* not_found) {
5240 // Register scratch3 is the general scratch register in this function.
5241 Register scratch = scratch3;
5242
5243 // Make sure that both characters are not digits as such strings has a
5244 // different hash algorithm. Don't try to look for these in the symbol table.
Ben Murdoch257744e2011-11-30 15:57:28 +00005245 Label not_array_index;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005246 __ mov(scratch, c1);
5247 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5248 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
Ben Murdoch257744e2011-11-30 15:57:28 +00005249 __ j(above, &not_array_index, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005250 __ mov(scratch, c2);
5251 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5252 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
Iain Merrick9ac36c92010-09-13 15:29:50 +01005253 __ j(below_equal, not_probed);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005254
5255 __ bind(&not_array_index);
5256 // Calculate the two character string hash.
5257 Register hash = scratch1;
5258 GenerateHashInit(masm, hash, c1, scratch);
5259 GenerateHashAddCharacter(masm, hash, c2, scratch);
5260 GenerateHashGetHash(masm, hash, scratch);
5261
5262 // Collect the two characters in a register.
5263 Register chars = c1;
5264 __ shl(c2, kBitsPerByte);
5265 __ or_(chars, Operand(c2));
5266
5267 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5268 // hash: hash of two character string.
5269
5270 // Load the symbol table.
5271 Register symbol_table = c2;
Steve Block44f0eee2011-05-26 01:26:41 +01005272 ExternalReference roots_address =
5273 ExternalReference::roots_address(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005274 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
5275 __ mov(symbol_table,
5276 Operand::StaticArray(scratch, times_pointer_size, roots_address));
5277
5278 // Calculate capacity mask from the symbol table capacity.
5279 Register mask = scratch2;
5280 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
5281 __ SmiUntag(mask);
5282 __ sub(Operand(mask), Immediate(1));
5283
5284 // Registers
5285 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5286 // hash: hash of two character string
5287 // symbol_table: symbol table
5288 // mask: capacity mask
5289 // scratch: -
5290
5291 // Perform a number of probes in the symbol table.
5292 static const int kProbes = 4;
5293 Label found_in_symbol_table;
5294 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
5295 for (int i = 0; i < kProbes; i++) {
5296 // Calculate entry in symbol table.
5297 __ mov(scratch, hash);
5298 if (i > 0) {
5299 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
5300 }
5301 __ and_(scratch, Operand(mask));
5302
5303 // Load the entry from the symbol table.
5304 Register candidate = scratch; // Scratch register contains candidate.
5305 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
5306 __ mov(candidate,
5307 FieldOperand(symbol_table,
5308 scratch,
5309 times_pointer_size,
5310 SymbolTable::kElementsStartOffset));
5311
5312 // If entry is undefined no string with this hash can be found.
Steve Block44f0eee2011-05-26 01:26:41 +01005313 Factory* factory = masm->isolate()->factory();
5314 __ cmp(candidate, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005315 __ j(equal, not_found);
Steve Block44f0eee2011-05-26 01:26:41 +01005316 __ cmp(candidate, factory->null_value());
5317 __ j(equal, &next_probe[i]);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005318
5319 // If length is not 2 the string is not a candidate.
5320 __ cmp(FieldOperand(candidate, String::kLengthOffset),
5321 Immediate(Smi::FromInt(2)));
5322 __ j(not_equal, &next_probe[i]);
5323
5324 // As we are out of registers save the mask on the stack and use that
5325 // register as a temporary.
5326 __ push(mask);
5327 Register temp = mask;
5328
5329 // Check that the candidate is a non-external ascii string.
5330 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
5331 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
5332 __ JumpIfInstanceTypeIsNotSequentialAscii(
5333 temp, temp, &next_probe_pop_mask[i]);
5334
5335 // Check if the two characters match.
5336 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
5337 __ and_(temp, 0x0000ffff);
5338 __ cmp(chars, Operand(temp));
5339 __ j(equal, &found_in_symbol_table);
5340 __ bind(&next_probe_pop_mask[i]);
5341 __ pop(mask);
5342 __ bind(&next_probe[i]);
5343 }
5344
5345 // No matching 2 character string found by probing.
5346 __ jmp(not_found);
5347
5348 // Scratch register contains result when we fall through to here.
5349 Register result = scratch;
5350 __ bind(&found_in_symbol_table);
5351 __ pop(mask); // Pop saved mask from the stack.
5352 if (!result.is(eax)) {
5353 __ mov(eax, result);
5354 }
5355}
5356
5357
5358void StringHelper::GenerateHashInit(MacroAssembler* masm,
5359 Register hash,
5360 Register character,
5361 Register scratch) {
5362 // hash = character + (character << 10);
5363 __ mov(hash, character);
5364 __ shl(hash, 10);
5365 __ add(hash, Operand(character));
5366 // hash ^= hash >> 6;
5367 __ mov(scratch, hash);
5368 __ sar(scratch, 6);
5369 __ xor_(hash, Operand(scratch));
5370}
5371
5372
5373void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
5374 Register hash,
5375 Register character,
5376 Register scratch) {
5377 // hash += character;
5378 __ add(hash, Operand(character));
5379 // hash += hash << 10;
5380 __ mov(scratch, hash);
5381 __ shl(scratch, 10);
5382 __ add(hash, Operand(scratch));
5383 // hash ^= hash >> 6;
5384 __ mov(scratch, hash);
5385 __ sar(scratch, 6);
5386 __ xor_(hash, Operand(scratch));
5387}
5388
5389
5390void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
5391 Register hash,
5392 Register scratch) {
5393 // hash += hash << 3;
5394 __ mov(scratch, hash);
5395 __ shl(scratch, 3);
5396 __ add(hash, Operand(scratch));
5397 // hash ^= hash >> 11;
5398 __ mov(scratch, hash);
5399 __ sar(scratch, 11);
5400 __ xor_(hash, Operand(scratch));
5401 // hash += hash << 15;
5402 __ mov(scratch, hash);
5403 __ shl(scratch, 15);
5404 __ add(hash, Operand(scratch));
5405
5406 // if (hash == 0) hash = 27;
Ben Murdoch257744e2011-11-30 15:57:28 +00005407 Label hash_not_zero;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005408 __ test(hash, Operand(hash));
Ben Murdoch257744e2011-11-30 15:57:28 +00005409 __ j(not_zero, &hash_not_zero, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005410 __ mov(hash, Immediate(27));
5411 __ bind(&hash_not_zero);
5412}
5413
5414
5415void SubStringStub::Generate(MacroAssembler* masm) {
5416 Label runtime;
5417
5418 // Stack frame on entry.
5419 // esp[0]: return address
5420 // esp[4]: to
5421 // esp[8]: from
5422 // esp[12]: string
5423
5424 // Make sure first argument is a string.
5425 __ mov(eax, Operand(esp, 3 * kPointerSize));
5426 STATIC_ASSERT(kSmiTag == 0);
5427 __ test(eax, Immediate(kSmiTagMask));
5428 __ j(zero, &runtime);
5429 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
5430 __ j(NegateCondition(is_string), &runtime);
5431
5432 // eax: string
5433 // ebx: instance type
5434
5435 // Calculate length of sub string using the smi values.
5436 Label result_longer_than_two;
5437 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
5438 __ test(ecx, Immediate(kSmiTagMask));
5439 __ j(not_zero, &runtime);
5440 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
5441 __ test(edx, Immediate(kSmiTagMask));
5442 __ j(not_zero, &runtime);
5443 __ sub(ecx, Operand(edx));
5444 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
5445 Label return_eax;
5446 __ j(equal, &return_eax);
5447 // Special handling of sub-strings of length 1 and 2. One character strings
5448 // are handled in the runtime system (looked up in the single character
5449 // cache). Two character strings are looked for in the symbol cache.
5450 __ SmiUntag(ecx); // Result length is no longer smi.
5451 __ cmp(ecx, 2);
5452 __ j(greater, &result_longer_than_two);
5453 __ j(less, &runtime);
5454
5455 // Sub string of length 2 requested.
5456 // eax: string
5457 // ebx: instance type
5458 // ecx: sub string length (value is 2)
5459 // edx: from index (smi)
5460 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
5461
5462 // Get the two characters forming the sub string.
5463 __ SmiUntag(edx); // From index is no longer smi.
5464 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
5465 __ movzx_b(ecx,
5466 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
5467
5468 // Try to lookup two character string in symbol table.
5469 Label make_two_character_string;
5470 StringHelper::GenerateTwoCharacterSymbolTableProbe(
Iain Merrick9ac36c92010-09-13 15:29:50 +01005471 masm, ebx, ecx, eax, edx, edi,
5472 &make_two_character_string, &make_two_character_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005473 __ ret(3 * kPointerSize);
5474
5475 __ bind(&make_two_character_string);
5476 // Setup registers for allocating the two character string.
5477 __ mov(eax, Operand(esp, 3 * kPointerSize));
5478 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
5479 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
5480 __ Set(ecx, Immediate(2));
5481
5482 __ bind(&result_longer_than_two);
5483 // eax: string
5484 // ebx: instance type
5485 // ecx: result string length
5486 // Check for flat ascii string
5487 Label non_ascii_flat;
5488 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
5489
5490 // Allocate the result.
5491 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
5492
5493 // eax: result string
5494 // ecx: result string length
5495 __ mov(edx, esi); // esi used by following code.
5496 // Locate first character of result.
5497 __ mov(edi, eax);
5498 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5499 // Load string argument and locate character of sub string start.
5500 __ mov(esi, Operand(esp, 3 * kPointerSize));
5501 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5502 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
5503 __ SmiUntag(ebx);
5504 __ add(esi, Operand(ebx));
5505
5506 // eax: result string
5507 // ecx: result length
5508 // edx: original value of esi
5509 // edi: first character of result
5510 // esi: character of sub string start
5511 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
5512 __ mov(esi, edx); // Restore esi.
Steve Block44f0eee2011-05-26 01:26:41 +01005513 Counters* counters = masm->isolate()->counters();
5514 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005515 __ ret(3 * kPointerSize);
5516
5517 __ bind(&non_ascii_flat);
5518 // eax: string
5519 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
5520 // ecx: result string length
5521 // Check for flat two byte string
5522 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
5523 __ j(not_equal, &runtime);
5524
5525 // Allocate the result.
5526 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
5527
5528 // eax: result string
5529 // ecx: result string length
5530 __ mov(edx, esi); // esi used by following code.
5531 // Locate first character of result.
5532 __ mov(edi, eax);
5533 __ add(Operand(edi),
5534 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5535 // Load string argument and locate character of sub string start.
5536 __ mov(esi, Operand(esp, 3 * kPointerSize));
5537 __ add(Operand(esi),
5538 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5539 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
5540 // As from is a smi it is 2 times the value which matches the size of a two
5541 // byte character.
5542 STATIC_ASSERT(kSmiTag == 0);
5543 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5544 __ add(esi, Operand(ebx));
5545
5546 // eax: result string
5547 // ecx: result length
5548 // edx: original value of esi
5549 // edi: first character of result
5550 // esi: character of sub string start
5551 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
5552 __ mov(esi, edx); // Restore esi.
5553
5554 __ bind(&return_eax);
Steve Block44f0eee2011-05-26 01:26:41 +01005555 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005556 __ ret(3 * kPointerSize);
5557
5558 // Just jump to runtime to create the sub string.
5559 __ bind(&runtime);
5560 __ TailCallRuntime(Runtime::kSubString, 3, 1);
5561}
5562
5563
Ben Murdoch257744e2011-11-30 15:57:28 +00005564void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
5565 Register left,
5566 Register right,
5567 Register scratch1,
5568 Register scratch2) {
5569 Register length = scratch1;
5570
5571 // Compare lengths.
5572 Label strings_not_equal, check_zero_length;
5573 __ mov(length, FieldOperand(left, String::kLengthOffset));
5574 __ cmp(length, FieldOperand(right, String::kLengthOffset));
5575 __ j(equal, &check_zero_length, Label::kNear);
5576 __ bind(&strings_not_equal);
5577 __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
5578 __ ret(0);
5579
5580 // Check if the length is zero.
5581 Label compare_chars;
5582 __ bind(&check_zero_length);
5583 STATIC_ASSERT(kSmiTag == 0);
5584 __ test(length, Operand(length));
5585 __ j(not_zero, &compare_chars, Label::kNear);
5586 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5587 __ ret(0);
5588
5589 // Compare characters.
5590 __ bind(&compare_chars);
5591 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
5592 &strings_not_equal, Label::kNear);
5593
5594 // Characters are equal.
5595 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5596 __ ret(0);
5597}
5598
5599
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005600void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
5601 Register left,
5602 Register right,
5603 Register scratch1,
5604 Register scratch2,
5605 Register scratch3) {
Steve Block44f0eee2011-05-26 01:26:41 +01005606 Counters* counters = masm->isolate()->counters();
5607 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005608
5609 // Find minimum length.
Ben Murdoch257744e2011-11-30 15:57:28 +00005610 Label left_shorter;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005611 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
5612 __ mov(scratch3, scratch1);
5613 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
5614
5615 Register length_delta = scratch3;
5616
Ben Murdoch257744e2011-11-30 15:57:28 +00005617 __ j(less_equal, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005618 // Right string is shorter. Change scratch1 to be length of right string.
5619 __ sub(scratch1, Operand(length_delta));
5620 __ bind(&left_shorter);
5621
5622 Register min_length = scratch1;
5623
5624 // If either length is zero, just compare lengths.
Ben Murdoch257744e2011-11-30 15:57:28 +00005625 Label compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005626 __ test(min_length, Operand(min_length));
Ben Murdoch257744e2011-11-30 15:57:28 +00005627 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005628
Ben Murdoch257744e2011-11-30 15:57:28 +00005629 // Compare characters.
5630 Label result_not_equal;
5631 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
5632 &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005633
5634 // Compare lengths - strings up to min-length are equal.
5635 __ bind(&compare_lengths);
5636 __ test(length_delta, Operand(length_delta));
Ben Murdoch257744e2011-11-30 15:57:28 +00005637 __ j(not_zero, &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005638
5639 // Result is EQUAL.
5640 STATIC_ASSERT(EQUAL == 0);
5641 STATIC_ASSERT(kSmiTag == 0);
5642 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5643 __ ret(0);
5644
Ben Murdoch257744e2011-11-30 15:57:28 +00005645 Label result_greater;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005646 __ bind(&result_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00005647 __ j(greater, &result_greater, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005648
5649 // Result is LESS.
5650 __ Set(eax, Immediate(Smi::FromInt(LESS)));
5651 __ ret(0);
5652
5653 // Result is GREATER.
5654 __ bind(&result_greater);
5655 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
5656 __ ret(0);
5657}
5658
5659
Ben Murdoch257744e2011-11-30 15:57:28 +00005660void StringCompareStub::GenerateAsciiCharsCompareLoop(
5661 MacroAssembler* masm,
5662 Register left,
5663 Register right,
5664 Register length,
5665 Register scratch,
5666 Label* chars_not_equal,
5667 Label::Distance chars_not_equal_near) {
5668 // Change index to run from -length to -1 by adding length to string
5669 // start. This means that loop ends when index reaches zero, which
5670 // doesn't need an additional compare.
5671 __ SmiUntag(length);
5672 __ lea(left,
5673 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize));
5674 __ lea(right,
5675 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize));
5676 __ neg(length);
5677 Register index = length; // index = -length;
5678
5679 // Compare loop.
5680 Label loop;
5681 __ bind(&loop);
5682 __ mov_b(scratch, Operand(left, index, times_1, 0));
5683 __ cmpb(scratch, Operand(right, index, times_1, 0));
5684 __ j(not_equal, chars_not_equal, chars_not_equal_near);
5685 __ add(Operand(index), Immediate(1));
5686 __ j(not_zero, &loop);
5687}
5688
5689
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005690void StringCompareStub::Generate(MacroAssembler* masm) {
5691 Label runtime;
5692
5693 // Stack frame on entry.
5694 // esp[0]: return address
5695 // esp[4]: right string
5696 // esp[8]: left string
5697
5698 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
5699 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
5700
Ben Murdoch257744e2011-11-30 15:57:28 +00005701 Label not_same;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005702 __ cmp(edx, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00005703 __ j(not_equal, &not_same, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005704 STATIC_ASSERT(EQUAL == 0);
5705 STATIC_ASSERT(kSmiTag == 0);
5706 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Steve Block44f0eee2011-05-26 01:26:41 +01005707 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005708 __ ret(2 * kPointerSize);
5709
5710 __ bind(&not_same);
5711
5712 // Check that both objects are sequential ascii strings.
5713 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
5714
5715 // Compare flat ascii strings.
5716 // Drop arguments from the stack.
5717 __ pop(ecx);
5718 __ add(Operand(esp), Immediate(2 * kPointerSize));
5719 __ push(ecx);
5720 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
5721
5722 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
5723 // tagged as a small integer.
5724 __ bind(&runtime);
5725 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
5726}
5727
Ben Murdochb0fe1622011-05-05 13:52:32 +01005728
Ben Murdochb0fe1622011-05-05 13:52:32 +01005729void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5730 ASSERT(state_ == CompareIC::SMIS);
Ben Murdoch257744e2011-11-30 15:57:28 +00005731 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005732 __ mov(ecx, Operand(edx));
5733 __ or_(ecx, Operand(eax));
5734 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00005735 __ j(not_zero, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005736
5737 if (GetCondition() == equal) {
5738 // For equality we do not care about the sign of the result.
5739 __ sub(eax, Operand(edx));
5740 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00005741 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005742 __ sub(edx, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00005743 __ j(no_overflow, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005744 // Correct sign of result in case of overflow.
5745 __ not_(edx);
5746 __ bind(&done);
5747 __ mov(eax, edx);
5748 }
5749 __ ret(0);
5750
5751 __ bind(&miss);
5752 GenerateMiss(masm);
5753}
5754
5755
5756void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5757 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
5758
Ben Murdoch257744e2011-11-30 15:57:28 +00005759 Label generic_stub;
5760 Label unordered;
5761 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005762 __ mov(ecx, Operand(edx));
5763 __ and_(ecx, Operand(eax));
5764 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00005765 __ j(zero, &generic_stub, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005766
5767 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00005768 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005769 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00005770 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005771
5772 // Inlining the double comparison and falling back to the general compare
5773 // stub if NaN is involved or SS2 or CMOV is unsupported.
Ben Murdoch8b112d22011-06-08 16:22:53 +01005774 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01005775 CpuFeatures::Scope scope1(SSE2);
5776 CpuFeatures::Scope scope2(CMOV);
5777
5778 // Load left and right operand
5779 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
5780 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
5781
5782 // Compare operands
5783 __ ucomisd(xmm0, xmm1);
5784
5785 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00005786 __ j(parity_even, &unordered, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005787
5788 // Return a result of -1, 0, or 1, based on EFLAGS.
5789 // Performing mov, because xor would destroy the flag register.
5790 __ mov(eax, 0); // equal
5791 __ mov(ecx, Immediate(Smi::FromInt(1)));
5792 __ cmov(above, eax, Operand(ecx));
5793 __ mov(ecx, Immediate(Smi::FromInt(-1)));
5794 __ cmov(below, eax, Operand(ecx));
5795 __ ret(0);
5796
5797 __ bind(&unordered);
5798 }
5799
5800 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
5801 __ bind(&generic_stub);
5802 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
5803
5804 __ bind(&miss);
5805 GenerateMiss(masm);
5806}
5807
5808
Ben Murdoch257744e2011-11-30 15:57:28 +00005809void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
5810 ASSERT(state_ == CompareIC::SYMBOLS);
5811 ASSERT(GetCondition() == equal);
5812
5813 // Registers containing left and right operands respectively.
5814 Register left = edx;
5815 Register right = eax;
5816 Register tmp1 = ecx;
5817 Register tmp2 = ebx;
5818
5819 // Check that both operands are heap objects.
5820 Label miss;
5821 __ mov(tmp1, Operand(left));
5822 STATIC_ASSERT(kSmiTag == 0);
5823 __ and_(tmp1, Operand(right));
5824 __ test(tmp1, Immediate(kSmiTagMask));
5825 __ j(zero, &miss, Label::kNear);
5826
5827 // Check that both operands are symbols.
5828 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
5829 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
5830 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
5831 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
5832 STATIC_ASSERT(kSymbolTag != 0);
5833 __ and_(tmp1, Operand(tmp2));
5834 __ test(tmp1, Immediate(kIsSymbolMask));
5835 __ j(zero, &miss, Label::kNear);
5836
5837 // Symbols are compared by identity.
5838 Label done;
5839 __ cmp(left, Operand(right));
5840 // Make sure eax is non-zero. At this point input operands are
5841 // guaranteed to be non-zero.
5842 ASSERT(right.is(eax));
5843 __ j(not_equal, &done, Label::kNear);
5844 STATIC_ASSERT(EQUAL == 0);
5845 STATIC_ASSERT(kSmiTag == 0);
5846 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5847 __ bind(&done);
5848 __ ret(0);
5849
5850 __ bind(&miss);
5851 GenerateMiss(masm);
5852}
5853
5854
5855void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
5856 ASSERT(state_ == CompareIC::STRINGS);
5857 ASSERT(GetCondition() == equal);
5858 Label miss;
5859
5860 // Registers containing left and right operands respectively.
5861 Register left = edx;
5862 Register right = eax;
5863 Register tmp1 = ecx;
5864 Register tmp2 = ebx;
5865 Register tmp3 = edi;
5866
5867 // Check that both operands are heap objects.
5868 __ mov(tmp1, Operand(left));
5869 STATIC_ASSERT(kSmiTag == 0);
5870 __ and_(tmp1, Operand(right));
5871 __ test(tmp1, Immediate(kSmiTagMask));
5872 __ j(zero, &miss);
5873
5874 // Check that both operands are strings. This leaves the instance
5875 // types loaded in tmp1 and tmp2.
5876 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
5877 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
5878 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
5879 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
5880 __ mov(tmp3, tmp1);
5881 STATIC_ASSERT(kNotStringTag != 0);
5882 __ or_(tmp3, Operand(tmp2));
5883 __ test(tmp3, Immediate(kIsNotStringMask));
5884 __ j(not_zero, &miss);
5885
5886 // Fast check for identical strings.
5887 Label not_same;
5888 __ cmp(left, Operand(right));
5889 __ j(not_equal, &not_same, Label::kNear);
5890 STATIC_ASSERT(EQUAL == 0);
5891 STATIC_ASSERT(kSmiTag == 0);
5892 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5893 __ ret(0);
5894
5895 // Handle not identical strings.
5896 __ bind(&not_same);
5897
5898 // Check that both strings are symbols. If they are, we're done
5899 // because we already know they are not identical.
5900 Label do_compare;
5901 STATIC_ASSERT(kSymbolTag != 0);
5902 __ and_(tmp1, Operand(tmp2));
5903 __ test(tmp1, Immediate(kIsSymbolMask));
5904 __ j(zero, &do_compare, Label::kNear);
5905 // Make sure eax is non-zero. At this point input operands are
5906 // guaranteed to be non-zero.
5907 ASSERT(right.is(eax));
5908 __ ret(0);
5909
5910 // Check that both strings are sequential ASCII.
5911 Label runtime;
5912 __ bind(&do_compare);
5913 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
5914
5915 // Compare flat ASCII strings. Returns when done.
5916 StringCompareStub::GenerateFlatAsciiStringEquals(
5917 masm, left, right, tmp1, tmp2);
5918
5919 // Handle more complex cases in runtime.
5920 __ bind(&runtime);
5921 __ pop(tmp1); // Return address.
5922 __ push(left);
5923 __ push(right);
5924 __ push(tmp1);
5925 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
5926
5927 __ bind(&miss);
5928 GenerateMiss(masm);
5929}
5930
5931
Ben Murdochb0fe1622011-05-05 13:52:32 +01005932void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
5933 ASSERT(state_ == CompareIC::OBJECTS);
Ben Murdoch257744e2011-11-30 15:57:28 +00005934 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005935 __ mov(ecx, Operand(edx));
5936 __ and_(ecx, Operand(eax));
5937 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00005938 __ j(zero, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005939
5940 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00005941 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005942 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00005943 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005944
5945 ASSERT(GetCondition() == equal);
5946 __ sub(eax, Operand(edx));
5947 __ ret(0);
5948
5949 __ bind(&miss);
5950 GenerateMiss(masm);
5951}
5952
5953
5954void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
5955 // Save the registers.
5956 __ pop(ecx);
5957 __ push(edx);
5958 __ push(eax);
5959 __ push(ecx);
5960
5961 // Call the runtime system in a fresh internal frame.
Steve Block44f0eee2011-05-26 01:26:41 +01005962 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
5963 masm->isolate());
Ben Murdochb0fe1622011-05-05 13:52:32 +01005964 __ EnterInternalFrame();
5965 __ push(edx);
5966 __ push(eax);
5967 __ push(Immediate(Smi::FromInt(op_)));
5968 __ CallExternalReference(miss, 3);
5969 __ LeaveInternalFrame();
5970
5971 // Compute the entry point of the rewritten stub.
5972 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
5973
5974 // Restore registers.
5975 __ pop(ecx);
5976 __ pop(eax);
5977 __ pop(edx);
5978 __ push(ecx);
5979
5980 // Do a tail call to the rewritten stub.
5981 __ jmp(Operand(edi));
5982}
5983
5984
Ben Murdoch257744e2011-11-30 15:57:28 +00005985// Helper function used to check that the dictionary doesn't contain
5986// the property. This function may return false negatives, so miss_label
5987// must always call a backup property check that is complete.
5988// This function is safe to call if the receiver has fast properties.
5989// Name must be a symbol and receiver must be a heap object.
5990MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
5991 MacroAssembler* masm,
5992 Label* miss,
5993 Label* done,
5994 Register properties,
5995 String* name,
5996 Register r0) {
5997 ASSERT(name->IsSymbol());
5998
5999 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6000 // not equal to the name and kProbes-th slot is not used (its name is the
6001 // undefined value), it guarantees the hash table doesn't contain the
6002 // property. It's true even if some slots represent deleted properties
6003 // (their names are the null value).
6004 for (int i = 0; i < kInlinedProbes; i++) {
6005 // Compute the masked index: (hash + i + i * i) & mask.
6006 Register index = r0;
6007 // Capacity is smi 2^n.
6008 __ mov(index, FieldOperand(properties, kCapacityOffset));
6009 __ dec(index);
6010 __ and_(Operand(index),
6011 Immediate(Smi::FromInt(name->Hash() +
6012 StringDictionary::GetProbeOffset(i))));
6013
6014 // Scale the index by multiplying by the entry size.
6015 ASSERT(StringDictionary::kEntrySize == 3);
6016 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
6017 Register entity_name = r0;
6018 // Having undefined at this place means the name is not contained.
6019 ASSERT_EQ(kSmiTagSize, 1);
6020 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
6021 kElementsStartOffset - kHeapObjectTag));
6022 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
6023 __ j(equal, done);
6024
6025 // Stop if found the property.
6026 __ cmp(entity_name, Handle<String>(name));
6027 __ j(equal, miss);
6028
6029 // Check if the entry name is not a symbol.
6030 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
6031 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
6032 kIsSymbolMask);
6033 __ j(zero, miss);
6034 }
6035
6036 StringDictionaryLookupStub stub(properties,
6037 r0,
6038 r0,
6039 StringDictionaryLookupStub::NEGATIVE_LOOKUP);
6040 __ push(Immediate(Handle<Object>(name)));
6041 __ push(Immediate(name->Hash()));
6042 MaybeObject* result = masm->TryCallStub(&stub);
6043 if (result->IsFailure()) return result;
6044 __ test(r0, Operand(r0));
6045 __ j(not_zero, miss);
6046 __ jmp(done);
6047 return result;
6048}
6049
6050
6051// Probe the string dictionary in the |elements| register. Jump to the
6052// |done| label if a property with the given name is found leaving the
6053// index into the dictionary in |r0|. Jump to the |miss| label
6054// otherwise.
6055void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
6056 Label* miss,
6057 Label* done,
6058 Register elements,
6059 Register name,
6060 Register r0,
6061 Register r1) {
6062 // Assert that name contains a string.
6063 if (FLAG_debug_code) __ AbortIfNotString(name);
6064
6065 __ mov(r1, FieldOperand(elements, kCapacityOffset));
6066 __ shr(r1, kSmiTagSize); // convert smi to int
6067 __ dec(r1);
6068
6069 // Generate an unrolled loop that performs a few probes before
6070 // giving up. Measurements done on Gmail indicate that 2 probes
6071 // cover ~93% of loads from dictionaries.
6072 for (int i = 0; i < kInlinedProbes; i++) {
6073 // Compute the masked index: (hash + i + i * i) & mask.
6074 __ mov(r0, FieldOperand(name, String::kHashFieldOffset));
6075 __ shr(r0, String::kHashShift);
6076 if (i > 0) {
6077 __ add(Operand(r0), Immediate(StringDictionary::GetProbeOffset(i)));
6078 }
6079 __ and_(r0, Operand(r1));
6080
6081 // Scale the index by multiplying by the entry size.
6082 ASSERT(StringDictionary::kEntrySize == 3);
6083 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
6084
6085 // Check if the key is identical to the name.
6086 __ cmp(name, Operand(elements,
6087 r0,
6088 times_4,
6089 kElementsStartOffset - kHeapObjectTag));
6090 __ j(equal, done);
6091 }
6092
6093 StringDictionaryLookupStub stub(elements,
6094 r1,
6095 r0,
6096 POSITIVE_LOOKUP);
6097 __ push(name);
6098 __ mov(r0, FieldOperand(name, String::kHashFieldOffset));
6099 __ shr(r0, String::kHashShift);
6100 __ push(r0);
6101 __ CallStub(&stub);
6102
6103 __ test(r1, Operand(r1));
6104 __ j(zero, miss);
6105 __ jmp(done);
6106}
6107
6108
6109void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
6110 // Stack frame on entry:
6111 // esp[0 * kPointerSize]: return address.
6112 // esp[1 * kPointerSize]: key's hash.
6113 // esp[2 * kPointerSize]: key.
6114 // Registers:
6115 // dictionary_: StringDictionary to probe.
6116 // result_: used as scratch.
6117 // index_: will hold an index of entry if lookup is successful.
6118 // might alias with result_.
6119 // Returns:
6120 // result_ is zero if lookup failed, non zero otherwise.
6121
6122 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
6123
6124 Register scratch = result_;
6125
6126 __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset));
6127 __ dec(scratch);
6128 __ SmiUntag(scratch);
6129 __ push(scratch);
6130
6131 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6132 // not equal to the name and kProbes-th slot is not used (its name is the
6133 // undefined value), it guarantees the hash table doesn't contain the
6134 // property. It's true even if some slots represent deleted properties
6135 // (their names are the null value).
6136 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
6137 // Compute the masked index: (hash + i + i * i) & mask.
6138 __ mov(scratch, Operand(esp, 2 * kPointerSize));
6139 if (i > 0) {
6140 __ add(Operand(scratch),
6141 Immediate(StringDictionary::GetProbeOffset(i)));
6142 }
6143 __ and_(scratch, Operand(esp, 0));
6144
6145 // Scale the index by multiplying by the entry size.
6146 ASSERT(StringDictionary::kEntrySize == 3);
6147 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
6148
6149 // Having undefined at this place means the name is not contained.
6150 ASSERT_EQ(kSmiTagSize, 1);
6151 __ mov(scratch, Operand(dictionary_,
6152 index_,
6153 times_pointer_size,
6154 kElementsStartOffset - kHeapObjectTag));
6155 __ cmp(scratch, masm->isolate()->factory()->undefined_value());
6156 __ j(equal, &not_in_dictionary);
6157
6158 // Stop if found the property.
6159 __ cmp(scratch, Operand(esp, 3 * kPointerSize));
6160 __ j(equal, &in_dictionary);
6161
6162 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
6163 // If we hit a non symbol key during negative lookup
6164 // we have to bailout as this key might be equal to the
6165 // key we are looking for.
6166
6167 // Check if the entry name is not a symbol.
6168 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
6169 __ test_b(FieldOperand(scratch, Map::kInstanceTypeOffset),
6170 kIsSymbolMask);
6171 __ j(zero, &maybe_in_dictionary);
6172 }
6173 }
6174
6175 __ bind(&maybe_in_dictionary);
6176 // If we are doing negative lookup then probing failure should be
6177 // treated as a lookup success. For positive lookup probing failure
6178 // should be treated as lookup failure.
6179 if (mode_ == POSITIVE_LOOKUP) {
6180 __ mov(result_, Immediate(0));
6181 __ Drop(1);
6182 __ ret(2 * kPointerSize);
6183 }
6184
6185 __ bind(&in_dictionary);
6186 __ mov(result_, Immediate(1));
6187 __ Drop(1);
6188 __ ret(2 * kPointerSize);
6189
6190 __ bind(&not_in_dictionary);
6191 __ mov(result_, Immediate(0));
6192 __ Drop(1);
6193 __ ret(2 * kPointerSize);
6194}
6195
6196
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006197#undef __
6198
6199} } // namespace v8::internal
6200
6201#endif // V8_TARGET_ARCH_IA32