blob: 66014a0a0ca9d0acf2dc65e4c92c041dcc235275 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_IA32_CODEGEN_IA32_H_
29#define V8_IA32_CODEGEN_IA32_H_
30
Kristian Monsen25f61362010-05-21 11:50:48 +010031#include "ast.h"
Steve Block6ded16b2010-05-10 14:33:55 +010032#include "ic-inl.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010033#include "jump-target-heavy.h"
Steve Block6ded16b2010-05-10 14:33:55 +010034
Steve Blocka7e24c12009-10-30 11:49:00 +000035namespace v8 {
36namespace internal {
37
38// Forward declarations
Leon Clarke4515c472010-02-03 11:58:03 +000039class CompilationInfo;
Steve Blocka7e24c12009-10-30 11:49:00 +000040class DeferredCode;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010041class FrameRegisterState;
Steve Blocka7e24c12009-10-30 11:49:00 +000042class RegisterAllocator;
43class RegisterFile;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010044class RuntimeCallHelper;
Steve Blocka7e24c12009-10-30 11:49:00 +000045
46enum InitState { CONST_INIT, NOT_CONST_INIT };
47enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
48
49
50// -------------------------------------------------------------------------
51// Reference support
52
Leon Clarked91b9f72010-01-27 17:25:45 +000053// A reference is a C++ stack-allocated object that puts a
54// reference on the virtual frame. The reference may be consumed
Kristian Monsen25f61362010-05-21 11:50:48 +010055// by GetValue, TakeValue and SetValue.
Leon Clarked91b9f72010-01-27 17:25:45 +000056// When the lifetime (scope) of a valid reference ends, it must have
57// been consumed, and be in state UNLOADED.
Steve Blocka7e24c12009-10-30 11:49:00 +000058class Reference BASE_EMBEDDED {
59 public:
60 // The values of the types is important, see size().
Leon Clarked91b9f72010-01-27 17:25:45 +000061 enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
62 Reference(CodeGenerator* cgen,
63 Expression* expression,
64 bool persist_after_get = false);
Steve Blocka7e24c12009-10-30 11:49:00 +000065 ~Reference();
66
67 Expression* expression() const { return expression_; }
68 Type type() const { return type_; }
69 void set_type(Type value) {
Leon Clarked91b9f72010-01-27 17:25:45 +000070 ASSERT_EQ(ILLEGAL, type_);
Steve Blocka7e24c12009-10-30 11:49:00 +000071 type_ = value;
72 }
73
Leon Clarked91b9f72010-01-27 17:25:45 +000074 void set_unloaded() {
75 ASSERT_NE(ILLEGAL, type_);
76 ASSERT_NE(UNLOADED, type_);
77 type_ = UNLOADED;
78 }
Steve Blocka7e24c12009-10-30 11:49:00 +000079 // The size the reference takes up on the stack.
Leon Clarked91b9f72010-01-27 17:25:45 +000080 int size() const {
81 return (type_ < SLOT) ? 0 : type_;
82 }
Steve Blocka7e24c12009-10-30 11:49:00 +000083
84 bool is_illegal() const { return type_ == ILLEGAL; }
85 bool is_slot() const { return type_ == SLOT; }
86 bool is_property() const { return type_ == NAMED || type_ == KEYED; }
Leon Clarked91b9f72010-01-27 17:25:45 +000087 bool is_unloaded() const { return type_ == UNLOADED; }
Steve Blocka7e24c12009-10-30 11:49:00 +000088
89 // Return the name. Only valid for named property references.
90 Handle<String> GetName();
91
92 // Generate code to push the value of the reference on top of the
93 // expression stack. The reference is expected to be already on top of
Leon Clarked91b9f72010-01-27 17:25:45 +000094 // the expression stack, and it is consumed by the call unless the
95 // reference is for a compound assignment.
96 // If the reference is not consumed, it is left in place under its value.
Steve Blockd0582a62009-12-15 09:54:21 +000097 void GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +000098
99 // Like GetValue except that the slot is expected to be written to before
Leon Clarked91b9f72010-01-27 17:25:45 +0000100 // being read from again. The value of the reference may be invalidated,
Steve Blocka7e24c12009-10-30 11:49:00 +0000101 // causing subsequent attempts to read it to fail.
Steve Blockd0582a62009-12-15 09:54:21 +0000102 void TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +0000103
104 // Generate code to store the value on top of the expression stack in the
105 // reference. The reference is expected to be immediately below the value
Leon Clarked91b9f72010-01-27 17:25:45 +0000106 // on the expression stack. The value is stored in the location specified
107 // by the reference, and is left on top of the stack, after the reference
108 // is popped from beneath it (unloaded).
Steve Blocka7e24c12009-10-30 11:49:00 +0000109 void SetValue(InitState init_state);
110
111 private:
112 CodeGenerator* cgen_;
113 Expression* expression_;
114 Type type_;
Leon Clarked91b9f72010-01-27 17:25:45 +0000115 // Keep the reference on the stack after get, so it can be used by set later.
116 bool persist_after_get_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000117};
118
119
120// -------------------------------------------------------------------------
121// Control destinations.
122
123// A control destination encapsulates a pair of jump targets and a
124// flag indicating which one is the preferred fall-through. The
125// preferred fall-through must be unbound, the other may be already
126// bound (ie, a backward target).
127//
128// The true and false targets may be jumped to unconditionally or
129// control may split conditionally. Unconditional jumping and
130// splitting should be emitted in tail position (as the last thing
131// when compiling an expression) because they can cause either label
132// to be bound or the non-fall through to be jumped to leaving an
133// invalid virtual frame.
134//
135// The labels in the control destination can be extracted and
136// manipulated normally without affecting the state of the
137// destination.
138
139class ControlDestination BASE_EMBEDDED {
140 public:
141 ControlDestination(JumpTarget* true_target,
142 JumpTarget* false_target,
143 bool true_is_fall_through)
144 : true_target_(true_target),
145 false_target_(false_target),
146 true_is_fall_through_(true_is_fall_through),
147 is_used_(false) {
148 ASSERT(true_is_fall_through ? !true_target->is_bound()
149 : !false_target->is_bound());
150 }
151
152 // Accessors for the jump targets. Directly jumping or branching to
153 // or binding the targets will not update the destination's state.
154 JumpTarget* true_target() const { return true_target_; }
155 JumpTarget* false_target() const { return false_target_; }
156
157 // True if the the destination has been jumped to unconditionally or
158 // control has been split to both targets. This predicate does not
159 // test whether the targets have been extracted and manipulated as
160 // raw jump targets.
161 bool is_used() const { return is_used_; }
162
163 // True if the destination is used and the true target (respectively
164 // false target) was the fall through. If the target is backward,
165 // "fall through" included jumping unconditionally to it.
166 bool true_was_fall_through() const {
167 return is_used_ && true_is_fall_through_;
168 }
169
170 bool false_was_fall_through() const {
171 return is_used_ && !true_is_fall_through_;
172 }
173
174 // Emit a branch to one of the true or false targets, and bind the
175 // other target. Because this binds the fall-through target, it
176 // should be emitted in tail position (as the last thing when
177 // compiling an expression).
178 void Split(Condition cc) {
179 ASSERT(!is_used_);
180 if (true_is_fall_through_) {
181 false_target_->Branch(NegateCondition(cc));
182 true_target_->Bind();
183 } else {
184 true_target_->Branch(cc);
185 false_target_->Bind();
186 }
187 is_used_ = true;
188 }
189
190 // Emit an unconditional jump in tail position, to the true target
191 // (if the argument is true) or the false target. The "jump" will
192 // actually bind the jump target if it is forward, jump to it if it
193 // is backward.
194 void Goto(bool where) {
195 ASSERT(!is_used_);
196 JumpTarget* target = where ? true_target_ : false_target_;
197 if (target->is_bound()) {
198 target->Jump();
199 } else {
200 target->Bind();
201 }
202 is_used_ = true;
203 true_is_fall_through_ = where;
204 }
205
206 // Mark this jump target as used as if Goto had been called, but
207 // without generating a jump or binding a label (the control effect
208 // should have already happened). This is used when the left
209 // subexpression of the short-circuit boolean operators are
210 // compiled.
211 void Use(bool where) {
212 ASSERT(!is_used_);
213 ASSERT((where ? true_target_ : false_target_)->is_bound());
214 is_used_ = true;
215 true_is_fall_through_ = where;
216 }
217
218 // Swap the true and false targets but keep the same actual label as
219 // the fall through. This is used when compiling negated
220 // expressions, where we want to swap the targets but preserve the
221 // state.
222 void Invert() {
223 JumpTarget* temp_target = true_target_;
224 true_target_ = false_target_;
225 false_target_ = temp_target;
226
227 true_is_fall_through_ = !true_is_fall_through_;
228 }
229
230 private:
231 // True and false jump targets.
232 JumpTarget* true_target_;
233 JumpTarget* false_target_;
234
235 // Before using the destination: true if the true target is the
236 // preferred fall through, false if the false target is. After
237 // using the destination: true if the true target was actually used
238 // as the fall through, false if the false target was.
239 bool true_is_fall_through_;
240
241 // True if the Split or Goto functions have been called.
242 bool is_used_;
243};
244
245
246// -------------------------------------------------------------------------
247// Code generation state
248
249// The state is passed down the AST by the code generator (and back up, in
250// the form of the state of the jump target pair). It is threaded through
251// the call stack. Constructing a state implicitly pushes it on the owning
252// code generator's stack of states, and destroying one implicitly pops it.
253//
254// The code generator state is only used for expressions, so statements have
255// the initial state.
256
257class CodeGenState BASE_EMBEDDED {
258 public:
259 // Create an initial code generator state. Destroying the initial state
260 // leaves the code generator with a NULL state.
261 explicit CodeGenState(CodeGenerator* owner);
262
263 // Create a code generator state based on a code generator's current
Steve Blockd0582a62009-12-15 09:54:21 +0000264 // state. The new state has its own control destination.
265 CodeGenState(CodeGenerator* owner, ControlDestination* destination);
Steve Blocka7e24c12009-10-30 11:49:00 +0000266
267 // Destroy a code generator state and restore the owning code generator's
268 // previous state.
269 ~CodeGenState();
270
271 // Accessors for the state.
Steve Blocka7e24c12009-10-30 11:49:00 +0000272 ControlDestination* destination() const { return destination_; }
273
274 private:
275 // The owning code generator.
276 CodeGenerator* owner_;
277
Steve Blocka7e24c12009-10-30 11:49:00 +0000278 // A control destination in case the expression has a control-flow
279 // effect.
280 ControlDestination* destination_;
281
282 // The previous state of the owning code generator, restored when
283 // this state is destroyed.
284 CodeGenState* previous_;
285};
286
287
288// -------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000289// Arguments allocation mode.
Steve Blocka7e24c12009-10-30 11:49:00 +0000290
291enum ArgumentsAllocationMode {
292 NO_ARGUMENTS_ALLOCATION,
293 EAGER_ARGUMENTS_ALLOCATION,
294 LAZY_ARGUMENTS_ALLOCATION
295};
296
297
298// -------------------------------------------------------------------------
299// CodeGenerator
300
301class CodeGenerator: public AstVisitor {
302 public:
303 // Takes a function literal, generates code for it. This function should only
304 // be called by compiler.cc.
Andrei Popescu31002712010-02-23 13:46:05 +0000305 static Handle<Code> MakeCode(CompilationInfo* info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000306
Steve Block3ce2e202009-11-05 08:53:23 +0000307 // Printing of AST, etc. as requested by flags.
Andrei Popescu31002712010-02-23 13:46:05 +0000308 static void MakeCodePrologue(CompilationInfo* info);
Steve Block3ce2e202009-11-05 08:53:23 +0000309
310 // Allocate and install the code.
Andrei Popescu31002712010-02-23 13:46:05 +0000311 static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm,
Steve Block3ce2e202009-11-05 08:53:23 +0000312 Code::Flags flags,
Andrei Popescu31002712010-02-23 13:46:05 +0000313 CompilationInfo* info);
Steve Block3ce2e202009-11-05 08:53:23 +0000314
Steve Blocka7e24c12009-10-30 11:49:00 +0000315#ifdef ENABLE_LOGGING_AND_PROFILING
316 static bool ShouldGenerateLog(Expression* type);
317#endif
318
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100319 static bool RecordPositions(MacroAssembler* masm,
320 int pos,
321 bool right_here = false);
Steve Block3ce2e202009-11-05 08:53:23 +0000322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323 // Accessors
324 MacroAssembler* masm() { return masm_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000325 VirtualFrame* frame() const { return frame_; }
Andrei Popescu31002712010-02-23 13:46:05 +0000326 inline Handle<Script> script();
Steve Blocka7e24c12009-10-30 11:49:00 +0000327
328 bool has_valid_frame() const { return frame_ != NULL; }
329
330 // Set the virtual frame to be new_frame, with non-frame register
331 // reference counts given by non_frame_registers. The non-frame
332 // register reference counts of the old frame are returned in
333 // non_frame_registers.
334 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers);
335
336 void DeleteFrame();
337
338 RegisterAllocator* allocator() const { return allocator_; }
339
340 CodeGenState* state() { return state_; }
341 void set_state(CodeGenState* state) { state_ = state; }
342
343 void AddDeferred(DeferredCode* code) { deferred_.Add(code); }
344
345 bool in_spilled_code() const { return in_spilled_code_; }
346 void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; }
347
Steve Block6ded16b2010-05-10 14:33:55 +0100348 // If the name is an inline runtime function call return the number of
349 // expected arguments. Otherwise return -1.
350 static int InlineRuntimeCallArgumentsCount(Handle<String> name);
351
Kristian Monsen25f61362010-05-21 11:50:48 +0100352 // Return a position of the element at |index_as_smi| + |additional_offset|
353 // in FixedArray pointer to which is held in |array|. |index_as_smi| is Smi.
354 static Operand FixedArrayElementOperand(Register array,
355 Register index_as_smi,
356 int additional_offset = 0) {
357 int offset = FixedArray::kHeaderSize + additional_offset * kPointerSize;
358 return FieldOperand(array, index_as_smi, times_half_pointer_size, offset);
359 }
360
Steve Blocka7e24c12009-10-30 11:49:00 +0000361 private:
362 // Construction/Destruction
Andrei Popescu31002712010-02-23 13:46:05 +0000363 explicit CodeGenerator(MacroAssembler* masm);
Steve Blocka7e24c12009-10-30 11:49:00 +0000364
365 // Accessors
Andrei Popescu31002712010-02-23 13:46:05 +0000366 inline bool is_eval();
Steve Block6ded16b2010-05-10 14:33:55 +0100367 inline Scope* scope();
Steve Blocka7e24c12009-10-30 11:49:00 +0000368
369 // Generating deferred code.
370 void ProcessDeferred();
371
372 // State
Steve Blocka7e24c12009-10-30 11:49:00 +0000373 ControlDestination* destination() const { return state_->destination(); }
374
Steve Block6ded16b2010-05-10 14:33:55 +0100375 // Control of side-effect-free int32 expression compilation.
376 bool in_safe_int32_mode() { return in_safe_int32_mode_; }
377 void set_in_safe_int32_mode(bool value) { in_safe_int32_mode_ = value; }
378 bool safe_int32_mode_enabled() {
379 return FLAG_safe_int32_compiler && safe_int32_mode_enabled_;
380 }
381 void set_safe_int32_mode_enabled(bool value) {
382 safe_int32_mode_enabled_ = value;
383 }
384 void set_unsafe_bailout(BreakTarget* unsafe_bailout) {
385 unsafe_bailout_ = unsafe_bailout;
386 }
387
388 // Take the Result that is an untagged int32, and convert it to a tagged
389 // Smi or HeapNumber. Remove the untagged_int32 flag from the result.
390 void ConvertInt32ResultToNumber(Result* value);
391 void ConvertInt32ResultToSmi(Result* value);
392
Steve Blocka7e24c12009-10-30 11:49:00 +0000393 // Track loop nesting level.
394 int loop_nesting() const { return loop_nesting_; }
395 void IncrementLoopNesting() { loop_nesting_++; }
396 void DecrementLoopNesting() { loop_nesting_--; }
397
398 // Node visitors.
399 void VisitStatements(ZoneList<Statement*>* statements);
400
401#define DEF_VISIT(type) \
402 void Visit##type(type* node);
403 AST_NODE_LIST(DEF_VISIT)
404#undef DEF_VISIT
405
406 // Visit a statement and then spill the virtual frame if control flow can
407 // reach the end of the statement (ie, it does not exit via break,
408 // continue, return, or throw). This function is used temporarily while
409 // the code generator is being transformed.
410 void VisitAndSpill(Statement* statement);
411
412 // Visit a list of statements and then spill the virtual frame if control
413 // flow can reach the end of the list.
414 void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
415
416 // Main code generation function
Andrei Popescu402d9372010-02-26 13:31:12 +0000417 void Generate(CompilationInfo* info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000418
419 // Generate the return sequence code. Should be called no more than
420 // once per compiled function, immediately after binding the return
421 // target (which can not be done more than once).
422 void GenerateReturnSequence(Result* return_value);
423
424 // Returns the arguments allocation mode.
Andrei Popescu31002712010-02-23 13:46:05 +0000425 ArgumentsAllocationMode ArgumentsMode();
Steve Blocka7e24c12009-10-30 11:49:00 +0000426
427 // Store the arguments object and allocate it if necessary.
428 Result StoreArgumentsObject(bool initial);
429
430 // The following are used by class Reference.
431 void LoadReference(Reference* ref);
Steve Blocka7e24c12009-10-30 11:49:00 +0000432
Steve Block3ce2e202009-11-05 08:53:23 +0000433 static Operand ContextOperand(Register context, int index) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000434 return Operand(context, Context::SlotOffset(index));
435 }
436
437 Operand SlotOperand(Slot* slot, Register tmp);
438
439 Operand ContextSlotOperandCheckExtensions(Slot* slot,
440 Result tmp,
441 JumpTarget* slow);
442
443 // Expressions
Steve Block3ce2e202009-11-05 08:53:23 +0000444 static Operand GlobalObject() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000445 return ContextOperand(esi, Context::GLOBAL_INDEX);
446 }
447
Steve Block6ded16b2010-05-10 14:33:55 +0100448 void LoadCondition(Expression* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +0000449 ControlDestination* destination,
450 bool force_control);
Steve Blockd0582a62009-12-15 09:54:21 +0000451 void Load(Expression* expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000452 void LoadGlobal();
453 void LoadGlobalReceiver();
454
455 // Generate code to push the value of an expression on top of the frame
456 // and then spill the frame fully to memory. This function is used
457 // temporarily while the code generator is being transformed.
Steve Blockd0582a62009-12-15 09:54:21 +0000458 void LoadAndSpill(Expression* expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000459
Steve Block6ded16b2010-05-10 14:33:55 +0100460 // Evaluate an expression and place its value on top of the frame,
461 // using, or not using, the side-effect-free expression compiler.
462 void LoadInSafeInt32Mode(Expression* expr, BreakTarget* unsafe_bailout);
463 void LoadWithSafeInt32ModeDisabled(Expression* expr);
464
Steve Blocka7e24c12009-10-30 11:49:00 +0000465 // Read a value from a slot and leave it on top of the expression stack.
Leon Clarkef7060e22010-06-03 12:02:55 +0100466 void LoadFromSlot(Slot* slot, TypeofState typeof_state);
467 void LoadFromSlotCheckForArguments(Slot* slot, TypeofState typeof_state);
Steve Blocka7e24c12009-10-30 11:49:00 +0000468 Result LoadFromGlobalSlotCheckExtensions(Slot* slot,
469 TypeofState typeof_state,
470 JumpTarget* slow);
471
Kristian Monsen25f61362010-05-21 11:50:48 +0100472 // Support for loading from local/global variables and arguments
473 // whose location is known unless they are shadowed by
474 // eval-introduced bindings. Generates no code for unsupported slot
475 // types and therefore expects to fall through to the slow jump target.
476 void EmitDynamicLoadFromSlotFastCase(Slot* slot,
477 TypeofState typeof_state,
478 Result* result,
479 JumpTarget* slow,
480 JumpTarget* done);
481
Steve Blocka7e24c12009-10-30 11:49:00 +0000482 // Store the value on top of the expression stack into a slot, leaving the
483 // value in place.
484 void StoreToSlot(Slot* slot, InitState init_state);
485
Andrei Popescu402d9372010-02-26 13:31:12 +0000486 // Support for compiling assignment expressions.
487 void EmitSlotAssignment(Assignment* node);
488 void EmitNamedPropertyAssignment(Assignment* node);
489 void EmitKeyedPropertyAssignment(Assignment* node);
490
491 // Receiver is passed on the frame and consumed.
492 Result EmitNamedLoad(Handle<String> name, bool is_contextual);
493
494 // If the store is contextual, value is passed on the frame and consumed.
495 // Otherwise, receiver and value are passed on the frame and consumed.
496 Result EmitNamedStore(Handle<String> name, bool is_contextual);
497
498 // Receiver and key are passed on the frame and consumed.
499 Result EmitKeyedLoad();
500
501 // Receiver, key, and value are passed on the frame and consumed.
502 Result EmitKeyedStore(StaticType* key_type);
Leon Clarked91b9f72010-01-27 17:25:45 +0000503
Steve Blocka7e24c12009-10-30 11:49:00 +0000504 // Special code for typeof expressions: Unfortunately, we must
505 // be careful when loading the expression in 'typeof'
506 // expressions. We are not allowed to throw reference errors for
507 // non-existing properties of the global object, so we must make it
508 // look like an explicit property access, instead of an access
509 // through the context chain.
510 void LoadTypeofExpression(Expression* x);
511
512 // Translate the value on top of the frame into control flow to the
513 // control destination.
514 void ToBoolean(ControlDestination* destination);
515
Steve Block6ded16b2010-05-10 14:33:55 +0100516 // Generate code that computes a shortcutting logical operation.
517 void GenerateLogicalBooleanOperation(BinaryOperation* node);
518
519 void GenericBinaryOperation(BinaryOperation* expr,
520 OverwriteMode overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000521
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100522 // Emits code sequence that jumps to a JumpTarget if the inputs
523 // are both smis. Cannot be in MacroAssembler because it takes
524 // advantage of TypeInfo to skip unneeded checks.
525 // Allocates a temporary register, possibly spilling from the frame,
526 // if it needs to check both left and right.
527 void JumpIfBothSmiUsingTypeInfo(Result* left,
528 Result* right,
529 JumpTarget* both_smi);
530
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100531 // Emits code sequence that jumps to deferred code if the inputs
532 // are not both smis. Cannot be in MacroAssembler because it takes
533 // advantage of TypeInfo to skip unneeded checks.
534 void JumpIfNotBothSmiUsingTypeInfo(Register left,
535 Register right,
536 Register scratch,
537 TypeInfo left_info,
538 TypeInfo right_info,
539 DeferredCode* deferred);
540
Steve Blocka7e24c12009-10-30 11:49:00 +0000541 // If possible, combine two constant smi values using op to produce
542 // a smi result, and push it on the virtual frame, all at compile time.
543 // Returns true if it succeeds. Otherwise it has no effect.
544 bool FoldConstantSmis(Token::Value op, int left, int right);
545
546 // Emit code to perform a binary operation on a constant
Steve Block6ded16b2010-05-10 14:33:55 +0100547 // smi and a likely smi. Consumes the Result operand.
548 Result ConstantSmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +0000549 Result* operand,
550 Handle<Object> constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +0000551 bool reversed,
552 OverwriteMode overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000553
554 // Emit code to perform a binary operation on two likely smis.
555 // The code to handle smi arguments is produced inline.
Steve Block6ded16b2010-05-10 14:33:55 +0100556 // Consumes the Results left and right.
557 Result LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +0000558 Result* left,
559 Result* right,
560 OverwriteMode overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000561
Steve Block6ded16b2010-05-10 14:33:55 +0100562
563 // Emit code to perform a binary operation on two untagged int32 values.
564 // The values are on top of the frame, and the result is pushed on the frame.
565 void Int32BinaryOperation(BinaryOperation* node);
566
567
Leon Clarkee46be812010-01-19 14:06:41 +0000568 void Comparison(AstNode* node,
569 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +0000570 bool strict,
571 ControlDestination* destination);
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100572
573 // If at least one of the sides is a constant smi, generate optimized code.
574 void ConstantSmiComparison(Condition cc,
575 bool strict,
576 ControlDestination* destination,
577 Result* left_side,
578 Result* right_side,
579 bool left_side_constant_smi,
580 bool right_side_constant_smi,
581 bool is_loop_condition);
582
Steve Block6ded16b2010-05-10 14:33:55 +0100583 void GenerateInlineNumberComparison(Result* left_side,
584 Result* right_side,
585 Condition cc,
586 ControlDestination* dest);
Steve Blocka7e24c12009-10-30 11:49:00 +0000587
588 // To prevent long attacker-controlled byte sequences, integer constants
589 // from the JavaScript source are loaded in two parts if they are larger
Steve Block6ded16b2010-05-10 14:33:55 +0100590 // than 17 bits.
591 static const int kMaxSmiInlinedBits = 17;
Steve Blocka7e24c12009-10-30 11:49:00 +0000592 bool IsUnsafeSmi(Handle<Object> value);
Steve Blockd0582a62009-12-15 09:54:21 +0000593 // Load an integer constant x into a register target or into the stack using
Steve Blocka7e24c12009-10-30 11:49:00 +0000594 // at most 16 bits of user-controlled data per assembly operation.
Steve Blockd0582a62009-12-15 09:54:21 +0000595 void MoveUnsafeSmi(Register target, Handle<Object> value);
596 void StoreUnsafeSmiToLocal(int offset, Handle<Object> value);
597 void PushUnsafeSmi(Handle<Object> value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000598
Leon Clarkee46be812010-01-19 14:06:41 +0000599 void CallWithArguments(ZoneList<Expression*>* arguments,
600 CallFunctionFlags flags,
601 int position);
Steve Blocka7e24c12009-10-30 11:49:00 +0000602
Leon Clarked91b9f72010-01-27 17:25:45 +0000603 // An optimized implementation of expressions of the form
604 // x.apply(y, arguments). We call x the applicand and y the receiver.
605 // The optimization avoids allocating an arguments object if possible.
606 void CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +0000607 Expression* receiver,
608 VariableProxy* arguments,
609 int position);
610
611 void CheckStack();
612
613 struct InlineRuntimeLUT {
614 void (CodeGenerator::*method)(ZoneList<Expression*>*);
615 const char* name;
Steve Block6ded16b2010-05-10 14:33:55 +0100616 int nargs;
Steve Blocka7e24c12009-10-30 11:49:00 +0000617 };
618
619 static InlineRuntimeLUT* FindInlineRuntimeLUT(Handle<String> name);
620 bool CheckForInlineRuntimeCall(CallRuntime* node);
621 static bool PatchInlineRuntimeEntry(Handle<String> name,
622 const InlineRuntimeLUT& new_entry,
623 InlineRuntimeLUT* old_entry);
624
Steve Blocka7e24c12009-10-30 11:49:00 +0000625 void ProcessDeclarations(ZoneList<Declaration*>* declarations);
626
Steve Block3ce2e202009-11-05 08:53:23 +0000627 static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000628
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100629 static Handle<Code> ComputeKeyedCallInitialize(int argc, InLoopFlag in_loop);
630
Steve Blocka7e24c12009-10-30 11:49:00 +0000631 // Declare global variables and functions in the given array of
632 // name/value pairs.
633 void DeclareGlobals(Handle<FixedArray> pairs);
634
Steve Block6ded16b2010-05-10 14:33:55 +0100635 // Instantiate the function based on the shared function info.
636 Result InstantiateFunction(Handle<SharedFunctionInfo> function_info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000637
638 // Support for type checks.
639 void GenerateIsSmi(ZoneList<Expression*>* args);
640 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
641 void GenerateIsArray(ZoneList<Expression*>* args);
Andrei Popescu402d9372010-02-26 13:31:12 +0000642 void GenerateIsRegExp(ZoneList<Expression*>* args);
Steve Blockd0582a62009-12-15 09:54:21 +0000643 void GenerateIsObject(ZoneList<Expression*>* args);
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100644 void GenerateIsSpecObject(ZoneList<Expression*>* args);
Steve Blockd0582a62009-12-15 09:54:21 +0000645 void GenerateIsFunction(ZoneList<Expression*>* args);
Leon Clarked91b9f72010-01-27 17:25:45 +0000646 void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000647
648 // Support for construct call checks.
649 void GenerateIsConstructCall(ZoneList<Expression*>* args);
650
651 // Support for arguments.length and arguments[?].
652 void GenerateArgumentsLength(ZoneList<Expression*>* args);
Steve Block6ded16b2010-05-10 14:33:55 +0100653 void GenerateArguments(ZoneList<Expression*>* args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000654
655 // Support for accessing the class and value fields of an object.
656 void GenerateClassOf(ZoneList<Expression*>* args);
657 void GenerateValueOf(ZoneList<Expression*>* args);
658 void GenerateSetValueOf(ZoneList<Expression*>* args);
659
660 // Fast support for charCodeAt(n).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100661 void GenerateStringCharCodeAt(ZoneList<Expression*>* args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000662
Steve Block6ded16b2010-05-10 14:33:55 +0100663 // Fast support for string.charAt(n) and string[n].
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100664 void GenerateStringCharFromCode(ZoneList<Expression*>* args);
665
666 // Fast support for string.charAt(n) and string[n].
667 void GenerateStringCharAt(ZoneList<Expression*>* args);
Steve Block6ded16b2010-05-10 14:33:55 +0100668
Steve Blocka7e24c12009-10-30 11:49:00 +0000669 // Fast support for object equality testing.
670 void GenerateObjectEquals(ZoneList<Expression*>* args);
671
672 void GenerateLog(ZoneList<Expression*>* args);
673
674 void GenerateGetFramePointer(ZoneList<Expression*>* args);
675
676 // Fast support for Math.random().
Steve Block6ded16b2010-05-10 14:33:55 +0100677 void GenerateRandomHeapNumber(ZoneList<Expression*>* args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000678
Steve Blockd0582a62009-12-15 09:54:21 +0000679 // Fast support for StringAdd.
680 void GenerateStringAdd(ZoneList<Expression*>* args);
681
Leon Clarkee46be812010-01-19 14:06:41 +0000682 // Fast support for SubString.
683 void GenerateSubString(ZoneList<Expression*>* args);
684
685 // Fast support for StringCompare.
686 void GenerateStringCompare(ZoneList<Expression*>* args);
687
688 // Support for direct calls from JavaScript to native RegExp code.
689 void GenerateRegExpExec(ZoneList<Expression*>* args);
690
Steve Block6ded16b2010-05-10 14:33:55 +0100691 void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
692
693 // Support for fast native caches.
694 void GenerateGetFromCache(ZoneList<Expression*>* args);
695
Andrei Popescu402d9372010-02-26 13:31:12 +0000696 // Fast support for number to string.
697 void GenerateNumberToString(ZoneList<Expression*>* args);
698
Steve Block6ded16b2010-05-10 14:33:55 +0100699 // Fast swapping of elements. Takes three expressions, the object and two
700 // indices. This should only be used if the indices are known to be
701 // non-negative and within bounds of the elements array at the call site.
702 void GenerateSwapElements(ZoneList<Expression*>* args);
703
704 // Fast call for custom callbacks.
705 void GenerateCallFunction(ZoneList<Expression*>* args);
706
707 // Fast call to math functions.
708 void GenerateMathPow(ZoneList<Expression*>* args);
Andrei Popescu402d9372010-02-26 13:31:12 +0000709 void GenerateMathSin(ZoneList<Expression*>* args);
710 void GenerateMathCos(ZoneList<Expression*>* args);
Steve Block6ded16b2010-05-10 14:33:55 +0100711 void GenerateMathSqrt(ZoneList<Expression*>* args);
Andrei Popescu402d9372010-02-26 13:31:12 +0000712
Steve Block3ce2e202009-11-05 08:53:23 +0000713 // Simple condition analysis.
714 enum ConditionAnalysis {
715 ALWAYS_TRUE,
716 ALWAYS_FALSE,
717 DONT_KNOW
718 };
719 ConditionAnalysis AnalyzeCondition(Expression* cond);
720
Steve Blocka7e24c12009-10-30 11:49:00 +0000721 // Methods used to indicate which source code is generated for. Source
722 // positions are collected by the assembler and emitted with the relocation
723 // information.
724 void CodeForFunctionPosition(FunctionLiteral* fun);
725 void CodeForReturnPosition(FunctionLiteral* fun);
726 void CodeForStatementPosition(Statement* stmt);
Steve Blockd0582a62009-12-15 09:54:21 +0000727 void CodeForDoWhileConditionPosition(DoWhileStatement* stmt);
Steve Blocka7e24c12009-10-30 11:49:00 +0000728 void CodeForSourcePosition(int pos);
729
Steve Block6ded16b2010-05-10 14:33:55 +0100730 void SetTypeForStackSlot(Slot* slot, TypeInfo info);
731
Steve Blocka7e24c12009-10-30 11:49:00 +0000732#ifdef DEBUG
733 // True if the registers are valid for entry to a block. There should
734 // be no frame-external references to (non-reserved) registers.
735 bool HasValidEntryRegisters();
736#endif
737
Steve Blocka7e24c12009-10-30 11:49:00 +0000738 ZoneList<DeferredCode*> deferred_;
739
740 // Assembler
741 MacroAssembler* masm_; // to generate code
742
Andrei Popescu31002712010-02-23 13:46:05 +0000743 CompilationInfo* info_;
744
Steve Blocka7e24c12009-10-30 11:49:00 +0000745 // Code generation state
Steve Blocka7e24c12009-10-30 11:49:00 +0000746 VirtualFrame* frame_;
747 RegisterAllocator* allocator_;
748 CodeGenState* state_;
749 int loop_nesting_;
Steve Block6ded16b2010-05-10 14:33:55 +0100750 bool in_safe_int32_mode_;
751 bool safe_int32_mode_enabled_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000752
753 // Jump targets.
754 // The target of the return from the function.
755 BreakTarget function_return_;
Steve Block6ded16b2010-05-10 14:33:55 +0100756 // The target of the bailout from a side-effect-free int32 subexpression.
757 BreakTarget* unsafe_bailout_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000758
759 // True if the function return is shadowed (ie, jumping to the target
760 // function_return_ does not jump to the true function return, but rather
761 // to some unlinking code).
762 bool function_return_is_shadowed_;
763
764 // True when we are in code that expects the virtual frame to be fully
765 // spilled. Some virtual frame function are disabled in DEBUG builds when
766 // called from spilled code, because they do not leave the virtual frame
767 // in a spilled state.
768 bool in_spilled_code_;
769
770 static InlineRuntimeLUT kInlineRuntimeLUT[];
771
772 friend class VirtualFrame;
773 friend class JumpTarget;
774 friend class Reference;
775 friend class Result;
Leon Clarke4515c472010-02-03 11:58:03 +0000776 friend class FastCodeGenerator;
Leon Clarked91b9f72010-01-27 17:25:45 +0000777 friend class FullCodeGenerator;
778 friend class FullCodeGenSyntaxChecker;
Steve Blocka7e24c12009-10-30 11:49:00 +0000779
780 friend class CodeGeneratorPatcher; // Used in test-log-stack-tracer.cc
781
782 DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
783};
784
785
Andrei Popescu402d9372010-02-26 13:31:12 +0000786// Compute a transcendental math function natively, or call the
787// TranscendentalCache runtime function.
788class TranscendentalCacheStub: public CodeStub {
789 public:
790 explicit TranscendentalCacheStub(TranscendentalCache::Type type)
791 : type_(type) {}
792 void Generate(MacroAssembler* masm);
793 private:
794 TranscendentalCache::Type type_;
795 Major MajorKey() { return TranscendentalCache; }
796 int MinorKey() { return type_; }
797 Runtime::FunctionId RuntimeFunction();
798 void GenerateOperation(MacroAssembler* masm);
799};
800
801
Steve Blockd0582a62009-12-15 09:54:21 +0000802// Flag that indicates how to generate code for the stub GenericBinaryOpStub.
Steve Blocka7e24c12009-10-30 11:49:00 +0000803enum GenericBinaryFlags {
Steve Block3ce2e202009-11-05 08:53:23 +0000804 NO_GENERIC_BINARY_FLAGS = 0,
805 NO_SMI_CODE_IN_STUB = 1 << 0 // Omit smi code in stub.
Steve Blocka7e24c12009-10-30 11:49:00 +0000806};
807
808
809class GenericBinaryOpStub: public CodeStub {
810 public:
Steve Blockd0582a62009-12-15 09:54:21 +0000811 GenericBinaryOpStub(Token::Value op,
Steve Blocka7e24c12009-10-30 11:49:00 +0000812 OverwriteMode mode,
Andrei Popescu402d9372010-02-26 13:31:12 +0000813 GenericBinaryFlags flags,
Steve Block6ded16b2010-05-10 14:33:55 +0100814 TypeInfo operands_type)
Steve Blockd0582a62009-12-15 09:54:21 +0000815 : op_(op),
Steve Block3ce2e202009-11-05 08:53:23 +0000816 mode_(mode),
817 flags_(flags),
818 args_in_registers_(false),
Leon Clarkee46be812010-01-19 14:06:41 +0000819 args_reversed_(false),
Steve Block6ded16b2010-05-10 14:33:55 +0100820 static_operands_type_(operands_type),
821 runtime_operands_type_(BinaryOpIC::DEFAULT),
822 name_(NULL) {
823 if (static_operands_type_.IsSmi()) {
824 mode_ = NO_OVERWRITE;
825 }
Steve Blockd0582a62009-12-15 09:54:21 +0000826 use_sse3_ = CpuFeatures::IsSupported(SSE3);
Steve Blocka7e24c12009-10-30 11:49:00 +0000827 ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
828 }
829
Steve Block6ded16b2010-05-10 14:33:55 +0100830 GenericBinaryOpStub(int key, BinaryOpIC::TypeInfo runtime_operands_type)
831 : op_(OpBits::decode(key)),
832 mode_(ModeBits::decode(key)),
833 flags_(FlagBits::decode(key)),
834 args_in_registers_(ArgsInRegistersBits::decode(key)),
835 args_reversed_(ArgsReversedBits::decode(key)),
836 use_sse3_(SSE3Bits::decode(key)),
837 static_operands_type_(TypeInfo::ExpandedRepresentation(
838 StaticTypeInfoBits::decode(key))),
839 runtime_operands_type_(runtime_operands_type),
840 name_(NULL) {
841 }
842
Steve Block3ce2e202009-11-05 08:53:23 +0000843 // Generate code to call the stub with the supplied arguments. This will add
844 // code at the call site to prepare arguments either in registers or on the
845 // stack together with the actual call.
846 void GenerateCall(MacroAssembler* masm, Register left, Register right);
847 void GenerateCall(MacroAssembler* masm, Register left, Smi* right);
848 void GenerateCall(MacroAssembler* masm, Smi* left, Register right);
Steve Blocka7e24c12009-10-30 11:49:00 +0000849
Leon Clarked91b9f72010-01-27 17:25:45 +0000850 Result GenerateCall(MacroAssembler* masm,
851 VirtualFrame* frame,
852 Result* left,
853 Result* right);
854
Steve Blocka7e24c12009-10-30 11:49:00 +0000855 private:
856 Token::Value op_;
857 OverwriteMode mode_;
858 GenericBinaryFlags flags_;
Steve Block3ce2e202009-11-05 08:53:23 +0000859 bool args_in_registers_; // Arguments passed in registers not on the stack.
860 bool args_reversed_; // Left and right argument are swapped.
Steve Blocka7e24c12009-10-30 11:49:00 +0000861 bool use_sse3_;
Steve Block6ded16b2010-05-10 14:33:55 +0100862
863 // Number type information of operands, determined by code generator.
864 TypeInfo static_operands_type_;
865
866 // Operand type information determined at runtime.
867 BinaryOpIC::TypeInfo runtime_operands_type_;
868
Leon Clarkee46be812010-01-19 14:06:41 +0000869 char* name_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000870
871 const char* GetName();
872
873#ifdef DEBUG
874 void Print() {
Andrei Popescu402d9372010-02-26 13:31:12 +0000875 PrintF("GenericBinaryOpStub %d (op %s), "
Steve Block6ded16b2010-05-10 14:33:55 +0100876 "(mode %d, flags %d, registers %d, reversed %d, type_info %s)\n",
Andrei Popescu402d9372010-02-26 13:31:12 +0000877 MinorKey(),
Steve Blocka7e24c12009-10-30 11:49:00 +0000878 Token::String(op_),
879 static_cast<int>(mode_),
Steve Block3ce2e202009-11-05 08:53:23 +0000880 static_cast<int>(flags_),
881 static_cast<int>(args_in_registers_),
Andrei Popescu402d9372010-02-26 13:31:12 +0000882 static_cast<int>(args_reversed_),
Steve Block6ded16b2010-05-10 14:33:55 +0100883 static_operands_type_.ToString());
Steve Blocka7e24c12009-10-30 11:49:00 +0000884 }
885#endif
886
Steve Block6ded16b2010-05-10 14:33:55 +0100887 // Minor key encoding in 18 bits RRNNNFRASOOOOOOOMM.
Steve Blocka7e24c12009-10-30 11:49:00 +0000888 class ModeBits: public BitField<OverwriteMode, 0, 2> {};
Andrei Popescu402d9372010-02-26 13:31:12 +0000889 class OpBits: public BitField<Token::Value, 2, 7> {};
890 class SSE3Bits: public BitField<bool, 9, 1> {};
891 class ArgsInRegistersBits: public BitField<bool, 10, 1> {};
892 class ArgsReversedBits: public BitField<bool, 11, 1> {};
893 class FlagBits: public BitField<GenericBinaryFlags, 12, 1> {};
Steve Block6ded16b2010-05-10 14:33:55 +0100894 class StaticTypeInfoBits: public BitField<int, 13, 3> {};
895 class RuntimeTypeInfoBits: public BitField<BinaryOpIC::TypeInfo, 16, 2> {};
Steve Blocka7e24c12009-10-30 11:49:00 +0000896
897 Major MajorKey() { return GenericBinaryOp; }
898 int MinorKey() {
Steve Block6ded16b2010-05-10 14:33:55 +0100899 // Encode the parameters in a unique 18 bit value.
Steve Blocka7e24c12009-10-30 11:49:00 +0000900 return OpBits::encode(op_)
901 | ModeBits::encode(mode_)
902 | FlagBits::encode(flags_)
Steve Block3ce2e202009-11-05 08:53:23 +0000903 | SSE3Bits::encode(use_sse3_)
904 | ArgsInRegistersBits::encode(args_in_registers_)
Andrei Popescu402d9372010-02-26 13:31:12 +0000905 | ArgsReversedBits::encode(args_reversed_)
Steve Block6ded16b2010-05-10 14:33:55 +0100906 | StaticTypeInfoBits::encode(
907 static_operands_type_.ThreeBitRepresentation())
908 | RuntimeTypeInfoBits::encode(runtime_operands_type_);
Steve Blocka7e24c12009-10-30 11:49:00 +0000909 }
Steve Block3ce2e202009-11-05 08:53:23 +0000910
Steve Blocka7e24c12009-10-30 11:49:00 +0000911 void Generate(MacroAssembler* masm);
Steve Block3ce2e202009-11-05 08:53:23 +0000912 void GenerateSmiCode(MacroAssembler* masm, Label* slow);
913 void GenerateLoadArguments(MacroAssembler* masm);
914 void GenerateReturn(MacroAssembler* masm);
Leon Clarked91b9f72010-01-27 17:25:45 +0000915 void GenerateHeapResultAllocation(MacroAssembler* masm, Label* alloc_failure);
Steve Block6ded16b2010-05-10 14:33:55 +0100916 void GenerateRegisterArgsPush(MacroAssembler* masm);
917 void GenerateTypeTransition(MacroAssembler* masm);
Steve Block3ce2e202009-11-05 08:53:23 +0000918
919 bool ArgsInRegistersSupported() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000920 return op_ == Token::ADD || op_ == Token::SUB
921 || op_ == Token::MUL || op_ == Token::DIV;
Steve Block3ce2e202009-11-05 08:53:23 +0000922 }
923 bool IsOperationCommutative() {
924 return (op_ == Token::ADD) || (op_ == Token::MUL);
925 }
926
927 void SetArgsInRegisters() { args_in_registers_ = true; }
928 void SetArgsReversed() { args_reversed_ = true; }
929 bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; }
Leon Clarked91b9f72010-01-27 17:25:45 +0000930 bool HasArgsInRegisters() { return args_in_registers_; }
931 bool HasArgsReversed() { return args_reversed_; }
Steve Block6ded16b2010-05-10 14:33:55 +0100932
933 bool ShouldGenerateSmiCode() {
934 return HasSmiCodeInStub() &&
935 runtime_operands_type_ != BinaryOpIC::HEAP_NUMBERS &&
936 runtime_operands_type_ != BinaryOpIC::STRINGS;
937 }
938
939 bool ShouldGenerateFPCode() {
940 return runtime_operands_type_ != BinaryOpIC::STRINGS;
941 }
942
943 virtual int GetCodeKind() { return Code::BINARY_OP_IC; }
944
945 virtual InlineCacheState GetICState() {
946 return BinaryOpIC::ToState(runtime_operands_type_);
947 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000948};
949
950
Steve Block6ded16b2010-05-10 14:33:55 +0100951class StringHelper : public AllStatic {
Leon Clarkee46be812010-01-19 14:06:41 +0000952 public:
953 // Generate code for copying characters using a simple loop. This should only
954 // be used in places where the number of characters is small and the
955 // additional setup and checking in GenerateCopyCharactersREP adds too much
956 // overhead. Copying of overlapping regions is not supported.
Steve Block6ded16b2010-05-10 14:33:55 +0100957 static void GenerateCopyCharacters(MacroAssembler* masm,
958 Register dest,
959 Register src,
960 Register count,
961 Register scratch,
962 bool ascii);
Leon Clarkee46be812010-01-19 14:06:41 +0000963
964 // Generate code for copying characters using the rep movs instruction.
965 // Copies ecx characters from esi to edi. Copying of overlapping regions is
966 // not supported.
Steve Block6ded16b2010-05-10 14:33:55 +0100967 static void GenerateCopyCharactersREP(MacroAssembler* masm,
968 Register dest, // Must be edi.
969 Register src, // Must be esi.
970 Register count, // Must be ecx.
971 Register scratch, // Neither of above.
972 bool ascii);
Andrei Popescu402d9372010-02-26 13:31:12 +0000973
974 // Probe the symbol table for a two character string. If the string is
975 // not found by probing a jump to the label not_found is performed. This jump
976 // does not guarantee that the string is not in the symbol table. If the
977 // string is found the code falls through with the string in register eax.
Steve Block6ded16b2010-05-10 14:33:55 +0100978 static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
979 Register c1,
980 Register c2,
981 Register scratch1,
982 Register scratch2,
983 Register scratch3,
984 Label* not_found);
Andrei Popescu402d9372010-02-26 13:31:12 +0000985
986 // Generate string hash.
Steve Block6ded16b2010-05-10 14:33:55 +0100987 static void GenerateHashInit(MacroAssembler* masm,
988 Register hash,
989 Register character,
990 Register scratch);
991 static void GenerateHashAddCharacter(MacroAssembler* masm,
992 Register hash,
993 Register character,
994 Register scratch);
995 static void GenerateHashGetHash(MacroAssembler* masm,
996 Register hash,
997 Register scratch);
998
999 private:
1000 DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
Leon Clarkee46be812010-01-19 14:06:41 +00001001};
1002
1003
Andrei Popescu31002712010-02-23 13:46:05 +00001004// Flag that indicates how to generate code for the stub StringAddStub.
1005enum StringAddFlags {
1006 NO_STRING_ADD_FLAGS = 0,
1007 NO_STRING_CHECK_IN_STUB = 1 << 0 // Omit string check in stub.
1008};
1009
1010
Steve Block6ded16b2010-05-10 14:33:55 +01001011class StringAddStub: public CodeStub {
Steve Blockd0582a62009-12-15 09:54:21 +00001012 public:
1013 explicit StringAddStub(StringAddFlags flags) {
1014 string_check_ = ((flags & NO_STRING_CHECK_IN_STUB) == 0);
1015 }
1016
1017 private:
1018 Major MajorKey() { return StringAdd; }
1019 int MinorKey() { return string_check_ ? 0 : 1; }
1020
1021 void Generate(MacroAssembler* masm);
1022
Steve Blockd0582a62009-12-15 09:54:21 +00001023 // Should the stub check whether arguments are strings?
1024 bool string_check_;
1025};
1026
1027
Steve Block6ded16b2010-05-10 14:33:55 +01001028class SubStringStub: public CodeStub {
Leon Clarkee46be812010-01-19 14:06:41 +00001029 public:
1030 SubStringStub() {}
1031
1032 private:
1033 Major MajorKey() { return SubString; }
1034 int MinorKey() { return 0; }
1035
1036 void Generate(MacroAssembler* masm);
1037};
1038
1039
Steve Block6ded16b2010-05-10 14:33:55 +01001040class StringCompareStub: public CodeStub {
Leon Clarkee46be812010-01-19 14:06:41 +00001041 public:
1042 explicit StringCompareStub() {
1043 }
1044
1045 // Compare two flat ascii strings and returns result in eax after popping two
1046 // arguments from the stack.
1047 static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
1048 Register left,
1049 Register right,
1050 Register scratch1,
1051 Register scratch2,
1052 Register scratch3);
1053
1054 private:
1055 Major MajorKey() { return StringCompare; }
1056 int MinorKey() { return 0; }
1057
1058 void Generate(MacroAssembler* masm);
1059};
1060
1061
Andrei Popescu402d9372010-02-26 13:31:12 +00001062class NumberToStringStub: public CodeStub {
1063 public:
1064 NumberToStringStub() { }
1065
1066 // Generate code to do a lookup in the number string cache. If the number in
1067 // the register object is found in the cache the generated code falls through
1068 // with the result in the result register. The object and the result register
1069 // can be the same. If the number is not found in the cache the code jumps to
1070 // the label not_found with only the content of register object unchanged.
1071 static void GenerateLookupNumberStringCache(MacroAssembler* masm,
1072 Register object,
1073 Register result,
1074 Register scratch1,
1075 Register scratch2,
1076 bool object_is_smi,
1077 Label* not_found);
1078
1079 private:
1080 Major MajorKey() { return NumberToString; }
1081 int MinorKey() { return 0; }
1082
1083 void Generate(MacroAssembler* masm);
1084
1085 const char* GetName() { return "NumberToStringStub"; }
1086
1087#ifdef DEBUG
1088 void Print() {
1089 PrintF("NumberToStringStub\n");
1090 }
1091#endif
1092};
1093
1094
Steve Blocka7e24c12009-10-30 11:49:00 +00001095} } // namespace v8::internal
1096
1097#endif // V8_IA32_CODEGEN_IA32_H_