blob: a81a7d1d8fb7c9235928d3b991f89e0cb4f9277d [file] [log] [blame]
Leon Clarke888f6722010-01-27 15:57:47 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_IA32_CODEGEN_IA32_H_
29#define V8_IA32_CODEGEN_IA32_H_
30
31namespace v8 {
32namespace internal {
33
34// Forward declarations
35class DeferredCode;
36class RegisterAllocator;
37class RegisterFile;
38
39enum InitState { CONST_INIT, NOT_CONST_INIT };
40enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
41
42
43// -------------------------------------------------------------------------
44// Reference support
45
Leon Clarke888f6722010-01-27 15:57:47 +000046// A reference is a C++ stack-allocated object that puts a
47// reference on the virtual frame. The reference may be consumed
48// by GetValue, TakeValue, SetValue, and Codegen::UnloadReference.
49// When the lifetime (scope) of a valid reference ends, it must have
50// been consumed, and be in state UNLOADED.
Steve Blocka7e24c12009-10-30 11:49:00 +000051class Reference BASE_EMBEDDED {
52 public:
53 // The values of the types is important, see size().
Leon Clarke888f6722010-01-27 15:57:47 +000054 enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
55 Reference(CodeGenerator* cgen,
56 Expression* expression,
57 bool persist_after_get = false);
Steve Blocka7e24c12009-10-30 11:49:00 +000058 ~Reference();
59
60 Expression* expression() const { return expression_; }
61 Type type() const { return type_; }
62 void set_type(Type value) {
Leon Clarke888f6722010-01-27 15:57:47 +000063 ASSERT_EQ(ILLEGAL, type_);
Steve Blocka7e24c12009-10-30 11:49:00 +000064 type_ = value;
65 }
66
Leon Clarke888f6722010-01-27 15:57:47 +000067 void set_unloaded() {
68 ASSERT_NE(ILLEGAL, type_);
69 ASSERT_NE(UNLOADED, type_);
70 type_ = UNLOADED;
71 }
Steve Blocka7e24c12009-10-30 11:49:00 +000072 // The size the reference takes up on the stack.
Leon Clarke888f6722010-01-27 15:57:47 +000073 int size() const {
74 return (type_ < SLOT) ? 0 : type_;
75 }
Steve Blocka7e24c12009-10-30 11:49:00 +000076
77 bool is_illegal() const { return type_ == ILLEGAL; }
78 bool is_slot() const { return type_ == SLOT; }
79 bool is_property() const { return type_ == NAMED || type_ == KEYED; }
Leon Clarke888f6722010-01-27 15:57:47 +000080 bool is_unloaded() const { return type_ == UNLOADED; }
Steve Blocka7e24c12009-10-30 11:49:00 +000081
82 // Return the name. Only valid for named property references.
83 Handle<String> GetName();
84
85 // Generate code to push the value of the reference on top of the
86 // expression stack. The reference is expected to be already on top of
Leon Clarke888f6722010-01-27 15:57:47 +000087 // the expression stack, and it is consumed by the call unless the
88 // reference is for a compound assignment.
89 // If the reference is not consumed, it is left in place under its value.
Steve Blockd0582a62009-12-15 09:54:21 +000090 void GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +000091
92 // Like GetValue except that the slot is expected to be written to before
Leon Clarke888f6722010-01-27 15:57:47 +000093 // being read from again. The value of the reference may be invalidated,
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // causing subsequent attempts to read it to fail.
Steve Blockd0582a62009-12-15 09:54:21 +000095 void TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +000096
97 // Generate code to store the value on top of the expression stack in the
98 // reference. The reference is expected to be immediately below the value
Leon Clarke888f6722010-01-27 15:57:47 +000099 // on the expression stack. The value is stored in the location specified
100 // by the reference, and is left on top of the stack, after the reference
101 // is popped from beneath it (unloaded).
Steve Blocka7e24c12009-10-30 11:49:00 +0000102 void SetValue(InitState init_state);
103
104 private:
105 CodeGenerator* cgen_;
106 Expression* expression_;
107 Type type_;
Leon Clarke888f6722010-01-27 15:57:47 +0000108 // Keep the reference on the stack after get, so it can be used by set later.
109 bool persist_after_get_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000110};
111
112
113// -------------------------------------------------------------------------
114// Control destinations.
115
116// A control destination encapsulates a pair of jump targets and a
117// flag indicating which one is the preferred fall-through. The
118// preferred fall-through must be unbound, the other may be already
119// bound (ie, a backward target).
120//
121// The true and false targets may be jumped to unconditionally or
122// control may split conditionally. Unconditional jumping and
123// splitting should be emitted in tail position (as the last thing
124// when compiling an expression) because they can cause either label
125// to be bound or the non-fall through to be jumped to leaving an
126// invalid virtual frame.
127//
128// The labels in the control destination can be extracted and
129// manipulated normally without affecting the state of the
130// destination.
131
132class ControlDestination BASE_EMBEDDED {
133 public:
134 ControlDestination(JumpTarget* true_target,
135 JumpTarget* false_target,
136 bool true_is_fall_through)
137 : true_target_(true_target),
138 false_target_(false_target),
139 true_is_fall_through_(true_is_fall_through),
140 is_used_(false) {
141 ASSERT(true_is_fall_through ? !true_target->is_bound()
142 : !false_target->is_bound());
143 }
144
145 // Accessors for the jump targets. Directly jumping or branching to
146 // or binding the targets will not update the destination's state.
147 JumpTarget* true_target() const { return true_target_; }
148 JumpTarget* false_target() const { return false_target_; }
149
150 // True if the the destination has been jumped to unconditionally or
151 // control has been split to both targets. This predicate does not
152 // test whether the targets have been extracted and manipulated as
153 // raw jump targets.
154 bool is_used() const { return is_used_; }
155
156 // True if the destination is used and the true target (respectively
157 // false target) was the fall through. If the target is backward,
158 // "fall through" included jumping unconditionally to it.
159 bool true_was_fall_through() const {
160 return is_used_ && true_is_fall_through_;
161 }
162
163 bool false_was_fall_through() const {
164 return is_used_ && !true_is_fall_through_;
165 }
166
167 // Emit a branch to one of the true or false targets, and bind the
168 // other target. Because this binds the fall-through target, it
169 // should be emitted in tail position (as the last thing when
170 // compiling an expression).
171 void Split(Condition cc) {
172 ASSERT(!is_used_);
173 if (true_is_fall_through_) {
174 false_target_->Branch(NegateCondition(cc));
175 true_target_->Bind();
176 } else {
177 true_target_->Branch(cc);
178 false_target_->Bind();
179 }
180 is_used_ = true;
181 }
182
183 // Emit an unconditional jump in tail position, to the true target
184 // (if the argument is true) or the false target. The "jump" will
185 // actually bind the jump target if it is forward, jump to it if it
186 // is backward.
187 void Goto(bool where) {
188 ASSERT(!is_used_);
189 JumpTarget* target = where ? true_target_ : false_target_;
190 if (target->is_bound()) {
191 target->Jump();
192 } else {
193 target->Bind();
194 }
195 is_used_ = true;
196 true_is_fall_through_ = where;
197 }
198
199 // Mark this jump target as used as if Goto had been called, but
200 // without generating a jump or binding a label (the control effect
201 // should have already happened). This is used when the left
202 // subexpression of the short-circuit boolean operators are
203 // compiled.
204 void Use(bool where) {
205 ASSERT(!is_used_);
206 ASSERT((where ? true_target_ : false_target_)->is_bound());
207 is_used_ = true;
208 true_is_fall_through_ = where;
209 }
210
211 // Swap the true and false targets but keep the same actual label as
212 // the fall through. This is used when compiling negated
213 // expressions, where we want to swap the targets but preserve the
214 // state.
215 void Invert() {
216 JumpTarget* temp_target = true_target_;
217 true_target_ = false_target_;
218 false_target_ = temp_target;
219
220 true_is_fall_through_ = !true_is_fall_through_;
221 }
222
223 private:
224 // True and false jump targets.
225 JumpTarget* true_target_;
226 JumpTarget* false_target_;
227
228 // Before using the destination: true if the true target is the
229 // preferred fall through, false if the false target is. After
230 // using the destination: true if the true target was actually used
231 // as the fall through, false if the false target was.
232 bool true_is_fall_through_;
233
234 // True if the Split or Goto functions have been called.
235 bool is_used_;
236};
237
238
239// -------------------------------------------------------------------------
240// Code generation state
241
242// The state is passed down the AST by the code generator (and back up, in
243// the form of the state of the jump target pair). It is threaded through
244// the call stack. Constructing a state implicitly pushes it on the owning
245// code generator's stack of states, and destroying one implicitly pops it.
246//
247// The code generator state is only used for expressions, so statements have
248// the initial state.
249
250class CodeGenState BASE_EMBEDDED {
251 public:
252 // Create an initial code generator state. Destroying the initial state
253 // leaves the code generator with a NULL state.
254 explicit CodeGenState(CodeGenerator* owner);
255
256 // Create a code generator state based on a code generator's current
Steve Blockd0582a62009-12-15 09:54:21 +0000257 // state. The new state has its own control destination.
258 CodeGenState(CodeGenerator* owner, ControlDestination* destination);
Steve Blocka7e24c12009-10-30 11:49:00 +0000259
260 // Destroy a code generator state and restore the owning code generator's
261 // previous state.
262 ~CodeGenState();
263
264 // Accessors for the state.
Steve Blocka7e24c12009-10-30 11:49:00 +0000265 ControlDestination* destination() const { return destination_; }
266
267 private:
268 // The owning code generator.
269 CodeGenerator* owner_;
270
Steve Blocka7e24c12009-10-30 11:49:00 +0000271 // A control destination in case the expression has a control-flow
272 // effect.
273 ControlDestination* destination_;
274
275 // The previous state of the owning code generator, restored when
276 // this state is destroyed.
277 CodeGenState* previous_;
278};
279
280
281// -------------------------------------------------------------------------
Leon Clarkee46be812010-01-19 14:06:41 +0000282// Arguments allocation mode.
Steve Blocka7e24c12009-10-30 11:49:00 +0000283
284enum ArgumentsAllocationMode {
285 NO_ARGUMENTS_ALLOCATION,
286 EAGER_ARGUMENTS_ALLOCATION,
287 LAZY_ARGUMENTS_ALLOCATION
288};
289
290
291// -------------------------------------------------------------------------
292// CodeGenerator
293
294class CodeGenerator: public AstVisitor {
295 public:
296 // Takes a function literal, generates code for it. This function should only
297 // be called by compiler.cc.
298 static Handle<Code> MakeCode(FunctionLiteral* fun,
299 Handle<Script> script,
300 bool is_eval);
301
Steve Block3ce2e202009-11-05 08:53:23 +0000302 // Printing of AST, etc. as requested by flags.
303 static void MakeCodePrologue(FunctionLiteral* fun);
304
305 // Allocate and install the code.
306 static Handle<Code> MakeCodeEpilogue(FunctionLiteral* fun,
307 MacroAssembler* masm,
308 Code::Flags flags,
309 Handle<Script> script);
310
Steve Blocka7e24c12009-10-30 11:49:00 +0000311#ifdef ENABLE_LOGGING_AND_PROFILING
312 static bool ShouldGenerateLog(Expression* type);
313#endif
314
Steve Block3ce2e202009-11-05 08:53:23 +0000315 static void RecordPositions(MacroAssembler* masm, int pos);
316
Steve Blocka7e24c12009-10-30 11:49:00 +0000317 // Accessors
318 MacroAssembler* masm() { return masm_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 VirtualFrame* frame() const { return frame_; }
Steve Blockd0582a62009-12-15 09:54:21 +0000320 Handle<Script> script() { return script_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000321
322 bool has_valid_frame() const { return frame_ != NULL; }
323
324 // Set the virtual frame to be new_frame, with non-frame register
325 // reference counts given by non_frame_registers. The non-frame
326 // register reference counts of the old frame are returned in
327 // non_frame_registers.
328 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers);
329
330 void DeleteFrame();
331
332 RegisterAllocator* allocator() const { return allocator_; }
333
334 CodeGenState* state() { return state_; }
335 void set_state(CodeGenState* state) { state_ = state; }
336
337 void AddDeferred(DeferredCode* code) { deferred_.Add(code); }
338
339 bool in_spilled_code() const { return in_spilled_code_; }
340 void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; }
341
342 private:
343 // Construction/Destruction
344 CodeGenerator(int buffer_size, Handle<Script> script, bool is_eval);
345 virtual ~CodeGenerator() { delete masm_; }
346
347 // Accessors
348 Scope* scope() const { return scope_; }
349 bool is_eval() { return is_eval_; }
350
351 // Generating deferred code.
352 void ProcessDeferred();
353
354 // State
Steve Blocka7e24c12009-10-30 11:49:00 +0000355 ControlDestination* destination() const { return state_->destination(); }
356
357 // Track loop nesting level.
358 int loop_nesting() const { return loop_nesting_; }
359 void IncrementLoopNesting() { loop_nesting_++; }
360 void DecrementLoopNesting() { loop_nesting_--; }
361
362 // Node visitors.
363 void VisitStatements(ZoneList<Statement*>* statements);
364
365#define DEF_VISIT(type) \
366 void Visit##type(type* node);
367 AST_NODE_LIST(DEF_VISIT)
368#undef DEF_VISIT
369
370 // Visit a statement and then spill the virtual frame if control flow can
371 // reach the end of the statement (ie, it does not exit via break,
372 // continue, return, or throw). This function is used temporarily while
373 // the code generator is being transformed.
374 void VisitAndSpill(Statement* statement);
375
376 // Visit a list of statements and then spill the virtual frame if control
377 // flow can reach the end of the list.
378 void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
379
380 // Main code generation function
381 void GenCode(FunctionLiteral* fun);
382
383 // Generate the return sequence code. Should be called no more than
384 // once per compiled function, immediately after binding the return
385 // target (which can not be done more than once).
386 void GenerateReturnSequence(Result* return_value);
387
388 // Returns the arguments allocation mode.
389 ArgumentsAllocationMode ArgumentsMode() const;
390
391 // Store the arguments object and allocate it if necessary.
392 Result StoreArgumentsObject(bool initial);
393
394 // The following are used by class Reference.
395 void LoadReference(Reference* ref);
396 void UnloadReference(Reference* ref);
397
Steve Block3ce2e202009-11-05 08:53:23 +0000398 static Operand ContextOperand(Register context, int index) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000399 return Operand(context, Context::SlotOffset(index));
400 }
401
402 Operand SlotOperand(Slot* slot, Register tmp);
403
404 Operand ContextSlotOperandCheckExtensions(Slot* slot,
405 Result tmp,
406 JumpTarget* slow);
407
408 // Expressions
Steve Block3ce2e202009-11-05 08:53:23 +0000409 static Operand GlobalObject() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000410 return ContextOperand(esi, Context::GLOBAL_INDEX);
411 }
412
413 void LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000414 ControlDestination* destination,
415 bool force_control);
Steve Blockd0582a62009-12-15 09:54:21 +0000416 void Load(Expression* expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000417 void LoadGlobal();
418 void LoadGlobalReceiver();
419
420 // Generate code to push the value of an expression on top of the frame
421 // and then spill the frame fully to memory. This function is used
422 // temporarily while the code generator is being transformed.
Steve Blockd0582a62009-12-15 09:54:21 +0000423 void LoadAndSpill(Expression* expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000424
425 // Read a value from a slot and leave it on top of the expression stack.
426 void LoadFromSlot(Slot* slot, TypeofState typeof_state);
427 void LoadFromSlotCheckForArguments(Slot* slot, TypeofState typeof_state);
428 Result LoadFromGlobalSlotCheckExtensions(Slot* slot,
429 TypeofState typeof_state,
430 JumpTarget* slow);
431
432 // Store the value on top of the expression stack into a slot, leaving the
433 // value in place.
434 void StoreToSlot(Slot* slot, InitState init_state);
435
Leon Clarke888f6722010-01-27 15:57:47 +0000436 // Load a property of an object, returning it in a Result.
437 // The object and the property name are passed on the stack, and
438 // not changed.
439 Result EmitKeyedLoad(bool is_global);
440
Steve Blocka7e24c12009-10-30 11:49:00 +0000441 // Special code for typeof expressions: Unfortunately, we must
442 // be careful when loading the expression in 'typeof'
443 // expressions. We are not allowed to throw reference errors for
444 // non-existing properties of the global object, so we must make it
445 // look like an explicit property access, instead of an access
446 // through the context chain.
447 void LoadTypeofExpression(Expression* x);
448
449 // Translate the value on top of the frame into control flow to the
450 // control destination.
451 void ToBoolean(ControlDestination* destination);
452
453 void GenericBinaryOperation(
454 Token::Value op,
Leon Clarkee46be812010-01-19 14:06:41 +0000455 StaticType* type,
Steve Blocka7e24c12009-10-30 11:49:00 +0000456 OverwriteMode overwrite_mode);
457
458 // If possible, combine two constant smi values using op to produce
459 // a smi result, and push it on the virtual frame, all at compile time.
460 // Returns true if it succeeds. Otherwise it has no effect.
461 bool FoldConstantSmis(Token::Value op, int left, int right);
462
463 // Emit code to perform a binary operation on a constant
464 // smi and a likely smi. Consumes the Result *operand.
Leon Clarke888f6722010-01-27 15:57:47 +0000465 Result ConstantSmiBinaryOperation(Token::Value op,
466 Result* operand,
467 Handle<Object> constant_operand,
468 StaticType* type,
469 bool reversed,
470 OverwriteMode overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000471
472 // Emit code to perform a binary operation on two likely smis.
473 // The code to handle smi arguments is produced inline.
474 // Consumes the Results *left and *right.
Leon Clarke888f6722010-01-27 15:57:47 +0000475 Result LikelySmiBinaryOperation(Token::Value op,
476 Result* left,
477 Result* right,
478 OverwriteMode overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000479
Leon Clarkee46be812010-01-19 14:06:41 +0000480 void Comparison(AstNode* node,
481 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +0000482 bool strict,
483 ControlDestination* destination);
484
485 // To prevent long attacker-controlled byte sequences, integer constants
486 // from the JavaScript source are loaded in two parts if they are larger
487 // than 16 bits.
488 static const int kMaxSmiInlinedBits = 16;
489 bool IsUnsafeSmi(Handle<Object> value);
Steve Blockd0582a62009-12-15 09:54:21 +0000490 // Load an integer constant x into a register target or into the stack using
Steve Blocka7e24c12009-10-30 11:49:00 +0000491 // at most 16 bits of user-controlled data per assembly operation.
Steve Blockd0582a62009-12-15 09:54:21 +0000492 void MoveUnsafeSmi(Register target, Handle<Object> value);
493 void StoreUnsafeSmiToLocal(int offset, Handle<Object> value);
494 void PushUnsafeSmi(Handle<Object> value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000495
Leon Clarkee46be812010-01-19 14:06:41 +0000496 void CallWithArguments(ZoneList<Expression*>* arguments,
497 CallFunctionFlags flags,
498 int position);
Steve Blocka7e24c12009-10-30 11:49:00 +0000499
Leon Clarke888f6722010-01-27 15:57:47 +0000500 // An optimized implementation of expressions of the form
501 // x.apply(y, arguments). We call x the applicand and y the receiver.
502 // The optimization avoids allocating an arguments object if possible.
503 void CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +0000504 Expression* receiver,
505 VariableProxy* arguments,
506 int position);
507
508 void CheckStack();
509
510 struct InlineRuntimeLUT {
511 void (CodeGenerator::*method)(ZoneList<Expression*>*);
512 const char* name;
513 };
514
515 static InlineRuntimeLUT* FindInlineRuntimeLUT(Handle<String> name);
516 bool CheckForInlineRuntimeCall(CallRuntime* node);
517 static bool PatchInlineRuntimeEntry(Handle<String> name,
518 const InlineRuntimeLUT& new_entry,
519 InlineRuntimeLUT* old_entry);
520
Steve Blocka7e24c12009-10-30 11:49:00 +0000521 void ProcessDeclarations(ZoneList<Declaration*>* declarations);
522
Steve Block3ce2e202009-11-05 08:53:23 +0000523 static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000524
525 // Declare global variables and functions in the given array of
526 // name/value pairs.
527 void DeclareGlobals(Handle<FixedArray> pairs);
528
529 // Instantiate the function boilerplate.
530 void InstantiateBoilerplate(Handle<JSFunction> boilerplate);
531
532 // Support for type checks.
533 void GenerateIsSmi(ZoneList<Expression*>* args);
534 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
535 void GenerateIsArray(ZoneList<Expression*>* args);
Steve Blockd0582a62009-12-15 09:54:21 +0000536 void GenerateIsObject(ZoneList<Expression*>* args);
537 void GenerateIsFunction(ZoneList<Expression*>* args);
Leon Clarke888f6722010-01-27 15:57:47 +0000538 void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000539
540 // Support for construct call checks.
541 void GenerateIsConstructCall(ZoneList<Expression*>* args);
542
543 // Support for arguments.length and arguments[?].
544 void GenerateArgumentsLength(ZoneList<Expression*>* args);
545 void GenerateArgumentsAccess(ZoneList<Expression*>* args);
546
547 // Support for accessing the class and value fields of an object.
548 void GenerateClassOf(ZoneList<Expression*>* args);
549 void GenerateValueOf(ZoneList<Expression*>* args);
550 void GenerateSetValueOf(ZoneList<Expression*>* args);
551
552 // Fast support for charCodeAt(n).
553 void GenerateFastCharCodeAt(ZoneList<Expression*>* args);
554
555 // Fast support for object equality testing.
556 void GenerateObjectEquals(ZoneList<Expression*>* args);
557
558 void GenerateLog(ZoneList<Expression*>* args);
559
560 void GenerateGetFramePointer(ZoneList<Expression*>* args);
561
562 // Fast support for Math.random().
563 void GenerateRandomPositiveSmi(ZoneList<Expression*>* args);
564
Steve Blockd0582a62009-12-15 09:54:21 +0000565 // Fast support for StringAdd.
566 void GenerateStringAdd(ZoneList<Expression*>* args);
567
Leon Clarkee46be812010-01-19 14:06:41 +0000568 // Fast support for SubString.
569 void GenerateSubString(ZoneList<Expression*>* args);
570
571 // Fast support for StringCompare.
572 void GenerateStringCompare(ZoneList<Expression*>* args);
573
574 // Support for direct calls from JavaScript to native RegExp code.
575 void GenerateRegExpExec(ZoneList<Expression*>* args);
576
Steve Block3ce2e202009-11-05 08:53:23 +0000577 // Simple condition analysis.
578 enum ConditionAnalysis {
579 ALWAYS_TRUE,
580 ALWAYS_FALSE,
581 DONT_KNOW
582 };
583 ConditionAnalysis AnalyzeCondition(Expression* cond);
584
Steve Blocka7e24c12009-10-30 11:49:00 +0000585 // Methods used to indicate which source code is generated for. Source
586 // positions are collected by the assembler and emitted with the relocation
587 // information.
588 void CodeForFunctionPosition(FunctionLiteral* fun);
589 void CodeForReturnPosition(FunctionLiteral* fun);
590 void CodeForStatementPosition(Statement* stmt);
Steve Blockd0582a62009-12-15 09:54:21 +0000591 void CodeForDoWhileConditionPosition(DoWhileStatement* stmt);
Steve Blocka7e24c12009-10-30 11:49:00 +0000592 void CodeForSourcePosition(int pos);
593
594#ifdef DEBUG
595 // True if the registers are valid for entry to a block. There should
596 // be no frame-external references to (non-reserved) registers.
597 bool HasValidEntryRegisters();
598#endif
599
600 bool is_eval_; // Tells whether code is generated for eval.
601 Handle<Script> script_;
602 ZoneList<DeferredCode*> deferred_;
603
604 // Assembler
605 MacroAssembler* masm_; // to generate code
606
607 // Code generation state
608 Scope* scope_;
609 VirtualFrame* frame_;
610 RegisterAllocator* allocator_;
611 CodeGenState* state_;
612 int loop_nesting_;
613
614 // Jump targets.
615 // The target of the return from the function.
616 BreakTarget function_return_;
617
618 // True if the function return is shadowed (ie, jumping to the target
619 // function_return_ does not jump to the true function return, but rather
620 // to some unlinking code).
621 bool function_return_is_shadowed_;
622
623 // True when we are in code that expects the virtual frame to be fully
624 // spilled. Some virtual frame function are disabled in DEBUG builds when
625 // called from spilled code, because they do not leave the virtual frame
626 // in a spilled state.
627 bool in_spilled_code_;
628
629 static InlineRuntimeLUT kInlineRuntimeLUT[];
630
631 friend class VirtualFrame;
632 friend class JumpTarget;
633 friend class Reference;
634 friend class Result;
Leon Clarke888f6722010-01-27 15:57:47 +0000635 friend class FullCodeGenerator;
636 friend class FullCodeGenSyntaxChecker;
Steve Blocka7e24c12009-10-30 11:49:00 +0000637
638 friend class CodeGeneratorPatcher; // Used in test-log-stack-tracer.cc
639
640 DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
641};
642
643
Steve Blockd0582a62009-12-15 09:54:21 +0000644// Flag that indicates how to generate code for the stub GenericBinaryOpStub.
Steve Blocka7e24c12009-10-30 11:49:00 +0000645enum GenericBinaryFlags {
Steve Block3ce2e202009-11-05 08:53:23 +0000646 NO_GENERIC_BINARY_FLAGS = 0,
647 NO_SMI_CODE_IN_STUB = 1 << 0 // Omit smi code in stub.
Steve Blocka7e24c12009-10-30 11:49:00 +0000648};
649
650
651class GenericBinaryOpStub: public CodeStub {
652 public:
Steve Blockd0582a62009-12-15 09:54:21 +0000653 GenericBinaryOpStub(Token::Value op,
Steve Blocka7e24c12009-10-30 11:49:00 +0000654 OverwriteMode mode,
655 GenericBinaryFlags flags)
Steve Blockd0582a62009-12-15 09:54:21 +0000656 : op_(op),
Steve Block3ce2e202009-11-05 08:53:23 +0000657 mode_(mode),
658 flags_(flags),
659 args_in_registers_(false),
Leon Clarkee46be812010-01-19 14:06:41 +0000660 args_reversed_(false),
661 name_(NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000662 use_sse3_ = CpuFeatures::IsSupported(SSE3);
Steve Blocka7e24c12009-10-30 11:49:00 +0000663 ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
664 }
665
Steve Block3ce2e202009-11-05 08:53:23 +0000666 // Generate code to call the stub with the supplied arguments. This will add
667 // code at the call site to prepare arguments either in registers or on the
668 // stack together with the actual call.
669 void GenerateCall(MacroAssembler* masm, Register left, Register right);
670 void GenerateCall(MacroAssembler* masm, Register left, Smi* right);
671 void GenerateCall(MacroAssembler* masm, Smi* left, Register right);
Steve Blocka7e24c12009-10-30 11:49:00 +0000672
Leon Clarke888f6722010-01-27 15:57:47 +0000673 Result GenerateCall(MacroAssembler* masm,
674 VirtualFrame* frame,
675 Result* left,
676 Result* right);
677
Steve Blocka7e24c12009-10-30 11:49:00 +0000678 private:
679 Token::Value op_;
680 OverwriteMode mode_;
681 GenericBinaryFlags flags_;
Steve Block3ce2e202009-11-05 08:53:23 +0000682 bool args_in_registers_; // Arguments passed in registers not on the stack.
683 bool args_reversed_; // Left and right argument are swapped.
Steve Blocka7e24c12009-10-30 11:49:00 +0000684 bool use_sse3_;
Leon Clarkee46be812010-01-19 14:06:41 +0000685 char* name_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000686
687 const char* GetName();
688
689#ifdef DEBUG
690 void Print() {
Steve Block3ce2e202009-11-05 08:53:23 +0000691 PrintF("GenericBinaryOpStub (op %s), "
692 "(mode %d, flags %d, registers %d, reversed %d)\n",
Steve Blocka7e24c12009-10-30 11:49:00 +0000693 Token::String(op_),
694 static_cast<int>(mode_),
Steve Block3ce2e202009-11-05 08:53:23 +0000695 static_cast<int>(flags_),
696 static_cast<int>(args_in_registers_),
697 static_cast<int>(args_reversed_));
Steve Blocka7e24c12009-10-30 11:49:00 +0000698 }
699#endif
700
Steve Block3ce2e202009-11-05 08:53:23 +0000701 // Minor key encoding in 16 bits FRASOOOOOOOOOOMM.
Steve Blocka7e24c12009-10-30 11:49:00 +0000702 class ModeBits: public BitField<OverwriteMode, 0, 2> {};
Steve Block3ce2e202009-11-05 08:53:23 +0000703 class OpBits: public BitField<Token::Value, 2, 10> {};
704 class SSE3Bits: public BitField<bool, 12, 1> {};
705 class ArgsInRegistersBits: public BitField<bool, 13, 1> {};
706 class ArgsReversedBits: public BitField<bool, 14, 1> {};
Steve Blocka7e24c12009-10-30 11:49:00 +0000707 class FlagBits: public BitField<GenericBinaryFlags, 15, 1> {};
708
709 Major MajorKey() { return GenericBinaryOp; }
710 int MinorKey() {
711 // Encode the parameters in a unique 16 bit value.
712 return OpBits::encode(op_)
713 | ModeBits::encode(mode_)
714 | FlagBits::encode(flags_)
Steve Block3ce2e202009-11-05 08:53:23 +0000715 | SSE3Bits::encode(use_sse3_)
716 | ArgsInRegistersBits::encode(args_in_registers_)
717 | ArgsReversedBits::encode(args_reversed_);
Steve Blocka7e24c12009-10-30 11:49:00 +0000718 }
Steve Block3ce2e202009-11-05 08:53:23 +0000719
Steve Blocka7e24c12009-10-30 11:49:00 +0000720 void Generate(MacroAssembler* masm);
Steve Block3ce2e202009-11-05 08:53:23 +0000721 void GenerateSmiCode(MacroAssembler* masm, Label* slow);
722 void GenerateLoadArguments(MacroAssembler* masm);
723 void GenerateReturn(MacroAssembler* masm);
Leon Clarke888f6722010-01-27 15:57:47 +0000724 void GenerateHeapResultAllocation(MacroAssembler* masm, Label* alloc_failure);
Steve Block3ce2e202009-11-05 08:53:23 +0000725
726 bool ArgsInRegistersSupported() {
Leon Clarke888f6722010-01-27 15:57:47 +0000727 return op_ == Token::ADD || op_ == Token::SUB
728 || op_ == Token::MUL || op_ == Token::DIV;
Steve Block3ce2e202009-11-05 08:53:23 +0000729 }
730 bool IsOperationCommutative() {
731 return (op_ == Token::ADD) || (op_ == Token::MUL);
732 }
733
734 void SetArgsInRegisters() { args_in_registers_ = true; }
735 void SetArgsReversed() { args_reversed_ = true; }
736 bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; }
Leon Clarke888f6722010-01-27 15:57:47 +0000737 bool HasArgsInRegisters() { return args_in_registers_; }
738 bool HasArgsReversed() { return args_reversed_; }
Steve Blocka7e24c12009-10-30 11:49:00 +0000739};
740
741
Steve Blockd0582a62009-12-15 09:54:21 +0000742// Flag that indicates how to generate code for the stub StringAddStub.
743enum StringAddFlags {
744 NO_STRING_ADD_FLAGS = 0,
745 NO_STRING_CHECK_IN_STUB = 1 << 0 // Omit string check in stub.
746};
747
748
Leon Clarkee46be812010-01-19 14:06:41 +0000749class StringStubBase: public CodeStub {
750 public:
751 // Generate code for copying characters using a simple loop. This should only
752 // be used in places where the number of characters is small and the
753 // additional setup and checking in GenerateCopyCharactersREP adds too much
754 // overhead. Copying of overlapping regions is not supported.
755 void GenerateCopyCharacters(MacroAssembler* masm,
756 Register dest,
757 Register src,
758 Register count,
759 Register scratch,
760 bool ascii);
761
762 // Generate code for copying characters using the rep movs instruction.
763 // Copies ecx characters from esi to edi. Copying of overlapping regions is
764 // not supported.
765 void GenerateCopyCharactersREP(MacroAssembler* masm,
766 Register dest, // Must be edi.
767 Register src, // Must be esi.
768 Register count, // Must be ecx.
769 Register scratch, // Neither of the above.
770 bool ascii);
771};
772
773
774class StringAddStub: public StringStubBase {
Steve Blockd0582a62009-12-15 09:54:21 +0000775 public:
776 explicit StringAddStub(StringAddFlags flags) {
777 string_check_ = ((flags & NO_STRING_CHECK_IN_STUB) == 0);
778 }
779
780 private:
781 Major MajorKey() { return StringAdd; }
782 int MinorKey() { return string_check_ ? 0 : 1; }
783
784 void Generate(MacroAssembler* masm);
785
Steve Blockd0582a62009-12-15 09:54:21 +0000786 // Should the stub check whether arguments are strings?
787 bool string_check_;
788};
789
790
Leon Clarkee46be812010-01-19 14:06:41 +0000791class SubStringStub: public StringStubBase {
792 public:
793 SubStringStub() {}
794
795 private:
796 Major MajorKey() { return SubString; }
797 int MinorKey() { return 0; }
798
799 void Generate(MacroAssembler* masm);
800};
801
802
803class StringCompareStub: public StringStubBase {
804 public:
805 explicit StringCompareStub() {
806 }
807
808 // Compare two flat ascii strings and returns result in eax after popping two
809 // arguments from the stack.
810 static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
811 Register left,
812 Register right,
813 Register scratch1,
814 Register scratch2,
815 Register scratch3);
816
817 private:
818 Major MajorKey() { return StringCompare; }
819 int MinorKey() { return 0; }
820
821 void Generate(MacroAssembler* masm);
822};
823
824
Steve Blocka7e24c12009-10-30 11:49:00 +0000825} } // namespace v8::internal
826
827#endif // V8_IA32_CODEGEN_IA32_H_