blob: 0fcb94fd10657ed252b9157f6e76e5e43aaa1466 [file] [log] [blame]
Ben Murdochc7cc0282012-03-05 14:35:55 +00001// Copyright 2012 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_IA32_MACRO_ASSEMBLER_IA32_H_
29#define V8_IA32_MACRO_ASSEMBLER_IA32_H_
30
31#include "assembler.h"
Ben Murdoch592a9fc2012-03-05 11:04:45 +000032#include "frames.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "v8globals.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034
35namespace v8 {
36namespace internal {
37
Kristian Monsen25f61362010-05-21 11:50:48 +010038// Flags used for the AllocateInNewSpace functions.
39enum AllocationFlags {
40 // No special flags.
41 NO_ALLOCATION_FLAGS = 0,
42 // Return the pointer to the allocated already tagged as a heap object.
43 TAG_OBJECT = 1 << 0,
44 // The content of the result register already contains the allocation top in
45 // new space.
46 RESULT_CONTAINS_TOP = 1 << 1
47};
48
Ben Murdoch257744e2011-11-30 15:57:28 +000049
Leon Clarkee46be812010-01-19 14:06:41 +000050// Convenience for platform-independent signatures. We do not normally
51// distinguish memory operands from other operands on ia32.
52typedef Operand MemOperand;
53
Ben Murdoch592a9fc2012-03-05 11:04:45 +000054enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
55enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
56
57
58bool AreAliased(Register r1, Register r2, Register r3, Register r4);
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061// MacroAssembler implements a collection of frequently used macros.
62class MacroAssembler: public Assembler {
63 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +010064 // The isolate parameter can be NULL if the macro assembler should
65 // not use isolate-dependent functionality. In this case, it's the
66 // responsibility of the caller to never invoke such function on the
67 // macro assembler.
68 MacroAssembler(Isolate* isolate, void* buffer, int size);
Steve Blocka7e24c12009-10-30 11:49:00 +000069
70 // ---------------------------------------------------------------------------
71 // GC Support
Ben Murdoch592a9fc2012-03-05 11:04:45 +000072 enum RememberedSetFinalAction {
73 kReturnAtEnd,
74 kFallThroughAtEnd
75 };
Steve Blocka7e24c12009-10-30 11:49:00 +000076
Ben Murdoch592a9fc2012-03-05 11:04:45 +000077 // Record in the remembered set the fact that we have a pointer to new space
78 // at the address pointed to by the addr register. Only works if addr is not
79 // in new space.
80 void RememberedSetHelper(Register object, // Used for debug code.
81 Register addr,
82 Register scratch,
83 SaveFPRegsMode save_fp,
84 RememberedSetFinalAction and_then);
Steve Block6ded16b2010-05-10 14:33:55 +010085
Ben Murdoch592a9fc2012-03-05 11:04:45 +000086 void CheckPageFlag(Register object,
87 Register scratch,
88 int mask,
89 Condition cc,
90 Label* condition_met,
91 Label::Distance condition_met_distance = Label::kFar);
Steve Block6ded16b2010-05-10 14:33:55 +010092
Ben Murdoch592a9fc2012-03-05 11:04:45 +000093 // Check if object is in new space. Jumps if the object is not in new space.
94 // The register scratch can be object itself, but scratch will be clobbered.
95 void JumpIfNotInNewSpace(Register object,
96 Register scratch,
97 Label* branch,
98 Label::Distance distance = Label::kFar) {
99 InNewSpace(object, scratch, zero, branch, distance);
100 }
101
102 // Check if object is in new space. Jumps if the object is in new space.
103 // The register scratch can be object itself, but it will be clobbered.
104 void JumpIfInNewSpace(Register object,
105 Register scratch,
106 Label* branch,
107 Label::Distance distance = Label::kFar) {
108 InNewSpace(object, scratch, not_zero, branch, distance);
109 }
110
111 // Check if an object has a given incremental marking color. Also uses ecx!
112 void HasColor(Register object,
113 Register scratch0,
114 Register scratch1,
115 Label* has_color,
116 Label::Distance has_color_distance,
117 int first_bit,
118 int second_bit);
119
120 void JumpIfBlack(Register object,
121 Register scratch0,
122 Register scratch1,
123 Label* on_black,
124 Label::Distance on_black_distance = Label::kFar);
125
126 // Checks the color of an object. If the object is already grey or black
127 // then we just fall through, since it is already live. If it is white and
128 // we can determine that it doesn't need to be scanned, then we just mark it
129 // black and fall through. For the rest we jump to the label so the
130 // incremental marker can fix its assumptions.
131 void EnsureNotWhite(Register object,
132 Register scratch1,
133 Register scratch2,
134 Label* object_is_white_and_not_data,
135 Label::Distance distance);
136
137 // Notify the garbage collector that we wrote a pointer into an object.
138 // |object| is the object being stored into, |value| is the object being
139 // stored. value and scratch registers are clobbered by the operation.
140 // The offset is the offset from the start of the object, not the offset from
141 // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
142 void RecordWriteField(
143 Register object,
144 int offset,
145 Register value,
146 Register scratch,
147 SaveFPRegsMode save_fp,
148 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
149 SmiCheck smi_check = INLINE_SMI_CHECK);
150
151 // As above, but the offset has the tag presubtracted. For use with
152 // Operand(reg, off).
153 void RecordWriteContextSlot(
154 Register context,
155 int offset,
156 Register value,
157 Register scratch,
158 SaveFPRegsMode save_fp,
159 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
160 SmiCheck smi_check = INLINE_SMI_CHECK) {
161 RecordWriteField(context,
162 offset + kHeapObjectTag,
163 value,
164 scratch,
165 save_fp,
166 remembered_set_action,
167 smi_check);
168 }
169
170 // Notify the garbage collector that we wrote a pointer into a fixed array.
171 // |array| is the array being stored into, |value| is the
172 // object being stored. |index| is the array index represented as a
173 // Smi. All registers are clobbered by the operation RecordWriteArray
Steve Block8defd9f2010-07-08 12:39:36 +0100174 // filters out smis so it does not update the write barrier if the
175 // value is a smi.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000176 void RecordWriteArray(
177 Register array,
178 Register value,
179 Register index,
180 SaveFPRegsMode save_fp,
181 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
182 SmiCheck smi_check = INLINE_SMI_CHECK);
Steve Blocka7e24c12009-10-30 11:49:00 +0000183
Steve Block8defd9f2010-07-08 12:39:36 +0100184 // For page containing |object| mark region covering |address|
185 // dirty. |object| is the object being stored into, |value| is the
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000186 // object being stored. The address and value registers are clobbered by the
Steve Block8defd9f2010-07-08 12:39:36 +0100187 // operation. RecordWrite filters out smis so it does not update the
188 // write barrier if the value is a smi.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000189 void RecordWrite(
190 Register object,
191 Register address,
192 Register value,
193 SaveFPRegsMode save_fp,
194 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
195 SmiCheck smi_check = INLINE_SMI_CHECK);
Steve Block8defd9f2010-07-08 12:39:36 +0100196
Steve Blocka7e24c12009-10-30 11:49:00 +0000197#ifdef ENABLE_DEBUGGER_SUPPORT
198 // ---------------------------------------------------------------------------
199 // Debugger Support
200
Andrei Popescu402d9372010-02-26 13:31:12 +0000201 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000202#endif
203
Ben Murdochb0fe1622011-05-05 13:52:32 +0100204 // Enter specific kind of exit frame. Expects the number of
205 // arguments in register eax and sets up the number of arguments in
206 // register edi and the pointer to the first argument in register
207 // esi.
208 void EnterExitFrame(bool save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000209
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800210 void EnterApiExitFrame(int argc);
Steve Blocka7e24c12009-10-30 11:49:00 +0000211
212 // Leave the current exit frame. Expects the return value in
213 // register eax:edx (untouched) and the pointer to the first
214 // argument in register esi.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100215 void LeaveExitFrame(bool save_doubles);
Steve Blocka7e24c12009-10-30 11:49:00 +0000216
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800217 // Leave the current exit frame. Expects the return value in
218 // register eax (untouched).
219 void LeaveApiExitFrame();
220
Steve Blockd0582a62009-12-15 09:54:21 +0000221 // Find the function context up the context chain.
222 void LoadContext(Register dst, int context_chain_length);
Steve Blocka7e24c12009-10-30 11:49:00 +0000223
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100224 // Load the global function with the given index.
225 void LoadGlobalFunction(int index, Register function);
226
227 // Load the initial map from the global function. The registers
228 // function and map can be the same.
229 void LoadGlobalFunctionInitialMap(Register function, Register map);
230
Ben Murdochb0fe1622011-05-05 13:52:32 +0100231 // Push and pop the registers that can hold pointers.
232 void PushSafepointRegisters() { pushad(); }
233 void PopSafepointRegisters() { popad(); }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100234 // Store the value in register/immediate src in the safepoint
235 // register stack slot for register dst.
236 void StoreToSafepointRegisterSlot(Register dst, Register src);
237 void StoreToSafepointRegisterSlot(Register dst, Immediate src);
238 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100239
Ben Murdochc7cc0282012-03-05 14:35:55 +0000240 void LoadHeapObject(Register result, Handle<HeapObject> object);
241 void PushHeapObject(Handle<HeapObject> object);
242
243 void LoadObject(Register result, Handle<Object> object) {
244 if (object->IsHeapObject()) {
245 LoadHeapObject(result, Handle<HeapObject>::cast(object));
246 } else {
247 Set(result, Immediate(object));
248 }
249 }
250
Steve Blocka7e24c12009-10-30 11:49:00 +0000251 // ---------------------------------------------------------------------------
252 // JavaScript invokes
253
Ben Murdochc7cc0282012-03-05 14:35:55 +0000254 // Set up call kind marking in ecx. The method takes ecx as an
Ben Murdoch257744e2011-11-30 15:57:28 +0000255 // explicit first parameter to make the code more readable at the
256 // call sites.
257 void SetCallKind(Register dst, CallKind kind);
258
Steve Blocka7e24c12009-10-30 11:49:00 +0000259 // Invoke the JavaScript function code by either calling or jumping.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000260 void InvokeCode(Register code,
261 const ParameterCount& expected,
262 const ParameterCount& actual,
263 InvokeFlag flag,
264 const CallWrapper& call_wrapper,
265 CallKind call_kind) {
266 InvokeCode(Operand(code), expected, actual, flag, call_wrapper, call_kind);
267 }
268
Steve Blocka7e24c12009-10-30 11:49:00 +0000269 void InvokeCode(const Operand& code,
270 const ParameterCount& expected,
271 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100272 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000273 const CallWrapper& call_wrapper,
274 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000275
276 void InvokeCode(Handle<Code> code,
277 const ParameterCount& expected,
278 const ParameterCount& actual,
279 RelocInfo::Mode rmode,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100280 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000281 const CallWrapper& call_wrapper,
282 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000283
284 // Invoke the JavaScript function in the given register. Changes the
285 // current context to the context in the function before invoking.
286 void InvokeFunction(Register function,
287 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100288 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000289 const CallWrapper& call_wrapper,
290 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000291
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000292 void InvokeFunction(Handle<JSFunction> function,
Andrei Popescu402d9372010-02-26 13:31:12 +0000293 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100294 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000295 const CallWrapper& call_wrapper,
296 CallKind call_kind);
Andrei Popescu402d9372010-02-26 13:31:12 +0000297
Steve Blocka7e24c12009-10-30 11:49:00 +0000298 // Invoke specified builtin JavaScript function. Adds an entry to
299 // the unresolved list if the name does not resolve.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100300 void InvokeBuiltin(Builtins::JavaScript id,
301 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000302 const CallWrapper& call_wrapper = NullCallWrapper());
Steve Blocka7e24c12009-10-30 11:49:00 +0000303
Steve Block791712a2010-08-27 10:21:07 +0100304 // Store the function for the given builtin in the target register.
305 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
306
Steve Blocka7e24c12009-10-30 11:49:00 +0000307 // Store the code object for the given builtin in the target register.
308 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
309
310 // Expression support
311 void Set(Register dst, const Immediate& x);
312 void Set(const Operand& dst, const Immediate& x);
313
Steve Block053d10c2011-06-13 19:13:29 +0100314 // Support for constant splitting.
315 bool IsUnsafeImmediate(const Immediate& x);
316 void SafeSet(Register dst, const Immediate& x);
317 void SafePush(const Immediate& x);
318
Ben Murdochc7cc0282012-03-05 14:35:55 +0000319 // Compare against a known root, e.g. undefined, null, true, ...
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000320 void CompareRoot(Register with, Heap::RootListIndex index);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000321 void CompareRoot(const Operand& with, Heap::RootListIndex index);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323 // Compare object type for heap object.
324 // Incoming register is heap_object and outgoing register is map.
325 void CmpObjectType(Register heap_object, InstanceType type, Register map);
326
327 // Compare instance type for map.
328 void CmpInstanceType(Register map, InstanceType type);
329
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000330 // Check if a map for a JSObject indicates that the object has fast elements.
331 // Jump to the specified label if it does not.
332 void CheckFastElements(Register map,
333 Label* fail,
334 Label::Distance distance = Label::kFar);
335
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000336 // Check if a map for a JSObject indicates that the object can have both smi
337 // and HeapObject elements. Jump to the specified label if it does not.
338 void CheckFastObjectElements(Register map,
339 Label* fail,
340 Label::Distance distance = Label::kFar);
341
342 // Check if a map for a JSObject indicates that the object has fast smi only
343 // elements. Jump to the specified label if it does not.
344 void CheckFastSmiOnlyElements(Register map,
345 Label* fail,
346 Label::Distance distance = Label::kFar);
347
348 // Check to see if maybe_number can be stored as a double in
349 // FastDoubleElements. If it can, store it at the index specified by key in
350 // the FastDoubleElements array elements, otherwise jump to fail.
351 void StoreNumberToDoubleElements(Register maybe_number,
352 Register elements,
353 Register key,
354 Register scratch1,
355 XMMRegister scratch2,
356 Label* fail,
357 bool specialize_for_processor);
358
Ben Murdochc7cc0282012-03-05 14:35:55 +0000359 // Compare an object's map with the specified map and its transitioned
360 // elements maps if mode is ALLOW_ELEMENT_TRANSITION_MAPS. FLAGS are set with
361 // result of map compare. If multiple map compares are required, the compare
362 // sequences branches to early_success.
363 void CompareMap(Register obj,
364 Handle<Map> map,
365 Label* early_success,
366 CompareMapMode mode = REQUIRE_EXACT_MAP);
367
Ben Murdoch257744e2011-11-30 15:57:28 +0000368 // Check if the map of an object is equal to a specified map and branch to
369 // label if not. Skip the smi check if not required (object is known to be a
Ben Murdochc7cc0282012-03-05 14:35:55 +0000370 // heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
371 // against maps that are ElementsKind transition maps of the specified map.
Andrei Popescu31002712010-02-23 13:46:05 +0000372 void CheckMap(Register obj,
373 Handle<Map> map,
374 Label* fail,
Ben Murdochc7cc0282012-03-05 14:35:55 +0000375 SmiCheckType smi_check_type,
376 CompareMapMode mode = REQUIRE_EXACT_MAP);
Ben Murdoch257744e2011-11-30 15:57:28 +0000377
378 // Check if the map of an object is equal to a specified map and branch to a
379 // specified target if equal. Skip the smi check if not required (object is
380 // known to be a heap object)
381 void DispatchMap(Register obj,
382 Handle<Map> map,
383 Handle<Code> success,
384 SmiCheckType smi_check_type);
Andrei Popescu31002712010-02-23 13:46:05 +0000385
Leon Clarkee46be812010-01-19 14:06:41 +0000386 // Check if the object in register heap_object is a string. Afterwards the
387 // register map contains the object map and the register instance_type
388 // contains the instance_type. The registers map and instance_type can be the
389 // same in which case it contains the instance type afterwards. Either of the
390 // registers map and instance_type can be the same as heap_object.
391 Condition IsObjectStringType(Register heap_object,
392 Register map,
393 Register instance_type);
394
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100395 // Check if a heap object's type is in the JSObject range, not including
396 // JSFunction. The object's map will be loaded in the map register.
397 // Any or all of the three registers may be the same.
398 // The contents of the scratch register will always be overwritten.
399 void IsObjectJSObjectType(Register heap_object,
400 Register map,
401 Register scratch,
402 Label* fail);
403
404 // The contents of the scratch register will be overwritten.
405 void IsInstanceJSObjectType(Register map, Register scratch, Label* fail);
406
Steve Blocka7e24c12009-10-30 11:49:00 +0000407 // FCmp is similar to integer cmp, but requires unsigned
408 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
409 void FCmp();
410
Ben Murdoch257744e2011-11-30 15:57:28 +0000411 void ClampUint8(Register reg);
412
413 void ClampDoubleToUint8(XMMRegister input_reg,
414 XMMRegister scratch_reg,
415 Register result_reg);
416
417
Leon Clarkee46be812010-01-19 14:06:41 +0000418 // Smi tagging support.
419 void SmiTag(Register reg) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000420 STATIC_ASSERT(kSmiTag == 0);
421 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000422 add(reg, reg);
Leon Clarkee46be812010-01-19 14:06:41 +0000423 }
424 void SmiUntag(Register reg) {
425 sar(reg, kSmiTagSize);
426 }
427
Iain Merrick75681382010-08-19 15:07:18 +0100428 // Modifies the register even if it does not contain a Smi!
Iain Merrick75681382010-08-19 15:07:18 +0100429 void SmiUntag(Register reg, Label* is_smi) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000430 STATIC_ASSERT(kSmiTagSize == 1);
Iain Merrick75681382010-08-19 15:07:18 +0100431 sar(reg, kSmiTagSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000432 STATIC_ASSERT(kSmiTag == 0);
Iain Merrick75681382010-08-19 15:07:18 +0100433 j(not_carry, is_smi);
434 }
435
Steve Block1e0659c2011-05-24 12:43:12 +0100436 // Jump the register contains a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000437 inline void JumpIfSmi(Register value,
438 Label* smi_label,
439 Label::Distance distance = Label::kFar) {
Steve Block1e0659c2011-05-24 12:43:12 +0100440 test(value, Immediate(kSmiTagMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000441 j(zero, smi_label, distance);
442 }
443 // Jump if the operand is a smi.
444 inline void JumpIfSmi(Operand value,
445 Label* smi_label,
446 Label::Distance distance = Label::kFar) {
447 test(value, Immediate(kSmiTagMask));
448 j(zero, smi_label, distance);
Steve Block1e0659c2011-05-24 12:43:12 +0100449 }
450 // Jump if register contain a non-smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000451 inline void JumpIfNotSmi(Register value,
452 Label* not_smi_label,
453 Label::Distance distance = Label::kFar) {
Steve Block1e0659c2011-05-24 12:43:12 +0100454 test(value, Immediate(kSmiTagMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000455 j(not_zero, not_smi_label, distance);
Steve Block1e0659c2011-05-24 12:43:12 +0100456 }
457
Ben Murdoch257744e2011-11-30 15:57:28 +0000458 void LoadInstanceDescriptors(Register map, Register descriptors);
Iain Merrick75681382010-08-19 15:07:18 +0100459
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100460 void LoadPowerOf2(XMMRegister dst, Register scratch, int power);
461
Andrei Popescu402d9372010-02-26 13:31:12 +0000462 // Abort execution if argument is not a number. Used in debug code.
Steve Block6ded16b2010-05-10 14:33:55 +0100463 void AbortIfNotNumber(Register object);
464
465 // Abort execution if argument is not a smi. Used in debug code.
466 void AbortIfNotSmi(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000467
Iain Merrick75681382010-08-19 15:07:18 +0100468 // Abort execution if argument is a smi. Used in debug code.
469 void AbortIfSmi(Register object);
470
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100471 // Abort execution if argument is a string. Used in debug code.
472 void AbortIfNotString(Register object);
473
Steve Blocka7e24c12009-10-30 11:49:00 +0000474 // ---------------------------------------------------------------------------
475 // Exception handling
476
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000477 // Push a new try handler and link it into try handler chain.
478 void PushTryHandler(CodeLocation try_location,
479 HandlerType type,
480 int handler_index);
Steve Blocka7e24c12009-10-30 11:49:00 +0000481
Leon Clarkee46be812010-01-19 14:06:41 +0000482 // Unlink the stack handler on top of the stack from the try handler chain.
483 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000484
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100485 // Activate the top handler in the try hander chain.
486 void Throw(Register value);
487
488 void ThrowUncatchable(UncatchableExceptionType type, Register value);
489
Steve Blocka7e24c12009-10-30 11:49:00 +0000490 // ---------------------------------------------------------------------------
491 // Inline caching support
492
Steve Blocka7e24c12009-10-30 11:49:00 +0000493 // Generate code for checking access rights - used for security checks
494 // on access to global objects across environments. The holder register
495 // is left untouched, but the scratch register is clobbered.
496 void CheckAccessGlobalProxy(Register holder_reg,
497 Register scratch,
498 Label* miss);
499
Ben Murdochc7cc0282012-03-05 14:35:55 +0000500 void GetNumberHash(Register r0, Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000501
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000502 void LoadFromNumberDictionary(Label* miss,
503 Register elements,
504 Register key,
505 Register r0,
506 Register r1,
507 Register r2,
508 Register result);
509
510
Steve Blocka7e24c12009-10-30 11:49:00 +0000511 // ---------------------------------------------------------------------------
512 // Allocation support
513
514 // Allocate an object in new space. If the new space is exhausted control
515 // continues at the gc_required label. The allocated object is returned in
516 // result and end of the new object is returned in result_end. The register
517 // scratch can be passed as no_reg in which case an additional object
518 // reference will be added to the reloc info. The returned pointers in result
519 // and result_end have not yet been tagged as heap objects. If
Steve Blockd0582a62009-12-15 09:54:21 +0000520 // result_contains_top_on_entry is true the content of result is known to be
Steve Blocka7e24c12009-10-30 11:49:00 +0000521 // the allocation top on entry (could be result_end from a previous call to
522 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
523 // should be no_reg as it is never used.
524 void AllocateInNewSpace(int object_size,
525 Register result,
526 Register result_end,
527 Register scratch,
528 Label* gc_required,
529 AllocationFlags flags);
530
531 void AllocateInNewSpace(int header_size,
532 ScaleFactor element_size,
533 Register element_count,
534 Register result,
535 Register result_end,
536 Register scratch,
537 Label* gc_required,
538 AllocationFlags flags);
539
540 void AllocateInNewSpace(Register object_size,
541 Register result,
542 Register result_end,
543 Register scratch,
544 Label* gc_required,
545 AllocationFlags flags);
546
547 // Undo allocation in new space. The object passed and objects allocated after
548 // it will no longer be allocated. Make sure that no pointers are left to the
549 // object(s) no longer allocated as they would be invalid when allocation is
550 // un-done.
551 void UndoAllocationInNewSpace(Register object);
552
Steve Block3ce2e202009-11-05 08:53:23 +0000553 // Allocate a heap number in new space with undefined value. The
554 // register scratch2 can be passed as no_reg; the others must be
555 // valid registers. Returns tagged pointer in result register, or
556 // jumps to gc_required if new space is full.
557 void AllocateHeapNumber(Register result,
558 Register scratch1,
559 Register scratch2,
560 Label* gc_required);
561
Steve Blockd0582a62009-12-15 09:54:21 +0000562 // Allocate a sequential string. All the header fields of the string object
563 // are initialized.
564 void AllocateTwoByteString(Register result,
565 Register length,
566 Register scratch1,
567 Register scratch2,
568 Register scratch3,
569 Label* gc_required);
570 void AllocateAsciiString(Register result,
571 Register length,
572 Register scratch1,
573 Register scratch2,
574 Register scratch3,
575 Label* gc_required);
Iain Merrick9ac36c92010-09-13 15:29:50 +0100576 void AllocateAsciiString(Register result,
577 int length,
578 Register scratch1,
579 Register scratch2,
580 Label* gc_required);
Steve Blockd0582a62009-12-15 09:54:21 +0000581
582 // Allocate a raw cons string object. Only the map field of the result is
583 // initialized.
Ben Murdoch589d6972011-11-30 16:04:58 +0000584 void AllocateTwoByteConsString(Register result,
Steve Blockd0582a62009-12-15 09:54:21 +0000585 Register scratch1,
586 Register scratch2,
587 Label* gc_required);
588 void AllocateAsciiConsString(Register result,
589 Register scratch1,
590 Register scratch2,
591 Label* gc_required);
592
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000593 // Allocate a raw sliced string object. Only the map field of the result is
594 // initialized.
Ben Murdoch589d6972011-11-30 16:04:58 +0000595 void AllocateTwoByteSlicedString(Register result,
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000596 Register scratch1,
597 Register scratch2,
598 Label* gc_required);
599 void AllocateAsciiSlicedString(Register result,
600 Register scratch1,
601 Register scratch2,
602 Label* gc_required);
603
Ben Murdochb8e0da22011-05-16 14:20:40 +0100604 // Copy memory, byte-by-byte, from source to destination. Not optimized for
605 // long or aligned copies.
606 // The contents of index and scratch are destroyed.
607 void CopyBytes(Register source,
608 Register destination,
609 Register length,
610 Register scratch);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800611
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000612 // Initialize fields with filler values. Fields starting at |start_offset|
613 // not including end_offset are overwritten with the value in |filler|. At
614 // the end the loop, |start_offset| takes the value of |end_offset|.
615 void InitializeFieldsWithFiller(Register start_offset,
616 Register end_offset,
617 Register filler);
618
Steve Blocka7e24c12009-10-30 11:49:00 +0000619 // ---------------------------------------------------------------------------
620 // Support functions.
621
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000622 // Check a boolean-bit of a Smi field.
623 void BooleanBitTest(Register object, int field_offset, int bit_index);
624
Steve Blocka7e24c12009-10-30 11:49:00 +0000625 // Check if result is zero and op is negative.
626 void NegativeZeroTest(Register result, Register op, Label* then_label);
627
Steve Blocka7e24c12009-10-30 11:49:00 +0000628 // Check if result is zero and any of op1 and op2 are negative.
629 // Register scratch is destroyed, and it must be different from op2.
630 void NegativeZeroTest(Register result, Register op1, Register op2,
631 Register scratch, Label* then_label);
632
633 // Try to get function prototype of a function and puts the value in
634 // the result register. Checks that the function really is a
635 // function and jumps to the miss label if the fast checks fail. The
636 // function register will be untouched; the other registers may be
637 // clobbered.
638 void TryGetFunctionPrototype(Register function,
639 Register result,
640 Register scratch,
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000641 Label* miss,
642 bool miss_on_bound_function = false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000643
644 // Generates code for reporting that an illegal operation has
645 // occurred.
646 void IllegalOperation(int num_arguments);
647
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100648 // Picks out an array index from the hash field.
649 // Register use:
650 // hash - holds the index's hash. Clobbered.
651 // index - holds the overwritten index on exit.
652 void IndexFromHash(Register hash, Register index);
653
Steve Blocka7e24c12009-10-30 11:49:00 +0000654 // ---------------------------------------------------------------------------
655 // Runtime calls
656
Leon Clarkee46be812010-01-19 14:06:41 +0000657 // Call a code stub. Generate the code if necessary.
Ben Murdoch257744e2011-11-30 15:57:28 +0000658 void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId);
Steve Blocka7e24c12009-10-30 11:49:00 +0000659
Leon Clarkee46be812010-01-19 14:06:41 +0000660 // Tail call a code stub (jump). Generate the code if necessary.
Steve Blockd0582a62009-12-15 09:54:21 +0000661 void TailCallStub(CodeStub* stub);
662
Steve Blocka7e24c12009-10-30 11:49:00 +0000663 // Return from a code stub after popping its arguments.
664 void StubReturn(int argc);
665
666 // Call a runtime routine.
Steve Block44f0eee2011-05-26 01:26:41 +0100667 void CallRuntime(const Runtime::Function* f, int num_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100668 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000669
670 // Convenience function: Same as above, but takes the fid instead.
671 void CallRuntime(Runtime::FunctionId id, int num_arguments);
672
Ben Murdochbb769b22010-08-11 14:56:33 +0100673 // Convenience function: call an external reference.
674 void CallExternalReference(ExternalReference ref, int num_arguments);
675
Steve Blocka7e24c12009-10-30 11:49:00 +0000676 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100677 // Like JumpToExternalReference, but also takes care of passing the number
678 // of parameters.
679 void TailCallExternalReference(const ExternalReference& ext,
680 int num_arguments,
681 int result_size);
682
683 // Convenience function: tail call a runtime routine (jump).
684 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000685 int num_arguments,
686 int result_size);
687
Steve Block6ded16b2010-05-10 14:33:55 +0100688 // Before calling a C-function from generated code, align arguments on stack.
689 // After aligning the frame, arguments must be stored in esp[0], esp[4],
690 // etc., not pushed. The argument count assumes all arguments are word sized.
691 // Some compilers/platforms require the stack to be aligned when calling
692 // C++ code.
693 // Needs a scratch register to do some arithmetic. This register will be
694 // trashed.
695 void PrepareCallCFunction(int num_arguments, Register scratch);
696
697 // Calls a C function and cleans up the space for arguments allocated
698 // by PrepareCallCFunction. The called function is not allowed to trigger a
699 // garbage collection, since that might move the code and invalidate the
700 // return address (unless this is somehow accounted for by the called
701 // function).
702 void CallCFunction(ExternalReference function, int num_arguments);
703 void CallCFunction(Register function, int num_arguments);
704
John Reck59135872010-11-02 12:39:01 -0700705 // Prepares stack to put arguments (aligns and so on). Reserves
706 // space for return value if needed (assumes the return value is a handle).
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000707 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
708 // etc. Saves context (esi). If space was reserved for return value then
709 // stores the pointer to the reserved slot into esi.
710 void PrepareCallApiFunction(int argc);
Steve Blockd0582a62009-12-15 09:54:21 +0000711
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000712 // Calls an API function. Allocates HandleScope, extracts returned value
713 // from handle and propagates exceptions. Clobbers ebx, edi and
714 // caller-save registers. Restores context. On return removes
715 // stack_space * kPointerSize (GCed).
716 void CallApiFunctionAndReturn(Address function_address, int stack_space);
Leon Clarkee46be812010-01-19 14:06:41 +0000717
Steve Blocka7e24c12009-10-30 11:49:00 +0000718 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100719 void JumpToExternalReference(const ExternalReference& ext);
Steve Blocka7e24c12009-10-30 11:49:00 +0000720
Steve Blocka7e24c12009-10-30 11:49:00 +0000721 // ---------------------------------------------------------------------------
722 // Utilities
723
724 void Ret();
725
Steve Block1e0659c2011-05-24 12:43:12 +0100726 // Return and drop arguments from stack, where the number of arguments
727 // may be bigger than 2^16 - 1. Requires a scratch register.
728 void Ret(int bytes_dropped, Register scratch);
729
Leon Clarkee46be812010-01-19 14:06:41 +0000730 // Emit code to discard a non-negative number of pointer-sized elements
731 // from the stack, clobbering only the esp register.
732 void Drop(int element_count);
733
734 void Call(Label* target) { call(target); }
735
Ben Murdochb0fe1622011-05-05 13:52:32 +0100736 // Emit call to the code we are currently generating.
737 void CallSelf() {
738 Handle<Code> self(reinterpret_cast<Code**>(CodeObject().location()));
739 call(self, RelocInfo::CODE_TARGET);
740 }
741
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100742 // Move if the registers are not identical.
743 void Move(Register target, Register source);
744
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000745 // Push a handle value.
Ben Murdochc7cc0282012-03-05 14:35:55 +0000746 void Push(Handle<Object> handle) { push(Immediate(handle)); }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000747
Ben Murdoch8b112d22011-06-08 16:22:53 +0100748 Handle<Object> CodeObject() {
749 ASSERT(!code_object_.is_null());
750 return code_object_;
751 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000752
753
754 // ---------------------------------------------------------------------------
755 // StatsCounter support
756
757 void SetCounter(StatsCounter* counter, int value);
758 void IncrementCounter(StatsCounter* counter, int value);
759 void DecrementCounter(StatsCounter* counter, int value);
Leon Clarked91b9f72010-01-27 17:25:45 +0000760 void IncrementCounter(Condition cc, StatsCounter* counter, int value);
761 void DecrementCounter(Condition cc, StatsCounter* counter, int value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000762
763
764 // ---------------------------------------------------------------------------
765 // Debugging
766
767 // Calls Abort(msg) if the condition cc is not satisfied.
768 // Use --debug_code to enable.
769 void Assert(Condition cc, const char* msg);
770
Iain Merrick75681382010-08-19 15:07:18 +0100771 void AssertFastElements(Register elements);
772
Steve Blocka7e24c12009-10-30 11:49:00 +0000773 // Like Assert(), but always enabled.
774 void Check(Condition cc, const char* msg);
775
776 // Print a message to stdout and abort execution.
777 void Abort(const char* msg);
778
Steve Block6ded16b2010-05-10 14:33:55 +0100779 // Check that the stack is aligned.
780 void CheckStackAlignment();
781
Steve Blocka7e24c12009-10-30 11:49:00 +0000782 // Verify restrictions about code generated in stubs.
783 void set_generating_stub(bool value) { generating_stub_ = value; }
784 bool generating_stub() { return generating_stub_; }
785 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
786 bool allow_stub_calls() { return allow_stub_calls_; }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000787 void set_has_frame(bool value) { has_frame_ = value; }
788 bool has_frame() { return has_frame_; }
789 inline bool AllowThisStubCall(CodeStub* stub);
Steve Blocka7e24c12009-10-30 11:49:00 +0000790
Leon Clarked91b9f72010-01-27 17:25:45 +0000791 // ---------------------------------------------------------------------------
792 // String utilities.
793
Ben Murdochc7cc0282012-03-05 14:35:55 +0000794 // Check whether the instance type represents a flat ASCII string. Jump to the
Andrei Popescu402d9372010-02-26 13:31:12 +0000795 // label if not. If the instance type can be scratched specify same register
796 // for both instance type and scratch.
797 void JumpIfInstanceTypeIsNotSequentialAscii(Register instance_type,
798 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +0100799 Label* on_not_flat_ascii_string);
Andrei Popescu402d9372010-02-26 13:31:12 +0000800
Leon Clarked91b9f72010-01-27 17:25:45 +0000801 // Checks if both objects are sequential ASCII strings, and jumps to label
802 // if either is not.
803 void JumpIfNotBothSequentialAsciiStrings(Register object1,
804 Register object2,
805 Register scratch1,
806 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100807 Label* on_not_flat_ascii_strings);
Leon Clarked91b9f72010-01-27 17:25:45 +0000808
Ben Murdoch8b112d22011-06-08 16:22:53 +0100809 static int SafepointRegisterStackIndex(Register reg) {
810 return SafepointRegisterStackIndex(reg.code());
811 }
812
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000813 // Activation support.
814 void EnterFrame(StackFrame::Type type);
815 void LeaveFrame(StackFrame::Type type);
816
Steve Blocka7e24c12009-10-30 11:49:00 +0000817 private:
Steve Blocka7e24c12009-10-30 11:49:00 +0000818 bool generating_stub_;
819 bool allow_stub_calls_;
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000820 bool has_frame_;
Andrei Popescu31002712010-02-23 13:46:05 +0000821 // This handle will be patched with the code object on installation.
822 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000823
824 // Helper functions for generating invokes.
825 void InvokePrologue(const ParameterCount& expected,
826 const ParameterCount& actual,
827 Handle<Code> code_constant,
828 const Operand& code_operand,
Ben Murdoch257744e2011-11-30 15:57:28 +0000829 Label* done,
Ben Murdochc7cc0282012-03-05 14:35:55 +0000830 bool* definitely_mismatches,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100831 InvokeFlag flag,
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000832 Label::Distance done_distance,
Ben Murdoch257744e2011-11-30 15:57:28 +0000833 const CallWrapper& call_wrapper = NullCallWrapper(),
834 CallKind call_kind = CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +0000835
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100836 void EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100837 void EnterExitFrameEpilogue(int argc, bool save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000838
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800839 void LeaveExitFrameEpilogue();
840
Steve Blocka7e24c12009-10-30 11:49:00 +0000841 // Allocation support helpers.
842 void LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +0000843 Register scratch,
844 AllocationFlags flags);
845 void UpdateAllocationTopHelper(Register result_end, Register scratch);
Leon Clarkee46be812010-01-19 14:06:41 +0000846
847 // Helper for PopHandleScope. Allowed to perform a GC and returns
848 // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
849 // possibly returns a failure object indicating an allocation failure.
John Reck59135872010-11-02 12:39:01 -0700850 MUST_USE_RESULT MaybeObject* PopHandleScopeHelper(Register saved,
851 Register scratch,
852 bool gc_allowed);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100853
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000854 // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace.
855 void InNewSpace(Register object,
856 Register scratch,
857 Condition cc,
858 Label* condition_met,
859 Label::Distance condition_met_distance = Label::kFar);
860
861 // Helper for finding the mark bits for an address. Afterwards, the
862 // bitmap register points at the word with the mark bits and the mask
863 // the position of the first bit. Uses ecx as scratch and leaves addr_reg
864 // unchanged.
865 inline void GetMarkBits(Register addr_reg,
866 Register bitmap_reg,
867 Register mask_reg);
868
869 // Helper for throwing exceptions. Compute a handler address and jump to
870 // it. See the implementation for register usage.
871 void JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100872
873 // Compute memory operands for safepoint stack slots.
874 Operand SafepointRegisterSlot(Register reg);
875 static int SafepointRegisterStackIndex(int reg_code);
876
877 // Needs access to SafepointRegisterStackIndex for optimized frame
878 // traversal.
879 friend class OptimizedFrame;
Steve Blocka7e24c12009-10-30 11:49:00 +0000880};
881
882
883// The code patcher is used to patch (typically) small parts of code e.g. for
884// debugging and other types of instrumentation. When using the code patcher
885// the exact number of bytes specified must be emitted. Is not legal to emit
886// relocation information. If any of these constraints are violated it causes
887// an assertion.
888class CodePatcher {
889 public:
890 CodePatcher(byte* address, int size);
891 virtual ~CodePatcher();
892
893 // Macro assembler to emit code.
894 MacroAssembler* masm() { return &masm_; }
895
896 private:
897 byte* address_; // The address of the code being patched.
898 int size_; // Number of bytes of the expected patch size.
899 MacroAssembler masm_; // Macro assembler used to generate the code.
900};
901
902
903// -----------------------------------------------------------------------------
904// Static helper functions.
905
906// Generate an Operand for loading a field from an object.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000907inline Operand FieldOperand(Register object, int offset) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000908 return Operand(object, offset - kHeapObjectTag);
909}
910
911
912// Generate an Operand for loading an indexed field from an object.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000913inline Operand FieldOperand(Register object,
914 Register index,
915 ScaleFactor scale,
916 int offset) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000917 return Operand(object, index, scale, offset - kHeapObjectTag);
918}
919
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800920
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000921inline Operand ContextOperand(Register context, int index) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800922 return Operand(context, Context::SlotOffset(index));
923}
924
925
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000926inline Operand GlobalObjectOperand() {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800927 return ContextOperand(esi, Context::GLOBAL_INDEX);
928}
929
930
John Reck59135872010-11-02 12:39:01 -0700931// Generates an Operand for saving parameters after PrepareCallApiFunction.
932Operand ApiParameterOperand(int index);
933
Steve Blocka7e24c12009-10-30 11:49:00 +0000934
935#ifdef GENERATED_CODE_COVERAGE
936extern void LogGeneratedCodeCoverage(const char* file_line);
937#define CODE_COVERAGE_STRINGIFY(x) #x
938#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
939#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
940#define ACCESS_MASM(masm) { \
941 byte* ia32_coverage_function = \
942 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
943 masm->pushfd(); \
944 masm->pushad(); \
945 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
946 masm->call(ia32_coverage_function, RelocInfo::RUNTIME_ENTRY); \
947 masm->pop(eax); \
948 masm->popad(); \
949 masm->popfd(); \
950 } \
951 masm->
952#else
953#define ACCESS_MASM(masm) masm->
954#endif
955
956
957} } // namespace v8::internal
958
959#endif // V8_IA32_MACRO_ASSEMBLER_IA32_H_