blob: 66d1ce7d3851d246d4417b2f92654430f7a417f4 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_IA32_MACRO_ASSEMBLER_IA32_H_
29#define V8_IA32_MACRO_ASSEMBLER_IA32_H_
30
31#include "assembler.h"
Ben Murdoch3ef787d2012-04-12 10:51:47 +010032#include "frames.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "v8globals.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034
35namespace v8 {
36namespace internal {
37
Kristian Monsen25f61362010-05-21 11:50:48 +010038// Flags used for the AllocateInNewSpace functions.
39enum AllocationFlags {
40 // No special flags.
41 NO_ALLOCATION_FLAGS = 0,
42 // Return the pointer to the allocated already tagged as a heap object.
43 TAG_OBJECT = 1 << 0,
44 // The content of the result register already contains the allocation top in
45 // new space.
46 RESULT_CONTAINS_TOP = 1 << 1
47};
48
Ben Murdoch257744e2011-11-30 15:57:28 +000049
Leon Clarkee46be812010-01-19 14:06:41 +000050// Convenience for platform-independent signatures. We do not normally
51// distinguish memory operands from other operands on ia32.
52typedef Operand MemOperand;
53
Ben Murdoch3ef787d2012-04-12 10:51:47 +010054enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
55enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
56
57
58bool AreAliased(Register r1, Register r2, Register r3, Register r4);
59
60
Steve Blocka7e24c12009-10-30 11:49:00 +000061// MacroAssembler implements a collection of frequently used macros.
62class MacroAssembler: public Assembler {
63 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +010064 // The isolate parameter can be NULL if the macro assembler should
65 // not use isolate-dependent functionality. In this case, it's the
66 // responsibility of the caller to never invoke such function on the
67 // macro assembler.
68 MacroAssembler(Isolate* isolate, void* buffer, int size);
Steve Blocka7e24c12009-10-30 11:49:00 +000069
70 // ---------------------------------------------------------------------------
71 // GC Support
Ben Murdoch3ef787d2012-04-12 10:51:47 +010072 enum RememberedSetFinalAction {
73 kReturnAtEnd,
74 kFallThroughAtEnd
75 };
Steve Blocka7e24c12009-10-30 11:49:00 +000076
Ben Murdoch3ef787d2012-04-12 10:51:47 +010077 // Record in the remembered set the fact that we have a pointer to new space
78 // at the address pointed to by the addr register. Only works if addr is not
79 // in new space.
80 void RememberedSetHelper(Register object, // Used for debug code.
81 Register addr,
82 Register scratch,
83 SaveFPRegsMode save_fp,
84 RememberedSetFinalAction and_then);
Steve Block6ded16b2010-05-10 14:33:55 +010085
Ben Murdoch3ef787d2012-04-12 10:51:47 +010086 void CheckPageFlag(Register object,
87 Register scratch,
88 int mask,
89 Condition cc,
90 Label* condition_met,
91 Label::Distance condition_met_distance = Label::kFar);
Steve Block6ded16b2010-05-10 14:33:55 +010092
Ben Murdoch3ef787d2012-04-12 10:51:47 +010093 // Check if object is in new space. Jumps if the object is not in new space.
94 // The register scratch can be object itself, but scratch will be clobbered.
95 void JumpIfNotInNewSpace(Register object,
96 Register scratch,
97 Label* branch,
98 Label::Distance distance = Label::kFar) {
99 InNewSpace(object, scratch, zero, branch, distance);
100 }
101
102 // Check if object is in new space. Jumps if the object is in new space.
103 // The register scratch can be object itself, but it will be clobbered.
104 void JumpIfInNewSpace(Register object,
105 Register scratch,
106 Label* branch,
107 Label::Distance distance = Label::kFar) {
108 InNewSpace(object, scratch, not_zero, branch, distance);
109 }
110
111 // Check if an object has a given incremental marking color. Also uses ecx!
112 void HasColor(Register object,
113 Register scratch0,
114 Register scratch1,
115 Label* has_color,
116 Label::Distance has_color_distance,
117 int first_bit,
118 int second_bit);
119
120 void JumpIfBlack(Register object,
121 Register scratch0,
122 Register scratch1,
123 Label* on_black,
124 Label::Distance on_black_distance = Label::kFar);
125
126 // Checks the color of an object. If the object is already grey or black
127 // then we just fall through, since it is already live. If it is white and
128 // we can determine that it doesn't need to be scanned, then we just mark it
129 // black and fall through. For the rest we jump to the label so the
130 // incremental marker can fix its assumptions.
131 void EnsureNotWhite(Register object,
132 Register scratch1,
133 Register scratch2,
134 Label* object_is_white_and_not_data,
135 Label::Distance distance);
136
137 // Notify the garbage collector that we wrote a pointer into an object.
138 // |object| is the object being stored into, |value| is the object being
139 // stored. value and scratch registers are clobbered by the operation.
140 // The offset is the offset from the start of the object, not the offset from
141 // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
142 void RecordWriteField(
143 Register object,
144 int offset,
145 Register value,
146 Register scratch,
147 SaveFPRegsMode save_fp,
148 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
149 SmiCheck smi_check = INLINE_SMI_CHECK);
150
151 // As above, but the offset has the tag presubtracted. For use with
152 // Operand(reg, off).
153 void RecordWriteContextSlot(
154 Register context,
155 int offset,
156 Register value,
157 Register scratch,
158 SaveFPRegsMode save_fp,
159 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
160 SmiCheck smi_check = INLINE_SMI_CHECK) {
161 RecordWriteField(context,
162 offset + kHeapObjectTag,
163 value,
164 scratch,
165 save_fp,
166 remembered_set_action,
167 smi_check);
168 }
169
170 // Notify the garbage collector that we wrote a pointer into a fixed array.
171 // |array| is the array being stored into, |value| is the
172 // object being stored. |index| is the array index represented as a
173 // Smi. All registers are clobbered by the operation RecordWriteArray
Steve Block8defd9f2010-07-08 12:39:36 +0100174 // filters out smis so it does not update the write barrier if the
175 // value is a smi.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100176 void RecordWriteArray(
177 Register array,
178 Register value,
179 Register index,
180 SaveFPRegsMode save_fp,
181 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
182 SmiCheck smi_check = INLINE_SMI_CHECK);
Steve Blocka7e24c12009-10-30 11:49:00 +0000183
Steve Block8defd9f2010-07-08 12:39:36 +0100184 // For page containing |object| mark region covering |address|
185 // dirty. |object| is the object being stored into, |value| is the
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100186 // object being stored. The address and value registers are clobbered by the
Steve Block8defd9f2010-07-08 12:39:36 +0100187 // operation. RecordWrite filters out smis so it does not update the
188 // write barrier if the value is a smi.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100189 void RecordWrite(
190 Register object,
191 Register address,
192 Register value,
193 SaveFPRegsMode save_fp,
194 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
195 SmiCheck smi_check = INLINE_SMI_CHECK);
Steve Block8defd9f2010-07-08 12:39:36 +0100196
Steve Blocka7e24c12009-10-30 11:49:00 +0000197#ifdef ENABLE_DEBUGGER_SUPPORT
198 // ---------------------------------------------------------------------------
199 // Debugger Support
200
Andrei Popescu402d9372010-02-26 13:31:12 +0000201 void DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +0000202#endif
203
Ben Murdochb0fe1622011-05-05 13:52:32 +0100204 // Enter specific kind of exit frame. Expects the number of
205 // arguments in register eax and sets up the number of arguments in
206 // register edi and the pointer to the first argument in register
207 // esi.
208 void EnterExitFrame(bool save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000209
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800210 void EnterApiExitFrame(int argc);
Steve Blocka7e24c12009-10-30 11:49:00 +0000211
212 // Leave the current exit frame. Expects the return value in
213 // register eax:edx (untouched) and the pointer to the first
214 // argument in register esi.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100215 void LeaveExitFrame(bool save_doubles);
Steve Blocka7e24c12009-10-30 11:49:00 +0000216
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800217 // Leave the current exit frame. Expects the return value in
218 // register eax (untouched).
219 void LeaveApiExitFrame();
220
Steve Blockd0582a62009-12-15 09:54:21 +0000221 // Find the function context up the context chain.
222 void LoadContext(Register dst, int context_chain_length);
Steve Blocka7e24c12009-10-30 11:49:00 +0000223
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100224 // Conditionally load the cached Array transitioned map of type
225 // transitioned_kind from the global context if the map in register
226 // map_in_out is the cached Array map in the global context of
227 // expected_kind.
228 void LoadTransitionedArrayMapConditional(
229 ElementsKind expected_kind,
230 ElementsKind transitioned_kind,
231 Register map_in_out,
232 Register scratch,
233 Label* no_map_match);
234
235 // Load the initial map for new Arrays from a JSFunction.
236 void LoadInitialArrayMap(Register function_in,
237 Register scratch,
238 Register map_out);
239
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100240 // Load the global function with the given index.
241 void LoadGlobalFunction(int index, Register function);
242
243 // Load the initial map from the global function. The registers
244 // function and map can be the same.
245 void LoadGlobalFunctionInitialMap(Register function, Register map);
246
Ben Murdochb0fe1622011-05-05 13:52:32 +0100247 // Push and pop the registers that can hold pointers.
248 void PushSafepointRegisters() { pushad(); }
249 void PopSafepointRegisters() { popad(); }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100250 // Store the value in register/immediate src in the safepoint
251 // register stack slot for register dst.
252 void StoreToSafepointRegisterSlot(Register dst, Register src);
253 void StoreToSafepointRegisterSlot(Register dst, Immediate src);
254 void LoadFromSafepointRegisterSlot(Register dst, Register src);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100255
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100256 void LoadHeapObject(Register result, Handle<HeapObject> object);
257 void PushHeapObject(Handle<HeapObject> object);
258
259 void LoadObject(Register result, Handle<Object> object) {
260 if (object->IsHeapObject()) {
261 LoadHeapObject(result, Handle<HeapObject>::cast(object));
262 } else {
263 Set(result, Immediate(object));
264 }
265 }
266
Steve Blocka7e24c12009-10-30 11:49:00 +0000267 // ---------------------------------------------------------------------------
268 // JavaScript invokes
269
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100270 // Set up call kind marking in ecx. The method takes ecx as an
Ben Murdoch257744e2011-11-30 15:57:28 +0000271 // explicit first parameter to make the code more readable at the
272 // call sites.
273 void SetCallKind(Register dst, CallKind kind);
274
Steve Blocka7e24c12009-10-30 11:49:00 +0000275 // Invoke the JavaScript function code by either calling or jumping.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100276 void InvokeCode(Register code,
277 const ParameterCount& expected,
278 const ParameterCount& actual,
279 InvokeFlag flag,
280 const CallWrapper& call_wrapper,
281 CallKind call_kind) {
282 InvokeCode(Operand(code), expected, actual, flag, call_wrapper, call_kind);
283 }
284
Steve Blocka7e24c12009-10-30 11:49:00 +0000285 void InvokeCode(const Operand& code,
286 const ParameterCount& expected,
287 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100288 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000289 const CallWrapper& call_wrapper,
290 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000291
292 void InvokeCode(Handle<Code> code,
293 const ParameterCount& expected,
294 const ParameterCount& actual,
295 RelocInfo::Mode rmode,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100296 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000297 const CallWrapper& call_wrapper,
298 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000299
300 // Invoke the JavaScript function in the given register. Changes the
301 // current context to the context in the function before invoking.
302 void InvokeFunction(Register function,
303 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100304 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000305 const CallWrapper& call_wrapper,
306 CallKind call_kind);
Steve Blocka7e24c12009-10-30 11:49:00 +0000307
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100308 void InvokeFunction(Handle<JSFunction> function,
Andrei Popescu402d9372010-02-26 13:31:12 +0000309 const ParameterCount& actual,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100310 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000311 const CallWrapper& call_wrapper,
312 CallKind call_kind);
Andrei Popescu402d9372010-02-26 13:31:12 +0000313
Steve Blocka7e24c12009-10-30 11:49:00 +0000314 // Invoke specified builtin JavaScript function. Adds an entry to
315 // the unresolved list if the name does not resolve.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100316 void InvokeBuiltin(Builtins::JavaScript id,
317 InvokeFlag flag,
Ben Murdoch257744e2011-11-30 15:57:28 +0000318 const CallWrapper& call_wrapper = NullCallWrapper());
Steve Blocka7e24c12009-10-30 11:49:00 +0000319
Steve Block791712a2010-08-27 10:21:07 +0100320 // Store the function for the given builtin in the target register.
321 void GetBuiltinFunction(Register target, Builtins::JavaScript id);
322
Steve Blocka7e24c12009-10-30 11:49:00 +0000323 // Store the code object for the given builtin in the target register.
324 void GetBuiltinEntry(Register target, Builtins::JavaScript id);
325
326 // Expression support
327 void Set(Register dst, const Immediate& x);
328 void Set(const Operand& dst, const Immediate& x);
329
Steve Block053d10c2011-06-13 19:13:29 +0100330 // Support for constant splitting.
331 bool IsUnsafeImmediate(const Immediate& x);
332 void SafeSet(Register dst, const Immediate& x);
333 void SafePush(const Immediate& x);
334
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100335 // Compare against a known root, e.g. undefined, null, true, ...
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000336 void CompareRoot(Register with, Heap::RootListIndex index);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100337 void CompareRoot(const Operand& with, Heap::RootListIndex index);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000338
Steve Blocka7e24c12009-10-30 11:49:00 +0000339 // Compare object type for heap object.
340 // Incoming register is heap_object and outgoing register is map.
341 void CmpObjectType(Register heap_object, InstanceType type, Register map);
342
343 // Compare instance type for map.
344 void CmpInstanceType(Register map, InstanceType type);
345
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000346 // Check if a map for a JSObject indicates that the object has fast elements.
347 // Jump to the specified label if it does not.
348 void CheckFastElements(Register map,
349 Label* fail,
350 Label::Distance distance = Label::kFar);
351
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100352 // Check if a map for a JSObject indicates that the object can have both smi
353 // and HeapObject elements. Jump to the specified label if it does not.
354 void CheckFastObjectElements(Register map,
355 Label* fail,
356 Label::Distance distance = Label::kFar);
357
358 // Check if a map for a JSObject indicates that the object has fast smi only
359 // elements. Jump to the specified label if it does not.
360 void CheckFastSmiOnlyElements(Register map,
361 Label* fail,
362 Label::Distance distance = Label::kFar);
363
364 // Check to see if maybe_number can be stored as a double in
365 // FastDoubleElements. If it can, store it at the index specified by key in
366 // the FastDoubleElements array elements, otherwise jump to fail.
367 void StoreNumberToDoubleElements(Register maybe_number,
368 Register elements,
369 Register key,
370 Register scratch1,
371 XMMRegister scratch2,
372 Label* fail,
373 bool specialize_for_processor);
374
375 // Compare an object's map with the specified map and its transitioned
376 // elements maps if mode is ALLOW_ELEMENT_TRANSITION_MAPS. FLAGS are set with
377 // result of map compare. If multiple map compares are required, the compare
378 // sequences branches to early_success.
379 void CompareMap(Register obj,
380 Handle<Map> map,
381 Label* early_success,
382 CompareMapMode mode = REQUIRE_EXACT_MAP);
383
Ben Murdoch257744e2011-11-30 15:57:28 +0000384 // Check if the map of an object is equal to a specified map and branch to
385 // label if not. Skip the smi check if not required (object is known to be a
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100386 // heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
387 // against maps that are ElementsKind transition maps of the specified map.
Andrei Popescu31002712010-02-23 13:46:05 +0000388 void CheckMap(Register obj,
389 Handle<Map> map,
390 Label* fail,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100391 SmiCheckType smi_check_type,
392 CompareMapMode mode = REQUIRE_EXACT_MAP);
Ben Murdoch257744e2011-11-30 15:57:28 +0000393
394 // Check if the map of an object is equal to a specified map and branch to a
395 // specified target if equal. Skip the smi check if not required (object is
396 // known to be a heap object)
397 void DispatchMap(Register obj,
398 Handle<Map> map,
399 Handle<Code> success,
400 SmiCheckType smi_check_type);
Andrei Popescu31002712010-02-23 13:46:05 +0000401
Leon Clarkee46be812010-01-19 14:06:41 +0000402 // Check if the object in register heap_object is a string. Afterwards the
403 // register map contains the object map and the register instance_type
404 // contains the instance_type. The registers map and instance_type can be the
405 // same in which case it contains the instance type afterwards. Either of the
406 // registers map and instance_type can be the same as heap_object.
407 Condition IsObjectStringType(Register heap_object,
408 Register map,
409 Register instance_type);
410
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100411 // Check if a heap object's type is in the JSObject range, not including
412 // JSFunction. The object's map will be loaded in the map register.
413 // Any or all of the three registers may be the same.
414 // The contents of the scratch register will always be overwritten.
415 void IsObjectJSObjectType(Register heap_object,
416 Register map,
417 Register scratch,
418 Label* fail);
419
420 // The contents of the scratch register will be overwritten.
421 void IsInstanceJSObjectType(Register map, Register scratch, Label* fail);
422
Steve Blocka7e24c12009-10-30 11:49:00 +0000423 // FCmp is similar to integer cmp, but requires unsigned
424 // jcc instructions (je, ja, jae, jb, jbe, je, and jz).
425 void FCmp();
426
Ben Murdoch257744e2011-11-30 15:57:28 +0000427 void ClampUint8(Register reg);
428
429 void ClampDoubleToUint8(XMMRegister input_reg,
430 XMMRegister scratch_reg,
431 Register result_reg);
432
433
Leon Clarkee46be812010-01-19 14:06:41 +0000434 // Smi tagging support.
435 void SmiTag(Register reg) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000436 STATIC_ASSERT(kSmiTag == 0);
437 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100438 add(reg, reg);
Leon Clarkee46be812010-01-19 14:06:41 +0000439 }
440 void SmiUntag(Register reg) {
441 sar(reg, kSmiTagSize);
442 }
443
Iain Merrick75681382010-08-19 15:07:18 +0100444 // Modifies the register even if it does not contain a Smi!
Iain Merrick75681382010-08-19 15:07:18 +0100445 void SmiUntag(Register reg, Label* is_smi) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000446 STATIC_ASSERT(kSmiTagSize == 1);
Iain Merrick75681382010-08-19 15:07:18 +0100447 sar(reg, kSmiTagSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000448 STATIC_ASSERT(kSmiTag == 0);
Iain Merrick75681382010-08-19 15:07:18 +0100449 j(not_carry, is_smi);
450 }
451
Steve Block1e0659c2011-05-24 12:43:12 +0100452 // Jump the register contains a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000453 inline void JumpIfSmi(Register value,
454 Label* smi_label,
455 Label::Distance distance = Label::kFar) {
Steve Block1e0659c2011-05-24 12:43:12 +0100456 test(value, Immediate(kSmiTagMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000457 j(zero, smi_label, distance);
458 }
459 // Jump if the operand is a smi.
460 inline void JumpIfSmi(Operand value,
461 Label* smi_label,
462 Label::Distance distance = Label::kFar) {
463 test(value, Immediate(kSmiTagMask));
464 j(zero, smi_label, distance);
Steve Block1e0659c2011-05-24 12:43:12 +0100465 }
466 // Jump if register contain a non-smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000467 inline void JumpIfNotSmi(Register value,
468 Label* not_smi_label,
469 Label::Distance distance = Label::kFar) {
Steve Block1e0659c2011-05-24 12:43:12 +0100470 test(value, Immediate(kSmiTagMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000471 j(not_zero, not_smi_label, distance);
Steve Block1e0659c2011-05-24 12:43:12 +0100472 }
473
Ben Murdoch257744e2011-11-30 15:57:28 +0000474 void LoadInstanceDescriptors(Register map, Register descriptors);
Iain Merrick75681382010-08-19 15:07:18 +0100475
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100476 void LoadPowerOf2(XMMRegister dst, Register scratch, int power);
477
Andrei Popescu402d9372010-02-26 13:31:12 +0000478 // Abort execution if argument is not a number. Used in debug code.
Steve Block6ded16b2010-05-10 14:33:55 +0100479 void AbortIfNotNumber(Register object);
480
481 // Abort execution if argument is not a smi. Used in debug code.
482 void AbortIfNotSmi(Register object);
Andrei Popescu402d9372010-02-26 13:31:12 +0000483
Iain Merrick75681382010-08-19 15:07:18 +0100484 // Abort execution if argument is a smi. Used in debug code.
485 void AbortIfSmi(Register object);
486
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100487 // Abort execution if argument is a string. Used in debug code.
488 void AbortIfNotString(Register object);
489
Steve Blocka7e24c12009-10-30 11:49:00 +0000490 // ---------------------------------------------------------------------------
491 // Exception handling
492
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100493 // Push a new try handler and link it into try handler chain.
494 void PushTryHandler(StackHandler::Kind kind, int handler_index);
Steve Blocka7e24c12009-10-30 11:49:00 +0000495
Leon Clarkee46be812010-01-19 14:06:41 +0000496 // Unlink the stack handler on top of the stack from the try handler chain.
497 void PopTryHandler();
Steve Blocka7e24c12009-10-30 11:49:00 +0000498
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100499 // Throw to the top handler in the try hander chain.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100500 void Throw(Register value);
501
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100502 // Throw past all JS frames to the top JS entry frame.
503 void ThrowUncatchable(Register value);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100504
Steve Blocka7e24c12009-10-30 11:49:00 +0000505 // ---------------------------------------------------------------------------
506 // Inline caching support
507
Steve Blocka7e24c12009-10-30 11:49:00 +0000508 // Generate code for checking access rights - used for security checks
509 // on access to global objects across environments. The holder register
510 // is left untouched, but the scratch register is clobbered.
511 void CheckAccessGlobalProxy(Register holder_reg,
512 Register scratch,
513 Label* miss);
514
Ben Murdochc7cc0282012-03-05 14:35:55 +0000515 void GetNumberHash(Register r0, Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000516
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000517 void LoadFromNumberDictionary(Label* miss,
518 Register elements,
519 Register key,
520 Register r0,
521 Register r1,
522 Register r2,
523 Register result);
524
525
Steve Blocka7e24c12009-10-30 11:49:00 +0000526 // ---------------------------------------------------------------------------
527 // Allocation support
528
529 // Allocate an object in new space. If the new space is exhausted control
530 // continues at the gc_required label. The allocated object is returned in
531 // result and end of the new object is returned in result_end. The register
532 // scratch can be passed as no_reg in which case an additional object
533 // reference will be added to the reloc info. The returned pointers in result
534 // and result_end have not yet been tagged as heap objects. If
Steve Blockd0582a62009-12-15 09:54:21 +0000535 // result_contains_top_on_entry is true the content of result is known to be
Steve Blocka7e24c12009-10-30 11:49:00 +0000536 // the allocation top on entry (could be result_end from a previous call to
537 // AllocateInNewSpace). If result_contains_top_on_entry is true scratch
538 // should be no_reg as it is never used.
539 void AllocateInNewSpace(int object_size,
540 Register result,
541 Register result_end,
542 Register scratch,
543 Label* gc_required,
544 AllocationFlags flags);
545
546 void AllocateInNewSpace(int header_size,
547 ScaleFactor element_size,
548 Register element_count,
549 Register result,
550 Register result_end,
551 Register scratch,
552 Label* gc_required,
553 AllocationFlags flags);
554
555 void AllocateInNewSpace(Register object_size,
556 Register result,
557 Register result_end,
558 Register scratch,
559 Label* gc_required,
560 AllocationFlags flags);
561
562 // Undo allocation in new space. The object passed and objects allocated after
563 // it will no longer be allocated. Make sure that no pointers are left to the
564 // object(s) no longer allocated as they would be invalid when allocation is
565 // un-done.
566 void UndoAllocationInNewSpace(Register object);
567
Steve Block3ce2e202009-11-05 08:53:23 +0000568 // Allocate a heap number in new space with undefined value. The
569 // register scratch2 can be passed as no_reg; the others must be
570 // valid registers. Returns tagged pointer in result register, or
571 // jumps to gc_required if new space is full.
572 void AllocateHeapNumber(Register result,
573 Register scratch1,
574 Register scratch2,
575 Label* gc_required);
576
Steve Blockd0582a62009-12-15 09:54:21 +0000577 // Allocate a sequential string. All the header fields of the string object
578 // are initialized.
579 void AllocateTwoByteString(Register result,
580 Register length,
581 Register scratch1,
582 Register scratch2,
583 Register scratch3,
584 Label* gc_required);
585 void AllocateAsciiString(Register result,
586 Register length,
587 Register scratch1,
588 Register scratch2,
589 Register scratch3,
590 Label* gc_required);
Iain Merrick9ac36c92010-09-13 15:29:50 +0100591 void AllocateAsciiString(Register result,
592 int length,
593 Register scratch1,
594 Register scratch2,
595 Label* gc_required);
Steve Blockd0582a62009-12-15 09:54:21 +0000596
597 // Allocate a raw cons string object. Only the map field of the result is
598 // initialized.
Ben Murdoch589d6972011-11-30 16:04:58 +0000599 void AllocateTwoByteConsString(Register result,
Steve Blockd0582a62009-12-15 09:54:21 +0000600 Register scratch1,
601 Register scratch2,
602 Label* gc_required);
603 void AllocateAsciiConsString(Register result,
604 Register scratch1,
605 Register scratch2,
606 Label* gc_required);
607
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000608 // Allocate a raw sliced string object. Only the map field of the result is
609 // initialized.
Ben Murdoch589d6972011-11-30 16:04:58 +0000610 void AllocateTwoByteSlicedString(Register result,
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000611 Register scratch1,
612 Register scratch2,
613 Label* gc_required);
614 void AllocateAsciiSlicedString(Register result,
615 Register scratch1,
616 Register scratch2,
617 Label* gc_required);
618
Ben Murdochb8e0da22011-05-16 14:20:40 +0100619 // Copy memory, byte-by-byte, from source to destination. Not optimized for
620 // long or aligned copies.
621 // The contents of index and scratch are destroyed.
622 void CopyBytes(Register source,
623 Register destination,
624 Register length,
625 Register scratch);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800626
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100627 // Initialize fields with filler values. Fields starting at |start_offset|
628 // not including end_offset are overwritten with the value in |filler|. At
629 // the end the loop, |start_offset| takes the value of |end_offset|.
630 void InitializeFieldsWithFiller(Register start_offset,
631 Register end_offset,
632 Register filler);
633
Steve Blocka7e24c12009-10-30 11:49:00 +0000634 // ---------------------------------------------------------------------------
635 // Support functions.
636
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100637 // Check a boolean-bit of a Smi field.
638 void BooleanBitTest(Register object, int field_offset, int bit_index);
639
Steve Blocka7e24c12009-10-30 11:49:00 +0000640 // Check if result is zero and op is negative.
641 void NegativeZeroTest(Register result, Register op, Label* then_label);
642
Steve Blocka7e24c12009-10-30 11:49:00 +0000643 // Check if result is zero and any of op1 and op2 are negative.
644 // Register scratch is destroyed, and it must be different from op2.
645 void NegativeZeroTest(Register result, Register op1, Register op2,
646 Register scratch, Label* then_label);
647
648 // Try to get function prototype of a function and puts the value in
649 // the result register. Checks that the function really is a
650 // function and jumps to the miss label if the fast checks fail. The
651 // function register will be untouched; the other registers may be
652 // clobbered.
653 void TryGetFunctionPrototype(Register function,
654 Register result,
655 Register scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100656 Label* miss,
657 bool miss_on_bound_function = false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000658
659 // Generates code for reporting that an illegal operation has
660 // occurred.
661 void IllegalOperation(int num_arguments);
662
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100663 // Picks out an array index from the hash field.
664 // Register use:
665 // hash - holds the index's hash. Clobbered.
666 // index - holds the overwritten index on exit.
667 void IndexFromHash(Register hash, Register index);
668
Steve Blocka7e24c12009-10-30 11:49:00 +0000669 // ---------------------------------------------------------------------------
670 // Runtime calls
671
Leon Clarkee46be812010-01-19 14:06:41 +0000672 // Call a code stub. Generate the code if necessary.
Ben Murdoch257744e2011-11-30 15:57:28 +0000673 void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId);
Steve Blocka7e24c12009-10-30 11:49:00 +0000674
Leon Clarkee46be812010-01-19 14:06:41 +0000675 // Tail call a code stub (jump). Generate the code if necessary.
Steve Blockd0582a62009-12-15 09:54:21 +0000676 void TailCallStub(CodeStub* stub);
677
Steve Blocka7e24c12009-10-30 11:49:00 +0000678 // Return from a code stub after popping its arguments.
679 void StubReturn(int argc);
680
681 // Call a runtime routine.
Steve Block44f0eee2011-05-26 01:26:41 +0100682 void CallRuntime(const Runtime::Function* f, int num_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100683 void CallRuntimeSaveDoubles(Runtime::FunctionId id);
Steve Blocka7e24c12009-10-30 11:49:00 +0000684
685 // Convenience function: Same as above, but takes the fid instead.
686 void CallRuntime(Runtime::FunctionId id, int num_arguments);
687
Ben Murdochbb769b22010-08-11 14:56:33 +0100688 // Convenience function: call an external reference.
689 void CallExternalReference(ExternalReference ref, int num_arguments);
690
Steve Blocka7e24c12009-10-30 11:49:00 +0000691 // Tail call of a runtime routine (jump).
Steve Block6ded16b2010-05-10 14:33:55 +0100692 // Like JumpToExternalReference, but also takes care of passing the number
693 // of parameters.
694 void TailCallExternalReference(const ExternalReference& ext,
695 int num_arguments,
696 int result_size);
697
698 // Convenience function: tail call a runtime routine (jump).
699 void TailCallRuntime(Runtime::FunctionId fid,
Steve Blocka7e24c12009-10-30 11:49:00 +0000700 int num_arguments,
701 int result_size);
702
Steve Block6ded16b2010-05-10 14:33:55 +0100703 // Before calling a C-function from generated code, align arguments on stack.
704 // After aligning the frame, arguments must be stored in esp[0], esp[4],
705 // etc., not pushed. The argument count assumes all arguments are word sized.
706 // Some compilers/platforms require the stack to be aligned when calling
707 // C++ code.
708 // Needs a scratch register to do some arithmetic. This register will be
709 // trashed.
710 void PrepareCallCFunction(int num_arguments, Register scratch);
711
712 // Calls a C function and cleans up the space for arguments allocated
713 // by PrepareCallCFunction. The called function is not allowed to trigger a
714 // garbage collection, since that might move the code and invalidate the
715 // return address (unless this is somehow accounted for by the called
716 // function).
717 void CallCFunction(ExternalReference function, int num_arguments);
718 void CallCFunction(Register function, int num_arguments);
719
John Reck59135872010-11-02 12:39:01 -0700720 // Prepares stack to put arguments (aligns and so on). Reserves
721 // space for return value if needed (assumes the return value is a handle).
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000722 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
723 // etc. Saves context (esi). If space was reserved for return value then
724 // stores the pointer to the reserved slot into esi.
725 void PrepareCallApiFunction(int argc);
Steve Blockd0582a62009-12-15 09:54:21 +0000726
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100727 // Calls an API function. Allocates HandleScope, extracts returned value
728 // from handle and propagates exceptions. Clobbers ebx, edi and
729 // caller-save registers. Restores context. On return removes
730 // stack_space * kPointerSize (GCed).
731 void CallApiFunctionAndReturn(Address function_address, int stack_space);
Leon Clarkee46be812010-01-19 14:06:41 +0000732
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 // Jump to a runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +0100734 void JumpToExternalReference(const ExternalReference& ext);
Steve Blocka7e24c12009-10-30 11:49:00 +0000735
Steve Blocka7e24c12009-10-30 11:49:00 +0000736 // ---------------------------------------------------------------------------
737 // Utilities
738
739 void Ret();
740
Steve Block1e0659c2011-05-24 12:43:12 +0100741 // Return and drop arguments from stack, where the number of arguments
742 // may be bigger than 2^16 - 1. Requires a scratch register.
743 void Ret(int bytes_dropped, Register scratch);
744
Leon Clarkee46be812010-01-19 14:06:41 +0000745 // Emit code to discard a non-negative number of pointer-sized elements
746 // from the stack, clobbering only the esp register.
747 void Drop(int element_count);
748
749 void Call(Label* target) { call(target); }
750
Ben Murdochb0fe1622011-05-05 13:52:32 +0100751 // Emit call to the code we are currently generating.
752 void CallSelf() {
753 Handle<Code> self(reinterpret_cast<Code**>(CodeObject().location()));
754 call(self, RelocInfo::CODE_TARGET);
755 }
756
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100757 // Move if the registers are not identical.
758 void Move(Register target, Register source);
759
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000760 // Push a handle value.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100761 void Push(Handle<Object> handle) { push(Immediate(handle)); }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000762
Ben Murdoch8b112d22011-06-08 16:22:53 +0100763 Handle<Object> CodeObject() {
764 ASSERT(!code_object_.is_null());
765 return code_object_;
766 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000767
768
769 // ---------------------------------------------------------------------------
770 // StatsCounter support
771
772 void SetCounter(StatsCounter* counter, int value);
773 void IncrementCounter(StatsCounter* counter, int value);
774 void DecrementCounter(StatsCounter* counter, int value);
Leon Clarked91b9f72010-01-27 17:25:45 +0000775 void IncrementCounter(Condition cc, StatsCounter* counter, int value);
776 void DecrementCounter(Condition cc, StatsCounter* counter, int value);
Steve Blocka7e24c12009-10-30 11:49:00 +0000777
778
779 // ---------------------------------------------------------------------------
780 // Debugging
781
782 // Calls Abort(msg) if the condition cc is not satisfied.
783 // Use --debug_code to enable.
784 void Assert(Condition cc, const char* msg);
785
Iain Merrick75681382010-08-19 15:07:18 +0100786 void AssertFastElements(Register elements);
787
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 // Like Assert(), but always enabled.
789 void Check(Condition cc, const char* msg);
790
791 // Print a message to stdout and abort execution.
792 void Abort(const char* msg);
793
Steve Block6ded16b2010-05-10 14:33:55 +0100794 // Check that the stack is aligned.
795 void CheckStackAlignment();
796
Steve Blocka7e24c12009-10-30 11:49:00 +0000797 // Verify restrictions about code generated in stubs.
798 void set_generating_stub(bool value) { generating_stub_ = value; }
799 bool generating_stub() { return generating_stub_; }
800 void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
801 bool allow_stub_calls() { return allow_stub_calls_; }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100802 void set_has_frame(bool value) { has_frame_ = value; }
803 bool has_frame() { return has_frame_; }
804 inline bool AllowThisStubCall(CodeStub* stub);
Steve Blocka7e24c12009-10-30 11:49:00 +0000805
Leon Clarked91b9f72010-01-27 17:25:45 +0000806 // ---------------------------------------------------------------------------
807 // String utilities.
808
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100809 // Check whether the instance type represents a flat ASCII string. Jump to the
Andrei Popescu402d9372010-02-26 13:31:12 +0000810 // label if not. If the instance type can be scratched specify same register
811 // for both instance type and scratch.
812 void JumpIfInstanceTypeIsNotSequentialAscii(Register instance_type,
813 Register scratch,
Steve Block6ded16b2010-05-10 14:33:55 +0100814 Label* on_not_flat_ascii_string);
Andrei Popescu402d9372010-02-26 13:31:12 +0000815
Leon Clarked91b9f72010-01-27 17:25:45 +0000816 // Checks if both objects are sequential ASCII strings, and jumps to label
817 // if either is not.
818 void JumpIfNotBothSequentialAsciiStrings(Register object1,
819 Register object2,
820 Register scratch1,
821 Register scratch2,
Steve Block6ded16b2010-05-10 14:33:55 +0100822 Label* on_not_flat_ascii_strings);
Leon Clarked91b9f72010-01-27 17:25:45 +0000823
Ben Murdoch8b112d22011-06-08 16:22:53 +0100824 static int SafepointRegisterStackIndex(Register reg) {
825 return SafepointRegisterStackIndex(reg.code());
826 }
827
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100828 // Activation support.
829 void EnterFrame(StackFrame::Type type);
830 void LeaveFrame(StackFrame::Type type);
831
832 // Expects object in eax and returns map with validated enum cache
833 // in eax. Assumes that any other register can be used as a scratch.
834 void CheckEnumCache(Label* call_runtime);
835
Steve Blocka7e24c12009-10-30 11:49:00 +0000836 private:
Steve Blocka7e24c12009-10-30 11:49:00 +0000837 bool generating_stub_;
838 bool allow_stub_calls_;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100839 bool has_frame_;
Andrei Popescu31002712010-02-23 13:46:05 +0000840 // This handle will be patched with the code object on installation.
841 Handle<Object> code_object_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000842
843 // Helper functions for generating invokes.
844 void InvokePrologue(const ParameterCount& expected,
845 const ParameterCount& actual,
846 Handle<Code> code_constant,
847 const Operand& code_operand,
Ben Murdoch257744e2011-11-30 15:57:28 +0000848 Label* done,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100849 bool* definitely_mismatches,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100850 InvokeFlag flag,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100851 Label::Distance done_distance,
Ben Murdoch257744e2011-11-30 15:57:28 +0000852 const CallWrapper& call_wrapper = NullCallWrapper(),
853 CallKind call_kind = CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +0000854
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100855 void EnterExitFramePrologue();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100856 void EnterExitFrameEpilogue(int argc, bool save_doubles);
Steve Blockd0582a62009-12-15 09:54:21 +0000857
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800858 void LeaveExitFrameEpilogue();
859
Steve Blocka7e24c12009-10-30 11:49:00 +0000860 // Allocation support helpers.
861 void LoadAllocationTopHelper(Register result,
Steve Blocka7e24c12009-10-30 11:49:00 +0000862 Register scratch,
863 AllocationFlags flags);
864 void UpdateAllocationTopHelper(Register result_end, Register scratch);
Leon Clarkee46be812010-01-19 14:06:41 +0000865
866 // Helper for PopHandleScope. Allowed to perform a GC and returns
867 // NULL if gc_allowed. Does not perform a GC if !gc_allowed, and
868 // possibly returns a failure object indicating an allocation failure.
John Reck59135872010-11-02 12:39:01 -0700869 MUST_USE_RESULT MaybeObject* PopHandleScopeHelper(Register saved,
870 Register scratch,
871 bool gc_allowed);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100872
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100873 // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace.
874 void InNewSpace(Register object,
875 Register scratch,
876 Condition cc,
877 Label* condition_met,
878 Label::Distance condition_met_distance = Label::kFar);
879
880 // Helper for finding the mark bits for an address. Afterwards, the
881 // bitmap register points at the word with the mark bits and the mask
882 // the position of the first bit. Uses ecx as scratch and leaves addr_reg
883 // unchanged.
884 inline void GetMarkBits(Register addr_reg,
885 Register bitmap_reg,
886 Register mask_reg);
887
888 // Helper for throwing exceptions. Compute a handler address and jump to
889 // it. See the implementation for register usage.
890 void JumpToHandlerEntry();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100891
892 // Compute memory operands for safepoint stack slots.
893 Operand SafepointRegisterSlot(Register reg);
894 static int SafepointRegisterStackIndex(int reg_code);
895
896 // Needs access to SafepointRegisterStackIndex for optimized frame
897 // traversal.
898 friend class OptimizedFrame;
Steve Blocka7e24c12009-10-30 11:49:00 +0000899};
900
901
902// The code patcher is used to patch (typically) small parts of code e.g. for
903// debugging and other types of instrumentation. When using the code patcher
904// the exact number of bytes specified must be emitted. Is not legal to emit
905// relocation information. If any of these constraints are violated it causes
906// an assertion.
907class CodePatcher {
908 public:
909 CodePatcher(byte* address, int size);
910 virtual ~CodePatcher();
911
912 // Macro assembler to emit code.
913 MacroAssembler* masm() { return &masm_; }
914
915 private:
916 byte* address_; // The address of the code being patched.
917 int size_; // Number of bytes of the expected patch size.
918 MacroAssembler masm_; // Macro assembler used to generate the code.
919};
920
921
922// -----------------------------------------------------------------------------
923// Static helper functions.
924
925// Generate an Operand for loading a field from an object.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100926inline Operand FieldOperand(Register object, int offset) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000927 return Operand(object, offset - kHeapObjectTag);
928}
929
930
931// Generate an Operand for loading an indexed field from an object.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100932inline Operand FieldOperand(Register object,
933 Register index,
934 ScaleFactor scale,
935 int offset) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000936 return Operand(object, index, scale, offset - kHeapObjectTag);
937}
938
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800939
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100940inline Operand ContextOperand(Register context, int index) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800941 return Operand(context, Context::SlotOffset(index));
942}
943
944
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100945inline Operand GlobalObjectOperand() {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800946 return ContextOperand(esi, Context::GLOBAL_INDEX);
947}
948
949
John Reck59135872010-11-02 12:39:01 -0700950// Generates an Operand for saving parameters after PrepareCallApiFunction.
951Operand ApiParameterOperand(int index);
952
Steve Blocka7e24c12009-10-30 11:49:00 +0000953
954#ifdef GENERATED_CODE_COVERAGE
955extern void LogGeneratedCodeCoverage(const char* file_line);
956#define CODE_COVERAGE_STRINGIFY(x) #x
957#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
958#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
959#define ACCESS_MASM(masm) { \
960 byte* ia32_coverage_function = \
961 reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
962 masm->pushfd(); \
963 masm->pushad(); \
964 masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
965 masm->call(ia32_coverage_function, RelocInfo::RUNTIME_ENTRY); \
966 masm->pop(eax); \
967 masm->popad(); \
968 masm->popfd(); \
969 } \
970 masm->
971#else
972#define ACCESS_MASM(masm) masm->
973#endif
974
975
976} } // namespace v8::internal
977
978#endif // V8_IA32_MACRO_ASSEMBLER_IA32_H_