blob: 2582da644a7d8065127509390950e24e811cef7f [file] [log] [blame]
Ben Murdoch85b71792012-04-11 18:30:58 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Block44f0eee2011-05-26 01:26:41 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ISOLATE_H_
29#define V8_ISOLATE_H_
30
31#include "../include/v8-debug.h"
32#include "allocation.h"
33#include "apiutils.h"
34#include "atomicops.h"
35#include "builtins.h"
36#include "contexts.h"
37#include "execution.h"
38#include "frames.h"
39#include "global-handles.h"
40#include "handles.h"
41#include "heap.h"
42#include "regexp-stack.h"
43#include "runtime-profiler.h"
44#include "runtime.h"
45#include "zone.h"
46
47namespace v8 {
48namespace internal {
49
Steve Block44f0eee2011-05-26 01:26:41 +010050class Bootstrapper;
51class CodeGenerator;
52class CodeRange;
53class CompilationCache;
54class ContextSlotCache;
55class ContextSwitcher;
56class Counters;
57class CpuFeatures;
58class CpuProfiler;
59class DeoptimizerData;
60class Deserializer;
61class EmptyStatement;
62class ExternalReferenceTable;
63class Factory;
64class FunctionInfoListener;
65class HandleScopeImplementer;
66class HeapProfiler;
67class InlineRuntimeFunctionsTable;
68class NoAllocationStringAllocator;
Ben Murdoch85b71792012-04-11 18:30:58 +010069class PcToCodeCache;
Steve Block44f0eee2011-05-26 01:26:41 +010070class PreallocatedMemoryThread;
Steve Block44f0eee2011-05-26 01:26:41 +010071class RegExpStack;
72class SaveContext;
Ben Murdoch8b112d22011-06-08 16:22:53 +010073class UnicodeCache;
Steve Block44f0eee2011-05-26 01:26:41 +010074class StringInputBuffer;
75class StringTracker;
76class StubCache;
77class ThreadManager;
78class ThreadState;
79class ThreadVisitor; // Defined in v8threads.h
80class VMState;
81
82// 'void function pointer', used to roundtrip the
83// ExternalReference::ExternalReferenceRedirector since we can not include
84// assembler.h, where it is defined, here.
85typedef void* ExternalReferenceRedirectorPointer();
86
87
88#ifdef ENABLE_DEBUGGER_SUPPORT
89class Debug;
90class Debugger;
91class DebuggerAgent;
92#endif
93
94#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
95 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
96class Redirection;
97class Simulator;
98#endif
99
100
101// Static indirection table for handles to constants. If a frame
102// element represents a constant, the data contains an index into
103// this table of handles to the actual constants.
104// Static indirection table for handles to constants. If a Result
105// represents a constant, the data contains an index into this table
106// of handles to the actual constants.
107typedef ZoneList<Handle<Object> > ZoneObjectList;
108
Ben Murdoch85b71792012-04-11 18:30:58 +0100109#define RETURN_IF_SCHEDULED_EXCEPTION(isolate) \
110 if (isolate->has_scheduled_exception()) \
111 return isolate->PromoteScheduledException()
Steve Block44f0eee2011-05-26 01:26:41 +0100112
113#define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \
Ben Murdoch85b71792012-04-11 18:30:58 +0100114 if (call.is_null()) { \
115 ASSERT(isolate->has_pending_exception()); \
116 return value; \
117 }
Steve Block44f0eee2011-05-26 01:26:41 +0100118
119#define RETURN_IF_EMPTY_HANDLE(isolate, call) \
120 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception())
121
Ben Murdoch589d6972011-11-30 16:04:58 +0000122#define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \
123 C(Handler, handler) \
124 C(CEntryFP, c_entry_fp) \
125 C(Context, context) \
126 C(PendingException, pending_exception) \
127 C(ExternalCaughtException, external_caught_exception) \
128 C(JSEntrySP, js_entry_sp)
Steve Block44f0eee2011-05-26 01:26:41 +0100129
130
Ben Murdoch8b112d22011-06-08 16:22:53 +0100131// Platform-independent, reliable thread identifier.
132class ThreadId {
133 public:
134 // Creates an invalid ThreadId.
135 ThreadId() : id_(kInvalidId) {}
136
137 // Returns ThreadId for current thread.
138 static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
139
140 // Returns invalid ThreadId (guaranteed not to be equal to any thread).
141 static ThreadId Invalid() { return ThreadId(kInvalidId); }
142
143 // Compares ThreadIds for equality.
144 INLINE(bool Equals(const ThreadId& other) const) {
145 return id_ == other.id_;
146 }
147
148 // Checks whether this ThreadId refers to any thread.
149 INLINE(bool IsValid() const) {
150 return id_ != kInvalidId;
151 }
152
153 // Converts ThreadId to an integer representation
154 // (required for public API: V8::V8::GetCurrentThreadId).
155 int ToInteger() const { return id_; }
156
157 // Converts ThreadId to an integer representation
158 // (required for public API: V8::V8::TerminateExecution).
159 static ThreadId FromInteger(int id) { return ThreadId(id); }
160
161 private:
162 static const int kInvalidId = -1;
163
164 explicit ThreadId(int id) : id_(id) {}
165
166 static int AllocateThreadId();
167
168 static int GetCurrentThreadId();
169
170 int id_;
171
172 static Atomic32 highest_thread_id_;
173
174 friend class Isolate;
175};
176
177
Steve Block44f0eee2011-05-26 01:26:41 +0100178class ThreadLocalTop BASE_EMBEDDED {
179 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +0100180 // Does early low-level initialization that does not depend on the
181 // isolate being present.
182 ThreadLocalTop();
183
Steve Block44f0eee2011-05-26 01:26:41 +0100184 // Initialize the thread data.
185 void Initialize();
186
187 // Get the top C++ try catch handler or NULL if none are registered.
188 //
189 // This method is not guarenteed to return an address that can be
190 // used for comparison with addresses into the JS stack. If such an
191 // address is needed, use try_catch_handler_address.
192 v8::TryCatch* TryCatchHandler();
193
194 // Get the address of the top C++ try catch handler or NULL if
195 // none are registered.
196 //
197 // This method always returns an address that can be compared to
198 // pointers into the JavaScript stack. When running on actual
199 // hardware, try_catch_handler_address and TryCatchHandler return
200 // the same pointer. When running on a simulator with a separate JS
201 // stack, try_catch_handler_address returns a JS stack address that
202 // corresponds to the place on the JS stack where the C++ handler
203 // would have been if the stack were not separate.
204 inline Address try_catch_handler_address() {
205 return try_catch_handler_address_;
206 }
207
208 // Set the address of the top C++ try catch handler.
209 inline void set_try_catch_handler_address(Address address) {
210 try_catch_handler_address_ = address;
211 }
212
213 void Free() {
214 ASSERT(!has_pending_message_);
215 ASSERT(!external_caught_exception_);
216 ASSERT(try_catch_handler_address_ == NULL);
217 }
218
Ben Murdoch257744e2011-11-30 15:57:28 +0000219 Isolate* isolate_;
Steve Block44f0eee2011-05-26 01:26:41 +0100220 // The context where the current execution method is created and for variable
221 // lookups.
222 Context* context_;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100223 ThreadId thread_id_;
Steve Block44f0eee2011-05-26 01:26:41 +0100224 MaybeObject* pending_exception_;
225 bool has_pending_message_;
Steve Block44f0eee2011-05-26 01:26:41 +0100226 Object* pending_message_obj_;
227 Script* pending_message_script_;
228 int pending_message_start_pos_;
229 int pending_message_end_pos_;
230 // Use a separate value for scheduled exceptions to preserve the
231 // invariants that hold about pending_exception. We may want to
232 // unify them later.
233 MaybeObject* scheduled_exception_;
234 bool external_caught_exception_;
235 SaveContext* save_context_;
236 v8::TryCatch* catcher_;
237
238 // Stack.
239 Address c_entry_fp_; // the frame pointer of the top c entry frame
240 Address handler_; // try-blocks are chained through the stack
241
242#ifdef USE_SIMULATOR
243#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
244 Simulator* simulator_;
245#endif
246#endif // USE_SIMULATOR
247
Ben Murdoch85b71792012-04-11 18:30:58 +0100248 Address js_entry_sp_; // the stack pointer of the bottom js entry frame
Steve Block44f0eee2011-05-26 01:26:41 +0100249 Address external_callback_; // the external callback we're currently in
Steve Block44f0eee2011-05-26 01:26:41 +0100250 StateTag current_vm_state_;
Steve Block44f0eee2011-05-26 01:26:41 +0100251
252 // Generated code scratch locations.
253 int32_t formal_count_;
254
255 // Call back function to report unsafe JS accesses.
256 v8::FailedAccessCheckCallback failed_access_check_callback_;
257
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000258 // Whether out of memory exceptions should be ignored.
259 bool ignore_out_of_memory_;
260
Steve Block44f0eee2011-05-26 01:26:41 +0100261 private:
Ben Murdoch8b112d22011-06-08 16:22:53 +0100262 void InitializeInternal();
263
Steve Block44f0eee2011-05-26 01:26:41 +0100264 Address try_catch_handler_address_;
265};
266
Ben Murdoch85b71792012-04-11 18:30:58 +0100267#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
268
269#define ISOLATE_PLATFORM_INIT_LIST(V) \
270 /* VirtualFrame::SpilledScope state */ \
271 V(bool, is_virtual_frame_in_spilled_scope, false) \
272 /* CodeGenerator::EmitNamedStore state */ \
273 V(int, inlined_write_barrier_size, -1)
274
275#if !defined(__arm__) && !defined(__mips__)
276class HashMap;
277#endif
278
279#else
280
281#define ISOLATE_PLATFORM_INIT_LIST(V)
282
283#endif
Steve Block44f0eee2011-05-26 01:26:41 +0100284
285#ifdef ENABLE_DEBUGGER_SUPPORT
286
287#define ISOLATE_DEBUGGER_INIT_LIST(V) \
288 V(v8::Debug::EventCallback, debug_event_callback, NULL) \
289 V(DebuggerAgent*, debugger_agent_instance, NULL)
290#else
291
292#define ISOLATE_DEBUGGER_INIT_LIST(V)
293
294#endif
295
296#ifdef DEBUG
297
298#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \
299 V(CommentStatistic, paged_space_comments_statistics, \
300 CommentStatistic::kMaxComments + 1)
301#else
302
303#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
304
305#endif
306
Steve Block44f0eee2011-05-26 01:26:41 +0100307#define ISOLATE_INIT_ARRAY_LIST(V) \
308 /* SerializerDeserializer state. */ \
309 V(Object*, serialize_partial_snapshot_cache, kPartialSnapshotCacheCapacity) \
310 V(int, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
311 V(int, bad_char_shift_table, kUC16AlphabetSize) \
312 V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
313 V(int, suffix_table, (kBMMaxShift + 1)) \
Ben Murdoch85b71792012-04-11 18:30:58 +0100314 V(uint32_t, random_seed, 2) \
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000315 V(uint32_t, private_random_seed, 2) \
Steve Block44f0eee2011-05-26 01:26:41 +0100316 ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
317
318typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
319
320#define ISOLATE_INIT_LIST(V) \
Ben Murdoch85b71792012-04-11 18:30:58 +0100321 /* AssertNoZoneAllocation state. */ \
322 V(bool, zone_allow_allocation, true) \
Steve Block44f0eee2011-05-26 01:26:41 +0100323 /* SerializerDeserializer state. */ \
324 V(int, serialize_partial_snapshot_cache_length, 0) \
325 /* Assembler state. */ \
326 /* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */ \
327 V(byte*, assembler_spare_buffer, NULL) \
328 V(FatalErrorCallback, exception_behavior, NULL) \
Ben Murdoch257744e2011-11-30 15:57:28 +0000329 V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \
Steve Block44f0eee2011-05-26 01:26:41 +0100330 V(v8::Debug::MessageHandler, message_handler, NULL) \
331 /* To distinguish the function templates, so that we can find them in the */ \
332 /* function cache of the global context. */ \
333 V(int, next_serial_number, 0) \
334 V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \
335 V(bool, always_allow_natives_syntax, false) \
336 /* Part of the state of liveedit. */ \
337 V(FunctionInfoListener*, active_function_info_listener, NULL) \
338 /* State for Relocatable. */ \
339 V(Relocatable*, relocatable_top, NULL) \
340 /* State for CodeEntry in profile-generator. */ \
341 V(CodeGenerator*, current_code_generator, NULL) \
342 V(bool, jump_target_compiling_deferred_code, false) \
343 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \
344 V(Object*, string_stream_current_security_token, NULL) \
345 /* TODO(isolates): Release this on destruction? */ \
346 V(int*, irregexp_interpreter_backtrack_stack_cache, NULL) \
347 /* Serializer state. */ \
348 V(ExternalReferenceTable*, external_reference_table, NULL) \
349 /* AstNode state. */ \
Ben Murdoch85b71792012-04-11 18:30:58 +0100350 V(unsigned, ast_node_id, 0) \
Steve Block44f0eee2011-05-26 01:26:41 +0100351 V(unsigned, ast_node_count, 0) \
Ben Murdoch8b112d22011-06-08 16:22:53 +0100352 /* SafeStackFrameIterator activations count. */ \
353 V(int, safe_stack_iterator_counter, 0) \
Ben Murdoch257744e2011-11-30 15:57:28 +0000354 V(uint64_t, enabled_cpu_features, 0) \
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000355 V(CpuProfiler*, cpu_profiler, NULL) \
356 V(HeapProfiler*, heap_profiler, NULL) \
Ben Murdoch85b71792012-04-11 18:30:58 +0100357 ISOLATE_PLATFORM_INIT_LIST(V) \
Steve Block44f0eee2011-05-26 01:26:41 +0100358 ISOLATE_DEBUGGER_INIT_LIST(V)
359
360class Isolate {
361 // These forward declarations are required to make the friend declarations in
362 // PerIsolateThreadData work on some older versions of gcc.
363 class ThreadDataTable;
364 class EntryStackItem;
365 public:
366 ~Isolate();
367
Steve Block44f0eee2011-05-26 01:26:41 +0100368 // A thread has a PerIsolateThreadData instance for each isolate that it has
369 // entered. That instance is allocated when the isolate is initially entered
370 // and reused on subsequent entries.
371 class PerIsolateThreadData {
372 public:
373 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
374 : isolate_(isolate),
375 thread_id_(thread_id),
376 stack_limit_(0),
377 thread_state_(NULL),
378#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
379 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
380 simulator_(NULL),
381#endif
382 next_(NULL),
383 prev_(NULL) { }
384 Isolate* isolate() const { return isolate_; }
385 ThreadId thread_id() const { return thread_id_; }
386 void set_stack_limit(uintptr_t value) { stack_limit_ = value; }
387 uintptr_t stack_limit() const { return stack_limit_; }
388 ThreadState* thread_state() const { return thread_state_; }
389 void set_thread_state(ThreadState* value) { thread_state_ = value; }
390
391#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
392 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
393 Simulator* simulator() const { return simulator_; }
394 void set_simulator(Simulator* simulator) {
395 simulator_ = simulator;
396 }
397#endif
398
399 bool Matches(Isolate* isolate, ThreadId thread_id) const {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100400 return isolate_ == isolate && thread_id_.Equals(thread_id);
Steve Block44f0eee2011-05-26 01:26:41 +0100401 }
402
403 private:
404 Isolate* isolate_;
405 ThreadId thread_id_;
406 uintptr_t stack_limit_;
407 ThreadState* thread_state_;
408
409#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
410 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
411 Simulator* simulator_;
412#endif
413
414 PerIsolateThreadData* next_;
415 PerIsolateThreadData* prev_;
416
417 friend class Isolate;
418 friend class ThreadDataTable;
419 friend class EntryStackItem;
420
421 DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
422 };
423
424
425 enum AddressId {
Ben Murdoch589d6972011-11-30 16:04:58 +0000426#define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
427 FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
Steve Block44f0eee2011-05-26 01:26:41 +0100428#undef C
Ben Murdoch589d6972011-11-30 16:04:58 +0000429 kIsolateAddressCount
Steve Block44f0eee2011-05-26 01:26:41 +0100430 };
431
432 // Returns the PerIsolateThreadData for the current thread (or NULL if one is
433 // not currently set).
434 static PerIsolateThreadData* CurrentPerIsolateThreadData() {
435 return reinterpret_cast<PerIsolateThreadData*>(
Ben Murdoch85b71792012-04-11 18:30:58 +0100436 Thread::GetThreadLocal(per_isolate_thread_data_key_));
Steve Block44f0eee2011-05-26 01:26:41 +0100437 }
438
439 // Returns the isolate inside which the current thread is running.
440 INLINE(static Isolate* Current()) {
441 Isolate* isolate = reinterpret_cast<Isolate*>(
Ben Murdoch85b71792012-04-11 18:30:58 +0100442 Thread::GetExistingThreadLocal(isolate_key_));
Steve Block44f0eee2011-05-26 01:26:41 +0100443 ASSERT(isolate != NULL);
444 return isolate;
445 }
446
447 INLINE(static Isolate* UncheckedCurrent()) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100448 return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
Steve Block44f0eee2011-05-26 01:26:41 +0100449 }
450
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000451 // Usually called by Init(), but can be called early e.g. to allow
452 // testing components that require logging but not the whole
453 // isolate.
454 //
455 // Safe to call more than once.
456 void InitializeLoggingAndCounters();
457
Steve Block44f0eee2011-05-26 01:26:41 +0100458 bool Init(Deserializer* des);
459
460 bool IsInitialized() { return state_ == INITIALIZED; }
461
462 // True if at least one thread Enter'ed this isolate.
463 bool IsInUse() { return entry_stack_ != NULL; }
464
465 // Destroys the non-default isolates.
466 // Sets default isolate into "has_been_disposed" state rather then destroying,
467 // for legacy API reasons.
468 void TearDown();
469
Ben Murdoch85b71792012-04-11 18:30:58 +0100470 bool IsDefaultIsolate() const { return this == default_isolate_; }
Steve Block44f0eee2011-05-26 01:26:41 +0100471
472 // Ensures that process-wide resources and the default isolate have been
Ben Murdoch85b71792012-04-11 18:30:58 +0100473 // allocated. It is only necessary to call this method in rare casses, for
Steve Block44f0eee2011-05-26 01:26:41 +0100474 // example if you are using V8 from within the body of a static initializer.
475 // Safe to call multiple times.
476 static void EnsureDefaultIsolate();
477
Ben Murdoch257744e2011-11-30 15:57:28 +0000478 // Find the PerThread for this particular (isolate, thread) combination
479 // If one does not yet exist, return null.
480 PerIsolateThreadData* FindPerThreadDataForThisThread();
481
482#ifdef ENABLE_DEBUGGER_SUPPORT
Steve Block44f0eee2011-05-26 01:26:41 +0100483 // Get the debugger from the default isolate. Preinitializes the
484 // default isolate if needed.
485 static Debugger* GetDefaultIsolateDebugger();
Ben Murdoch257744e2011-11-30 15:57:28 +0000486#endif
Steve Block44f0eee2011-05-26 01:26:41 +0100487
488 // Get the stack guard from the default isolate. Preinitializes the
489 // default isolate if needed.
490 static StackGuard* GetDefaultIsolateStackGuard();
491
492 // Returns the key used to store the pointer to the current isolate.
493 // Used internally for V8 threads that do not execute JavaScript but still
494 // are part of the domain of an isolate (like the context switcher).
Ben Murdoch85b71792012-04-11 18:30:58 +0100495 static Thread::LocalStorageKey isolate_key() {
496 return isolate_key_;
497 }
Steve Block44f0eee2011-05-26 01:26:41 +0100498
499 // Returns the key used to store process-wide thread IDs.
Ben Murdoch85b71792012-04-11 18:30:58 +0100500 static Thread::LocalStorageKey thread_id_key() {
501 return thread_id_key_;
502 }
Steve Block44f0eee2011-05-26 01:26:41 +0100503
Steve Block44f0eee2011-05-26 01:26:41 +0100504 // If a client attempts to create a Locker without specifying an isolate,
505 // we assume that the client is using legacy behavior. Set up the current
506 // thread to be inside the implicit isolate (or fail a check if we have
507 // switched to non-legacy behavior).
508 static void EnterDefaultIsolate();
509
Steve Block44f0eee2011-05-26 01:26:41 +0100510 // Mutex for serializing access to break control structures.
511 Mutex* break_access() { return break_access_; }
512
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000513 // Mutex for serializing access to debugger.
514 Mutex* debugger_access() { return debugger_access_; }
515
Steve Block44f0eee2011-05-26 01:26:41 +0100516 Address get_address_from_id(AddressId id);
517
518 // Access to top context (where the current function object was created).
519 Context* context() { return thread_local_top_.context_; }
520 void set_context(Context* context) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000521 ASSERT(context == NULL || context->IsContext());
Steve Block44f0eee2011-05-26 01:26:41 +0100522 thread_local_top_.context_ = context;
523 }
524 Context** context_address() { return &thread_local_top_.context_; }
525
526 SaveContext* save_context() {return thread_local_top_.save_context_; }
527 void set_save_context(SaveContext* save) {
528 thread_local_top_.save_context_ = save;
529 }
530
531 // Access to current thread id.
Ben Murdoch8b112d22011-06-08 16:22:53 +0100532 ThreadId thread_id() { return thread_local_top_.thread_id_; }
533 void set_thread_id(ThreadId id) { thread_local_top_.thread_id_ = id; }
Steve Block44f0eee2011-05-26 01:26:41 +0100534
535 // Interface to pending exception.
536 MaybeObject* pending_exception() {
537 ASSERT(has_pending_exception());
538 return thread_local_top_.pending_exception_;
539 }
540 bool external_caught_exception() {
541 return thread_local_top_.external_caught_exception_;
542 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100543 void set_external_caught_exception(bool value) {
544 thread_local_top_.external_caught_exception_ = value;
545 }
Steve Block44f0eee2011-05-26 01:26:41 +0100546 void set_pending_exception(MaybeObject* exception) {
547 thread_local_top_.pending_exception_ = exception;
548 }
549 void clear_pending_exception() {
550 thread_local_top_.pending_exception_ = heap_.the_hole_value();
551 }
552 MaybeObject** pending_exception_address() {
553 return &thread_local_top_.pending_exception_;
554 }
555 bool has_pending_exception() {
556 return !thread_local_top_.pending_exception_->IsTheHole();
557 }
558 void clear_pending_message() {
559 thread_local_top_.has_pending_message_ = false;
Steve Block44f0eee2011-05-26 01:26:41 +0100560 thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
561 thread_local_top_.pending_message_script_ = NULL;
562 }
563 v8::TryCatch* try_catch_handler() {
564 return thread_local_top_.TryCatchHandler();
565 }
566 Address try_catch_handler_address() {
567 return thread_local_top_.try_catch_handler_address();
568 }
569 bool* external_caught_exception_address() {
570 return &thread_local_top_.external_caught_exception_;
571 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100572 v8::TryCatch* catcher() {
573 return thread_local_top_.catcher_;
574 }
575 void set_catcher(v8::TryCatch* catcher) {
576 thread_local_top_.catcher_ = catcher;
577 }
Steve Block44f0eee2011-05-26 01:26:41 +0100578
579 MaybeObject** scheduled_exception_address() {
580 return &thread_local_top_.scheduled_exception_;
581 }
582 MaybeObject* scheduled_exception() {
583 ASSERT(has_scheduled_exception());
584 return thread_local_top_.scheduled_exception_;
585 }
586 bool has_scheduled_exception() {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000587 return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
Steve Block44f0eee2011-05-26 01:26:41 +0100588 }
589 void clear_scheduled_exception() {
590 thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
591 }
592
593 bool IsExternallyCaught();
594
595 bool is_catchable_by_javascript(MaybeObject* exception) {
596 return (exception != Failure::OutOfMemoryException()) &&
597 (exception != heap()->termination_exception());
598 }
599
600 // JS execution stack (see frames.h).
601 static Address c_entry_fp(ThreadLocalTop* thread) {
602 return thread->c_entry_fp_;
603 }
604 static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
605
606 inline Address* c_entry_fp_address() {
607 return &thread_local_top_.c_entry_fp_;
608 }
609 inline Address* handler_address() { return &thread_local_top_.handler_; }
610
Steve Block44f0eee2011-05-26 01:26:41 +0100611 // Bottom JS entry (see StackTracer::Trace in log.cc).
612 static Address js_entry_sp(ThreadLocalTop* thread) {
613 return thread->js_entry_sp_;
614 }
615 inline Address* js_entry_sp_address() {
616 return &thread_local_top_.js_entry_sp_;
617 }
Steve Block44f0eee2011-05-26 01:26:41 +0100618
619 // Generated code scratch locations.
620 void* formal_count_address() { return &thread_local_top_.formal_count_; }
621
622 // Returns the global object of the current context. It could be
Ben Murdoch85b71792012-04-11 18:30:58 +0100623 // a builtin object, or a js global object.
Steve Block44f0eee2011-05-26 01:26:41 +0100624 Handle<GlobalObject> global() {
625 return Handle<GlobalObject>(context()->global());
626 }
627
628 // Returns the global proxy object of the current context.
629 Object* global_proxy() {
630 return context()->global_proxy();
631 }
632
633 Handle<JSBuiltinsObject> js_builtins_object() {
634 return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
635 }
636
637 static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
638 void FreeThreadResources() { thread_local_top_.Free(); }
639
640 // This method is called by the api after operations that may throw
641 // exceptions. If an exception was thrown and not handled by an external
642 // handler the exception is scheduled to be rethrown when we return to running
643 // JavaScript code. If an exception is scheduled true is returned.
644 bool OptionalRescheduleException(bool is_bottom_call);
645
Ben Murdoch8b112d22011-06-08 16:22:53 +0100646 class ExceptionScope {
647 public:
648 explicit ExceptionScope(Isolate* isolate) :
649 // Scope currently can only be used for regular exceptions, not
650 // failures like OOM or termination exception.
651 isolate_(isolate),
652 pending_exception_(isolate_->pending_exception()->ToObjectUnchecked()),
653 catcher_(isolate_->catcher())
654 { }
655
656 ~ExceptionScope() {
657 isolate_->set_catcher(catcher_);
658 isolate_->set_pending_exception(*pending_exception_);
659 }
660
661 private:
662 Isolate* isolate_;
663 Handle<Object> pending_exception_;
664 v8::TryCatch* catcher_;
665 };
666
Steve Block44f0eee2011-05-26 01:26:41 +0100667 void SetCaptureStackTraceForUncaughtExceptions(
668 bool capture,
669 int frame_limit,
670 StackTrace::StackTraceOptions options);
671
672 // Tells whether the current context has experienced an out of memory
673 // exception.
674 bool is_out_of_memory();
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000675 bool ignore_out_of_memory() {
676 return thread_local_top_.ignore_out_of_memory_;
677 }
678 void set_ignore_out_of_memory(bool value) {
679 thread_local_top_.ignore_out_of_memory_ = value;
680 }
Steve Block44f0eee2011-05-26 01:26:41 +0100681
682 void PrintCurrentStackTrace(FILE* out);
683 void PrintStackTrace(FILE* out, char* thread_data);
684 void PrintStack(StringStream* accumulator);
685 void PrintStack();
686 Handle<String> StackTraceString();
687 Handle<JSArray> CaptureCurrentStackTrace(
688 int frame_limit,
689 StackTrace::StackTraceOptions options);
690
691 // Returns if the top context may access the given global object. If
692 // the result is false, the pending exception is guaranteed to be
693 // set.
694 bool MayNamedAccess(JSObject* receiver,
695 Object* key,
696 v8::AccessType type);
697 bool MayIndexedAccess(JSObject* receiver,
698 uint32_t index,
699 v8::AccessType type);
700
701 void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
702 void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
703
704 // Exception throwing support. The caller should use the result
705 // of Throw() as its return value.
706 Failure* Throw(Object* exception, MessageLocation* location = NULL);
707 // Re-throw an exception. This involves no error reporting since
708 // error reporting was handled when the exception was thrown
709 // originally.
710 Failure* ReThrow(MaybeObject* exception, MessageLocation* location = NULL);
711 void ScheduleThrow(Object* exception);
712 void ReportPendingMessages();
713 Failure* ThrowIllegalOperation();
714
715 // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
716 Failure* PromoteScheduledException();
Ben Murdoch85b71792012-04-11 18:30:58 +0100717 void DoThrow(MaybeObject* exception, MessageLocation* location);
Steve Block44f0eee2011-05-26 01:26:41 +0100718 // Checks if exception should be reported and finds out if it's
719 // caught externally.
720 bool ShouldReportException(bool* can_be_caught_externally,
721 bool catchable_by_javascript);
722
723 // Attempts to compute the current source location, storing the
724 // result in the target out parameter.
725 void ComputeLocation(MessageLocation* target);
726
727 // Override command line flag.
728 void TraceException(bool flag);
729
730 // Out of resource exception helpers.
731 Failure* StackOverflow();
732 Failure* TerminateExecution();
733
734 // Administration
735 void Iterate(ObjectVisitor* v);
736 void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
737 char* Iterate(ObjectVisitor* v, char* t);
738 void IterateThread(ThreadVisitor* v);
739 void IterateThread(ThreadVisitor* v, char* t);
740
741
742 // Returns the current global context.
743 Handle<Context> global_context();
744
745 // Returns the global context of the calling JavaScript code. That
746 // is, the global context of the top-most JavaScript frame.
747 Handle<Context> GetCallingGlobalContext();
748
749 void RegisterTryCatchHandler(v8::TryCatch* that);
750 void UnregisterTryCatchHandler(v8::TryCatch* that);
751
752 char* ArchiveThread(char* to);
753 char* RestoreThread(char* from);
754
755 static const char* const kStackOverflowMessage;
756
757 static const int kUC16AlphabetSize = 256; // See StringSearchBase.
758 static const int kBMMaxShift = 250; // See StringSearchBase.
759
760 // Accessors.
761#define GLOBAL_ACCESSOR(type, name, initialvalue) \
762 inline type name() const { \
763 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
764 return name##_; \
765 } \
766 inline void set_##name(type value) { \
767 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
768 name##_ = value; \
769 }
770 ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
771#undef GLOBAL_ACCESSOR
772
773#define GLOBAL_ARRAY_ACCESSOR(type, name, length) \
774 inline type* name() { \
775 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
776 return &(name##_)[0]; \
777 }
778 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
779#undef GLOBAL_ARRAY_ACCESSOR
780
781#define GLOBAL_CONTEXT_FIELD_ACCESSOR(index, type, name) \
782 Handle<type> name() { \
783 return Handle<type>(context()->global_context()->name()); \
784 }
785 GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSOR)
786#undef GLOBAL_CONTEXT_FIELD_ACCESSOR
787
788 Bootstrapper* bootstrapper() { return bootstrapper_; }
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000789 Counters* counters() {
790 // Call InitializeLoggingAndCounters() if logging is needed before
791 // the isolate is fully initialized.
792 ASSERT(counters_ != NULL);
793 return counters_;
794 }
Steve Block44f0eee2011-05-26 01:26:41 +0100795 CodeRange* code_range() { return code_range_; }
796 RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
797 CompilationCache* compilation_cache() { return compilation_cache_; }
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000798 Logger* logger() {
799 // Call InitializeLoggingAndCounters() if logging is needed before
800 // the isolate is fully initialized.
801 ASSERT(logger_ != NULL);
802 return logger_;
803 }
Steve Block44f0eee2011-05-26 01:26:41 +0100804 StackGuard* stack_guard() { return &stack_guard_; }
805 Heap* heap() { return &heap_; }
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000806 StatsTable* stats_table();
Steve Block44f0eee2011-05-26 01:26:41 +0100807 StubCache* stub_cache() { return stub_cache_; }
808 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
809 ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
810
811 TranscendentalCache* transcendental_cache() const {
812 return transcendental_cache_;
813 }
814
815 MemoryAllocator* memory_allocator() {
816 return memory_allocator_;
817 }
818
819 KeyedLookupCache* keyed_lookup_cache() {
820 return keyed_lookup_cache_;
821 }
822
823 ContextSlotCache* context_slot_cache() {
824 return context_slot_cache_;
825 }
826
827 DescriptorLookupCache* descriptor_lookup_cache() {
828 return descriptor_lookup_cache_;
829 }
830
831 v8::ImplementationUtilities::HandleScopeData* handle_scope_data() {
832 return &handle_scope_data_;
833 }
834 HandleScopeImplementer* handle_scope_implementer() {
835 ASSERT(handle_scope_implementer_);
836 return handle_scope_implementer_;
837 }
838 Zone* zone() { return &zone_; }
839
Ben Murdoch8b112d22011-06-08 16:22:53 +0100840 UnicodeCache* unicode_cache() {
841 return unicode_cache_;
Steve Block44f0eee2011-05-26 01:26:41 +0100842 }
843
Ben Murdoch85b71792012-04-11 18:30:58 +0100844 PcToCodeCache* pc_to_code_cache() { return pc_to_code_cache_; }
Steve Block44f0eee2011-05-26 01:26:41 +0100845
846 StringInputBuffer* write_input_buffer() { return write_input_buffer_; }
847
848 GlobalHandles* global_handles() { return global_handles_; }
849
850 ThreadManager* thread_manager() { return thread_manager_; }
851
852 ContextSwitcher* context_switcher() { return context_switcher_; }
853
854 void set_context_switcher(ContextSwitcher* switcher) {
855 context_switcher_ = switcher;
856 }
857
858 StringTracker* string_tracker() { return string_tracker_; }
859
860 unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
861 return &jsregexp_uncanonicalize_;
862 }
863
864 unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
865 return &jsregexp_canonrange_;
866 }
867
868 StringInputBuffer* objects_string_compare_buffer_a() {
869 return &objects_string_compare_buffer_a_;
870 }
871
872 StringInputBuffer* objects_string_compare_buffer_b() {
873 return &objects_string_compare_buffer_b_;
874 }
875
876 StaticResource<StringInputBuffer>* objects_string_input_buffer() {
877 return &objects_string_input_buffer_;
878 }
879
Steve Block44f0eee2011-05-26 01:26:41 +0100880 RuntimeState* runtime_state() { return &runtime_state_; }
881
Steve Block44f0eee2011-05-26 01:26:41 +0100882 StaticResource<SafeStringInputBuffer>* compiler_safe_string_input_buffer() {
883 return &compiler_safe_string_input_buffer_;
884 }
885
886 Builtins* builtins() { return &builtins_; }
887
888 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
889 regexp_macro_assembler_canonicalize() {
890 return &regexp_macro_assembler_canonicalize_;
891 }
892
893 RegExpStack* regexp_stack() { return regexp_stack_; }
894
895 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
896 interp_canonicalize_mapping() {
897 return &interp_canonicalize_mapping_;
898 }
899
Steve Block44f0eee2011-05-26 01:26:41 +0100900 void* PreallocatedStorageNew(size_t size);
901 void PreallocatedStorageDelete(void* p);
902 void PreallocatedStorageInit(size_t size);
903
904#ifdef ENABLE_DEBUGGER_SUPPORT
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000905 Debugger* debugger() {
906 if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
907 return debugger_;
908 }
909 Debug* debug() {
910 if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
911 return debug_;
912 }
Steve Block44f0eee2011-05-26 01:26:41 +0100913#endif
914
Ben Murdoch257744e2011-11-30 15:57:28 +0000915 inline bool DebuggerHasBreakPoints();
916
Steve Block44f0eee2011-05-26 01:26:41 +0100917#ifdef DEBUG
918 HistogramInfo* heap_histograms() { return heap_histograms_; }
919
920 JSObject::SpillInformation* js_spill_information() {
921 return &js_spill_information_;
922 }
923
924 int* code_kind_statistics() { return code_kind_statistics_; }
925#endif
926
927#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
928 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
929 bool simulator_initialized() { return simulator_initialized_; }
930 void set_simulator_initialized(bool initialized) {
931 simulator_initialized_ = initialized;
932 }
933
934 HashMap* simulator_i_cache() { return simulator_i_cache_; }
935 void set_simulator_i_cache(HashMap* hash_map) {
936 simulator_i_cache_ = hash_map;
937 }
938
939 Redirection* simulator_redirection() {
940 return simulator_redirection_;
941 }
942 void set_simulator_redirection(Redirection* redirection) {
943 simulator_redirection_ = redirection;
944 }
945#endif
946
947 Factory* factory() { return reinterpret_cast<Factory*>(this); }
948
949 // SerializerDeserializer state.
950 static const int kPartialSnapshotCacheCapacity = 1400;
951
952 static const int kJSRegexpStaticOffsetsVectorSize = 50;
953
Steve Block44f0eee2011-05-26 01:26:41 +0100954 Address external_callback() {
955 return thread_local_top_.external_callback_;
956 }
957 void set_external_callback(Address callback) {
958 thread_local_top_.external_callback_ = callback;
959 }
Steve Block44f0eee2011-05-26 01:26:41 +0100960
Steve Block44f0eee2011-05-26 01:26:41 +0100961 StateTag current_vm_state() {
962 return thread_local_top_.current_vm_state_;
963 }
964
965 void SetCurrentVMState(StateTag state) {
966 if (RuntimeProfiler::IsEnabled()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000967 // Make sure thread local top is initialized.
968 ASSERT(thread_local_top_.isolate_ == this);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100969 StateTag current_state = thread_local_top_.current_vm_state_;
970 if (current_state != JS && state == JS) {
971 // Non-JS -> JS transition.
Steve Block44f0eee2011-05-26 01:26:41 +0100972 RuntimeProfiler::IsolateEnteredJS(this);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100973 } else if (current_state == JS && state != JS) {
Steve Block44f0eee2011-05-26 01:26:41 +0100974 // JS -> non-JS transition.
975 ASSERT(RuntimeProfiler::IsSomeIsolateInJS());
976 RuntimeProfiler::IsolateExitedJS(this);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100977 } else {
978 // Other types of state transitions are not interesting to the
979 // runtime profiler, because they don't affect whether we're
980 // in JS or not.
981 ASSERT((current_state == JS) == (state == JS));
Steve Block44f0eee2011-05-26 01:26:41 +0100982 }
983 }
984 thread_local_top_.current_vm_state_ = state;
985 }
Steve Block44f0eee2011-05-26 01:26:41 +0100986
Ben Murdoch257744e2011-11-30 15:57:28 +0000987 void SetData(void* data) { embedder_data_ = data; }
988 void* GetData() { return embedder_data_; }
989
Steve Block44f0eee2011-05-26 01:26:41 +0100990 private:
991 Isolate();
992
993 // The per-process lock should be acquired before the ThreadDataTable is
994 // modified.
995 class ThreadDataTable {
996 public:
997 ThreadDataTable();
998 ~ThreadDataTable();
999
1000 PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1001 void Insert(PerIsolateThreadData* data);
1002 void Remove(Isolate* isolate, ThreadId thread_id);
1003 void Remove(PerIsolateThreadData* data);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001004 void RemoveAllThreads(Isolate* isolate);
Steve Block44f0eee2011-05-26 01:26:41 +01001005
1006 private:
1007 PerIsolateThreadData* list_;
1008 };
1009
1010 // These items form a stack synchronously with threads Enter'ing and Exit'ing
1011 // the Isolate. The top of the stack points to a thread which is currently
1012 // running the Isolate. When the stack is empty, the Isolate is considered
1013 // not entered by any thread and can be Disposed.
1014 // If the same thread enters the Isolate more then once, the entry_count_
1015 // is incremented rather then a new item pushed to the stack.
1016 class EntryStackItem {
1017 public:
1018 EntryStackItem(PerIsolateThreadData* previous_thread_data,
1019 Isolate* previous_isolate,
1020 EntryStackItem* previous_item)
1021 : entry_count(1),
1022 previous_thread_data(previous_thread_data),
1023 previous_isolate(previous_isolate),
1024 previous_item(previous_item) { }
1025
1026 int entry_count;
1027 PerIsolateThreadData* previous_thread_data;
1028 Isolate* previous_isolate;
1029 EntryStackItem* previous_item;
1030
1031 DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1032 };
1033
Ben Murdoch85b71792012-04-11 18:30:58 +01001034 // This mutex protects highest_thread_id_, thread_data_table_ and
1035 // default_isolate_.
1036 static Mutex* process_wide_mutex_;
1037
1038 static Thread::LocalStorageKey per_isolate_thread_data_key_;
1039 static Thread::LocalStorageKey isolate_key_;
1040 static Thread::LocalStorageKey thread_id_key_;
1041 static Isolate* default_isolate_;
1042 static ThreadDataTable* thread_data_table_;
1043
Steve Block44f0eee2011-05-26 01:26:41 +01001044 void Deinit();
1045
1046 static void SetIsolateThreadLocals(Isolate* isolate,
1047 PerIsolateThreadData* data);
1048
1049 enum State {
1050 UNINITIALIZED, // Some components may not have been allocated.
Steve Block44f0eee2011-05-26 01:26:41 +01001051 INITIALIZED // All components are fully initialized.
1052 };
1053
1054 State state_;
1055 EntryStackItem* entry_stack_;
1056
1057 // Allocate and insert PerIsolateThreadData into the ThreadDataTable
1058 // (regardless of whether such data already exists).
1059 PerIsolateThreadData* AllocatePerIsolateThreadData(ThreadId thread_id);
1060
1061 // Find the PerThread for this particular (isolate, thread) combination.
1062 // If one does not yet exist, allocate a new one.
1063 PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1064
Ben Murdoch85b71792012-04-11 18:30:58 +01001065// PreInits and returns a default isolate. Needed when a new thread tries
Steve Block44f0eee2011-05-26 01:26:41 +01001066 // to create a Locker for the first time (the lock itself is in the isolate).
1067 static Isolate* GetDefaultIsolateForLocking();
1068
1069 // Initializes the current thread to run this Isolate.
1070 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1071 // at the same time, this should be prevented using external locking.
1072 void Enter();
1073
1074 // Exits the current thread. The previosuly entered Isolate is restored
1075 // for the thread.
1076 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1077 // at the same time, this should be prevented using external locking.
1078 void Exit();
1079
1080 void PreallocatedMemoryThreadStart();
1081 void PreallocatedMemoryThreadStop();
1082 void InitializeThreadLocal();
1083
1084 void PrintStackTrace(FILE* out, ThreadLocalTop* thread);
1085 void MarkCompactPrologue(bool is_compacting,
1086 ThreadLocalTop* archived_thread_data);
1087 void MarkCompactEpilogue(bool is_compacting,
1088 ThreadLocalTop* archived_thread_data);
1089
1090 void FillCache();
1091
Ben Murdoch8b112d22011-06-08 16:22:53 +01001092 void PropagatePendingExceptionToExternalTryCatch();
1093
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001094 void InitializeDebugger();
1095
Steve Block44f0eee2011-05-26 01:26:41 +01001096 int stack_trace_nesting_level_;
1097 StringStream* incomplete_message_;
1098 // The preallocated memory thread singleton.
1099 PreallocatedMemoryThread* preallocated_memory_thread_;
Ben Murdoch589d6972011-11-30 16:04:58 +00001100 Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT
Steve Block44f0eee2011-05-26 01:26:41 +01001101 NoAllocationStringAllocator* preallocated_message_space_;
1102
1103 Bootstrapper* bootstrapper_;
1104 RuntimeProfiler* runtime_profiler_;
1105 CompilationCache* compilation_cache_;
1106 Counters* counters_;
Steve Block44f0eee2011-05-26 01:26:41 +01001107 CodeRange* code_range_;
1108 Mutex* break_access_;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001109 Atomic32 debugger_initialized_;
1110 Mutex* debugger_access_;
Steve Block44f0eee2011-05-26 01:26:41 +01001111 Heap heap_;
1112 Logger* logger_;
1113 StackGuard stack_guard_;
1114 StatsTable* stats_table_;
1115 StubCache* stub_cache_;
1116 DeoptimizerData* deoptimizer_data_;
1117 ThreadLocalTop thread_local_top_;
1118 bool capture_stack_trace_for_uncaught_exceptions_;
1119 int stack_trace_for_uncaught_exceptions_frame_limit_;
1120 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1121 TranscendentalCache* transcendental_cache_;
1122 MemoryAllocator* memory_allocator_;
1123 KeyedLookupCache* keyed_lookup_cache_;
1124 ContextSlotCache* context_slot_cache_;
1125 DescriptorLookupCache* descriptor_lookup_cache_;
1126 v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
1127 HandleScopeImplementer* handle_scope_implementer_;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001128 UnicodeCache* unicode_cache_;
Steve Block44f0eee2011-05-26 01:26:41 +01001129 Zone zone_;
1130 PreallocatedStorage in_use_list_;
1131 PreallocatedStorage free_list_;
1132 bool preallocated_storage_preallocated_;
Ben Murdoch85b71792012-04-11 18:30:58 +01001133 PcToCodeCache* pc_to_code_cache_;
Steve Block44f0eee2011-05-26 01:26:41 +01001134 StringInputBuffer* write_input_buffer_;
1135 GlobalHandles* global_handles_;
1136 ContextSwitcher* context_switcher_;
1137 ThreadManager* thread_manager_;
Steve Block44f0eee2011-05-26 01:26:41 +01001138 RuntimeState runtime_state_;
Steve Block44f0eee2011-05-26 01:26:41 +01001139 StaticResource<SafeStringInputBuffer> compiler_safe_string_input_buffer_;
1140 Builtins builtins_;
1141 StringTracker* string_tracker_;
1142 unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1143 unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1144 StringInputBuffer objects_string_compare_buffer_a_;
1145 StringInputBuffer objects_string_compare_buffer_b_;
1146 StaticResource<StringInputBuffer> objects_string_input_buffer_;
1147 unibrow::Mapping<unibrow::Ecma262Canonicalize>
1148 regexp_macro_assembler_canonicalize_;
1149 RegExpStack* regexp_stack_;
1150 unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
Ben Murdoch257744e2011-11-30 15:57:28 +00001151 void* embedder_data_;
Steve Block44f0eee2011-05-26 01:26:41 +01001152
1153#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
1154 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
1155 bool simulator_initialized_;
1156 HashMap* simulator_i_cache_;
1157 Redirection* simulator_redirection_;
1158#endif
1159
1160#ifdef DEBUG
1161 // A static array of histogram info for each type.
1162 HistogramInfo heap_histograms_[LAST_TYPE + 1];
1163 JSObject::SpillInformation js_spill_information_;
1164 int code_kind_statistics_[Code::NUMBER_OF_KINDS];
1165#endif
1166
1167#ifdef ENABLE_DEBUGGER_SUPPORT
1168 Debugger* debugger_;
1169 Debug* debug_;
1170#endif
1171
Steve Block44f0eee2011-05-26 01:26:41 +01001172#define GLOBAL_BACKING_STORE(type, name, initialvalue) \
1173 type name##_;
1174 ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1175#undef GLOBAL_BACKING_STORE
1176
1177#define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \
1178 type name##_[length];
1179 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1180#undef GLOBAL_ARRAY_BACKING_STORE
1181
1182#ifdef DEBUG
1183 // This class is huge and has a number of fields controlled by
1184 // preprocessor defines. Make sure the offsets of these fields agree
1185 // between compilation units.
1186#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
1187 static const intptr_t name##_debug_offset_;
1188 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1189 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1190#undef ISOLATE_FIELD_OFFSET
1191#endif
1192
1193 friend class ExecutionAccess;
1194 friend class IsolateInitializer;
Ben Murdoch257744e2011-11-30 15:57:28 +00001195 friend class ThreadManager;
1196 friend class Simulator;
1197 friend class StackGuard;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001198 friend class ThreadId;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001199 friend class TestMemoryAllocatorScope;
Steve Block44f0eee2011-05-26 01:26:41 +01001200 friend class v8::Isolate;
1201 friend class v8::Locker;
Ben Murdoch257744e2011-11-30 15:57:28 +00001202 friend class v8::Unlocker;
Steve Block44f0eee2011-05-26 01:26:41 +01001203
1204 DISALLOW_COPY_AND_ASSIGN(Isolate);
1205};
1206
1207
1208// If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1209// class as a work around for a bug in the generated code found with these
1210// versions of GCC. See V8 issue 122 for details.
1211class SaveContext BASE_EMBEDDED {
1212 public:
Ben Murdoch85b71792012-04-11 18:30:58 +01001213 explicit SaveContext(Isolate* isolate) : prev_(isolate->save_context()) {
1214 if (isolate->context() != NULL) {
1215 context_ = Handle<Context>(isolate->context());
1216#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
1217 dummy_ = Handle<Context>(isolate->context());
1218#endif
1219 }
1220 isolate->set_save_context(this);
1221
1222 // If there is no JS frame under the current C frame, use the value 0.
1223 JavaScriptFrameIterator it(isolate);
1224 js_sp_ = it.done() ? 0 : it.frame()->sp();
1225 }
Steve Block44f0eee2011-05-26 01:26:41 +01001226
1227 ~SaveContext() {
1228 if (context_.is_null()) {
1229 Isolate* isolate = Isolate::Current();
1230 isolate->set_context(NULL);
1231 isolate->set_save_context(prev_);
1232 } else {
1233 Isolate* isolate = context_->GetIsolate();
1234 isolate->set_context(*context_);
1235 isolate->set_save_context(prev_);
1236 }
1237 }
1238
1239 Handle<Context> context() { return context_; }
1240 SaveContext* prev() { return prev_; }
1241
1242 // Returns true if this save context is below a given JavaScript frame.
Ben Murdoch85b71792012-04-11 18:30:58 +01001243 bool below(JavaScriptFrame* frame) {
1244 return (js_sp_ == 0) || (frame->sp() < js_sp_);
Steve Block44f0eee2011-05-26 01:26:41 +01001245 }
1246
1247 private:
1248 Handle<Context> context_;
1249#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
1250 Handle<Context> dummy_;
1251#endif
1252 SaveContext* prev_;
Ben Murdoch85b71792012-04-11 18:30:58 +01001253 Address js_sp_; // The top JS frame's sp when saving context.
Steve Block44f0eee2011-05-26 01:26:41 +01001254};
1255
1256
1257class AssertNoContextChange BASE_EMBEDDED {
1258#ifdef DEBUG
1259 public:
1260 AssertNoContextChange() :
1261 scope_(Isolate::Current()),
1262 context_(Isolate::Current()->context(), Isolate::Current()) {
1263 }
1264
1265 ~AssertNoContextChange() {
1266 ASSERT(Isolate::Current()->context() == *context_);
1267 }
1268
1269 private:
1270 HandleScope scope_;
1271 Handle<Context> context_;
1272#else
1273 public:
1274 AssertNoContextChange() { }
1275#endif
1276};
1277
1278
1279class ExecutionAccess BASE_EMBEDDED {
1280 public:
1281 explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1282 Lock(isolate);
1283 }
1284 ~ExecutionAccess() { Unlock(isolate_); }
1285
1286 static void Lock(Isolate* isolate) { isolate->break_access_->Lock(); }
1287 static void Unlock(Isolate* isolate) { isolate->break_access_->Unlock(); }
1288
1289 static bool TryLock(Isolate* isolate) {
1290 return isolate->break_access_->TryLock();
1291 }
1292
1293 private:
1294 Isolate* isolate_;
1295};
1296
1297
1298// Support for checking for stack-overflows in C++ code.
1299class StackLimitCheck BASE_EMBEDDED {
1300 public:
1301 explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1302
1303 bool HasOverflowed() const {
1304 StackGuard* stack_guard = isolate_->stack_guard();
1305 // Stack has overflowed in C++ code only if stack pointer exceeds the C++
1306 // stack guard and the limits are not set to interrupt values.
1307 // TODO(214): Stack overflows are ignored if a interrupt is pending. This
1308 // code should probably always use the initial C++ limit.
1309 return (reinterpret_cast<uintptr_t>(this) < stack_guard->climit()) &&
1310 stack_guard->IsStackOverflow();
1311 }
1312 private:
1313 Isolate* isolate_;
1314};
1315
1316
1317// Support for temporarily postponing interrupts. When the outermost
1318// postpone scope is left the interrupts will be re-enabled and any
1319// interrupts that occurred while in the scope will be taken into
1320// account.
1321class PostponeInterruptsScope BASE_EMBEDDED {
1322 public:
1323 explicit PostponeInterruptsScope(Isolate* isolate)
1324 : stack_guard_(isolate->stack_guard()) {
1325 stack_guard_->thread_local_.postpone_interrupts_nesting_++;
1326 stack_guard_->DisableInterrupts();
1327 }
1328
1329 ~PostponeInterruptsScope() {
1330 if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) {
1331 stack_guard_->EnableInterrupts();
1332 }
1333 }
1334 private:
1335 StackGuard* stack_guard_;
1336};
1337
1338
1339// Temporary macros for accessing current isolate and its subobjects.
1340// They provide better readability, especially when used a lot in the code.
1341#define HEAP (v8::internal::Isolate::Current()->heap())
1342#define FACTORY (v8::internal::Isolate::Current()->factory())
1343#define ISOLATE (v8::internal::Isolate::Current())
1344#define ZONE (v8::internal::Isolate::Current()->zone())
1345#define LOGGER (v8::internal::Isolate::Current()->logger())
1346
1347
1348// Tells whether the global context is marked with out of memory.
1349inline bool Context::has_out_of_memory() {
1350 return global_context()->out_of_memory()->IsTrue();
1351}
1352
1353
1354// Mark the global context with out of memory.
1355inline void Context::mark_out_of_memory() {
1356 global_context()->set_out_of_memory(HEAP->true_value());
1357}
1358
1359
Steve Block44f0eee2011-05-26 01:26:41 +01001360} } // namespace v8::internal
1361
Steve Block44f0eee2011-05-26 01:26:41 +01001362#endif // V8_ISOLATE_H_