blob: 2ea9b80b69c975046cf797106b7714469cc190cd [file] [log] [blame]
Ben Murdoch8b112d22011-06-08 16:22:53 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Block44f0eee2011-05-26 01:26:41 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ISOLATE_H_
29#define V8_ISOLATE_H_
30
31#include "../include/v8-debug.h"
32#include "allocation.h"
33#include "apiutils.h"
34#include "atomicops.h"
35#include "builtins.h"
36#include "contexts.h"
37#include "execution.h"
38#include "frames.h"
39#include "global-handles.h"
40#include "handles.h"
41#include "heap.h"
42#include "regexp-stack.h"
43#include "runtime-profiler.h"
44#include "runtime.h"
45#include "zone.h"
46
47namespace v8 {
48namespace internal {
49
Steve Block44f0eee2011-05-26 01:26:41 +010050class Bootstrapper;
51class CodeGenerator;
52class CodeRange;
53class CompilationCache;
54class ContextSlotCache;
55class ContextSwitcher;
56class Counters;
57class CpuFeatures;
58class CpuProfiler;
59class DeoptimizerData;
60class Deserializer;
61class EmptyStatement;
62class ExternalReferenceTable;
63class Factory;
64class FunctionInfoListener;
65class HandleScopeImplementer;
66class HeapProfiler;
67class InlineRuntimeFunctionsTable;
68class NoAllocationStringAllocator;
Ben Murdoch592a9fc2012-03-05 11:04:45 +000069class InnerPointerToCodeCache;
Steve Block44f0eee2011-05-26 01:26:41 +010070class PreallocatedMemoryThread;
Steve Block44f0eee2011-05-26 01:26:41 +010071class RegExpStack;
72class SaveContext;
Ben Murdoch8b112d22011-06-08 16:22:53 +010073class UnicodeCache;
Steve Block44f0eee2011-05-26 01:26:41 +010074class StringInputBuffer;
75class StringTracker;
76class StubCache;
77class ThreadManager;
78class ThreadState;
79class ThreadVisitor; // Defined in v8threads.h
80class VMState;
81
82// 'void function pointer', used to roundtrip the
83// ExternalReference::ExternalReferenceRedirector since we can not include
84// assembler.h, where it is defined, here.
85typedef void* ExternalReferenceRedirectorPointer();
86
87
88#ifdef ENABLE_DEBUGGER_SUPPORT
89class Debug;
90class Debugger;
91class DebuggerAgent;
92#endif
93
94#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
95 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
96class Redirection;
97class Simulator;
98#endif
99
100
101// Static indirection table for handles to constants. If a frame
102// element represents a constant, the data contains an index into
103// this table of handles to the actual constants.
104// Static indirection table for handles to constants. If a Result
105// represents a constant, the data contains an index into this table
106// of handles to the actual constants.
107typedef ZoneList<Handle<Object> > ZoneObjectList;
108
109#define RETURN_IF_SCHEDULED_EXCEPTION(isolate) \
110 if (isolate->has_scheduled_exception()) \
111 return isolate->PromoteScheduledException()
112
113#define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \
114 if (call.is_null()) { \
115 ASSERT(isolate->has_pending_exception()); \
116 return value; \
117 }
118
119#define RETURN_IF_EMPTY_HANDLE(isolate, call) \
120 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception())
121
Ben Murdoch589d6972011-11-30 16:04:58 +0000122#define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \
123 C(Handler, handler) \
124 C(CEntryFP, c_entry_fp) \
125 C(Context, context) \
126 C(PendingException, pending_exception) \
127 C(ExternalCaughtException, external_caught_exception) \
128 C(JSEntrySP, js_entry_sp)
Steve Block44f0eee2011-05-26 01:26:41 +0100129
130
Ben Murdoch8b112d22011-06-08 16:22:53 +0100131// Platform-independent, reliable thread identifier.
132class ThreadId {
133 public:
134 // Creates an invalid ThreadId.
135 ThreadId() : id_(kInvalidId) {}
136
137 // Returns ThreadId for current thread.
138 static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
139
140 // Returns invalid ThreadId (guaranteed not to be equal to any thread).
141 static ThreadId Invalid() { return ThreadId(kInvalidId); }
142
143 // Compares ThreadIds for equality.
144 INLINE(bool Equals(const ThreadId& other) const) {
145 return id_ == other.id_;
146 }
147
148 // Checks whether this ThreadId refers to any thread.
149 INLINE(bool IsValid() const) {
150 return id_ != kInvalidId;
151 }
152
153 // Converts ThreadId to an integer representation
154 // (required for public API: V8::V8::GetCurrentThreadId).
155 int ToInteger() const { return id_; }
156
157 // Converts ThreadId to an integer representation
158 // (required for public API: V8::V8::TerminateExecution).
159 static ThreadId FromInteger(int id) { return ThreadId(id); }
160
161 private:
162 static const int kInvalidId = -1;
163
164 explicit ThreadId(int id) : id_(id) {}
165
166 static int AllocateThreadId();
167
168 static int GetCurrentThreadId();
169
170 int id_;
171
172 static Atomic32 highest_thread_id_;
173
174 friend class Isolate;
175};
176
177
Steve Block44f0eee2011-05-26 01:26:41 +0100178class ThreadLocalTop BASE_EMBEDDED {
179 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +0100180 // Does early low-level initialization that does not depend on the
181 // isolate being present.
182 ThreadLocalTop();
183
Steve Block44f0eee2011-05-26 01:26:41 +0100184 // Initialize the thread data.
185 void Initialize();
186
187 // Get the top C++ try catch handler or NULL if none are registered.
188 //
189 // This method is not guarenteed to return an address that can be
190 // used for comparison with addresses into the JS stack. If such an
191 // address is needed, use try_catch_handler_address.
192 v8::TryCatch* TryCatchHandler();
193
194 // Get the address of the top C++ try catch handler or NULL if
195 // none are registered.
196 //
197 // This method always returns an address that can be compared to
198 // pointers into the JavaScript stack. When running on actual
199 // hardware, try_catch_handler_address and TryCatchHandler return
200 // the same pointer. When running on a simulator with a separate JS
201 // stack, try_catch_handler_address returns a JS stack address that
202 // corresponds to the place on the JS stack where the C++ handler
203 // would have been if the stack were not separate.
204 inline Address try_catch_handler_address() {
205 return try_catch_handler_address_;
206 }
207
208 // Set the address of the top C++ try catch handler.
209 inline void set_try_catch_handler_address(Address address) {
210 try_catch_handler_address_ = address;
211 }
212
213 void Free() {
214 ASSERT(!has_pending_message_);
215 ASSERT(!external_caught_exception_);
216 ASSERT(try_catch_handler_address_ == NULL);
217 }
218
Ben Murdoch257744e2011-11-30 15:57:28 +0000219 Isolate* isolate_;
Steve Block44f0eee2011-05-26 01:26:41 +0100220 // The context where the current execution method is created and for variable
221 // lookups.
222 Context* context_;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100223 ThreadId thread_id_;
Steve Block44f0eee2011-05-26 01:26:41 +0100224 MaybeObject* pending_exception_;
225 bool has_pending_message_;
Steve Block44f0eee2011-05-26 01:26:41 +0100226 Object* pending_message_obj_;
227 Script* pending_message_script_;
228 int pending_message_start_pos_;
229 int pending_message_end_pos_;
230 // Use a separate value for scheduled exceptions to preserve the
231 // invariants that hold about pending_exception. We may want to
232 // unify them later.
233 MaybeObject* scheduled_exception_;
234 bool external_caught_exception_;
235 SaveContext* save_context_;
236 v8::TryCatch* catcher_;
237
238 // Stack.
239 Address c_entry_fp_; // the frame pointer of the top c entry frame
240 Address handler_; // try-blocks are chained through the stack
241
242#ifdef USE_SIMULATOR
243#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
244 Simulator* simulator_;
245#endif
246#endif // USE_SIMULATOR
247
Steve Block44f0eee2011-05-26 01:26:41 +0100248 Address js_entry_sp_; // the stack pointer of the bottom js entry frame
249 Address external_callback_; // the external callback we're currently in
Steve Block44f0eee2011-05-26 01:26:41 +0100250 StateTag current_vm_state_;
Steve Block44f0eee2011-05-26 01:26:41 +0100251
252 // Generated code scratch locations.
253 int32_t formal_count_;
254
255 // Call back function to report unsafe JS accesses.
256 v8::FailedAccessCheckCallback failed_access_check_callback_;
257
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000258 // Head of the list of live LookupResults.
259 LookupResult* top_lookup_result_;
260
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000261 // Whether out of memory exceptions should be ignored.
262 bool ignore_out_of_memory_;
263
Steve Block44f0eee2011-05-26 01:26:41 +0100264 private:
Ben Murdoch8b112d22011-06-08 16:22:53 +0100265 void InitializeInternal();
266
Steve Block44f0eee2011-05-26 01:26:41 +0100267 Address try_catch_handler_address_;
268};
269
270#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
271
272#define ISOLATE_PLATFORM_INIT_LIST(V) \
273 /* VirtualFrame::SpilledScope state */ \
274 V(bool, is_virtual_frame_in_spilled_scope, false) \
275 /* CodeGenerator::EmitNamedStore state */ \
276 V(int, inlined_write_barrier_size, -1)
277
278#if !defined(__arm__) && !defined(__mips__)
279class HashMap;
280#endif
281
282#else
283
284#define ISOLATE_PLATFORM_INIT_LIST(V)
285
286#endif
287
288#ifdef ENABLE_DEBUGGER_SUPPORT
289
290#define ISOLATE_DEBUGGER_INIT_LIST(V) \
291 V(v8::Debug::EventCallback, debug_event_callback, NULL) \
292 V(DebuggerAgent*, debugger_agent_instance, NULL)
293#else
294
295#define ISOLATE_DEBUGGER_INIT_LIST(V)
296
297#endif
298
299#ifdef DEBUG
300
301#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \
302 V(CommentStatistic, paged_space_comments_statistics, \
303 CommentStatistic::kMaxComments + 1)
304#else
305
306#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
307
308#endif
309
Steve Block44f0eee2011-05-26 01:26:41 +0100310#define ISOLATE_INIT_ARRAY_LIST(V) \
311 /* SerializerDeserializer state. */ \
312 V(Object*, serialize_partial_snapshot_cache, kPartialSnapshotCacheCapacity) \
313 V(int, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
314 V(int, bad_char_shift_table, kUC16AlphabetSize) \
315 V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
316 V(int, suffix_table, (kBMMaxShift + 1)) \
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000317 V(uint32_t, private_random_seed, 2) \
Steve Block44f0eee2011-05-26 01:26:41 +0100318 ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
319
320typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
321
322#define ISOLATE_INIT_LIST(V) \
323 /* AssertNoZoneAllocation state. */ \
324 V(bool, zone_allow_allocation, true) \
325 /* SerializerDeserializer state. */ \
326 V(int, serialize_partial_snapshot_cache_length, 0) \
327 /* Assembler state. */ \
328 /* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */ \
329 V(byte*, assembler_spare_buffer, NULL) \
330 V(FatalErrorCallback, exception_behavior, NULL) \
Ben Murdoch257744e2011-11-30 15:57:28 +0000331 V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \
Steve Block44f0eee2011-05-26 01:26:41 +0100332 V(v8::Debug::MessageHandler, message_handler, NULL) \
333 /* To distinguish the function templates, so that we can find them in the */ \
334 /* function cache of the global context. */ \
335 V(int, next_serial_number, 0) \
336 V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \
337 V(bool, always_allow_natives_syntax, false) \
338 /* Part of the state of liveedit. */ \
339 V(FunctionInfoListener*, active_function_info_listener, NULL) \
340 /* State for Relocatable. */ \
341 V(Relocatable*, relocatable_top, NULL) \
342 /* State for CodeEntry in profile-generator. */ \
343 V(CodeGenerator*, current_code_generator, NULL) \
344 V(bool, jump_target_compiling_deferred_code, false) \
345 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \
346 V(Object*, string_stream_current_security_token, NULL) \
347 /* TODO(isolates): Release this on destruction? */ \
348 V(int*, irregexp_interpreter_backtrack_stack_cache, NULL) \
349 /* Serializer state. */ \
350 V(ExternalReferenceTable*, external_reference_table, NULL) \
351 /* AstNode state. */ \
352 V(unsigned, ast_node_id, 0) \
353 V(unsigned, ast_node_count, 0) \
Ben Murdoch8b112d22011-06-08 16:22:53 +0100354 /* SafeStackFrameIterator activations count. */ \
355 V(int, safe_stack_iterator_counter, 0) \
Ben Murdoch257744e2011-11-30 15:57:28 +0000356 V(uint64_t, enabled_cpu_features, 0) \
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000357 V(CpuProfiler*, cpu_profiler, NULL) \
358 V(HeapProfiler*, heap_profiler, NULL) \
Steve Block44f0eee2011-05-26 01:26:41 +0100359 ISOLATE_PLATFORM_INIT_LIST(V) \
Steve Block44f0eee2011-05-26 01:26:41 +0100360 ISOLATE_DEBUGGER_INIT_LIST(V)
361
362class Isolate {
363 // These forward declarations are required to make the friend declarations in
364 // PerIsolateThreadData work on some older versions of gcc.
365 class ThreadDataTable;
366 class EntryStackItem;
367 public:
368 ~Isolate();
369
Steve Block44f0eee2011-05-26 01:26:41 +0100370 // A thread has a PerIsolateThreadData instance for each isolate that it has
371 // entered. That instance is allocated when the isolate is initially entered
372 // and reused on subsequent entries.
373 class PerIsolateThreadData {
374 public:
375 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
376 : isolate_(isolate),
377 thread_id_(thread_id),
378 stack_limit_(0),
379 thread_state_(NULL),
380#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
381 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
382 simulator_(NULL),
383#endif
384 next_(NULL),
385 prev_(NULL) { }
386 Isolate* isolate() const { return isolate_; }
387 ThreadId thread_id() const { return thread_id_; }
388 void set_stack_limit(uintptr_t value) { stack_limit_ = value; }
389 uintptr_t stack_limit() const { return stack_limit_; }
390 ThreadState* thread_state() const { return thread_state_; }
391 void set_thread_state(ThreadState* value) { thread_state_ = value; }
392
393#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
394 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
395 Simulator* simulator() const { return simulator_; }
396 void set_simulator(Simulator* simulator) {
397 simulator_ = simulator;
398 }
399#endif
400
401 bool Matches(Isolate* isolate, ThreadId thread_id) const {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100402 return isolate_ == isolate && thread_id_.Equals(thread_id);
Steve Block44f0eee2011-05-26 01:26:41 +0100403 }
404
405 private:
406 Isolate* isolate_;
407 ThreadId thread_id_;
408 uintptr_t stack_limit_;
409 ThreadState* thread_state_;
410
411#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
412 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
413 Simulator* simulator_;
414#endif
415
416 PerIsolateThreadData* next_;
417 PerIsolateThreadData* prev_;
418
419 friend class Isolate;
420 friend class ThreadDataTable;
421 friend class EntryStackItem;
422
423 DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
424 };
425
426
427 enum AddressId {
Ben Murdoch589d6972011-11-30 16:04:58 +0000428#define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
429 FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
Steve Block44f0eee2011-05-26 01:26:41 +0100430#undef C
Ben Murdoch589d6972011-11-30 16:04:58 +0000431 kIsolateAddressCount
Steve Block44f0eee2011-05-26 01:26:41 +0100432 };
433
434 // Returns the PerIsolateThreadData for the current thread (or NULL if one is
435 // not currently set).
436 static PerIsolateThreadData* CurrentPerIsolateThreadData() {
437 return reinterpret_cast<PerIsolateThreadData*>(
438 Thread::GetThreadLocal(per_isolate_thread_data_key_));
439 }
440
441 // Returns the isolate inside which the current thread is running.
442 INLINE(static Isolate* Current()) {
443 Isolate* isolate = reinterpret_cast<Isolate*>(
444 Thread::GetExistingThreadLocal(isolate_key_));
445 ASSERT(isolate != NULL);
446 return isolate;
447 }
448
449 INLINE(static Isolate* UncheckedCurrent()) {
450 return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
451 }
452
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000453 // Usually called by Init(), but can be called early e.g. to allow
454 // testing components that require logging but not the whole
455 // isolate.
456 //
457 // Safe to call more than once.
458 void InitializeLoggingAndCounters();
459
Steve Block44f0eee2011-05-26 01:26:41 +0100460 bool Init(Deserializer* des);
461
462 bool IsInitialized() { return state_ == INITIALIZED; }
463
464 // True if at least one thread Enter'ed this isolate.
465 bool IsInUse() { return entry_stack_ != NULL; }
466
467 // Destroys the non-default isolates.
468 // Sets default isolate into "has_been_disposed" state rather then destroying,
469 // for legacy API reasons.
470 void TearDown();
471
472 bool IsDefaultIsolate() const { return this == default_isolate_; }
473
474 // Ensures that process-wide resources and the default isolate have been
475 // allocated. It is only necessary to call this method in rare casses, for
476 // example if you are using V8 from within the body of a static initializer.
477 // Safe to call multiple times.
478 static void EnsureDefaultIsolate();
479
Ben Murdoch257744e2011-11-30 15:57:28 +0000480 // Find the PerThread for this particular (isolate, thread) combination
481 // If one does not yet exist, return null.
482 PerIsolateThreadData* FindPerThreadDataForThisThread();
483
484#ifdef ENABLE_DEBUGGER_SUPPORT
Steve Block44f0eee2011-05-26 01:26:41 +0100485 // Get the debugger from the default isolate. Preinitializes the
486 // default isolate if needed.
487 static Debugger* GetDefaultIsolateDebugger();
Ben Murdoch257744e2011-11-30 15:57:28 +0000488#endif
Steve Block44f0eee2011-05-26 01:26:41 +0100489
490 // Get the stack guard from the default isolate. Preinitializes the
491 // default isolate if needed.
492 static StackGuard* GetDefaultIsolateStackGuard();
493
494 // Returns the key used to store the pointer to the current isolate.
495 // Used internally for V8 threads that do not execute JavaScript but still
496 // are part of the domain of an isolate (like the context switcher).
497 static Thread::LocalStorageKey isolate_key() {
498 return isolate_key_;
499 }
500
501 // Returns the key used to store process-wide thread IDs.
502 static Thread::LocalStorageKey thread_id_key() {
503 return thread_id_key_;
504 }
505
Steve Block44f0eee2011-05-26 01:26:41 +0100506 // If a client attempts to create a Locker without specifying an isolate,
507 // we assume that the client is using legacy behavior. Set up the current
508 // thread to be inside the implicit isolate (or fail a check if we have
509 // switched to non-legacy behavior).
510 static void EnterDefaultIsolate();
511
Steve Block44f0eee2011-05-26 01:26:41 +0100512 // Mutex for serializing access to break control structures.
513 Mutex* break_access() { return break_access_; }
514
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000515 // Mutex for serializing access to debugger.
516 Mutex* debugger_access() { return debugger_access_; }
517
Steve Block44f0eee2011-05-26 01:26:41 +0100518 Address get_address_from_id(AddressId id);
519
520 // Access to top context (where the current function object was created).
521 Context* context() { return thread_local_top_.context_; }
522 void set_context(Context* context) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000523 ASSERT(context == NULL || context->IsContext());
Steve Block44f0eee2011-05-26 01:26:41 +0100524 thread_local_top_.context_ = context;
525 }
526 Context** context_address() { return &thread_local_top_.context_; }
527
528 SaveContext* save_context() {return thread_local_top_.save_context_; }
529 void set_save_context(SaveContext* save) {
530 thread_local_top_.save_context_ = save;
531 }
532
533 // Access to current thread id.
Ben Murdoch8b112d22011-06-08 16:22:53 +0100534 ThreadId thread_id() { return thread_local_top_.thread_id_; }
535 void set_thread_id(ThreadId id) { thread_local_top_.thread_id_ = id; }
Steve Block44f0eee2011-05-26 01:26:41 +0100536
537 // Interface to pending exception.
538 MaybeObject* pending_exception() {
539 ASSERT(has_pending_exception());
540 return thread_local_top_.pending_exception_;
541 }
542 bool external_caught_exception() {
543 return thread_local_top_.external_caught_exception_;
544 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100545 void set_external_caught_exception(bool value) {
546 thread_local_top_.external_caught_exception_ = value;
547 }
Steve Block44f0eee2011-05-26 01:26:41 +0100548 void set_pending_exception(MaybeObject* exception) {
549 thread_local_top_.pending_exception_ = exception;
550 }
551 void clear_pending_exception() {
552 thread_local_top_.pending_exception_ = heap_.the_hole_value();
553 }
554 MaybeObject** pending_exception_address() {
555 return &thread_local_top_.pending_exception_;
556 }
557 bool has_pending_exception() {
558 return !thread_local_top_.pending_exception_->IsTheHole();
559 }
560 void clear_pending_message() {
561 thread_local_top_.has_pending_message_ = false;
Steve Block44f0eee2011-05-26 01:26:41 +0100562 thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
563 thread_local_top_.pending_message_script_ = NULL;
564 }
565 v8::TryCatch* try_catch_handler() {
566 return thread_local_top_.TryCatchHandler();
567 }
568 Address try_catch_handler_address() {
569 return thread_local_top_.try_catch_handler_address();
570 }
571 bool* external_caught_exception_address() {
572 return &thread_local_top_.external_caught_exception_;
573 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100574 v8::TryCatch* catcher() {
575 return thread_local_top_.catcher_;
576 }
577 void set_catcher(v8::TryCatch* catcher) {
578 thread_local_top_.catcher_ = catcher;
579 }
Steve Block44f0eee2011-05-26 01:26:41 +0100580
581 MaybeObject** scheduled_exception_address() {
582 return &thread_local_top_.scheduled_exception_;
583 }
584 MaybeObject* scheduled_exception() {
585 ASSERT(has_scheduled_exception());
586 return thread_local_top_.scheduled_exception_;
587 }
588 bool has_scheduled_exception() {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000589 return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
Steve Block44f0eee2011-05-26 01:26:41 +0100590 }
591 void clear_scheduled_exception() {
592 thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
593 }
594
595 bool IsExternallyCaught();
596
597 bool is_catchable_by_javascript(MaybeObject* exception) {
598 return (exception != Failure::OutOfMemoryException()) &&
599 (exception != heap()->termination_exception());
600 }
601
602 // JS execution stack (see frames.h).
603 static Address c_entry_fp(ThreadLocalTop* thread) {
604 return thread->c_entry_fp_;
605 }
606 static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
607
608 inline Address* c_entry_fp_address() {
609 return &thread_local_top_.c_entry_fp_;
610 }
611 inline Address* handler_address() { return &thread_local_top_.handler_; }
612
Steve Block44f0eee2011-05-26 01:26:41 +0100613 // Bottom JS entry (see StackTracer::Trace in log.cc).
614 static Address js_entry_sp(ThreadLocalTop* thread) {
615 return thread->js_entry_sp_;
616 }
617 inline Address* js_entry_sp_address() {
618 return &thread_local_top_.js_entry_sp_;
619 }
Steve Block44f0eee2011-05-26 01:26:41 +0100620
621 // Generated code scratch locations.
622 void* formal_count_address() { return &thread_local_top_.formal_count_; }
623
624 // Returns the global object of the current context. It could be
625 // a builtin object, or a js global object.
626 Handle<GlobalObject> global() {
627 return Handle<GlobalObject>(context()->global());
628 }
629
630 // Returns the global proxy object of the current context.
631 Object* global_proxy() {
632 return context()->global_proxy();
633 }
634
635 Handle<JSBuiltinsObject> js_builtins_object() {
636 return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
637 }
638
639 static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
640 void FreeThreadResources() { thread_local_top_.Free(); }
641
642 // This method is called by the api after operations that may throw
643 // exceptions. If an exception was thrown and not handled by an external
644 // handler the exception is scheduled to be rethrown when we return to running
645 // JavaScript code. If an exception is scheduled true is returned.
646 bool OptionalRescheduleException(bool is_bottom_call);
647
Ben Murdoch8b112d22011-06-08 16:22:53 +0100648 class ExceptionScope {
649 public:
650 explicit ExceptionScope(Isolate* isolate) :
651 // Scope currently can only be used for regular exceptions, not
652 // failures like OOM or termination exception.
653 isolate_(isolate),
654 pending_exception_(isolate_->pending_exception()->ToObjectUnchecked()),
655 catcher_(isolate_->catcher())
656 { }
657
658 ~ExceptionScope() {
659 isolate_->set_catcher(catcher_);
660 isolate_->set_pending_exception(*pending_exception_);
661 }
662
663 private:
664 Isolate* isolate_;
665 Handle<Object> pending_exception_;
666 v8::TryCatch* catcher_;
667 };
668
Steve Block44f0eee2011-05-26 01:26:41 +0100669 void SetCaptureStackTraceForUncaughtExceptions(
670 bool capture,
671 int frame_limit,
672 StackTrace::StackTraceOptions options);
673
674 // Tells whether the current context has experienced an out of memory
675 // exception.
676 bool is_out_of_memory();
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000677 bool ignore_out_of_memory() {
678 return thread_local_top_.ignore_out_of_memory_;
679 }
680 void set_ignore_out_of_memory(bool value) {
681 thread_local_top_.ignore_out_of_memory_ = value;
682 }
Steve Block44f0eee2011-05-26 01:26:41 +0100683
684 void PrintCurrentStackTrace(FILE* out);
685 void PrintStackTrace(FILE* out, char* thread_data);
686 void PrintStack(StringStream* accumulator);
687 void PrintStack();
688 Handle<String> StackTraceString();
689 Handle<JSArray> CaptureCurrentStackTrace(
690 int frame_limit,
691 StackTrace::StackTraceOptions options);
692
693 // Returns if the top context may access the given global object. If
694 // the result is false, the pending exception is guaranteed to be
695 // set.
696 bool MayNamedAccess(JSObject* receiver,
697 Object* key,
698 v8::AccessType type);
699 bool MayIndexedAccess(JSObject* receiver,
700 uint32_t index,
701 v8::AccessType type);
702
703 void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
704 void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
705
706 // Exception throwing support. The caller should use the result
707 // of Throw() as its return value.
708 Failure* Throw(Object* exception, MessageLocation* location = NULL);
709 // Re-throw an exception. This involves no error reporting since
710 // error reporting was handled when the exception was thrown
711 // originally.
712 Failure* ReThrow(MaybeObject* exception, MessageLocation* location = NULL);
713 void ScheduleThrow(Object* exception);
714 void ReportPendingMessages();
715 Failure* ThrowIllegalOperation();
716
717 // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
718 Failure* PromoteScheduledException();
Ben Murdoch8b112d22011-06-08 16:22:53 +0100719 void DoThrow(MaybeObject* exception, MessageLocation* location);
Steve Block44f0eee2011-05-26 01:26:41 +0100720 // Checks if exception should be reported and finds out if it's
721 // caught externally.
722 bool ShouldReportException(bool* can_be_caught_externally,
723 bool catchable_by_javascript);
724
725 // Attempts to compute the current source location, storing the
726 // result in the target out parameter.
727 void ComputeLocation(MessageLocation* target);
728
729 // Override command line flag.
730 void TraceException(bool flag);
731
732 // Out of resource exception helpers.
733 Failure* StackOverflow();
734 Failure* TerminateExecution();
735
736 // Administration
737 void Iterate(ObjectVisitor* v);
738 void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
739 char* Iterate(ObjectVisitor* v, char* t);
740 void IterateThread(ThreadVisitor* v);
741 void IterateThread(ThreadVisitor* v, char* t);
742
743
744 // Returns the current global context.
745 Handle<Context> global_context();
746
747 // Returns the global context of the calling JavaScript code. That
748 // is, the global context of the top-most JavaScript frame.
749 Handle<Context> GetCallingGlobalContext();
750
751 void RegisterTryCatchHandler(v8::TryCatch* that);
752 void UnregisterTryCatchHandler(v8::TryCatch* that);
753
754 char* ArchiveThread(char* to);
755 char* RestoreThread(char* from);
756
757 static const char* const kStackOverflowMessage;
758
759 static const int kUC16AlphabetSize = 256; // See StringSearchBase.
760 static const int kBMMaxShift = 250; // See StringSearchBase.
761
762 // Accessors.
763#define GLOBAL_ACCESSOR(type, name, initialvalue) \
764 inline type name() const { \
765 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
766 return name##_; \
767 } \
768 inline void set_##name(type value) { \
769 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
770 name##_ = value; \
771 }
772 ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
773#undef GLOBAL_ACCESSOR
774
775#define GLOBAL_ARRAY_ACCESSOR(type, name, length) \
776 inline type* name() { \
777 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
778 return &(name##_)[0]; \
779 }
780 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
781#undef GLOBAL_ARRAY_ACCESSOR
782
783#define GLOBAL_CONTEXT_FIELD_ACCESSOR(index, type, name) \
784 Handle<type> name() { \
785 return Handle<type>(context()->global_context()->name()); \
786 }
787 GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSOR)
788#undef GLOBAL_CONTEXT_FIELD_ACCESSOR
789
790 Bootstrapper* bootstrapper() { return bootstrapper_; }
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000791 Counters* counters() {
792 // Call InitializeLoggingAndCounters() if logging is needed before
793 // the isolate is fully initialized.
794 ASSERT(counters_ != NULL);
795 return counters_;
796 }
Steve Block44f0eee2011-05-26 01:26:41 +0100797 CodeRange* code_range() { return code_range_; }
798 RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
799 CompilationCache* compilation_cache() { return compilation_cache_; }
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000800 Logger* logger() {
801 // Call InitializeLoggingAndCounters() if logging is needed before
802 // the isolate is fully initialized.
803 ASSERT(logger_ != NULL);
804 return logger_;
805 }
Steve Block44f0eee2011-05-26 01:26:41 +0100806 StackGuard* stack_guard() { return &stack_guard_; }
807 Heap* heap() { return &heap_; }
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000808 StatsTable* stats_table();
Steve Block44f0eee2011-05-26 01:26:41 +0100809 StubCache* stub_cache() { return stub_cache_; }
810 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
811 ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
812
813 TranscendentalCache* transcendental_cache() const {
814 return transcendental_cache_;
815 }
816
817 MemoryAllocator* memory_allocator() {
818 return memory_allocator_;
819 }
820
821 KeyedLookupCache* keyed_lookup_cache() {
822 return keyed_lookup_cache_;
823 }
824
825 ContextSlotCache* context_slot_cache() {
826 return context_slot_cache_;
827 }
828
829 DescriptorLookupCache* descriptor_lookup_cache() {
830 return descriptor_lookup_cache_;
831 }
832
833 v8::ImplementationUtilities::HandleScopeData* handle_scope_data() {
834 return &handle_scope_data_;
835 }
836 HandleScopeImplementer* handle_scope_implementer() {
837 ASSERT(handle_scope_implementer_);
838 return handle_scope_implementer_;
839 }
840 Zone* zone() { return &zone_; }
841
Ben Murdoch8b112d22011-06-08 16:22:53 +0100842 UnicodeCache* unicode_cache() {
843 return unicode_cache_;
Steve Block44f0eee2011-05-26 01:26:41 +0100844 }
845
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000846 InnerPointerToCodeCache* inner_pointer_to_code_cache() {
847 return inner_pointer_to_code_cache_;
848 }
Steve Block44f0eee2011-05-26 01:26:41 +0100849
850 StringInputBuffer* write_input_buffer() { return write_input_buffer_; }
851
852 GlobalHandles* global_handles() { return global_handles_; }
853
854 ThreadManager* thread_manager() { return thread_manager_; }
855
856 ContextSwitcher* context_switcher() { return context_switcher_; }
857
858 void set_context_switcher(ContextSwitcher* switcher) {
859 context_switcher_ = switcher;
860 }
861
862 StringTracker* string_tracker() { return string_tracker_; }
863
864 unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
865 return &jsregexp_uncanonicalize_;
866 }
867
868 unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
869 return &jsregexp_canonrange_;
870 }
871
872 StringInputBuffer* objects_string_compare_buffer_a() {
873 return &objects_string_compare_buffer_a_;
874 }
875
876 StringInputBuffer* objects_string_compare_buffer_b() {
877 return &objects_string_compare_buffer_b_;
878 }
879
880 StaticResource<StringInputBuffer>* objects_string_input_buffer() {
881 return &objects_string_input_buffer_;
882 }
883
Steve Block44f0eee2011-05-26 01:26:41 +0100884 RuntimeState* runtime_state() { return &runtime_state_; }
885
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000886 void set_fp_stubs_generated(bool value) {
887 fp_stubs_generated_ = value;
888 }
889
890 bool fp_stubs_generated() { return fp_stubs_generated_; }
891
Steve Block44f0eee2011-05-26 01:26:41 +0100892 StaticResource<SafeStringInputBuffer>* compiler_safe_string_input_buffer() {
893 return &compiler_safe_string_input_buffer_;
894 }
895
896 Builtins* builtins() { return &builtins_; }
897
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000898 void NotifyExtensionInstalled() {
899 has_installed_extensions_ = true;
900 }
901
902 bool has_installed_extensions() { return has_installed_extensions_; }
903
Steve Block44f0eee2011-05-26 01:26:41 +0100904 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
905 regexp_macro_assembler_canonicalize() {
906 return &regexp_macro_assembler_canonicalize_;
907 }
908
909 RegExpStack* regexp_stack() { return regexp_stack_; }
910
911 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
912 interp_canonicalize_mapping() {
913 return &interp_canonicalize_mapping_;
914 }
915
Steve Block44f0eee2011-05-26 01:26:41 +0100916 void* PreallocatedStorageNew(size_t size);
917 void PreallocatedStorageDelete(void* p);
918 void PreallocatedStorageInit(size_t size);
919
920#ifdef ENABLE_DEBUGGER_SUPPORT
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000921 Debugger* debugger() {
922 if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
923 return debugger_;
924 }
925 Debug* debug() {
926 if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
927 return debug_;
928 }
Steve Block44f0eee2011-05-26 01:26:41 +0100929#endif
930
Ben Murdoch257744e2011-11-30 15:57:28 +0000931 inline bool DebuggerHasBreakPoints();
932
Steve Block44f0eee2011-05-26 01:26:41 +0100933#ifdef DEBUG
934 HistogramInfo* heap_histograms() { return heap_histograms_; }
935
936 JSObject::SpillInformation* js_spill_information() {
937 return &js_spill_information_;
938 }
939
940 int* code_kind_statistics() { return code_kind_statistics_; }
941#endif
942
943#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
944 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
945 bool simulator_initialized() { return simulator_initialized_; }
946 void set_simulator_initialized(bool initialized) {
947 simulator_initialized_ = initialized;
948 }
949
950 HashMap* simulator_i_cache() { return simulator_i_cache_; }
951 void set_simulator_i_cache(HashMap* hash_map) {
952 simulator_i_cache_ = hash_map;
953 }
954
955 Redirection* simulator_redirection() {
956 return simulator_redirection_;
957 }
958 void set_simulator_redirection(Redirection* redirection) {
959 simulator_redirection_ = redirection;
960 }
961#endif
962
963 Factory* factory() { return reinterpret_cast<Factory*>(this); }
964
965 // SerializerDeserializer state.
966 static const int kPartialSnapshotCacheCapacity = 1400;
967
968 static const int kJSRegexpStaticOffsetsVectorSize = 50;
969
Steve Block44f0eee2011-05-26 01:26:41 +0100970 Address external_callback() {
971 return thread_local_top_.external_callback_;
972 }
973 void set_external_callback(Address callback) {
974 thread_local_top_.external_callback_ = callback;
975 }
Steve Block44f0eee2011-05-26 01:26:41 +0100976
Steve Block44f0eee2011-05-26 01:26:41 +0100977 StateTag current_vm_state() {
978 return thread_local_top_.current_vm_state_;
979 }
980
981 void SetCurrentVMState(StateTag state) {
982 if (RuntimeProfiler::IsEnabled()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000983 // Make sure thread local top is initialized.
984 ASSERT(thread_local_top_.isolate_ == this);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100985 StateTag current_state = thread_local_top_.current_vm_state_;
986 if (current_state != JS && state == JS) {
987 // Non-JS -> JS transition.
Steve Block44f0eee2011-05-26 01:26:41 +0100988 RuntimeProfiler::IsolateEnteredJS(this);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100989 } else if (current_state == JS && state != JS) {
Steve Block44f0eee2011-05-26 01:26:41 +0100990 // JS -> non-JS transition.
991 ASSERT(RuntimeProfiler::IsSomeIsolateInJS());
992 RuntimeProfiler::IsolateExitedJS(this);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100993 } else {
994 // Other types of state transitions are not interesting to the
995 // runtime profiler, because they don't affect whether we're
996 // in JS or not.
997 ASSERT((current_state == JS) == (state == JS));
Steve Block44f0eee2011-05-26 01:26:41 +0100998 }
999 }
1000 thread_local_top_.current_vm_state_ = state;
1001 }
Steve Block44f0eee2011-05-26 01:26:41 +01001002
Ben Murdoch257744e2011-11-30 15:57:28 +00001003 void SetData(void* data) { embedder_data_ = data; }
1004 void* GetData() { return embedder_data_; }
1005
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001006 LookupResult* top_lookup_result() {
1007 return thread_local_top_.top_lookup_result_;
1008 }
1009 void SetTopLookupResult(LookupResult* top) {
1010 thread_local_top_.top_lookup_result_ = top;
1011 }
1012
Steve Block44f0eee2011-05-26 01:26:41 +01001013 private:
1014 Isolate();
1015
1016 // The per-process lock should be acquired before the ThreadDataTable is
1017 // modified.
1018 class ThreadDataTable {
1019 public:
1020 ThreadDataTable();
1021 ~ThreadDataTable();
1022
1023 PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1024 void Insert(PerIsolateThreadData* data);
1025 void Remove(Isolate* isolate, ThreadId thread_id);
1026 void Remove(PerIsolateThreadData* data);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001027 void RemoveAllThreads(Isolate* isolate);
Steve Block44f0eee2011-05-26 01:26:41 +01001028
1029 private:
1030 PerIsolateThreadData* list_;
1031 };
1032
1033 // These items form a stack synchronously with threads Enter'ing and Exit'ing
1034 // the Isolate. The top of the stack points to a thread which is currently
1035 // running the Isolate. When the stack is empty, the Isolate is considered
1036 // not entered by any thread and can be Disposed.
1037 // If the same thread enters the Isolate more then once, the entry_count_
1038 // is incremented rather then a new item pushed to the stack.
1039 class EntryStackItem {
1040 public:
1041 EntryStackItem(PerIsolateThreadData* previous_thread_data,
1042 Isolate* previous_isolate,
1043 EntryStackItem* previous_item)
1044 : entry_count(1),
1045 previous_thread_data(previous_thread_data),
1046 previous_isolate(previous_isolate),
1047 previous_item(previous_item) { }
1048
1049 int entry_count;
1050 PerIsolateThreadData* previous_thread_data;
1051 Isolate* previous_isolate;
1052 EntryStackItem* previous_item;
1053
1054 DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1055 };
1056
1057 // This mutex protects highest_thread_id_, thread_data_table_ and
1058 // default_isolate_.
1059 static Mutex* process_wide_mutex_;
1060
1061 static Thread::LocalStorageKey per_isolate_thread_data_key_;
1062 static Thread::LocalStorageKey isolate_key_;
1063 static Thread::LocalStorageKey thread_id_key_;
1064 static Isolate* default_isolate_;
1065 static ThreadDataTable* thread_data_table_;
Steve Block44f0eee2011-05-26 01:26:41 +01001066
Steve Block44f0eee2011-05-26 01:26:41 +01001067 void Deinit();
1068
1069 static void SetIsolateThreadLocals(Isolate* isolate,
1070 PerIsolateThreadData* data);
1071
1072 enum State {
1073 UNINITIALIZED, // Some components may not have been allocated.
Steve Block44f0eee2011-05-26 01:26:41 +01001074 INITIALIZED // All components are fully initialized.
1075 };
1076
1077 State state_;
1078 EntryStackItem* entry_stack_;
1079
1080 // Allocate and insert PerIsolateThreadData into the ThreadDataTable
1081 // (regardless of whether such data already exists).
1082 PerIsolateThreadData* AllocatePerIsolateThreadData(ThreadId thread_id);
1083
1084 // Find the PerThread for this particular (isolate, thread) combination.
1085 // If one does not yet exist, allocate a new one.
1086 PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1087
Ben Murdoch257744e2011-11-30 15:57:28 +00001088// PreInits and returns a default isolate. Needed when a new thread tries
Steve Block44f0eee2011-05-26 01:26:41 +01001089 // to create a Locker for the first time (the lock itself is in the isolate).
1090 static Isolate* GetDefaultIsolateForLocking();
1091
1092 // Initializes the current thread to run this Isolate.
1093 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1094 // at the same time, this should be prevented using external locking.
1095 void Enter();
1096
1097 // Exits the current thread. The previosuly entered Isolate is restored
1098 // for the thread.
1099 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1100 // at the same time, this should be prevented using external locking.
1101 void Exit();
1102
1103 void PreallocatedMemoryThreadStart();
1104 void PreallocatedMemoryThreadStop();
1105 void InitializeThreadLocal();
1106
1107 void PrintStackTrace(FILE* out, ThreadLocalTop* thread);
1108 void MarkCompactPrologue(bool is_compacting,
1109 ThreadLocalTop* archived_thread_data);
1110 void MarkCompactEpilogue(bool is_compacting,
1111 ThreadLocalTop* archived_thread_data);
1112
1113 void FillCache();
1114
Ben Murdoch8b112d22011-06-08 16:22:53 +01001115 void PropagatePendingExceptionToExternalTryCatch();
1116
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001117 void InitializeDebugger();
1118
Steve Block44f0eee2011-05-26 01:26:41 +01001119 int stack_trace_nesting_level_;
1120 StringStream* incomplete_message_;
1121 // The preallocated memory thread singleton.
1122 PreallocatedMemoryThread* preallocated_memory_thread_;
Ben Murdoch589d6972011-11-30 16:04:58 +00001123 Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT
Steve Block44f0eee2011-05-26 01:26:41 +01001124 NoAllocationStringAllocator* preallocated_message_space_;
1125
1126 Bootstrapper* bootstrapper_;
1127 RuntimeProfiler* runtime_profiler_;
1128 CompilationCache* compilation_cache_;
1129 Counters* counters_;
Steve Block44f0eee2011-05-26 01:26:41 +01001130 CodeRange* code_range_;
1131 Mutex* break_access_;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001132 Atomic32 debugger_initialized_;
1133 Mutex* debugger_access_;
Steve Block44f0eee2011-05-26 01:26:41 +01001134 Heap heap_;
1135 Logger* logger_;
1136 StackGuard stack_guard_;
1137 StatsTable* stats_table_;
1138 StubCache* stub_cache_;
1139 DeoptimizerData* deoptimizer_data_;
1140 ThreadLocalTop thread_local_top_;
1141 bool capture_stack_trace_for_uncaught_exceptions_;
1142 int stack_trace_for_uncaught_exceptions_frame_limit_;
1143 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1144 TranscendentalCache* transcendental_cache_;
1145 MemoryAllocator* memory_allocator_;
1146 KeyedLookupCache* keyed_lookup_cache_;
1147 ContextSlotCache* context_slot_cache_;
1148 DescriptorLookupCache* descriptor_lookup_cache_;
1149 v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
1150 HandleScopeImplementer* handle_scope_implementer_;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001151 UnicodeCache* unicode_cache_;
Steve Block44f0eee2011-05-26 01:26:41 +01001152 Zone zone_;
1153 PreallocatedStorage in_use_list_;
1154 PreallocatedStorage free_list_;
1155 bool preallocated_storage_preallocated_;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001156 InnerPointerToCodeCache* inner_pointer_to_code_cache_;
Steve Block44f0eee2011-05-26 01:26:41 +01001157 StringInputBuffer* write_input_buffer_;
1158 GlobalHandles* global_handles_;
1159 ContextSwitcher* context_switcher_;
1160 ThreadManager* thread_manager_;
Steve Block44f0eee2011-05-26 01:26:41 +01001161 RuntimeState runtime_state_;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001162 bool fp_stubs_generated_;
Steve Block44f0eee2011-05-26 01:26:41 +01001163 StaticResource<SafeStringInputBuffer> compiler_safe_string_input_buffer_;
1164 Builtins builtins_;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001165 bool has_installed_extensions_;
Steve Block44f0eee2011-05-26 01:26:41 +01001166 StringTracker* string_tracker_;
1167 unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1168 unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1169 StringInputBuffer objects_string_compare_buffer_a_;
1170 StringInputBuffer objects_string_compare_buffer_b_;
1171 StaticResource<StringInputBuffer> objects_string_input_buffer_;
1172 unibrow::Mapping<unibrow::Ecma262Canonicalize>
1173 regexp_macro_assembler_canonicalize_;
1174 RegExpStack* regexp_stack_;
1175 unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
Ben Murdoch257744e2011-11-30 15:57:28 +00001176 void* embedder_data_;
Steve Block44f0eee2011-05-26 01:26:41 +01001177
1178#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
1179 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
1180 bool simulator_initialized_;
1181 HashMap* simulator_i_cache_;
1182 Redirection* simulator_redirection_;
1183#endif
1184
1185#ifdef DEBUG
1186 // A static array of histogram info for each type.
1187 HistogramInfo heap_histograms_[LAST_TYPE + 1];
1188 JSObject::SpillInformation js_spill_information_;
1189 int code_kind_statistics_[Code::NUMBER_OF_KINDS];
1190#endif
1191
1192#ifdef ENABLE_DEBUGGER_SUPPORT
1193 Debugger* debugger_;
1194 Debug* debug_;
1195#endif
1196
Steve Block44f0eee2011-05-26 01:26:41 +01001197#define GLOBAL_BACKING_STORE(type, name, initialvalue) \
1198 type name##_;
1199 ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1200#undef GLOBAL_BACKING_STORE
1201
1202#define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \
1203 type name##_[length];
1204 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1205#undef GLOBAL_ARRAY_BACKING_STORE
1206
1207#ifdef DEBUG
1208 // This class is huge and has a number of fields controlled by
1209 // preprocessor defines. Make sure the offsets of these fields agree
1210 // between compilation units.
1211#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
1212 static const intptr_t name##_debug_offset_;
1213 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1214 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1215#undef ISOLATE_FIELD_OFFSET
1216#endif
1217
1218 friend class ExecutionAccess;
1219 friend class IsolateInitializer;
Ben Murdoch257744e2011-11-30 15:57:28 +00001220 friend class ThreadManager;
1221 friend class Simulator;
1222 friend class StackGuard;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001223 friend class ThreadId;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001224 friend class TestMemoryAllocatorScope;
Steve Block44f0eee2011-05-26 01:26:41 +01001225 friend class v8::Isolate;
1226 friend class v8::Locker;
Ben Murdoch257744e2011-11-30 15:57:28 +00001227 friend class v8::Unlocker;
Steve Block44f0eee2011-05-26 01:26:41 +01001228
1229 DISALLOW_COPY_AND_ASSIGN(Isolate);
1230};
1231
1232
1233// If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1234// class as a work around for a bug in the generated code found with these
1235// versions of GCC. See V8 issue 122 for details.
1236class SaveContext BASE_EMBEDDED {
1237 public:
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001238 inline explicit SaveContext(Isolate* isolate);
Steve Block44f0eee2011-05-26 01:26:41 +01001239
1240 ~SaveContext() {
1241 if (context_.is_null()) {
1242 Isolate* isolate = Isolate::Current();
1243 isolate->set_context(NULL);
1244 isolate->set_save_context(prev_);
1245 } else {
1246 Isolate* isolate = context_->GetIsolate();
1247 isolate->set_context(*context_);
1248 isolate->set_save_context(prev_);
1249 }
1250 }
1251
1252 Handle<Context> context() { return context_; }
1253 SaveContext* prev() { return prev_; }
1254
1255 // Returns true if this save context is below a given JavaScript frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001256 bool IsBelowFrame(JavaScriptFrame* frame) {
1257 return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
Steve Block44f0eee2011-05-26 01:26:41 +01001258 }
1259
1260 private:
1261 Handle<Context> context_;
1262#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
1263 Handle<Context> dummy_;
1264#endif
1265 SaveContext* prev_;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001266 Address c_entry_fp_;
Steve Block44f0eee2011-05-26 01:26:41 +01001267};
1268
1269
1270class AssertNoContextChange BASE_EMBEDDED {
1271#ifdef DEBUG
1272 public:
1273 AssertNoContextChange() :
1274 scope_(Isolate::Current()),
1275 context_(Isolate::Current()->context(), Isolate::Current()) {
1276 }
1277
1278 ~AssertNoContextChange() {
1279 ASSERT(Isolate::Current()->context() == *context_);
1280 }
1281
1282 private:
1283 HandleScope scope_;
1284 Handle<Context> context_;
1285#else
1286 public:
1287 AssertNoContextChange() { }
1288#endif
1289};
1290
1291
1292class ExecutionAccess BASE_EMBEDDED {
1293 public:
1294 explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1295 Lock(isolate);
1296 }
1297 ~ExecutionAccess() { Unlock(isolate_); }
1298
1299 static void Lock(Isolate* isolate) { isolate->break_access_->Lock(); }
1300 static void Unlock(Isolate* isolate) { isolate->break_access_->Unlock(); }
1301
1302 static bool TryLock(Isolate* isolate) {
1303 return isolate->break_access_->TryLock();
1304 }
1305
1306 private:
1307 Isolate* isolate_;
1308};
1309
1310
1311// Support for checking for stack-overflows in C++ code.
1312class StackLimitCheck BASE_EMBEDDED {
1313 public:
1314 explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1315
1316 bool HasOverflowed() const {
1317 StackGuard* stack_guard = isolate_->stack_guard();
1318 // Stack has overflowed in C++ code only if stack pointer exceeds the C++
1319 // stack guard and the limits are not set to interrupt values.
1320 // TODO(214): Stack overflows are ignored if a interrupt is pending. This
1321 // code should probably always use the initial C++ limit.
1322 return (reinterpret_cast<uintptr_t>(this) < stack_guard->climit()) &&
1323 stack_guard->IsStackOverflow();
1324 }
1325 private:
1326 Isolate* isolate_;
1327};
1328
1329
1330// Support for temporarily postponing interrupts. When the outermost
1331// postpone scope is left the interrupts will be re-enabled and any
1332// interrupts that occurred while in the scope will be taken into
1333// account.
1334class PostponeInterruptsScope BASE_EMBEDDED {
1335 public:
1336 explicit PostponeInterruptsScope(Isolate* isolate)
1337 : stack_guard_(isolate->stack_guard()) {
1338 stack_guard_->thread_local_.postpone_interrupts_nesting_++;
1339 stack_guard_->DisableInterrupts();
1340 }
1341
1342 ~PostponeInterruptsScope() {
1343 if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) {
1344 stack_guard_->EnableInterrupts();
1345 }
1346 }
1347 private:
1348 StackGuard* stack_guard_;
1349};
1350
1351
1352// Temporary macros for accessing current isolate and its subobjects.
1353// They provide better readability, especially when used a lot in the code.
1354#define HEAP (v8::internal::Isolate::Current()->heap())
1355#define FACTORY (v8::internal::Isolate::Current()->factory())
1356#define ISOLATE (v8::internal::Isolate::Current())
1357#define ZONE (v8::internal::Isolate::Current()->zone())
1358#define LOGGER (v8::internal::Isolate::Current()->logger())
1359
1360
1361// Tells whether the global context is marked with out of memory.
1362inline bool Context::has_out_of_memory() {
1363 return global_context()->out_of_memory()->IsTrue();
1364}
1365
1366
1367// Mark the global context with out of memory.
1368inline void Context::mark_out_of_memory() {
1369 global_context()->set_out_of_memory(HEAP->true_value());
1370}
1371
1372
Steve Block44f0eee2011-05-26 01:26:41 +01001373} } // namespace v8::internal
1374
Steve Block44f0eee2011-05-26 01:26:41 +01001375#endif // V8_ISOLATE_H_