blob: 0d36b3f795c506b2ac1c1f0ab24e1ffbb1df6455 [file] [log] [blame]
Ben Murdoch8b112d22011-06-08 16:22:53 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Block44f0eee2011-05-26 01:26:41 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_ISOLATE_H_
29#define V8_ISOLATE_H_
30
31#include "../include/v8-debug.h"
32#include "allocation.h"
33#include "apiutils.h"
34#include "atomicops.h"
35#include "builtins.h"
36#include "contexts.h"
37#include "execution.h"
38#include "frames.h"
39#include "global-handles.h"
40#include "handles.h"
41#include "heap.h"
42#include "regexp-stack.h"
43#include "runtime-profiler.h"
44#include "runtime.h"
45#include "zone.h"
46
47namespace v8 {
48namespace internal {
49
50class AstSentinels;
51class Bootstrapper;
52class CodeGenerator;
53class CodeRange;
54class CompilationCache;
55class ContextSlotCache;
56class ContextSwitcher;
57class Counters;
58class CpuFeatures;
59class CpuProfiler;
60class DeoptimizerData;
61class Deserializer;
62class EmptyStatement;
63class ExternalReferenceTable;
64class Factory;
65class FunctionInfoListener;
66class HandleScopeImplementer;
67class HeapProfiler;
68class InlineRuntimeFunctionsTable;
69class NoAllocationStringAllocator;
70class PcToCodeCache;
71class PreallocatedMemoryThread;
72class ProducerHeapProfile;
73class RegExpStack;
74class SaveContext;
Ben Murdoch8b112d22011-06-08 16:22:53 +010075class UnicodeCache;
Steve Block44f0eee2011-05-26 01:26:41 +010076class StringInputBuffer;
77class StringTracker;
78class StubCache;
79class ThreadManager;
80class ThreadState;
81class ThreadVisitor; // Defined in v8threads.h
82class VMState;
83
84// 'void function pointer', used to roundtrip the
85// ExternalReference::ExternalReferenceRedirector since we can not include
86// assembler.h, where it is defined, here.
87typedef void* ExternalReferenceRedirectorPointer();
88
89
90#ifdef ENABLE_DEBUGGER_SUPPORT
91class Debug;
92class Debugger;
93class DebuggerAgent;
94#endif
95
96#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
97 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
98class Redirection;
99class Simulator;
100#endif
101
102
103// Static indirection table for handles to constants. If a frame
104// element represents a constant, the data contains an index into
105// this table of handles to the actual constants.
106// Static indirection table for handles to constants. If a Result
107// represents a constant, the data contains an index into this table
108// of handles to the actual constants.
109typedef ZoneList<Handle<Object> > ZoneObjectList;
110
111#define RETURN_IF_SCHEDULED_EXCEPTION(isolate) \
112 if (isolate->has_scheduled_exception()) \
113 return isolate->PromoteScheduledException()
114
115#define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \
116 if (call.is_null()) { \
117 ASSERT(isolate->has_pending_exception()); \
118 return value; \
119 }
120
121#define RETURN_IF_EMPTY_HANDLE(isolate, call) \
122 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception())
123
124#define ISOLATE_ADDRESS_LIST(C) \
125 C(handler_address) \
126 C(c_entry_fp_address) \
127 C(context_address) \
128 C(pending_exception_address) \
129 C(external_caught_exception_address)
130
131#ifdef ENABLE_LOGGING_AND_PROFILING
132#define ISOLATE_ADDRESS_LIST_PROF(C) \
133 C(js_entry_sp_address)
134#else
135#define ISOLATE_ADDRESS_LIST_PROF(C)
136#endif
137
138
Ben Murdoch8b112d22011-06-08 16:22:53 +0100139// Platform-independent, reliable thread identifier.
140class ThreadId {
141 public:
142 // Creates an invalid ThreadId.
143 ThreadId() : id_(kInvalidId) {}
144
145 // Returns ThreadId for current thread.
146 static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
147
148 // Returns invalid ThreadId (guaranteed not to be equal to any thread).
149 static ThreadId Invalid() { return ThreadId(kInvalidId); }
150
151 // Compares ThreadIds for equality.
152 INLINE(bool Equals(const ThreadId& other) const) {
153 return id_ == other.id_;
154 }
155
156 // Checks whether this ThreadId refers to any thread.
157 INLINE(bool IsValid() const) {
158 return id_ != kInvalidId;
159 }
160
161 // Converts ThreadId to an integer representation
162 // (required for public API: V8::V8::GetCurrentThreadId).
163 int ToInteger() const { return id_; }
164
165 // Converts ThreadId to an integer representation
166 // (required for public API: V8::V8::TerminateExecution).
167 static ThreadId FromInteger(int id) { return ThreadId(id); }
168
169 private:
170 static const int kInvalidId = -1;
171
172 explicit ThreadId(int id) : id_(id) {}
173
174 static int AllocateThreadId();
175
176 static int GetCurrentThreadId();
177
178 int id_;
179
180 static Atomic32 highest_thread_id_;
181
182 friend class Isolate;
183};
184
185
Steve Block44f0eee2011-05-26 01:26:41 +0100186class ThreadLocalTop BASE_EMBEDDED {
187 public:
Ben Murdoch8b112d22011-06-08 16:22:53 +0100188 // Does early low-level initialization that does not depend on the
189 // isolate being present.
190 ThreadLocalTop();
191
Steve Block44f0eee2011-05-26 01:26:41 +0100192 // Initialize the thread data.
193 void Initialize();
194
195 // Get the top C++ try catch handler or NULL if none are registered.
196 //
197 // This method is not guarenteed to return an address that can be
198 // used for comparison with addresses into the JS stack. If such an
199 // address is needed, use try_catch_handler_address.
200 v8::TryCatch* TryCatchHandler();
201
202 // Get the address of the top C++ try catch handler or NULL if
203 // none are registered.
204 //
205 // This method always returns an address that can be compared to
206 // pointers into the JavaScript stack. When running on actual
207 // hardware, try_catch_handler_address and TryCatchHandler return
208 // the same pointer. When running on a simulator with a separate JS
209 // stack, try_catch_handler_address returns a JS stack address that
210 // corresponds to the place on the JS stack where the C++ handler
211 // would have been if the stack were not separate.
212 inline Address try_catch_handler_address() {
213 return try_catch_handler_address_;
214 }
215
216 // Set the address of the top C++ try catch handler.
217 inline void set_try_catch_handler_address(Address address) {
218 try_catch_handler_address_ = address;
219 }
220
221 void Free() {
222 ASSERT(!has_pending_message_);
223 ASSERT(!external_caught_exception_);
224 ASSERT(try_catch_handler_address_ == NULL);
225 }
226
Ben Murdoch257744e2011-11-30 15:57:28 +0000227 Isolate* isolate_;
Steve Block44f0eee2011-05-26 01:26:41 +0100228 // The context where the current execution method is created and for variable
229 // lookups.
230 Context* context_;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100231 ThreadId thread_id_;
Steve Block44f0eee2011-05-26 01:26:41 +0100232 MaybeObject* pending_exception_;
233 bool has_pending_message_;
Steve Block44f0eee2011-05-26 01:26:41 +0100234 Object* pending_message_obj_;
235 Script* pending_message_script_;
236 int pending_message_start_pos_;
237 int pending_message_end_pos_;
238 // Use a separate value for scheduled exceptions to preserve the
239 // invariants that hold about pending_exception. We may want to
240 // unify them later.
241 MaybeObject* scheduled_exception_;
242 bool external_caught_exception_;
243 SaveContext* save_context_;
244 v8::TryCatch* catcher_;
245
246 // Stack.
247 Address c_entry_fp_; // the frame pointer of the top c entry frame
248 Address handler_; // try-blocks are chained through the stack
249
250#ifdef USE_SIMULATOR
251#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
252 Simulator* simulator_;
253#endif
254#endif // USE_SIMULATOR
255
256#ifdef ENABLE_LOGGING_AND_PROFILING
257 Address js_entry_sp_; // the stack pointer of the bottom js entry frame
258 Address external_callback_; // the external callback we're currently in
259#endif
260
261#ifdef ENABLE_VMSTATE_TRACKING
262 StateTag current_vm_state_;
263#endif
264
265 // Generated code scratch locations.
266 int32_t formal_count_;
267
268 // Call back function to report unsafe JS accesses.
269 v8::FailedAccessCheckCallback failed_access_check_callback_;
270
271 private:
Ben Murdoch8b112d22011-06-08 16:22:53 +0100272 void InitializeInternal();
273
Steve Block44f0eee2011-05-26 01:26:41 +0100274 Address try_catch_handler_address_;
275};
276
277#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
278
279#define ISOLATE_PLATFORM_INIT_LIST(V) \
280 /* VirtualFrame::SpilledScope state */ \
281 V(bool, is_virtual_frame_in_spilled_scope, false) \
282 /* CodeGenerator::EmitNamedStore state */ \
283 V(int, inlined_write_barrier_size, -1)
284
285#if !defined(__arm__) && !defined(__mips__)
286class HashMap;
287#endif
288
289#else
290
291#define ISOLATE_PLATFORM_INIT_LIST(V)
292
293#endif
294
295#ifdef ENABLE_DEBUGGER_SUPPORT
296
297#define ISOLATE_DEBUGGER_INIT_LIST(V) \
298 V(v8::Debug::EventCallback, debug_event_callback, NULL) \
299 V(DebuggerAgent*, debugger_agent_instance, NULL)
300#else
301
302#define ISOLATE_DEBUGGER_INIT_LIST(V)
303
304#endif
305
306#ifdef DEBUG
307
308#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \
309 V(CommentStatistic, paged_space_comments_statistics, \
310 CommentStatistic::kMaxComments + 1)
311#else
312
313#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
314
315#endif
316
317#ifdef ENABLE_LOGGING_AND_PROFILING
318
319#define ISOLATE_LOGGING_INIT_LIST(V) \
320 V(CpuProfiler*, cpu_profiler, NULL) \
321 V(HeapProfiler*, heap_profiler, NULL)
322
323#else
324
325#define ISOLATE_LOGGING_INIT_LIST(V)
326
327#endif
328
329#define ISOLATE_INIT_ARRAY_LIST(V) \
330 /* SerializerDeserializer state. */ \
331 V(Object*, serialize_partial_snapshot_cache, kPartialSnapshotCacheCapacity) \
332 V(int, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
333 V(int, bad_char_shift_table, kUC16AlphabetSize) \
334 V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
335 V(int, suffix_table, (kBMMaxShift + 1)) \
336 ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
337
338typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
339
340#define ISOLATE_INIT_LIST(V) \
341 /* AssertNoZoneAllocation state. */ \
342 V(bool, zone_allow_allocation, true) \
343 /* SerializerDeserializer state. */ \
344 V(int, serialize_partial_snapshot_cache_length, 0) \
345 /* Assembler state. */ \
346 /* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */ \
347 V(byte*, assembler_spare_buffer, NULL) \
348 V(FatalErrorCallback, exception_behavior, NULL) \
Ben Murdoch257744e2011-11-30 15:57:28 +0000349 V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \
Steve Block44f0eee2011-05-26 01:26:41 +0100350 V(v8::Debug::MessageHandler, message_handler, NULL) \
351 /* To distinguish the function templates, so that we can find them in the */ \
352 /* function cache of the global context. */ \
353 V(int, next_serial_number, 0) \
354 V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \
355 V(bool, always_allow_natives_syntax, false) \
356 /* Part of the state of liveedit. */ \
357 V(FunctionInfoListener*, active_function_info_listener, NULL) \
358 /* State for Relocatable. */ \
359 V(Relocatable*, relocatable_top, NULL) \
360 /* State for CodeEntry in profile-generator. */ \
361 V(CodeGenerator*, current_code_generator, NULL) \
362 V(bool, jump_target_compiling_deferred_code, false) \
363 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \
364 V(Object*, string_stream_current_security_token, NULL) \
365 /* TODO(isolates): Release this on destruction? */ \
366 V(int*, irregexp_interpreter_backtrack_stack_cache, NULL) \
367 /* Serializer state. */ \
368 V(ExternalReferenceTable*, external_reference_table, NULL) \
369 /* AstNode state. */ \
370 V(unsigned, ast_node_id, 0) \
371 V(unsigned, ast_node_count, 0) \
Ben Murdoch8b112d22011-06-08 16:22:53 +0100372 /* SafeStackFrameIterator activations count. */ \
373 V(int, safe_stack_iterator_counter, 0) \
Ben Murdoch257744e2011-11-30 15:57:28 +0000374 V(uint64_t, enabled_cpu_features, 0) \
Steve Block44f0eee2011-05-26 01:26:41 +0100375 ISOLATE_PLATFORM_INIT_LIST(V) \
376 ISOLATE_LOGGING_INIT_LIST(V) \
377 ISOLATE_DEBUGGER_INIT_LIST(V)
378
379class Isolate {
380 // These forward declarations are required to make the friend declarations in
381 // PerIsolateThreadData work on some older versions of gcc.
382 class ThreadDataTable;
383 class EntryStackItem;
384 public:
385 ~Isolate();
386
Steve Block44f0eee2011-05-26 01:26:41 +0100387 // A thread has a PerIsolateThreadData instance for each isolate that it has
388 // entered. That instance is allocated when the isolate is initially entered
389 // and reused on subsequent entries.
390 class PerIsolateThreadData {
391 public:
392 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
393 : isolate_(isolate),
394 thread_id_(thread_id),
395 stack_limit_(0),
396 thread_state_(NULL),
397#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
398 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
399 simulator_(NULL),
400#endif
401 next_(NULL),
402 prev_(NULL) { }
403 Isolate* isolate() const { return isolate_; }
404 ThreadId thread_id() const { return thread_id_; }
405 void set_stack_limit(uintptr_t value) { stack_limit_ = value; }
406 uintptr_t stack_limit() const { return stack_limit_; }
407 ThreadState* thread_state() const { return thread_state_; }
408 void set_thread_state(ThreadState* value) { thread_state_ = value; }
409
410#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
411 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
412 Simulator* simulator() const { return simulator_; }
413 void set_simulator(Simulator* simulator) {
414 simulator_ = simulator;
415 }
416#endif
417
418 bool Matches(Isolate* isolate, ThreadId thread_id) const {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100419 return isolate_ == isolate && thread_id_.Equals(thread_id);
Steve Block44f0eee2011-05-26 01:26:41 +0100420 }
421
422 private:
423 Isolate* isolate_;
424 ThreadId thread_id_;
425 uintptr_t stack_limit_;
426 ThreadState* thread_state_;
427
428#if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
429 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
430 Simulator* simulator_;
431#endif
432
433 PerIsolateThreadData* next_;
434 PerIsolateThreadData* prev_;
435
436 friend class Isolate;
437 friend class ThreadDataTable;
438 friend class EntryStackItem;
439
440 DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
441 };
442
443
444 enum AddressId {
445#define C(name) k_##name,
446 ISOLATE_ADDRESS_LIST(C)
447 ISOLATE_ADDRESS_LIST_PROF(C)
448#undef C
449 k_isolate_address_count
450 };
451
452 // Returns the PerIsolateThreadData for the current thread (or NULL if one is
453 // not currently set).
454 static PerIsolateThreadData* CurrentPerIsolateThreadData() {
455 return reinterpret_cast<PerIsolateThreadData*>(
456 Thread::GetThreadLocal(per_isolate_thread_data_key_));
457 }
458
459 // Returns the isolate inside which the current thread is running.
460 INLINE(static Isolate* Current()) {
461 Isolate* isolate = reinterpret_cast<Isolate*>(
462 Thread::GetExistingThreadLocal(isolate_key_));
463 ASSERT(isolate != NULL);
464 return isolate;
465 }
466
467 INLINE(static Isolate* UncheckedCurrent()) {
468 return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
469 }
470
471 bool Init(Deserializer* des);
472
473 bool IsInitialized() { return state_ == INITIALIZED; }
474
475 // True if at least one thread Enter'ed this isolate.
476 bool IsInUse() { return entry_stack_ != NULL; }
477
478 // Destroys the non-default isolates.
479 // Sets default isolate into "has_been_disposed" state rather then destroying,
480 // for legacy API reasons.
481 void TearDown();
482
483 bool IsDefaultIsolate() const { return this == default_isolate_; }
484
485 // Ensures that process-wide resources and the default isolate have been
486 // allocated. It is only necessary to call this method in rare casses, for
487 // example if you are using V8 from within the body of a static initializer.
488 // Safe to call multiple times.
489 static void EnsureDefaultIsolate();
490
Ben Murdoch257744e2011-11-30 15:57:28 +0000491 // Find the PerThread for this particular (isolate, thread) combination
492 // If one does not yet exist, return null.
493 PerIsolateThreadData* FindPerThreadDataForThisThread();
494
495#ifdef ENABLE_DEBUGGER_SUPPORT
Steve Block44f0eee2011-05-26 01:26:41 +0100496 // Get the debugger from the default isolate. Preinitializes the
497 // default isolate if needed.
498 static Debugger* GetDefaultIsolateDebugger();
Ben Murdoch257744e2011-11-30 15:57:28 +0000499#endif
Steve Block44f0eee2011-05-26 01:26:41 +0100500
501 // Get the stack guard from the default isolate. Preinitializes the
502 // default isolate if needed.
503 static StackGuard* GetDefaultIsolateStackGuard();
504
505 // Returns the key used to store the pointer to the current isolate.
506 // Used internally for V8 threads that do not execute JavaScript but still
507 // are part of the domain of an isolate (like the context switcher).
508 static Thread::LocalStorageKey isolate_key() {
509 return isolate_key_;
510 }
511
512 // Returns the key used to store process-wide thread IDs.
513 static Thread::LocalStorageKey thread_id_key() {
514 return thread_id_key_;
515 }
516
Steve Block44f0eee2011-05-26 01:26:41 +0100517 // If a client attempts to create a Locker without specifying an isolate,
518 // we assume that the client is using legacy behavior. Set up the current
519 // thread to be inside the implicit isolate (or fail a check if we have
520 // switched to non-legacy behavior).
521 static void EnterDefaultIsolate();
522
Ben Murdoch257744e2011-11-30 15:57:28 +0000523 // Debug.
Steve Block44f0eee2011-05-26 01:26:41 +0100524 // Mutex for serializing access to break control structures.
525 Mutex* break_access() { return break_access_; }
526
527 Address get_address_from_id(AddressId id);
528
529 // Access to top context (where the current function object was created).
530 Context* context() { return thread_local_top_.context_; }
531 void set_context(Context* context) {
532 thread_local_top_.context_ = context;
533 }
534 Context** context_address() { return &thread_local_top_.context_; }
535
536 SaveContext* save_context() {return thread_local_top_.save_context_; }
537 void set_save_context(SaveContext* save) {
538 thread_local_top_.save_context_ = save;
539 }
540
541 // Access to current thread id.
Ben Murdoch8b112d22011-06-08 16:22:53 +0100542 ThreadId thread_id() { return thread_local_top_.thread_id_; }
543 void set_thread_id(ThreadId id) { thread_local_top_.thread_id_ = id; }
Steve Block44f0eee2011-05-26 01:26:41 +0100544
545 // Interface to pending exception.
546 MaybeObject* pending_exception() {
547 ASSERT(has_pending_exception());
548 return thread_local_top_.pending_exception_;
549 }
550 bool external_caught_exception() {
551 return thread_local_top_.external_caught_exception_;
552 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100553 void set_external_caught_exception(bool value) {
554 thread_local_top_.external_caught_exception_ = value;
555 }
Steve Block44f0eee2011-05-26 01:26:41 +0100556 void set_pending_exception(MaybeObject* exception) {
557 thread_local_top_.pending_exception_ = exception;
558 }
559 void clear_pending_exception() {
560 thread_local_top_.pending_exception_ = heap_.the_hole_value();
561 }
562 MaybeObject** pending_exception_address() {
563 return &thread_local_top_.pending_exception_;
564 }
565 bool has_pending_exception() {
566 return !thread_local_top_.pending_exception_->IsTheHole();
567 }
568 void clear_pending_message() {
569 thread_local_top_.has_pending_message_ = false;
Steve Block44f0eee2011-05-26 01:26:41 +0100570 thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
571 thread_local_top_.pending_message_script_ = NULL;
572 }
573 v8::TryCatch* try_catch_handler() {
574 return thread_local_top_.TryCatchHandler();
575 }
576 Address try_catch_handler_address() {
577 return thread_local_top_.try_catch_handler_address();
578 }
579 bool* external_caught_exception_address() {
580 return &thread_local_top_.external_caught_exception_;
581 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100582 v8::TryCatch* catcher() {
583 return thread_local_top_.catcher_;
584 }
585 void set_catcher(v8::TryCatch* catcher) {
586 thread_local_top_.catcher_ = catcher;
587 }
Steve Block44f0eee2011-05-26 01:26:41 +0100588
589 MaybeObject** scheduled_exception_address() {
590 return &thread_local_top_.scheduled_exception_;
591 }
592 MaybeObject* scheduled_exception() {
593 ASSERT(has_scheduled_exception());
594 return thread_local_top_.scheduled_exception_;
595 }
596 bool has_scheduled_exception() {
597 return !thread_local_top_.scheduled_exception_->IsTheHole();
598 }
599 void clear_scheduled_exception() {
600 thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
601 }
602
603 bool IsExternallyCaught();
604
605 bool is_catchable_by_javascript(MaybeObject* exception) {
606 return (exception != Failure::OutOfMemoryException()) &&
607 (exception != heap()->termination_exception());
608 }
609
610 // JS execution stack (see frames.h).
611 static Address c_entry_fp(ThreadLocalTop* thread) {
612 return thread->c_entry_fp_;
613 }
614 static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
615
616 inline Address* c_entry_fp_address() {
617 return &thread_local_top_.c_entry_fp_;
618 }
619 inline Address* handler_address() { return &thread_local_top_.handler_; }
620
621#ifdef ENABLE_LOGGING_AND_PROFILING
622 // Bottom JS entry (see StackTracer::Trace in log.cc).
623 static Address js_entry_sp(ThreadLocalTop* thread) {
624 return thread->js_entry_sp_;
625 }
626 inline Address* js_entry_sp_address() {
627 return &thread_local_top_.js_entry_sp_;
628 }
629#endif
630
631 // Generated code scratch locations.
632 void* formal_count_address() { return &thread_local_top_.formal_count_; }
633
634 // Returns the global object of the current context. It could be
635 // a builtin object, or a js global object.
636 Handle<GlobalObject> global() {
637 return Handle<GlobalObject>(context()->global());
638 }
639
640 // Returns the global proxy object of the current context.
641 Object* global_proxy() {
642 return context()->global_proxy();
643 }
644
645 Handle<JSBuiltinsObject> js_builtins_object() {
646 return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
647 }
648
649 static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
650 void FreeThreadResources() { thread_local_top_.Free(); }
651
652 // This method is called by the api after operations that may throw
653 // exceptions. If an exception was thrown and not handled by an external
654 // handler the exception is scheduled to be rethrown when we return to running
655 // JavaScript code. If an exception is scheduled true is returned.
656 bool OptionalRescheduleException(bool is_bottom_call);
657
Ben Murdoch8b112d22011-06-08 16:22:53 +0100658 class ExceptionScope {
659 public:
660 explicit ExceptionScope(Isolate* isolate) :
661 // Scope currently can only be used for regular exceptions, not
662 // failures like OOM or termination exception.
663 isolate_(isolate),
664 pending_exception_(isolate_->pending_exception()->ToObjectUnchecked()),
665 catcher_(isolate_->catcher())
666 { }
667
668 ~ExceptionScope() {
669 isolate_->set_catcher(catcher_);
670 isolate_->set_pending_exception(*pending_exception_);
671 }
672
673 private:
674 Isolate* isolate_;
675 Handle<Object> pending_exception_;
676 v8::TryCatch* catcher_;
677 };
678
Steve Block44f0eee2011-05-26 01:26:41 +0100679 void SetCaptureStackTraceForUncaughtExceptions(
680 bool capture,
681 int frame_limit,
682 StackTrace::StackTraceOptions options);
683
684 // Tells whether the current context has experienced an out of memory
685 // exception.
686 bool is_out_of_memory();
687
688 void PrintCurrentStackTrace(FILE* out);
689 void PrintStackTrace(FILE* out, char* thread_data);
690 void PrintStack(StringStream* accumulator);
691 void PrintStack();
692 Handle<String> StackTraceString();
693 Handle<JSArray> CaptureCurrentStackTrace(
694 int frame_limit,
695 StackTrace::StackTraceOptions options);
696
697 // Returns if the top context may access the given global object. If
698 // the result is false, the pending exception is guaranteed to be
699 // set.
700 bool MayNamedAccess(JSObject* receiver,
701 Object* key,
702 v8::AccessType type);
703 bool MayIndexedAccess(JSObject* receiver,
704 uint32_t index,
705 v8::AccessType type);
706
707 void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
708 void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
709
710 // Exception throwing support. The caller should use the result
711 // of Throw() as its return value.
712 Failure* Throw(Object* exception, MessageLocation* location = NULL);
713 // Re-throw an exception. This involves no error reporting since
714 // error reporting was handled when the exception was thrown
715 // originally.
716 Failure* ReThrow(MaybeObject* exception, MessageLocation* location = NULL);
717 void ScheduleThrow(Object* exception);
718 void ReportPendingMessages();
719 Failure* ThrowIllegalOperation();
720
721 // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
722 Failure* PromoteScheduledException();
Ben Murdoch8b112d22011-06-08 16:22:53 +0100723 void DoThrow(MaybeObject* exception, MessageLocation* location);
Steve Block44f0eee2011-05-26 01:26:41 +0100724 // Checks if exception should be reported and finds out if it's
725 // caught externally.
726 bool ShouldReportException(bool* can_be_caught_externally,
727 bool catchable_by_javascript);
728
729 // Attempts to compute the current source location, storing the
730 // result in the target out parameter.
731 void ComputeLocation(MessageLocation* target);
732
733 // Override command line flag.
734 void TraceException(bool flag);
735
736 // Out of resource exception helpers.
737 Failure* StackOverflow();
738 Failure* TerminateExecution();
739
740 // Administration
741 void Iterate(ObjectVisitor* v);
742 void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
743 char* Iterate(ObjectVisitor* v, char* t);
744 void IterateThread(ThreadVisitor* v);
745 void IterateThread(ThreadVisitor* v, char* t);
746
747
748 // Returns the current global context.
749 Handle<Context> global_context();
750
751 // Returns the global context of the calling JavaScript code. That
752 // is, the global context of the top-most JavaScript frame.
753 Handle<Context> GetCallingGlobalContext();
754
755 void RegisterTryCatchHandler(v8::TryCatch* that);
756 void UnregisterTryCatchHandler(v8::TryCatch* that);
757
758 char* ArchiveThread(char* to);
759 char* RestoreThread(char* from);
760
761 static const char* const kStackOverflowMessage;
762
763 static const int kUC16AlphabetSize = 256; // See StringSearchBase.
764 static const int kBMMaxShift = 250; // See StringSearchBase.
765
766 // Accessors.
767#define GLOBAL_ACCESSOR(type, name, initialvalue) \
768 inline type name() const { \
769 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
770 return name##_; \
771 } \
772 inline void set_##name(type value) { \
773 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
774 name##_ = value; \
775 }
776 ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
777#undef GLOBAL_ACCESSOR
778
779#define GLOBAL_ARRAY_ACCESSOR(type, name, length) \
780 inline type* name() { \
781 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
782 return &(name##_)[0]; \
783 }
784 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
785#undef GLOBAL_ARRAY_ACCESSOR
786
787#define GLOBAL_CONTEXT_FIELD_ACCESSOR(index, type, name) \
788 Handle<type> name() { \
789 return Handle<type>(context()->global_context()->name()); \
790 }
791 GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSOR)
792#undef GLOBAL_CONTEXT_FIELD_ACCESSOR
793
794 Bootstrapper* bootstrapper() { return bootstrapper_; }
Ben Murdoch257744e2011-11-30 15:57:28 +0000795 Counters* counters() { return counters_; }
Steve Block44f0eee2011-05-26 01:26:41 +0100796 CodeRange* code_range() { return code_range_; }
797 RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
798 CompilationCache* compilation_cache() { return compilation_cache_; }
Ben Murdoch257744e2011-11-30 15:57:28 +0000799 Logger* logger() { return logger_; }
Steve Block44f0eee2011-05-26 01:26:41 +0100800 StackGuard* stack_guard() { return &stack_guard_; }
801 Heap* heap() { return &heap_; }
Ben Murdoch257744e2011-11-30 15:57:28 +0000802 StatsTable* stats_table() { return stats_table_; }
Steve Block44f0eee2011-05-26 01:26:41 +0100803 StubCache* stub_cache() { return stub_cache_; }
804 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
805 ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
806
807 TranscendentalCache* transcendental_cache() const {
808 return transcendental_cache_;
809 }
810
811 MemoryAllocator* memory_allocator() {
812 return memory_allocator_;
813 }
814
815 KeyedLookupCache* keyed_lookup_cache() {
816 return keyed_lookup_cache_;
817 }
818
819 ContextSlotCache* context_slot_cache() {
820 return context_slot_cache_;
821 }
822
823 DescriptorLookupCache* descriptor_lookup_cache() {
824 return descriptor_lookup_cache_;
825 }
826
827 v8::ImplementationUtilities::HandleScopeData* handle_scope_data() {
828 return &handle_scope_data_;
829 }
830 HandleScopeImplementer* handle_scope_implementer() {
831 ASSERT(handle_scope_implementer_);
832 return handle_scope_implementer_;
833 }
834 Zone* zone() { return &zone_; }
835
Ben Murdoch8b112d22011-06-08 16:22:53 +0100836 UnicodeCache* unicode_cache() {
837 return unicode_cache_;
Steve Block44f0eee2011-05-26 01:26:41 +0100838 }
839
840 PcToCodeCache* pc_to_code_cache() { return pc_to_code_cache_; }
841
842 StringInputBuffer* write_input_buffer() { return write_input_buffer_; }
843
844 GlobalHandles* global_handles() { return global_handles_; }
845
846 ThreadManager* thread_manager() { return thread_manager_; }
847
848 ContextSwitcher* context_switcher() { return context_switcher_; }
849
850 void set_context_switcher(ContextSwitcher* switcher) {
851 context_switcher_ = switcher;
852 }
853
854 StringTracker* string_tracker() { return string_tracker_; }
855
856 unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
857 return &jsregexp_uncanonicalize_;
858 }
859
860 unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
861 return &jsregexp_canonrange_;
862 }
863
864 StringInputBuffer* objects_string_compare_buffer_a() {
865 return &objects_string_compare_buffer_a_;
866 }
867
868 StringInputBuffer* objects_string_compare_buffer_b() {
869 return &objects_string_compare_buffer_b_;
870 }
871
872 StaticResource<StringInputBuffer>* objects_string_input_buffer() {
873 return &objects_string_input_buffer_;
874 }
875
876 AstSentinels* ast_sentinels() { return ast_sentinels_; }
877
878 RuntimeState* runtime_state() { return &runtime_state_; }
879
Steve Block44f0eee2011-05-26 01:26:41 +0100880 StaticResource<SafeStringInputBuffer>* compiler_safe_string_input_buffer() {
881 return &compiler_safe_string_input_buffer_;
882 }
883
884 Builtins* builtins() { return &builtins_; }
885
886 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
887 regexp_macro_assembler_canonicalize() {
888 return &regexp_macro_assembler_canonicalize_;
889 }
890
891 RegExpStack* regexp_stack() { return regexp_stack_; }
892
893 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
894 interp_canonicalize_mapping() {
895 return &interp_canonicalize_mapping_;
896 }
897
898 ZoneObjectList* frame_element_constant_list() {
899 return &frame_element_constant_list_;
900 }
901
902 ZoneObjectList* result_constant_list() {
903 return &result_constant_list_;
904 }
905
906 void* PreallocatedStorageNew(size_t size);
907 void PreallocatedStorageDelete(void* p);
908 void PreallocatedStorageInit(size_t size);
909
910#ifdef ENABLE_DEBUGGER_SUPPORT
Ben Murdoch257744e2011-11-30 15:57:28 +0000911 Debugger* debugger() { return debugger_; }
912 Debug* debug() { return debug_; }
Steve Block44f0eee2011-05-26 01:26:41 +0100913#endif
914
Ben Murdoch257744e2011-11-30 15:57:28 +0000915 inline bool DebuggerHasBreakPoints();
916
Steve Block44f0eee2011-05-26 01:26:41 +0100917#ifdef ENABLE_LOGGING_AND_PROFILING
918 ProducerHeapProfile* producer_heap_profile() {
919 return producer_heap_profile_;
920 }
921#endif
922
923#ifdef DEBUG
924 HistogramInfo* heap_histograms() { return heap_histograms_; }
925
926 JSObject::SpillInformation* js_spill_information() {
927 return &js_spill_information_;
928 }
929
930 int* code_kind_statistics() { return code_kind_statistics_; }
931#endif
932
933#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
934 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
935 bool simulator_initialized() { return simulator_initialized_; }
936 void set_simulator_initialized(bool initialized) {
937 simulator_initialized_ = initialized;
938 }
939
940 HashMap* simulator_i_cache() { return simulator_i_cache_; }
941 void set_simulator_i_cache(HashMap* hash_map) {
942 simulator_i_cache_ = hash_map;
943 }
944
945 Redirection* simulator_redirection() {
946 return simulator_redirection_;
947 }
948 void set_simulator_redirection(Redirection* redirection) {
949 simulator_redirection_ = redirection;
950 }
951#endif
952
953 Factory* factory() { return reinterpret_cast<Factory*>(this); }
954
955 // SerializerDeserializer state.
956 static const int kPartialSnapshotCacheCapacity = 1400;
957
958 static const int kJSRegexpStaticOffsetsVectorSize = 50;
959
960#ifdef ENABLE_LOGGING_AND_PROFILING
961 Address external_callback() {
962 return thread_local_top_.external_callback_;
963 }
964 void set_external_callback(Address callback) {
965 thread_local_top_.external_callback_ = callback;
966 }
967#endif
968
969#ifdef ENABLE_VMSTATE_TRACKING
970 StateTag current_vm_state() {
971 return thread_local_top_.current_vm_state_;
972 }
973
974 void SetCurrentVMState(StateTag state) {
975 if (RuntimeProfiler::IsEnabled()) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100976 StateTag current_state = thread_local_top_.current_vm_state_;
977 if (current_state != JS && state == JS) {
978 // Non-JS -> JS transition.
Steve Block44f0eee2011-05-26 01:26:41 +0100979 RuntimeProfiler::IsolateEnteredJS(this);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100980 } else if (current_state == JS && state != JS) {
Steve Block44f0eee2011-05-26 01:26:41 +0100981 // JS -> non-JS transition.
982 ASSERT(RuntimeProfiler::IsSomeIsolateInJS());
983 RuntimeProfiler::IsolateExitedJS(this);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100984 } else {
985 // Other types of state transitions are not interesting to the
986 // runtime profiler, because they don't affect whether we're
987 // in JS or not.
988 ASSERT((current_state == JS) == (state == JS));
Steve Block44f0eee2011-05-26 01:26:41 +0100989 }
990 }
991 thread_local_top_.current_vm_state_ = state;
992 }
993#endif
994
995 void ResetEagerOptimizingData();
996
Ben Murdoch257744e2011-11-30 15:57:28 +0000997 void SetData(void* data) { embedder_data_ = data; }
998 void* GetData() { return embedder_data_; }
999
Steve Block44f0eee2011-05-26 01:26:41 +01001000 private:
1001 Isolate();
1002
1003 // The per-process lock should be acquired before the ThreadDataTable is
1004 // modified.
1005 class ThreadDataTable {
1006 public:
1007 ThreadDataTable();
1008 ~ThreadDataTable();
1009
1010 PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1011 void Insert(PerIsolateThreadData* data);
1012 void Remove(Isolate* isolate, ThreadId thread_id);
1013 void Remove(PerIsolateThreadData* data);
1014
1015 private:
1016 PerIsolateThreadData* list_;
1017 };
1018
1019 // These items form a stack synchronously with threads Enter'ing and Exit'ing
1020 // the Isolate. The top of the stack points to a thread which is currently
1021 // running the Isolate. When the stack is empty, the Isolate is considered
1022 // not entered by any thread and can be Disposed.
1023 // If the same thread enters the Isolate more then once, the entry_count_
1024 // is incremented rather then a new item pushed to the stack.
1025 class EntryStackItem {
1026 public:
1027 EntryStackItem(PerIsolateThreadData* previous_thread_data,
1028 Isolate* previous_isolate,
1029 EntryStackItem* previous_item)
1030 : entry_count(1),
1031 previous_thread_data(previous_thread_data),
1032 previous_isolate(previous_isolate),
1033 previous_item(previous_item) { }
1034
1035 int entry_count;
1036 PerIsolateThreadData* previous_thread_data;
1037 Isolate* previous_isolate;
1038 EntryStackItem* previous_item;
1039
1040 DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1041 };
1042
1043 // This mutex protects highest_thread_id_, thread_data_table_ and
1044 // default_isolate_.
1045 static Mutex* process_wide_mutex_;
1046
1047 static Thread::LocalStorageKey per_isolate_thread_data_key_;
1048 static Thread::LocalStorageKey isolate_key_;
1049 static Thread::LocalStorageKey thread_id_key_;
1050 static Isolate* default_isolate_;
1051 static ThreadDataTable* thread_data_table_;
Steve Block44f0eee2011-05-26 01:26:41 +01001052
Ben Murdoch257744e2011-11-30 15:57:28 +00001053 bool PreInit();
1054
Steve Block44f0eee2011-05-26 01:26:41 +01001055 void Deinit();
1056
1057 static void SetIsolateThreadLocals(Isolate* isolate,
1058 PerIsolateThreadData* data);
1059
1060 enum State {
1061 UNINITIALIZED, // Some components may not have been allocated.
Ben Murdoch257744e2011-11-30 15:57:28 +00001062 PREINITIALIZED, // Components have been allocated but not initialized.
Steve Block44f0eee2011-05-26 01:26:41 +01001063 INITIALIZED // All components are fully initialized.
1064 };
1065
1066 State state_;
1067 EntryStackItem* entry_stack_;
1068
1069 // Allocate and insert PerIsolateThreadData into the ThreadDataTable
1070 // (regardless of whether such data already exists).
1071 PerIsolateThreadData* AllocatePerIsolateThreadData(ThreadId thread_id);
1072
1073 // Find the PerThread for this particular (isolate, thread) combination.
1074 // If one does not yet exist, allocate a new one.
1075 PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1076
Ben Murdoch257744e2011-11-30 15:57:28 +00001077// PreInits and returns a default isolate. Needed when a new thread tries
Steve Block44f0eee2011-05-26 01:26:41 +01001078 // to create a Locker for the first time (the lock itself is in the isolate).
1079 static Isolate* GetDefaultIsolateForLocking();
1080
1081 // Initializes the current thread to run this Isolate.
1082 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1083 // at the same time, this should be prevented using external locking.
1084 void Enter();
1085
1086 // Exits the current thread. The previosuly entered Isolate is restored
1087 // for the thread.
1088 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1089 // at the same time, this should be prevented using external locking.
1090 void Exit();
1091
1092 void PreallocatedMemoryThreadStart();
1093 void PreallocatedMemoryThreadStop();
1094 void InitializeThreadLocal();
1095
1096 void PrintStackTrace(FILE* out, ThreadLocalTop* thread);
1097 void MarkCompactPrologue(bool is_compacting,
1098 ThreadLocalTop* archived_thread_data);
1099 void MarkCompactEpilogue(bool is_compacting,
1100 ThreadLocalTop* archived_thread_data);
1101
1102 void FillCache();
1103
Ben Murdoch8b112d22011-06-08 16:22:53 +01001104 void PropagatePendingExceptionToExternalTryCatch();
1105
Steve Block44f0eee2011-05-26 01:26:41 +01001106 int stack_trace_nesting_level_;
1107 StringStream* incomplete_message_;
1108 // The preallocated memory thread singleton.
1109 PreallocatedMemoryThread* preallocated_memory_thread_;
1110 Address isolate_addresses_[k_isolate_address_count + 1]; // NOLINT
1111 NoAllocationStringAllocator* preallocated_message_space_;
1112
1113 Bootstrapper* bootstrapper_;
1114 RuntimeProfiler* runtime_profiler_;
1115 CompilationCache* compilation_cache_;
1116 Counters* counters_;
Steve Block44f0eee2011-05-26 01:26:41 +01001117 CodeRange* code_range_;
1118 Mutex* break_access_;
1119 Heap heap_;
1120 Logger* logger_;
1121 StackGuard stack_guard_;
1122 StatsTable* stats_table_;
1123 StubCache* stub_cache_;
1124 DeoptimizerData* deoptimizer_data_;
1125 ThreadLocalTop thread_local_top_;
1126 bool capture_stack_trace_for_uncaught_exceptions_;
1127 int stack_trace_for_uncaught_exceptions_frame_limit_;
1128 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1129 TranscendentalCache* transcendental_cache_;
1130 MemoryAllocator* memory_allocator_;
1131 KeyedLookupCache* keyed_lookup_cache_;
1132 ContextSlotCache* context_slot_cache_;
1133 DescriptorLookupCache* descriptor_lookup_cache_;
1134 v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
1135 HandleScopeImplementer* handle_scope_implementer_;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001136 UnicodeCache* unicode_cache_;
Steve Block44f0eee2011-05-26 01:26:41 +01001137 Zone zone_;
1138 PreallocatedStorage in_use_list_;
1139 PreallocatedStorage free_list_;
1140 bool preallocated_storage_preallocated_;
1141 PcToCodeCache* pc_to_code_cache_;
1142 StringInputBuffer* write_input_buffer_;
1143 GlobalHandles* global_handles_;
1144 ContextSwitcher* context_switcher_;
1145 ThreadManager* thread_manager_;
1146 AstSentinels* ast_sentinels_;
1147 RuntimeState runtime_state_;
Steve Block44f0eee2011-05-26 01:26:41 +01001148 StaticResource<SafeStringInputBuffer> compiler_safe_string_input_buffer_;
1149 Builtins builtins_;
1150 StringTracker* string_tracker_;
1151 unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1152 unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1153 StringInputBuffer objects_string_compare_buffer_a_;
1154 StringInputBuffer objects_string_compare_buffer_b_;
1155 StaticResource<StringInputBuffer> objects_string_input_buffer_;
1156 unibrow::Mapping<unibrow::Ecma262Canonicalize>
1157 regexp_macro_assembler_canonicalize_;
1158 RegExpStack* regexp_stack_;
1159 unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
1160 ZoneObjectList frame_element_constant_list_;
1161 ZoneObjectList result_constant_list_;
Ben Murdoch257744e2011-11-30 15:57:28 +00001162 void* embedder_data_;
Steve Block44f0eee2011-05-26 01:26:41 +01001163
1164#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
1165 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
1166 bool simulator_initialized_;
1167 HashMap* simulator_i_cache_;
1168 Redirection* simulator_redirection_;
1169#endif
1170
1171#ifdef DEBUG
1172 // A static array of histogram info for each type.
1173 HistogramInfo heap_histograms_[LAST_TYPE + 1];
1174 JSObject::SpillInformation js_spill_information_;
1175 int code_kind_statistics_[Code::NUMBER_OF_KINDS];
1176#endif
1177
1178#ifdef ENABLE_DEBUGGER_SUPPORT
1179 Debugger* debugger_;
1180 Debug* debug_;
1181#endif
1182
1183#ifdef ENABLE_LOGGING_AND_PROFILING
1184 ProducerHeapProfile* producer_heap_profile_;
1185#endif
1186
1187#define GLOBAL_BACKING_STORE(type, name, initialvalue) \
1188 type name##_;
1189 ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1190#undef GLOBAL_BACKING_STORE
1191
1192#define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \
1193 type name##_[length];
1194 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1195#undef GLOBAL_ARRAY_BACKING_STORE
1196
1197#ifdef DEBUG
1198 // This class is huge and has a number of fields controlled by
1199 // preprocessor defines. Make sure the offsets of these fields agree
1200 // between compilation units.
1201#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
1202 static const intptr_t name##_debug_offset_;
1203 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1204 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1205#undef ISOLATE_FIELD_OFFSET
1206#endif
1207
1208 friend class ExecutionAccess;
1209 friend class IsolateInitializer;
Ben Murdoch257744e2011-11-30 15:57:28 +00001210 friend class ThreadManager;
1211 friend class Simulator;
1212 friend class StackGuard;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001213 friend class ThreadId;
Steve Block44f0eee2011-05-26 01:26:41 +01001214 friend class v8::Isolate;
1215 friend class v8::Locker;
Ben Murdoch257744e2011-11-30 15:57:28 +00001216 friend class v8::Unlocker;
Steve Block44f0eee2011-05-26 01:26:41 +01001217
1218 DISALLOW_COPY_AND_ASSIGN(Isolate);
1219};
1220
1221
1222// If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1223// class as a work around for a bug in the generated code found with these
1224// versions of GCC. See V8 issue 122 for details.
1225class SaveContext BASE_EMBEDDED {
1226 public:
1227 explicit SaveContext(Isolate* isolate) : prev_(isolate->save_context()) {
1228 if (isolate->context() != NULL) {
1229 context_ = Handle<Context>(isolate->context());
1230#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
1231 dummy_ = Handle<Context>(isolate->context());
1232#endif
1233 }
1234 isolate->set_save_context(this);
1235
1236 // If there is no JS frame under the current C frame, use the value 0.
Ben Murdoch8b112d22011-06-08 16:22:53 +01001237 JavaScriptFrameIterator it(isolate);
Steve Block44f0eee2011-05-26 01:26:41 +01001238 js_sp_ = it.done() ? 0 : it.frame()->sp();
1239 }
1240
1241 ~SaveContext() {
1242 if (context_.is_null()) {
1243 Isolate* isolate = Isolate::Current();
1244 isolate->set_context(NULL);
1245 isolate->set_save_context(prev_);
1246 } else {
1247 Isolate* isolate = context_->GetIsolate();
1248 isolate->set_context(*context_);
1249 isolate->set_save_context(prev_);
1250 }
1251 }
1252
1253 Handle<Context> context() { return context_; }
1254 SaveContext* prev() { return prev_; }
1255
1256 // Returns true if this save context is below a given JavaScript frame.
1257 bool below(JavaScriptFrame* frame) {
1258 return (js_sp_ == 0) || (frame->sp() < js_sp_);
1259 }
1260
1261 private:
1262 Handle<Context> context_;
1263#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
1264 Handle<Context> dummy_;
1265#endif
1266 SaveContext* prev_;
1267 Address js_sp_; // The top JS frame's sp when saving context.
1268};
1269
1270
1271class AssertNoContextChange BASE_EMBEDDED {
1272#ifdef DEBUG
1273 public:
1274 AssertNoContextChange() :
1275 scope_(Isolate::Current()),
1276 context_(Isolate::Current()->context(), Isolate::Current()) {
1277 }
1278
1279 ~AssertNoContextChange() {
1280 ASSERT(Isolate::Current()->context() == *context_);
1281 }
1282
1283 private:
1284 HandleScope scope_;
1285 Handle<Context> context_;
1286#else
1287 public:
1288 AssertNoContextChange() { }
1289#endif
1290};
1291
1292
1293class ExecutionAccess BASE_EMBEDDED {
1294 public:
1295 explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1296 Lock(isolate);
1297 }
1298 ~ExecutionAccess() { Unlock(isolate_); }
1299
1300 static void Lock(Isolate* isolate) { isolate->break_access_->Lock(); }
1301 static void Unlock(Isolate* isolate) { isolate->break_access_->Unlock(); }
1302
1303 static bool TryLock(Isolate* isolate) {
1304 return isolate->break_access_->TryLock();
1305 }
1306
1307 private:
1308 Isolate* isolate_;
1309};
1310
1311
1312// Support for checking for stack-overflows in C++ code.
1313class StackLimitCheck BASE_EMBEDDED {
1314 public:
1315 explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1316
1317 bool HasOverflowed() const {
1318 StackGuard* stack_guard = isolate_->stack_guard();
1319 // Stack has overflowed in C++ code only if stack pointer exceeds the C++
1320 // stack guard and the limits are not set to interrupt values.
1321 // TODO(214): Stack overflows are ignored if a interrupt is pending. This
1322 // code should probably always use the initial C++ limit.
1323 return (reinterpret_cast<uintptr_t>(this) < stack_guard->climit()) &&
1324 stack_guard->IsStackOverflow();
1325 }
1326 private:
1327 Isolate* isolate_;
1328};
1329
1330
1331// Support for temporarily postponing interrupts. When the outermost
1332// postpone scope is left the interrupts will be re-enabled and any
1333// interrupts that occurred while in the scope will be taken into
1334// account.
1335class PostponeInterruptsScope BASE_EMBEDDED {
1336 public:
1337 explicit PostponeInterruptsScope(Isolate* isolate)
1338 : stack_guard_(isolate->stack_guard()) {
1339 stack_guard_->thread_local_.postpone_interrupts_nesting_++;
1340 stack_guard_->DisableInterrupts();
1341 }
1342
1343 ~PostponeInterruptsScope() {
1344 if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) {
1345 stack_guard_->EnableInterrupts();
1346 }
1347 }
1348 private:
1349 StackGuard* stack_guard_;
1350};
1351
1352
1353// Temporary macros for accessing current isolate and its subobjects.
1354// They provide better readability, especially when used a lot in the code.
1355#define HEAP (v8::internal::Isolate::Current()->heap())
1356#define FACTORY (v8::internal::Isolate::Current()->factory())
1357#define ISOLATE (v8::internal::Isolate::Current())
1358#define ZONE (v8::internal::Isolate::Current()->zone())
1359#define LOGGER (v8::internal::Isolate::Current()->logger())
1360
1361
1362// Tells whether the global context is marked with out of memory.
1363inline bool Context::has_out_of_memory() {
1364 return global_context()->out_of_memory()->IsTrue();
1365}
1366
1367
1368// Mark the global context with out of memory.
1369inline void Context::mark_out_of_memory() {
1370 global_context()->set_out_of_memory(HEAP->true_value());
1371}
1372
1373
Steve Block44f0eee2011-05-26 01:26:41 +01001374} } // namespace v8::internal
1375
1376// TODO(isolates): Get rid of these -inl.h includes and place them only where
1377// they're needed.
1378#include "allocation-inl.h"
1379#include "zone-inl.h"
1380#include "frames-inl.h"
1381
1382#endif // V8_ISOLATE_H_