blob: 315a34ed50835b6a4e113a6c98efb0b4a619d910 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2007-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29#include "accessors.h"
30#include "top.h"
31
32#include "cctest.h"
33
34
35using namespace v8::internal;
36
37
38static Object* AllocateAfterFailures() {
39 static int attempts = 0;
40 if (++attempts < 3) return Failure::RetryAfterGC(0);
41
42 // New space.
43 NewSpace* new_space = Heap::new_space();
44 static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
45 while (new_space->Available() > kNewSpaceFillerSize) {
46 int available_before = new_space->Available();
47 CHECK(!Heap::AllocateByteArray(0)->IsFailure());
48 if (available_before == new_space->Available()) {
49 // It seems that we are avoiding new space allocations when
50 // allocation is forced, so no need to fill up new space
51 // in order to make the test harder.
52 break;
53 }
54 }
55 CHECK(!Heap::AllocateByteArray(100)->IsFailure());
56 CHECK(!Heap::AllocateFixedArray(100, NOT_TENURED)->IsFailure());
57
58 // Make sure we can allocate through optimized allocation functions
59 // for specific kinds.
60 CHECK(!Heap::AllocateFixedArray(100)->IsFailure());
61 CHECK(!Heap::AllocateHeapNumber(0.42)->IsFailure());
62 CHECK(!Heap::AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
63 Object* object = Heap::AllocateJSObject(*Top::object_function());
64 CHECK(!Heap::CopyJSObject(JSObject::cast(object))->IsFailure());
65
66 // Old data space.
67 OldSpace* old_data_space = Heap::old_data_space();
Steve Blockd0582a62009-12-15 09:54:21 +000068 static const int kOldDataSpaceFillerSize = ByteArray::SizeFor(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000069 while (old_data_space->Available() > kOldDataSpaceFillerSize) {
Steve Blockd0582a62009-12-15 09:54:21 +000070 CHECK(!Heap::AllocateByteArray(0, TENURED)->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +000071 }
72 CHECK(!Heap::AllocateRawAsciiString(100, TENURED)->IsFailure());
73
74 // Large object space.
75 while (!Heap::OldGenerationAllocationLimitReached()) {
76 CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure());
77 }
78 CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure());
79
80 // Map space.
81 MapSpace* map_space = Heap::map_space();
82 static const int kMapSpaceFillerSize = Map::kSize;
83 InstanceType instance_type = JS_OBJECT_TYPE;
84 int instance_size = JSObject::kHeaderSize;
85 while (map_space->Available() > kMapSpaceFillerSize) {
86 CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure());
87 }
88 CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure());
89
90 // Test that we can allocate in old pointer space and code space.
91 CHECK(!Heap::AllocateFixedArray(100, TENURED)->IsFailure());
92 CHECK(!Heap::CopyCode(Builtins::builtin(Builtins::Illegal))->IsFailure());
93
94 // Return success.
95 return Smi::FromInt(42);
96}
97
98
99static Handle<Object> Test() {
100 CALL_HEAP_FUNCTION(AllocateAfterFailures(), Object);
101}
102
103
104TEST(StressHandles) {
105 v8::Persistent<v8::Context> env = v8::Context::New();
106 v8::HandleScope scope;
107 env->Enter();
108 Handle<Object> o = Test();
109 CHECK(o->IsSmi() && Smi::cast(*o)->value() == 42);
110 env->Exit();
111}
112
113
114static Object* TestAccessorGet(Object* object, void*) {
115 return AllocateAfterFailures();
116}
117
118
119const AccessorDescriptor kDescriptor = {
120 TestAccessorGet,
121 0,
122 0
123};
124
125
126TEST(StressJS) {
127 v8::Persistent<v8::Context> env = v8::Context::New();
128 v8::HandleScope scope;
129 env->Enter();
130 Handle<JSFunction> function =
131 Factory::NewFunction(Factory::function_symbol(), Factory::null_value());
132 // Force the creation of an initial map and set the code to
133 // something empty.
134 Factory::NewJSObject(function);
135 function->set_code(Builtins::builtin(Builtins::EmptyFunction));
136 // Patch the map to have an accessor for "get".
137 Handle<Map> map(function->initial_map());
138 Handle<DescriptorArray> instance_descriptors(map->instance_descriptors());
139 Handle<Proxy> proxy = Factory::NewProxy(&kDescriptor);
140 instance_descriptors = Factory::CopyAppendProxyDescriptor(
141 instance_descriptors,
142 Factory::NewStringFromAscii(Vector<const char>("get", 3)),
143 proxy,
144 static_cast<PropertyAttributes>(0));
145 map->set_instance_descriptors(*instance_descriptors);
146 // Add the Foo constructor the global object.
147 env->Global()->Set(v8::String::New("Foo"), v8::Utils::ToLocal(function));
148 // Call the accessor through JavaScript.
149 v8::Handle<v8::Value> result =
150 v8::Script::Compile(v8::String::New("(new Foo).get"))->Run();
151 CHECK_EQ(42, result->Int32Value());
152 env->Exit();
153}
154
155
156// CodeRange test.
157// Tests memory management in a CodeRange by allocating and freeing blocks,
158// using a pseudorandom generator to choose block sizes geometrically
159// distributed between 2 * Page::kPageSize and 2^5 + 1 * Page::kPageSize.
160// Ensure that the freed chunks are collected and reused by allocating (in
161// total) more than the size of the CodeRange.
162
163// This pseudorandom generator does not need to be particularly good.
164// Use the lower half of the V8::Random() generator.
165unsigned int Pseudorandom() {
166 static uint32_t lo = 2345;
167 lo = 18273 * (lo & 0xFFFF) + (lo >> 16); // Provably not 0.
168 return lo & 0xFFFF;
169}
170
171
172// Plain old data class. Represents a block of allocated memory.
173class Block {
174 public:
175 Block(void* base_arg, int size_arg)
176 : base(base_arg), size(size_arg) {}
177
178 void *base;
179 int size;
180};
181
182
183TEST(CodeRange) {
184 const int code_range_size = 16*MB;
185 CodeRange::Setup(code_range_size);
186 int current_allocated = 0;
187 int total_allocated = 0;
188 List<Block> blocks(1000);
189
190 while (total_allocated < 5 * code_range_size) {
191 if (current_allocated < code_range_size / 10) {
192 // Allocate a block.
193 // Geometrically distributed sizes, greater than Page::kPageSize.
194 size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
195 Pseudorandom() % 5000 + 1;
196 size_t allocated = 0;
197 void* base = CodeRange::AllocateRawMemory(requested, &allocated);
Steve Blockd0582a62009-12-15 09:54:21 +0000198 blocks.Add(Block(base, static_cast<int>(allocated)));
199 current_allocated += static_cast<int>(allocated);
200 total_allocated += static_cast<int>(allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +0000201 } else {
202 // Free a block.
203 int index = Pseudorandom() % blocks.length();
204 CodeRange::FreeRawMemory(blocks[index].base, blocks[index].size);
205 current_allocated -= blocks[index].size;
206 if (index < blocks.length() - 1) {
207 blocks[index] = blocks.RemoveLast();
208 } else {
209 blocks.RemoveLast();
210 }
211 }
212 }
213
214 CodeRange::TearDown();
215}