blob: fcf2ce4f1b870e424df8e7762bee6d59eab6c6fb [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2007-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29#include "accessors.h"
30#include "top.h"
31
32#include "cctest.h"
33
34
35using namespace v8::internal;
36
37
John Reck59135872010-11-02 12:39:01 -070038static MaybeObject* AllocateAfterFailures() {
Steve Blocka7e24c12009-10-30 11:49:00 +000039 static int attempts = 0;
Ben Murdochf87a2032010-10-22 12:50:53 +010040 if (++attempts < 3) return Failure::RetryAfterGC();
Steve Blocka7e24c12009-10-30 11:49:00 +000041
42 // New space.
43 NewSpace* new_space = Heap::new_space();
44 static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
45 while (new_space->Available() > kNewSpaceFillerSize) {
Ben Murdochf87a2032010-10-22 12:50:53 +010046 int available_before = static_cast<int>(new_space->Available());
Steve Blocka7e24c12009-10-30 11:49:00 +000047 CHECK(!Heap::AllocateByteArray(0)->IsFailure());
48 if (available_before == new_space->Available()) {
49 // It seems that we are avoiding new space allocations when
50 // allocation is forced, so no need to fill up new space
51 // in order to make the test harder.
52 break;
53 }
54 }
55 CHECK(!Heap::AllocateByteArray(100)->IsFailure());
56 CHECK(!Heap::AllocateFixedArray(100, NOT_TENURED)->IsFailure());
57
58 // Make sure we can allocate through optimized allocation functions
59 // for specific kinds.
60 CHECK(!Heap::AllocateFixedArray(100)->IsFailure());
61 CHECK(!Heap::AllocateHeapNumber(0.42)->IsFailure());
62 CHECK(!Heap::AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
John Reck59135872010-11-02 12:39:01 -070063 Object* object =
64 Heap::AllocateJSObject(*Top::object_function())->ToObjectChecked();
Steve Blocka7e24c12009-10-30 11:49:00 +000065 CHECK(!Heap::CopyJSObject(JSObject::cast(object))->IsFailure());
66
67 // Old data space.
68 OldSpace* old_data_space = Heap::old_data_space();
Steve Blockd0582a62009-12-15 09:54:21 +000069 static const int kOldDataSpaceFillerSize = ByteArray::SizeFor(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000070 while (old_data_space->Available() > kOldDataSpaceFillerSize) {
Steve Blockd0582a62009-12-15 09:54:21 +000071 CHECK(!Heap::AllocateByteArray(0, TENURED)->IsFailure());
Steve Blocka7e24c12009-10-30 11:49:00 +000072 }
73 CHECK(!Heap::AllocateRawAsciiString(100, TENURED)->IsFailure());
74
75 // Large object space.
76 while (!Heap::OldGenerationAllocationLimitReached()) {
77 CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure());
78 }
79 CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure());
80
81 // Map space.
82 MapSpace* map_space = Heap::map_space();
83 static const int kMapSpaceFillerSize = Map::kSize;
84 InstanceType instance_type = JS_OBJECT_TYPE;
85 int instance_size = JSObject::kHeaderSize;
86 while (map_space->Available() > kMapSpaceFillerSize) {
87 CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure());
88 }
89 CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure());
90
91 // Test that we can allocate in old pointer space and code space.
92 CHECK(!Heap::AllocateFixedArray(100, TENURED)->IsFailure());
93 CHECK(!Heap::CopyCode(Builtins::builtin(Builtins::Illegal))->IsFailure());
94
95 // Return success.
96 return Smi::FromInt(42);
97}
98
99
100static Handle<Object> Test() {
101 CALL_HEAP_FUNCTION(AllocateAfterFailures(), Object);
102}
103
104
105TEST(StressHandles) {
106 v8::Persistent<v8::Context> env = v8::Context::New();
107 v8::HandleScope scope;
108 env->Enter();
109 Handle<Object> o = Test();
110 CHECK(o->IsSmi() && Smi::cast(*o)->value() == 42);
111 env->Exit();
112}
113
114
John Reck59135872010-11-02 12:39:01 -0700115static MaybeObject* TestAccessorGet(Object* object, void*) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000116 return AllocateAfterFailures();
117}
118
119
120const AccessorDescriptor kDescriptor = {
121 TestAccessorGet,
122 0,
123 0
124};
125
126
127TEST(StressJS) {
128 v8::Persistent<v8::Context> env = v8::Context::New();
129 v8::HandleScope scope;
130 env->Enter();
131 Handle<JSFunction> function =
132 Factory::NewFunction(Factory::function_symbol(), Factory::null_value());
133 // Force the creation of an initial map and set the code to
134 // something empty.
135 Factory::NewJSObject(function);
136 function->set_code(Builtins::builtin(Builtins::EmptyFunction));
137 // Patch the map to have an accessor for "get".
138 Handle<Map> map(function->initial_map());
139 Handle<DescriptorArray> instance_descriptors(map->instance_descriptors());
140 Handle<Proxy> proxy = Factory::NewProxy(&kDescriptor);
141 instance_descriptors = Factory::CopyAppendProxyDescriptor(
142 instance_descriptors,
143 Factory::NewStringFromAscii(Vector<const char>("get", 3)),
144 proxy,
145 static_cast<PropertyAttributes>(0));
146 map->set_instance_descriptors(*instance_descriptors);
147 // Add the Foo constructor the global object.
148 env->Global()->Set(v8::String::New("Foo"), v8::Utils::ToLocal(function));
149 // Call the accessor through JavaScript.
150 v8::Handle<v8::Value> result =
151 v8::Script::Compile(v8::String::New("(new Foo).get"))->Run();
152 CHECK_EQ(42, result->Int32Value());
153 env->Exit();
154}
155
156
157// CodeRange test.
158// Tests memory management in a CodeRange by allocating and freeing blocks,
159// using a pseudorandom generator to choose block sizes geometrically
160// distributed between 2 * Page::kPageSize and 2^5 + 1 * Page::kPageSize.
161// Ensure that the freed chunks are collected and reused by allocating (in
162// total) more than the size of the CodeRange.
163
164// This pseudorandom generator does not need to be particularly good.
165// Use the lower half of the V8::Random() generator.
166unsigned int Pseudorandom() {
167 static uint32_t lo = 2345;
168 lo = 18273 * (lo & 0xFFFF) + (lo >> 16); // Provably not 0.
169 return lo & 0xFFFF;
170}
171
172
173// Plain old data class. Represents a block of allocated memory.
174class Block {
175 public:
176 Block(void* base_arg, int size_arg)
177 : base(base_arg), size(size_arg) {}
178
179 void *base;
180 int size;
181};
182
183
184TEST(CodeRange) {
185 const int code_range_size = 16*MB;
186 CodeRange::Setup(code_range_size);
187 int current_allocated = 0;
188 int total_allocated = 0;
189 List<Block> blocks(1000);
190
191 while (total_allocated < 5 * code_range_size) {
192 if (current_allocated < code_range_size / 10) {
193 // Allocate a block.
194 // Geometrically distributed sizes, greater than Page::kPageSize.
195 size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
196 Pseudorandom() % 5000 + 1;
197 size_t allocated = 0;
198 void* base = CodeRange::AllocateRawMemory(requested, &allocated);
Steve Blockd0582a62009-12-15 09:54:21 +0000199 blocks.Add(Block(base, static_cast<int>(allocated)));
200 current_allocated += static_cast<int>(allocated);
201 total_allocated += static_cast<int>(allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +0000202 } else {
203 // Free a block.
204 int index = Pseudorandom() % blocks.length();
205 CodeRange::FreeRawMemory(blocks[index].base, blocks[index].size);
206 current_allocated -= blocks[index].size;
207 if (index < blocks.length() - 1) {
208 blocks[index] = blocks.RemoveLast();
209 } else {
210 blocks.RemoveLast();
211 }
212 }
213 }
214
215 CodeRange::TearDown();
216}