blob: 3335e72dab93cff4ceb6505311fe2c11dafa9180 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Andreas Gampea7433512014-02-21 13:19:23 -080017#ifndef ART_RUNTIME_GC_SPACE_SPACE_TEST_H_
18#define ART_RUNTIME_GC_SPACE_SPACE_TEST_H_
19
Mathieu Chartiera1602f22014-01-13 17:19:19 -080020#include "zygote_space.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070021
Brian Carlstroma1ce1fe2014-02-24 23:23:58 -080022#include <stdint.h>
23
24#include "common_runtime_test.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070025#include "globals.h"
Elliott Hughes90a33692011-08-30 13:27:07 -070026#include "UniquePtr.h"
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -070027#include "mirror/array-inl.h"
28#include "mirror/object-inl.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070029
30namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070031namespace gc {
32namespace space {
Carl Shapiro69759ea2011-07-21 18:13:35 -070033
Brian Carlstroma1ce1fe2014-02-24 23:23:58 -080034class SpaceTest : public CommonRuntimeTest {
Ian Rogers3bb17a62012-01-27 23:56:44 -080035 public:
Mathieu Chartier5647d182014-03-07 15:00:39 -080036 jobject byte_array_class_;
37
38 SpaceTest() : byte_array_class_(nullptr) {
39 }
40
Mathieu Chartier1b54f9c2014-04-30 16:45:02 -070041 void AddSpace(ContinuousSpace* space, bool revoke = true) {
42 Heap* heap = Runtime::Current()->GetHeap();
43 if (revoke) {
44 heap->RevokeAllThreadLocalBuffers();
45 }
46 heap->AddSpace(space);
47 heap->SetSpaceAsDefault(space);
Ian Rogers1d54e732013-05-02 21:10:01 -070048 }
Mathieu Chartier5647d182014-03-07 15:00:39 -080049
50 mirror::Class* GetByteArrayClass(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
51 SirtRef<mirror::ClassLoader> null_loader(self, nullptr);
52 if (byte_array_class_ == nullptr) {
53 mirror::Class* byte_array_class =
54 Runtime::Current()->GetClassLinker()->FindClass(self, "[B", null_loader);
55 EXPECT_TRUE(byte_array_class != nullptr);
56 byte_array_class_ = self->GetJniEnv()->NewLocalRef(byte_array_class);
57 EXPECT_TRUE(byte_array_class_ != nullptr);
58 }
59 return reinterpret_cast<mirror::Class*>(self->DecodeJObject(byte_array_class_));
60 }
61
62 mirror::Object* Alloc(space::MallocSpace* alloc_space, Thread* self, size_t bytes,
63 size_t* bytes_allocated, size_t* usable_size)
64 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
65 SirtRef<mirror::Class> byte_array_class(self, GetByteArrayClass(self));
66 mirror::Object* obj = alloc_space->Alloc(self, bytes, bytes_allocated, usable_size);
67 if (obj != nullptr) {
68 InstallClass(obj, byte_array_class.get(), bytes);
69 }
70 return obj;
71 }
72
73 mirror::Object* AllocWithGrowth(space::MallocSpace* alloc_space, Thread* self, size_t bytes,
74 size_t* bytes_allocated, size_t* usable_size)
75 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
76 SirtRef<mirror::Class> byte_array_class(self, GetByteArrayClass(self));
77 mirror::Object* obj = alloc_space->AllocWithGrowth(self, bytes, bytes_allocated, usable_size);
78 if (obj != nullptr) {
79 InstallClass(obj, byte_array_class.get(), bytes);
80 }
81 return obj;
82 }
83
84 void InstallClass(mirror::Object* o, mirror::Class* byte_array_class, size_t size)
Mathieu Chartier4e305412014-02-19 10:54:44 -080085 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Hiroshi Yamauchi4d2efce2014-02-10 16:19:09 -080086 // Note the minimum size, which is the size of a zero-length byte array.
87 EXPECT_GE(size, SizeOfZeroLengthByteArray());
Mathieu Chartier4e305412014-02-19 10:54:44 -080088 EXPECT_TRUE(byte_array_class != nullptr);
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -070089 o->SetClass(byte_array_class);
Hiroshi Yamauchi800ac2d2014-04-02 17:32:54 -070090 if (kUseBakerOrBrooksReadBarrier) {
91 // Like the proper heap object allocation, install and verify
92 // the correct read barrier pointer.
93 if (kUseBrooksReadBarrier) {
94 o->SetReadBarrierPointer(o);
95 }
96 o->AssertReadBarrierPointer();
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -080097 }
Mathieu Chartier4e305412014-02-19 10:54:44 -080098 mirror::Array* arr = o->AsArray<kVerifyNone>();
Hiroshi Yamauchi4d2efce2014-02-10 16:19:09 -080099 size_t header_size = SizeOfZeroLengthByteArray();
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700100 int32_t length = size - header_size;
101 arr->SetLength(length);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800102 EXPECT_EQ(arr->SizeOf<kVerifyNone>(), size);
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700103 }
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800104
Hiroshi Yamauchi4d2efce2014-02-10 16:19:09 -0800105 static size_t SizeOfZeroLengthByteArray() {
106 return mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimByte)).Uint32Value();
107 }
108
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800109 typedef MallocSpace* (*CreateSpaceFn)(const std::string& name, size_t initial_size, size_t growth_limit,
110 size_t capacity, byte* requested_begin);
111 void InitTestBody(CreateSpaceFn create_space);
112 void ZygoteSpaceTestBody(CreateSpaceFn create_space);
113 void AllocAndFreeTestBody(CreateSpaceFn create_space);
114 void AllocAndFreeListTestBody(CreateSpaceFn create_space);
115
116 void SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space, intptr_t object_size,
117 int round, size_t growth_limit);
118 void SizeFootPrintGrowthLimitAndTrimDriver(size_t object_size, CreateSpaceFn create_space);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800119};
Brian Carlstrom9b7f2c22011-09-27 14:35:04 -0700120
Ian Rogers719d1a32014-03-06 12:13:39 -0800121static inline size_t test_rand(size_t* seed) {
Mathieu Chartiereb5710e2013-07-25 15:19:42 -0700122 *seed = *seed * 1103515245 + 12345;
123 return *seed;
124}
125
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800126void SpaceTest::InitTestBody(CreateSpaceFn create_space) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700127 {
jeffhaoc1160702011-10-27 15:48:45 -0700128 // Init < max == growth
Mathieu Chartier4e305412014-02-19 10:54:44 -0800129 UniquePtr<Space> space(create_space("test", 16 * MB, 32 * MB, 32 * MB, nullptr));
130 EXPECT_TRUE(space.get() != nullptr);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700131 }
132 {
jeffhaoc1160702011-10-27 15:48:45 -0700133 // Init == max == growth
Mathieu Chartier4e305412014-02-19 10:54:44 -0800134 UniquePtr<Space> space(create_space("test", 16 * MB, 16 * MB, 16 * MB, nullptr));
135 EXPECT_TRUE(space.get() != nullptr);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700136 }
137 {
jeffhaoc1160702011-10-27 15:48:45 -0700138 // Init > max == growth
Mathieu Chartier4e305412014-02-19 10:54:44 -0800139 UniquePtr<Space> space(create_space("test", 32 * MB, 16 * MB, 16 * MB, nullptr));
140 EXPECT_TRUE(space.get() == nullptr);
jeffhaoc1160702011-10-27 15:48:45 -0700141 }
142 {
143 // Growth == init < max
Mathieu Chartier4e305412014-02-19 10:54:44 -0800144 UniquePtr<Space> space(create_space("test", 16 * MB, 16 * MB, 32 * MB, nullptr));
145 EXPECT_TRUE(space.get() != nullptr);
jeffhaoc1160702011-10-27 15:48:45 -0700146 }
147 {
148 // Growth < init < max
Mathieu Chartier4e305412014-02-19 10:54:44 -0800149 UniquePtr<Space> space(create_space("test", 16 * MB, 8 * MB, 32 * MB, nullptr));
150 EXPECT_TRUE(space.get() == nullptr);
jeffhaoc1160702011-10-27 15:48:45 -0700151 }
152 {
153 // Init < growth < max
Mathieu Chartier4e305412014-02-19 10:54:44 -0800154 UniquePtr<Space> space(create_space("test", 8 * MB, 16 * MB, 32 * MB, nullptr));
155 EXPECT_TRUE(space.get() != nullptr);
jeffhaoc1160702011-10-27 15:48:45 -0700156 }
157 {
158 // Init < max < growth
Mathieu Chartier4e305412014-02-19 10:54:44 -0800159 UniquePtr<Space> space(create_space("test", 8 * MB, 32 * MB, 16 * MB, nullptr));
160 EXPECT_TRUE(space.get() == nullptr);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700161 }
162}
163
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700164// TODO: This test is not very good, we should improve it.
165// The test should do more allocations before the creation of the ZygoteSpace, and then do
166// allocations after the ZygoteSpace is created. The test should also do some GCs to ensure that
167// the GC works with the ZygoteSpace.
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800168void SpaceTest::ZygoteSpaceTestBody(CreateSpaceFn create_space) {
Ian Rogers6fac4472014-02-25 17:01:10 -0800169 size_t dummy;
Mathieu Chartier4e305412014-02-19 10:54:44 -0800170 MallocSpace* space(create_space("test", 4 * MB, 16 * MB, 16 * MB, nullptr));
171 ASSERT_TRUE(space != nullptr);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700172
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800173 // Make space findable to the heap, will also delete space when runtime is cleaned up
174 AddSpace(space);
175 Thread* self = Thread::Current();
Mathieu Chartier4e305412014-02-19 10:54:44 -0800176 ScopedObjectAccess soa(self);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700177
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800178 // Succeeds, fits without adjusting the footprint limit.
Ian Rogers6fac4472014-02-25 17:01:10 -0800179 size_t ptr1_bytes_allocated, ptr1_usable_size;
Mathieu Chartier5647d182014-03-07 15:00:39 -0800180 SirtRef<mirror::Object> ptr1(self, Alloc(space, self, 1 * MB, &ptr1_bytes_allocated,
181 &ptr1_usable_size));
Mathieu Chartier4e305412014-02-19 10:54:44 -0800182 EXPECT_TRUE(ptr1.get() != nullptr);
Ian Rogers6fac4472014-02-25 17:01:10 -0800183 EXPECT_LE(1U * MB, ptr1_bytes_allocated);
184 EXPECT_LE(1U * MB, ptr1_usable_size);
185 EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700186
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800187 // Fails, requires a higher footprint limit.
Mathieu Chartier5647d182014-03-07 15:00:39 -0800188 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800189 EXPECT_TRUE(ptr2 == nullptr);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700190
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800191 // Succeeds, adjusts the footprint.
Ian Rogers6fac4472014-02-25 17:01:10 -0800192 size_t ptr3_bytes_allocated, ptr3_usable_size;
Mathieu Chartier5647d182014-03-07 15:00:39 -0800193 SirtRef<mirror::Object> ptr3(self, AllocWithGrowth(space, self, 8 * MB, &ptr3_bytes_allocated,
194 &ptr3_usable_size));
Mathieu Chartier4e305412014-02-19 10:54:44 -0800195 EXPECT_TRUE(ptr3.get() != nullptr);
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800196 EXPECT_LE(8U * MB, ptr3_bytes_allocated);
Ian Rogers6fac4472014-02-25 17:01:10 -0800197 EXPECT_LE(8U * MB, ptr3_usable_size);
198 EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700199
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800200 // Fails, requires a higher footprint limit.
Ian Rogers6fac4472014-02-25 17:01:10 -0800201 mirror::Object* ptr4 = space->Alloc(self, 8 * MB, &dummy, nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800202 EXPECT_TRUE(ptr4 == nullptr);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700203
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800204 // Also fails, requires a higher allowed footprint.
Ian Rogers6fac4472014-02-25 17:01:10 -0800205 mirror::Object* ptr5 = space->AllocWithGrowth(self, 8 * MB, &dummy, nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800206 EXPECT_TRUE(ptr5 == nullptr);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700207
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800208 // Release some memory.
Ian Rogers6fac4472014-02-25 17:01:10 -0800209 size_t free3 = space->AllocationSize(ptr3.get(), nullptr);
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800210 EXPECT_EQ(free3, ptr3_bytes_allocated);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800211 EXPECT_EQ(free3, space->Free(self, ptr3.reset(nullptr)));
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800212 EXPECT_LE(8U * MB, free3);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700213
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800214 // Succeeds, now that memory has been freed.
Ian Rogers6fac4472014-02-25 17:01:10 -0800215 size_t ptr6_bytes_allocated, ptr6_usable_size;
Mathieu Chartier5647d182014-03-07 15:00:39 -0800216 SirtRef<mirror::Object> ptr6(self, AllocWithGrowth(space, self, 9 * MB, &ptr6_bytes_allocated,
217 &ptr6_usable_size));
Mathieu Chartier4e305412014-02-19 10:54:44 -0800218 EXPECT_TRUE(ptr6.get() != nullptr);
Ian Rogers6fac4472014-02-25 17:01:10 -0800219 EXPECT_LE(9U * MB, ptr6_bytes_allocated);
220 EXPECT_LE(9U * MB, ptr6_usable_size);
221 EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700222
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800223 // Final clean up.
Ian Rogers6fac4472014-02-25 17:01:10 -0800224 size_t free1 = space->AllocationSize(ptr1.get(), nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800225 space->Free(self, ptr1.reset(nullptr));
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800226 EXPECT_LE(1U * MB, free1);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700227
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800228 // Make sure that the zygote space isn't directly at the start of the space.
Ian Rogers6fac4472014-02-25 17:01:10 -0800229 EXPECT_TRUE(space->Alloc(self, 1U * MB, &dummy, nullptr) != nullptr);
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800230
231 gc::Heap* heap = Runtime::Current()->GetHeap();
232 space::Space* old_space = space;
233 heap->RemoveSpace(old_space);
Mathieu Chartier1b54f9c2014-04-30 16:45:02 -0700234 heap->RevokeAllThreadLocalBuffers();
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800235 space::ZygoteSpace* zygote_space = space->CreateZygoteSpace("alloc space",
236 heap->IsLowMemoryMode(),
237 &space);
238 delete old_space;
239 // Add the zygote space.
Mathieu Chartier1b54f9c2014-04-30 16:45:02 -0700240 AddSpace(zygote_space, false);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700241
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800242 // Make space findable to the heap, will also delete space when runtime is cleaned up
Mathieu Chartier1b54f9c2014-04-30 16:45:02 -0700243 AddSpace(space, false);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700244
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800245 // Succeeds, fits without adjusting the footprint limit.
Mathieu Chartier5647d182014-03-07 15:00:39 -0800246 ptr1.reset(Alloc(space, self, 1 * MB, &ptr1_bytes_allocated, &ptr1_usable_size));
Mathieu Chartier4e305412014-02-19 10:54:44 -0800247 EXPECT_TRUE(ptr1.get() != nullptr);
Ian Rogers6fac4472014-02-25 17:01:10 -0800248 EXPECT_LE(1U * MB, ptr1_bytes_allocated);
249 EXPECT_LE(1U * MB, ptr1_usable_size);
250 EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700251
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800252 // Fails, requires a higher footprint limit.
Mathieu Chartier5647d182014-03-07 15:00:39 -0800253 ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800254 EXPECT_TRUE(ptr2 == nullptr);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700255
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800256 // Succeeds, adjusts the footprint.
Mathieu Chartier5647d182014-03-07 15:00:39 -0800257 ptr3.reset(AllocWithGrowth(space, self, 2 * MB, &ptr3_bytes_allocated, &ptr3_usable_size));
Mathieu Chartier4e305412014-02-19 10:54:44 -0800258 EXPECT_TRUE(ptr3.get() != nullptr);
Ian Rogers6fac4472014-02-25 17:01:10 -0800259 EXPECT_LE(2U * MB, ptr3_bytes_allocated);
260 EXPECT_LE(2U * MB, ptr3_usable_size);
261 EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800262 space->Free(self, ptr3.reset(nullptr));
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700263
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800264 // Final clean up.
Ian Rogers6fac4472014-02-25 17:01:10 -0800265 free1 = space->AllocationSize(ptr1.get(), nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800266 space->Free(self, ptr1.reset(nullptr));
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800267 EXPECT_LE(1U * MB, free1);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700268}
269
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800270void SpaceTest::AllocAndFreeTestBody(CreateSpaceFn create_space) {
Mathieu Chartiereb5710e2013-07-25 15:19:42 -0700271 size_t dummy = 0;
Mathieu Chartier4e305412014-02-19 10:54:44 -0800272 MallocSpace* space(create_space("test", 4 * MB, 16 * MB, 16 * MB, nullptr));
273 ASSERT_TRUE(space != nullptr);
Ian Rogers50b35e22012-10-04 10:09:15 -0700274 Thread* self = Thread::Current();
Mathieu Chartier4e305412014-02-19 10:54:44 -0800275 ScopedObjectAccess soa(self);
Ian Rogers30fab402012-01-23 15:43:46 -0800276
Ian Rogers3bb17a62012-01-27 23:56:44 -0800277 // Make space findable to the heap, will also delete space when runtime is cleaned up
Mathieu Chartier590fee92013-09-13 13:46:47 -0700278 AddSpace(space);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700279
Ian Rogers3bb17a62012-01-27 23:56:44 -0800280 // Succeeds, fits without adjusting the footprint limit.
Ian Rogers6fac4472014-02-25 17:01:10 -0800281 size_t ptr1_bytes_allocated, ptr1_usable_size;
Mathieu Chartier5647d182014-03-07 15:00:39 -0800282 SirtRef<mirror::Object> ptr1(self, Alloc(space, self, 1 * MB, &ptr1_bytes_allocated,
283 &ptr1_usable_size));
Mathieu Chartier4e305412014-02-19 10:54:44 -0800284 EXPECT_TRUE(ptr1.get() != nullptr);
Ian Rogers6fac4472014-02-25 17:01:10 -0800285 EXPECT_LE(1U * MB, ptr1_bytes_allocated);
286 EXPECT_LE(1U * MB, ptr1_usable_size);
287 EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700288
Ian Rogers3bb17a62012-01-27 23:56:44 -0800289 // Fails, requires a higher footprint limit.
Mathieu Chartier5647d182014-03-07 15:00:39 -0800290 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800291 EXPECT_TRUE(ptr2 == nullptr);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700292
293 // Succeeds, adjusts the footprint.
Ian Rogers6fac4472014-02-25 17:01:10 -0800294 size_t ptr3_bytes_allocated, ptr3_usable_size;
Mathieu Chartier5647d182014-03-07 15:00:39 -0800295 SirtRef<mirror::Object> ptr3(self, AllocWithGrowth(space, self, 8 * MB, &ptr3_bytes_allocated,
296 &ptr3_usable_size));
Mathieu Chartier4e305412014-02-19 10:54:44 -0800297 EXPECT_TRUE(ptr3.get() != nullptr);
Hiroshi Yamauchi50b29282013-07-30 13:58:37 -0700298 EXPECT_LE(8U * MB, ptr3_bytes_allocated);
Ian Rogers6fac4472014-02-25 17:01:10 -0800299 EXPECT_LE(8U * MB, ptr3_usable_size);
300 EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700301
Ian Rogers3bb17a62012-01-27 23:56:44 -0800302 // Fails, requires a higher footprint limit.
Mathieu Chartier5647d182014-03-07 15:00:39 -0800303 mirror::Object* ptr4 = Alloc(space, self, 8 * MB, &dummy, nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800304 EXPECT_TRUE(ptr4 == nullptr);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700305
306 // Also fails, requires a higher allowed footprint.
Mathieu Chartier5647d182014-03-07 15:00:39 -0800307 mirror::Object* ptr5 = AllocWithGrowth(space, self, 8 * MB, &dummy, nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800308 EXPECT_TRUE(ptr5 == nullptr);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700309
310 // Release some memory.
Ian Rogers6fac4472014-02-25 17:01:10 -0800311 size_t free3 = space->AllocationSize(ptr3.get(), nullptr);
Hiroshi Yamauchi50b29282013-07-30 13:58:37 -0700312 EXPECT_EQ(free3, ptr3_bytes_allocated);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800313 space->Free(self, ptr3.reset(nullptr));
Carl Shapiro69759ea2011-07-21 18:13:35 -0700314 EXPECT_LE(8U * MB, free3);
315
316 // Succeeds, now that memory has been freed.
Ian Rogers6fac4472014-02-25 17:01:10 -0800317 size_t ptr6_bytes_allocated, ptr6_usable_size;
Mathieu Chartier5647d182014-03-07 15:00:39 -0800318 SirtRef<mirror::Object> ptr6(self, AllocWithGrowth(space, self, 9 * MB, &ptr6_bytes_allocated,
319 &ptr6_usable_size));
Mathieu Chartier4e305412014-02-19 10:54:44 -0800320 EXPECT_TRUE(ptr6.get() != nullptr);
Ian Rogers6fac4472014-02-25 17:01:10 -0800321 EXPECT_LE(9U * MB, ptr6_bytes_allocated);
322 EXPECT_LE(9U * MB, ptr6_usable_size);
323 EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700324
325 // Final clean up.
Ian Rogers6fac4472014-02-25 17:01:10 -0800326 size_t free1 = space->AllocationSize(ptr1.get(), nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800327 space->Free(self, ptr1.reset(nullptr));
Carl Shapiro69759ea2011-07-21 18:13:35 -0700328 EXPECT_LE(1U * MB, free1);
329}
330
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800331void SpaceTest::AllocAndFreeListTestBody(CreateSpaceFn create_space) {
Mathieu Chartier4e305412014-02-19 10:54:44 -0800332 MallocSpace* space(create_space("test", 4 * MB, 16 * MB, 16 * MB, nullptr));
333 ASSERT_TRUE(space != nullptr);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800334
335 // Make space findable to the heap, will also delete space when runtime is cleaned up
Mathieu Chartier590fee92013-09-13 13:46:47 -0700336 AddSpace(space);
Ian Rogers50b35e22012-10-04 10:09:15 -0700337 Thread* self = Thread::Current();
Mathieu Chartier4e305412014-02-19 10:54:44 -0800338 ScopedObjectAccess soa(self);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800339
340 // Succeeds, fits without adjusting the max allowed footprint.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800341 mirror::Object* lots_of_objects[1024];
Elliott Hughesb25c3f62012-03-26 16:35:06 -0700342 for (size_t i = 0; i < arraysize(lots_of_objects); i++) {
Ian Rogers6fac4472014-02-25 17:01:10 -0800343 size_t allocation_size, usable_size;
Hiroshi Yamauchi4d2efce2014-02-10 16:19:09 -0800344 size_t size_of_zero_length_byte_array = SizeOfZeroLengthByteArray();
Mathieu Chartier5647d182014-03-07 15:00:39 -0800345 lots_of_objects[i] = Alloc(space, self, size_of_zero_length_byte_array, &allocation_size,
346 &usable_size);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800347 EXPECT_TRUE(lots_of_objects[i] != nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800348 SirtRef<mirror::Object> obj(self, lots_of_objects[i]);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800349 lots_of_objects[i] = obj.get();
Ian Rogers6fac4472014-02-25 17:01:10 -0800350 size_t computed_usable_size;
351 EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size));
352 EXPECT_EQ(usable_size, computed_usable_size);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800353 }
354
Mathieu Chartier73d1e172014-04-11 17:53:48 -0700355 // Release memory.
Mathieu Chartier4e305412014-02-19 10:54:44 -0800356 space->FreeList(self, arraysize(lots_of_objects), lots_of_objects);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800357
358 // Succeeds, fits by adjusting the max allowed footprint.
Elliott Hughesb25c3f62012-03-26 16:35:06 -0700359 for (size_t i = 0; i < arraysize(lots_of_objects); i++) {
Ian Rogers6fac4472014-02-25 17:01:10 -0800360 size_t allocation_size, usable_size;
Mathieu Chartier5647d182014-03-07 15:00:39 -0800361 lots_of_objects[i] = AllocWithGrowth(space, self, 1024, &allocation_size, &usable_size);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800362 EXPECT_TRUE(lots_of_objects[i] != nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800363 SirtRef<mirror::Object> obj(self, lots_of_objects[i]);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800364 lots_of_objects[i] = obj.get();
Ian Rogers6fac4472014-02-25 17:01:10 -0800365 size_t computed_usable_size;
366 EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size));
367 EXPECT_EQ(usable_size, computed_usable_size);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800368 }
369
Mathieu Chartier73d1e172014-04-11 17:53:48 -0700370 // Release memory.
Mathieu Chartier4e305412014-02-19 10:54:44 -0800371 space->FreeList(self, arraysize(lots_of_objects), lots_of_objects);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800372}
373
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800374void SpaceTest::SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space, intptr_t object_size,
Ian Rogers3bb17a62012-01-27 23:56:44 -0800375 int round, size_t growth_limit) {
376 if (((object_size > 0 && object_size >= static_cast<intptr_t>(growth_limit))) ||
377 ((object_size < 0 && -object_size >= static_cast<intptr_t>(growth_limit)))) {
378 // No allocation can succeed
379 return;
380 }
Ian Rogers3bb17a62012-01-27 23:56:44 -0800381
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700382 // The space's footprint equals amount of resources requested from system
383 size_t footprint = space->GetFootprint();
Ian Rogers3bb17a62012-01-27 23:56:44 -0800384
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700385 // The space must at least have its book keeping allocated
Ian Rogers3bb17a62012-01-27 23:56:44 -0800386 EXPECT_GT(footprint, 0u);
387
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700388 // But it shouldn't exceed the initial size
Ian Rogers3bb17a62012-01-27 23:56:44 -0800389 EXPECT_LE(footprint, growth_limit);
390
391 // space's size shouldn't exceed the initial size
392 EXPECT_LE(space->Size(), growth_limit);
393
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700394 // this invariant should always hold or else the space has grown to be larger than what the
Ian Rogers3bb17a62012-01-27 23:56:44 -0800395 // space believes its size is (which will break invariants)
396 EXPECT_GE(space->Size(), footprint);
397
398 // Fill the space with lots of small objects up to the growth limit
399 size_t max_objects = (growth_limit / (object_size > 0 ? object_size : 8)) + 1;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800400 UniquePtr<mirror::Object*[]> lots_of_objects(new mirror::Object*[max_objects]);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800401 size_t last_object = 0; // last object for which allocation succeeded
402 size_t amount_allocated = 0; // amount of space allocated
Ian Rogers50b35e22012-10-04 10:09:15 -0700403 Thread* self = Thread::Current();
Mathieu Chartier4e305412014-02-19 10:54:44 -0800404 ScopedObjectAccess soa(self);
Mathieu Chartiereb5710e2013-07-25 15:19:42 -0700405 size_t rand_seed = 123456789;
Elliott Hughesb25c3f62012-03-26 16:35:06 -0700406 for (size_t i = 0; i < max_objects; i++) {
Ian Rogers3bb17a62012-01-27 23:56:44 -0800407 size_t alloc_fails = 0; // number of failed allocations
408 size_t max_fails = 30; // number of times we fail allocation before giving up
409 for (; alloc_fails < max_fails; alloc_fails++) {
410 size_t alloc_size;
411 if (object_size > 0) {
412 alloc_size = object_size;
413 } else {
Mathieu Chartiereb5710e2013-07-25 15:19:42 -0700414 alloc_size = test_rand(&rand_seed) % static_cast<size_t>(-object_size);
Hiroshi Yamauchi4d2efce2014-02-10 16:19:09 -0800415 // Note the minimum size, which is the size of a zero-length byte array.
416 size_t size_of_zero_length_byte_array = SizeOfZeroLengthByteArray();
417 if (alloc_size < size_of_zero_length_byte_array) {
418 alloc_size = size_of_zero_length_byte_array;
Ian Rogers3bb17a62012-01-27 23:56:44 -0800419 }
420 }
Mathieu Chartier4e305412014-02-19 10:54:44 -0800421 SirtRef<mirror::Object> object(self, nullptr);
Mathieu Chartiereb5710e2013-07-25 15:19:42 -0700422 size_t bytes_allocated = 0;
Ian Rogers3bb17a62012-01-27 23:56:44 -0800423 if (round <= 1) {
Mathieu Chartier5647d182014-03-07 15:00:39 -0800424 object.reset(Alloc(space, self, alloc_size, &bytes_allocated, nullptr));
Ian Rogers3bb17a62012-01-27 23:56:44 -0800425 } else {
Mathieu Chartier5647d182014-03-07 15:00:39 -0800426 object.reset(AllocWithGrowth(space, self, alloc_size, &bytes_allocated, nullptr));
Ian Rogers3bb17a62012-01-27 23:56:44 -0800427 }
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700428 footprint = space->GetFootprint();
Ian Rogers3bb17a62012-01-27 23:56:44 -0800429 EXPECT_GE(space->Size(), footprint); // invariant
Mathieu Chartier4e305412014-02-19 10:54:44 -0800430 if (object.get() != nullptr) { // allocation succeeded
Mathieu Chartier4e305412014-02-19 10:54:44 -0800431 lots_of_objects[i] = object.get();
Ian Rogers6fac4472014-02-25 17:01:10 -0800432 size_t allocation_size = space->AllocationSize(object.get(), nullptr);
Mathieu Chartiereb5710e2013-07-25 15:19:42 -0700433 EXPECT_EQ(bytes_allocated, allocation_size);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800434 if (object_size > 0) {
435 EXPECT_GE(allocation_size, static_cast<size_t>(object_size));
436 } else {
437 EXPECT_GE(allocation_size, 8u);
438 }
439 amount_allocated += allocation_size;
440 break;
441 }
442 }
443 if (alloc_fails == max_fails) {
444 last_object = i;
445 break;
446 }
447 }
448 CHECK_NE(last_object, 0u); // we should have filled the space
449 EXPECT_GT(amount_allocated, 0u);
450
451 // We shouldn't have gone past the growth_limit
452 EXPECT_LE(amount_allocated, growth_limit);
453 EXPECT_LE(footprint, growth_limit);
454 EXPECT_LE(space->Size(), growth_limit);
455
456 // footprint and size should agree with amount allocated
457 EXPECT_GE(footprint, amount_allocated);
458 EXPECT_GE(space->Size(), amount_allocated);
459
460 // Release storage in a semi-adhoc manner
461 size_t free_increment = 96;
Elliott Hughesb25c3f62012-03-26 16:35:06 -0700462 while (true) {
Mathieu Chartier4e305412014-02-19 10:54:44 -0800463 {
464 ScopedThreadStateChange tsc(self, kNative);
465 // Give the space a haircut.
466 space->Trim();
467 }
Ian Rogers3bb17a62012-01-27 23:56:44 -0800468
469 // Bounds sanity
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700470 footprint = space->GetFootprint();
Ian Rogers3bb17a62012-01-27 23:56:44 -0800471 EXPECT_LE(amount_allocated, growth_limit);
472 EXPECT_GE(footprint, amount_allocated);
473 EXPECT_LE(footprint, growth_limit);
474 EXPECT_GE(space->Size(), amount_allocated);
475 EXPECT_LE(space->Size(), growth_limit);
476
477 if (free_increment == 0) {
478 break;
479 }
480
Mathieu Chartier4e305412014-02-19 10:54:44 -0800481 // Free some objects
482 for (size_t i = 0; i < last_object; i += free_increment) {
483 mirror::Object* object = lots_of_objects.get()[i];
484 if (object == nullptr) {
485 continue;
Ian Rogers3bb17a62012-01-27 23:56:44 -0800486 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800487 size_t allocation_size = space->AllocationSize(object, nullptr);
Mathieu Chartier4e305412014-02-19 10:54:44 -0800488 if (object_size > 0) {
489 EXPECT_GE(allocation_size, static_cast<size_t>(object_size));
490 } else {
491 EXPECT_GE(allocation_size, 8u);
492 }
493 space->Free(self, object);
494 lots_of_objects.get()[i] = nullptr;
495 amount_allocated -= allocation_size;
496 footprint = space->GetFootprint();
497 EXPECT_GE(space->Size(), footprint); // invariant
Ian Rogers3bb17a62012-01-27 23:56:44 -0800498 }
Mathieu Chartier4e305412014-02-19 10:54:44 -0800499
500 free_increment >>= 1;
Ian Rogers3bb17a62012-01-27 23:56:44 -0800501 }
Mathieu Chartier4e305412014-02-19 10:54:44 -0800502
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700503 // The space has become empty here before allocating a large object
504 // below. For RosAlloc, revoke thread-local runs, which are kept
505 // even when empty for a performance reason, so that they won't
506 // cause the following large object allocation to fail due to
507 // potential fragmentation. Note they are normally revoked at each
508 // GC (but no GC here.)
509 space->RevokeAllThreadLocalBuffers();
510
Ian Rogers3bb17a62012-01-27 23:56:44 -0800511 // All memory was released, try a large allocation to check freed memory is being coalesced
Mathieu Chartier4e305412014-02-19 10:54:44 -0800512 SirtRef<mirror::Object> large_object(self, nullptr);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800513 size_t three_quarters_space = (growth_limit / 2) + (growth_limit / 4);
Mathieu Chartiereb5710e2013-07-25 15:19:42 -0700514 size_t bytes_allocated = 0;
Ian Rogers3bb17a62012-01-27 23:56:44 -0800515 if (round <= 1) {
Mathieu Chartier5647d182014-03-07 15:00:39 -0800516 large_object.reset(Alloc(space, self, three_quarters_space, &bytes_allocated, nullptr));
Ian Rogers3bb17a62012-01-27 23:56:44 -0800517 } else {
Mathieu Chartier5647d182014-03-07 15:00:39 -0800518 large_object.reset(AllocWithGrowth(space, self, three_quarters_space, &bytes_allocated,
519 nullptr));
Ian Rogers3bb17a62012-01-27 23:56:44 -0800520 }
Mathieu Chartier4e305412014-02-19 10:54:44 -0800521 EXPECT_TRUE(large_object.get() != nullptr);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800522
523 // Sanity check footprint
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700524 footprint = space->GetFootprint();
Ian Rogers3bb17a62012-01-27 23:56:44 -0800525 EXPECT_LE(footprint, growth_limit);
526 EXPECT_GE(space->Size(), footprint);
527 EXPECT_LE(space->Size(), growth_limit);
528
529 // Clean up
Mathieu Chartier4e305412014-02-19 10:54:44 -0800530 space->Free(self, large_object.reset(nullptr));
531
Ian Rogers3bb17a62012-01-27 23:56:44 -0800532 // Sanity check footprint
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700533 footprint = space->GetFootprint();
Ian Rogers3bb17a62012-01-27 23:56:44 -0800534 EXPECT_LE(footprint, growth_limit);
535 EXPECT_GE(space->Size(), footprint);
536 EXPECT_LE(space->Size(), growth_limit);
537}
538
Hiroshi Yamauchi3ddbd422013-12-06 17:43:36 -0800539void SpaceTest::SizeFootPrintGrowthLimitAndTrimDriver(size_t object_size, CreateSpaceFn create_space) {
Hiroshi Yamauchi4d2efce2014-02-10 16:19:09 -0800540 if (object_size < SizeOfZeroLengthByteArray()) {
541 // Too small for the object layout/model.
542 return;
543 }
Ian Rogers3bb17a62012-01-27 23:56:44 -0800544 size_t initial_size = 4 * MB;
545 size_t growth_limit = 8 * MB;
546 size_t capacity = 16 * MB;
Mathieu Chartier4e305412014-02-19 10:54:44 -0800547 MallocSpace* space(create_space("test", initial_size, growth_limit, capacity, nullptr));
548 ASSERT_TRUE(space != nullptr);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800549
550 // Basic sanity
551 EXPECT_EQ(space->Capacity(), growth_limit);
552 EXPECT_EQ(space->NonGrowthLimitCapacity(), capacity);
553
554 // Make space findable to the heap, will also delete space when runtime is cleaned up
Mathieu Chartier590fee92013-09-13 13:46:47 -0700555 AddSpace(space);
Ian Rogers3bb17a62012-01-27 23:56:44 -0800556
557 // In this round we don't allocate with growth and therefore can't grow past the initial size.
558 // This effectively makes the growth_limit the initial_size, so assert this.
559 SizeFootPrintGrowthLimitAndTrimBody(space, object_size, 1, initial_size);
560 SizeFootPrintGrowthLimitAndTrimBody(space, object_size, 2, growth_limit);
561 // Remove growth limit
562 space->ClearGrowthLimit();
563 EXPECT_EQ(space->Capacity(), capacity);
564 SizeFootPrintGrowthLimitAndTrimBody(space, object_size, 3, capacity);
565}
566
Andreas Gampe24651ec2014-02-27 13:26:16 -0800567#define TEST_SizeFootPrintGrowthLimitAndTrimStatic(name, spaceName, spaceFn, size) \
568 TEST_F(spaceName##StaticTest, SizeFootPrintGrowthLimitAndTrim_AllocationsOf_##name) { \
Andreas Gampea7433512014-02-21 13:19:23 -0800569 SizeFootPrintGrowthLimitAndTrimDriver(size, spaceFn); \
Andreas Gampe24651ec2014-02-27 13:26:16 -0800570 }
571
572#define TEST_SizeFootPrintGrowthLimitAndTrimRandom(name, spaceName, spaceFn, size) \
573 TEST_F(spaceName##RandomTest, SizeFootPrintGrowthLimitAndTrim_RandomAllocationsWithMax_##name) { \
Andreas Gampea7433512014-02-21 13:19:23 -0800574 SizeFootPrintGrowthLimitAndTrimDriver(-size, spaceFn); \
Ian Rogers3bb17a62012-01-27 23:56:44 -0800575 }
576
Andreas Gampe24651ec2014-02-27 13:26:16 -0800577#define TEST_SPACE_CREATE_FN_BASE(spaceName, spaceFn) \
578 class spaceName##BaseTest : public SpaceTest { \
Andreas Gampea7433512014-02-21 13:19:23 -0800579 }; \
580 \
Andreas Gampe24651ec2014-02-27 13:26:16 -0800581 TEST_F(spaceName##BaseTest, Init) { \
Andreas Gampea7433512014-02-21 13:19:23 -0800582 InitTestBody(spaceFn); \
583 } \
Andreas Gampe24651ec2014-02-27 13:26:16 -0800584 TEST_F(spaceName##BaseTest, ZygoteSpace) { \
Andreas Gampea7433512014-02-21 13:19:23 -0800585 ZygoteSpaceTestBody(spaceFn); \
586 } \
Andreas Gampe24651ec2014-02-27 13:26:16 -0800587 TEST_F(spaceName##BaseTest, AllocAndFree) { \
Andreas Gampea7433512014-02-21 13:19:23 -0800588 AllocAndFreeTestBody(spaceFn); \
589 } \
Andreas Gampe24651ec2014-02-27 13:26:16 -0800590 TEST_F(spaceName##BaseTest, AllocAndFreeList) { \
Andreas Gampea7433512014-02-21 13:19:23 -0800591 AllocAndFreeListTestBody(spaceFn); \
Andreas Gampe24651ec2014-02-27 13:26:16 -0800592 }
593
594#define TEST_SPACE_CREATE_FN_STATIC(spaceName, spaceFn) \
595 class spaceName##StaticTest : public SpaceTest { \
596 }; \
597 \
598 TEST_SizeFootPrintGrowthLimitAndTrimStatic(12B, spaceName, spaceFn, 12) \
599 TEST_SizeFootPrintGrowthLimitAndTrimStatic(16B, spaceName, spaceFn, 16) \
600 TEST_SizeFootPrintGrowthLimitAndTrimStatic(24B, spaceName, spaceFn, 24) \
601 TEST_SizeFootPrintGrowthLimitAndTrimStatic(32B, spaceName, spaceFn, 32) \
602 TEST_SizeFootPrintGrowthLimitAndTrimStatic(64B, spaceName, spaceFn, 64) \
603 TEST_SizeFootPrintGrowthLimitAndTrimStatic(128B, spaceName, spaceFn, 128) \
604 TEST_SizeFootPrintGrowthLimitAndTrimStatic(1KB, spaceName, spaceFn, 1 * KB) \
605 TEST_SizeFootPrintGrowthLimitAndTrimStatic(4KB, spaceName, spaceFn, 4 * KB) \
606 TEST_SizeFootPrintGrowthLimitAndTrimStatic(1MB, spaceName, spaceFn, 1 * MB) \
607 TEST_SizeFootPrintGrowthLimitAndTrimStatic(4MB, spaceName, spaceFn, 4 * MB) \
608 TEST_SizeFootPrintGrowthLimitAndTrimStatic(8MB, spaceName, spaceFn, 8 * MB)
609
610#define TEST_SPACE_CREATE_FN_RANDOM(spaceName, spaceFn) \
611 class spaceName##RandomTest : public SpaceTest { \
612 }; \
613 \
614 TEST_SizeFootPrintGrowthLimitAndTrimRandom(16B, spaceName, spaceFn, 16) \
615 TEST_SizeFootPrintGrowthLimitAndTrimRandom(24B, spaceName, spaceFn, 24) \
616 TEST_SizeFootPrintGrowthLimitAndTrimRandom(32B, spaceName, spaceFn, 32) \
617 TEST_SizeFootPrintGrowthLimitAndTrimRandom(64B, spaceName, spaceFn, 64) \
618 TEST_SizeFootPrintGrowthLimitAndTrimRandom(128B, spaceName, spaceFn, 128) \
619 TEST_SizeFootPrintGrowthLimitAndTrimRandom(1KB, spaceName, spaceFn, 1 * KB) \
620 TEST_SizeFootPrintGrowthLimitAndTrimRandom(4KB, spaceName, spaceFn, 4 * KB) \
621 TEST_SizeFootPrintGrowthLimitAndTrimRandom(1MB, spaceName, spaceFn, 1 * MB) \
622 TEST_SizeFootPrintGrowthLimitAndTrimRandom(4MB, spaceName, spaceFn, 4 * MB) \
623 TEST_SizeFootPrintGrowthLimitAndTrimRandom(8MB, spaceName, spaceFn, 8 * MB)
Ian Rogers3bb17a62012-01-27 23:56:44 -0800624
Ian Rogers1d54e732013-05-02 21:10:01 -0700625} // namespace space
626} // namespace gc
Carl Shapiro69759ea2011-07-21 18:13:35 -0700627} // namespace art
Andreas Gampea7433512014-02-21 13:19:23 -0800628
629#endif // ART_RUNTIME_GC_SPACE_SPACE_TEST_H_