bsalomon@google.com | 4da34e3 | 2012-06-19 15:40:27 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2012 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "bench/Benchmark.h" |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 9 | #include "include/private/GrTypesPriv.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 10 | #include "include/utils/SkRandom.h" |
| 11 | #include "src/gpu/GrMemoryPool.h" |
Hal Canary | 8a00144 | 2018-09-19 11:31:27 -0400 | [diff] [blame] | 12 | |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 13 | #include <type_traits> |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 14 | |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 15 | namespace { |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 16 | |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 17 | // sizeof is a multiple of GrMemoryPool::kAlignment for 4, 8, or 16 byte alignment |
| 18 | using Aligned = std::aligned_storage<32, GrMemoryPool::kAlignment>::type; |
| 19 | static_assert(sizeof(Aligned) == 32); |
| 20 | static_assert(sizeof(Aligned) % GrMemoryPool::kAlignment == 0); |
| 21 | |
| 22 | // sizeof is not a multiple of GrMemoryPool::kAlignment (will not be a multiple of max_align_t |
| 23 | // if it's 4, 8, or 16, as desired). |
| 24 | using Unaligned = std::aligned_storage<30, 2>::type; |
| 25 | static_assert(sizeof(Unaligned) == 30); |
| 26 | static_assert(sizeof(Unaligned) % GrMemoryPool::kAlignment != 0); |
| 27 | |
| 28 | // When max_align_t == 16, 8, or 4 the padded Unaligned will also be 32 |
| 29 | static_assert(GrAlignTo(sizeof(Unaligned), GrMemoryPool::kAlignment) == sizeof(Aligned)); |
| 30 | |
| 31 | // All benchmarks create and delete the same number of objects. The key difference is the order |
| 32 | // of operations, the size of the objects being allocated, and the size of the pool. |
| 33 | typedef void (*RunBenchProc)(GrMemoryPool*, int); |
| 34 | |
| 35 | } |
| 36 | |
| 37 | // N objects are created, and then destroyed in reverse order (fully unwinding the cursor within |
| 38 | // each block of the memory pool). |
| 39 | template <typename T> |
| 40 | static void run_stack(GrMemoryPool* pool, int loops) { |
| 41 | static const int kMaxObjects = 4 * (1 << 10); |
| 42 | T* objs[kMaxObjects]; |
| 43 | for (int i = 0; i < loops; ++i) { |
| 44 | // Push N objects into the pool (or heap if pool is null) |
| 45 | for (int j = 0; j < kMaxObjects; ++j) { |
| 46 | objs[j] = pool ? (T*) pool->allocate(sizeof(T)) : new T; |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 47 | } |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 48 | // Pop N objects off in LIFO order |
| 49 | for (int j = kMaxObjects - 1; j >= 0; --j) { |
| 50 | if (pool) { |
| 51 | pool->release(objs[j]); |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 52 | } else { |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 53 | delete objs[j]; |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 54 | } |
| 55 | } |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 56 | |
| 57 | // Everything has been cleaned up for the next loop |
| 58 | } |
| 59 | } |
| 60 | |
| 61 | // N objects are created, and then destroyed in creation order (is not able to unwind the cursor |
| 62 | // within each block, but can reclaim the block once everything is destroyed). |
| 63 | template <typename T> |
| 64 | static void run_queue(GrMemoryPool* pool, int loops) { |
| 65 | static const int kMaxObjects = 4 * (1 << 10); |
| 66 | T* objs[kMaxObjects]; |
| 67 | for (int i = 0; i < loops; ++i) { |
| 68 | // Push N objects into the pool (or heap if pool is null) |
| 69 | for (int j = 0; j < kMaxObjects; ++j) { |
| 70 | objs[j] = pool ? (T*) pool->allocate(sizeof(T)) : new T; |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 71 | } |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 72 | // Pop N objects off in FIFO order |
| 73 | for (int j = 0; j < kMaxObjects; ++j) { |
| 74 | if (pool) { |
| 75 | pool->release(objs[j]); |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 76 | } else { |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 77 | delete objs[j]; |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 78 | } |
| 79 | } |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 80 | |
| 81 | // Everything has been cleaned up for the next loop |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 82 | } |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 83 | } |
Michael Ludwig | f811fc3 | 2020-01-07 16:29:14 -0500 | [diff] [blame] | 84 | |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 85 | // N objects are created and immediately destroyed, so space at the start of the pool should be |
| 86 | // immediately reclaimed. |
| 87 | template <typename T> |
| 88 | static void run_pushpop(GrMemoryPool* pool, int loops) { |
| 89 | static const int kMaxObjects = 4 * (1 << 10); |
| 90 | T* objs[kMaxObjects]; |
| 91 | for (int i = 0; i < loops; ++i) { |
| 92 | // Push N objects into the pool (or heap if pool is null) |
| 93 | for (int j = 0; j < kMaxObjects; ++j) { |
| 94 | if (pool) { |
| 95 | objs[j] = (T*) pool->allocate(sizeof(T)); |
| 96 | pool->release(objs[j]); |
| 97 | } else { |
| 98 | objs[j] = new T; |
| 99 | delete objs[j]; |
| 100 | } |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 101 | } |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 102 | |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 103 | // Everything has been cleaned up for the next loop |
| 104 | } |
| 105 | } |
| 106 | |
| 107 | // N object creations and destructions are invoked in random order. |
| 108 | template <typename T> |
| 109 | static void run_random(GrMemoryPool* pool, int loops) { |
| 110 | static const int kMaxObjects = 4 * (1 << 10); |
| 111 | T* objs[kMaxObjects]; |
| 112 | for (int i = 0; i < kMaxObjects; ++i) { |
| 113 | objs[i] = nullptr; |
| 114 | } |
| 115 | |
| 116 | auto del = [&](int j) { |
| 117 | // Delete |
| 118 | if (pool) { |
| 119 | pool->release(objs[j]); |
| 120 | } else { |
| 121 | delete objs[j]; |
| 122 | } |
| 123 | objs[j] = nullptr; |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 124 | }; |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 125 | |
| 126 | SkRandom r; |
| 127 | for (int i = 0; i < loops; ++i) { |
| 128 | // Execute 2*kMaxObjects operations, which should average to N create and N destroy, |
| 129 | // followed by a small number of remaining deletions. |
| 130 | for (int j = 0; j < 2 * kMaxObjects; ++j) { |
| 131 | int k = r.nextRangeU(0, kMaxObjects-1); |
| 132 | if (objs[k]) { |
| 133 | del(k); |
| 134 | } else { |
| 135 | // Create |
| 136 | objs[k] = pool ? (T*) pool->allocate(sizeof(T)) : new T; |
| 137 | } |
| 138 | } |
| 139 | |
| 140 | // Ensure everything is null for the next loop |
| 141 | for (int j = 0; j < kMaxObjects; ++j) { |
| 142 | if (objs[j]) { |
| 143 | del(j); |
| 144 | } |
| 145 | } |
| 146 | } |
| 147 | } |
| 148 | |
| 149 | /////////////////////////////////////////////////////////////////////////////////////////////////// |
| 150 | |
| 151 | class GrMemoryPoolBench : public Benchmark { |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 152 | public: |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 153 | GrMemoryPoolBench(const char* name, RunBenchProc proc, int poolSize) |
| 154 | : fPoolSize(poolSize) |
| 155 | , fProc(proc) { |
| 156 | fName.printf("grmemorypool_%s", name); |
| 157 | } |
| 158 | |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 159 | bool isSuitableFor(Backend backend) override { |
| 160 | return backend == kNonRendering_Backend; |
| 161 | } |
| 162 | |
| 163 | protected: |
| 164 | const char* onGetName() override { |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 165 | return fName.c_str(); |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 166 | } |
| 167 | |
| 168 | void onDraw(int loops, SkCanvas*) override { |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 169 | std::unique_ptr<GrMemoryPool> pool; |
| 170 | if (fPoolSize > 0) { |
| 171 | pool = GrMemoryPool::Make(fPoolSize, fPoolSize); |
| 172 | } // else keep it null to test regular new/delete performance |
| 173 | |
| 174 | fProc(pool.get(), loops); |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 175 | } |
| 176 | |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 177 | SkString fName; |
| 178 | int fPoolSize; |
| 179 | RunBenchProc fProc; |
| 180 | |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 181 | typedef Benchmark INHERITED; |
| 182 | }; |
| 183 | |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 184 | /////////////////////////////////////////////////////////////////////////////////////////////////// |
Michael Ludwig | dff4c1d | 2020-01-08 15:53:59 +0000 | [diff] [blame] | 185 | |
Michael Ludwig | 9abd568 | 2020-01-08 13:50:42 -0500 | [diff] [blame] | 186 | static const int kLargePool = 10 * (1 << 10); |
| 187 | static const int kSmallPool = GrMemoryPool::kMinAllocationSize; |
| 188 | |
| 189 | DEF_BENCH( return new GrMemoryPoolBench("stack_aligned_lg", run_stack<Aligned>, kLargePool); ) |
| 190 | DEF_BENCH( return new GrMemoryPoolBench("stack_aligned_sm", run_stack<Aligned>, kSmallPool); ) |
| 191 | DEF_BENCH( return new GrMemoryPoolBench("stack_aligned_ref", run_stack<Aligned>, 0); ) |
| 192 | DEF_BENCH( return new GrMemoryPoolBench("stack_unaligned_lg", run_stack<Unaligned>, kLargePool); ) |
| 193 | DEF_BENCH( return new GrMemoryPoolBench("stack_unaligned_sm", run_stack<Unaligned>, kSmallPool); ) |
| 194 | DEF_BENCH( return new GrMemoryPoolBench("stack_unaligned_ref", run_stack<Unaligned>, 0); ) |
| 195 | |
| 196 | DEF_BENCH( return new GrMemoryPoolBench("queue_aligned_lg", run_queue<Aligned>, kLargePool); ) |
| 197 | DEF_BENCH( return new GrMemoryPoolBench("queue_aligned_sm", run_queue<Aligned>, kSmallPool); ) |
| 198 | DEF_BENCH( return new GrMemoryPoolBench("queue_aligned_ref", run_queue<Aligned>, 0); ) |
| 199 | DEF_BENCH( return new GrMemoryPoolBench("queue_unaligned_lg", run_queue<Unaligned>, kLargePool); ) |
| 200 | DEF_BENCH( return new GrMemoryPoolBench("queue_unaligned_sm", run_queue<Unaligned>, kSmallPool); ) |
| 201 | DEF_BENCH( return new GrMemoryPoolBench("queue_unaligned_ref", run_queue<Unaligned>, 0); ) |
| 202 | |
| 203 | DEF_BENCH( return new GrMemoryPoolBench("pushpop_aligned_lg", run_pushpop<Aligned>, kLargePool); ) |
| 204 | DEF_BENCH( return new GrMemoryPoolBench("pushpop_aligned_sm", run_pushpop<Aligned>, kSmallPool); ) |
| 205 | // DEF_BENCH( return new GrMemoryPoolBench("pushpop_aligned_ref", run_pushpop<Aligned>, 0); ) |
| 206 | DEF_BENCH( return new GrMemoryPoolBench("pushpop_unaligned_lg", run_pushpop<Unaligned>, kLargePool); ) |
| 207 | DEF_BENCH( return new GrMemoryPoolBench("pushpop_unaligned_sm", run_pushpop<Unaligned>, kSmallPool); ) |
| 208 | // DEF_BENCH( return new GrMemoryPoolBench("pushpop_unaligned_ref", run_pushpop<Unaligned>, 0); ) |
| 209 | // pushpop_x_ref are not meaningful because the compiler completely optimizes away new T; delete *. |
| 210 | |
| 211 | DEF_BENCH( return new GrMemoryPoolBench("random_aligned_lg", run_random<Aligned>, kLargePool); ) |
| 212 | DEF_BENCH( return new GrMemoryPoolBench("random_aligned_sm", run_random<Aligned>, kSmallPool); ) |
| 213 | DEF_BENCH( return new GrMemoryPoolBench("random_aligned_ref", run_random<Aligned>, 0); ) |
| 214 | DEF_BENCH( return new GrMemoryPoolBench("random_unaligned_lg", run_random<Unaligned>, kLargePool); ) |
| 215 | DEF_BENCH( return new GrMemoryPoolBench("random_unaligned_sm", run_random<Unaligned>, kSmallPool); ) |
| 216 | DEF_BENCH( return new GrMemoryPoolBench("random_unaligned_ref", run_random<Unaligned>, 0); ) |