XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 1 | // Copyright (c) Facebook, Inc. and its affiliates. |
| 2 | // All rights reserved. |
| 3 | // |
| 4 | // Copyright 2019 Google LLC |
| 5 | // |
| 6 | // This source code is licensed under the BSD-style license found in the |
| 7 | // LICENSE file in the root directory of this source tree. |
| 8 | |
| 9 | #include <algorithm> |
| 10 | #include <cfloat> |
| 11 | #include <cmath> |
| 12 | #include <functional> |
| 13 | #include <random> |
| 14 | #include <vector> |
| 15 | |
| 16 | #include <cpuinfo.h> |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 17 | |
| 18 | #include <benchmark/benchmark.h> |
Frank Barchard | bb4c18b | 2019-09-30 11:05:52 -0700 | [diff] [blame] | 19 | #include "bench/utils.h" |
| 20 | #include <xnnpack/AlignedAllocator.h> |
Marat Dukhan | 1dadbf7 | 2019-10-01 10:46:20 -0700 | [diff] [blame] | 21 | #include <xnnpack/common.h> |
Frank Barchard | bb4c18b | 2019-09-30 11:05:52 -0700 | [diff] [blame] | 22 | #include <xnnpack/requantization-stubs.h> |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 23 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 24 | |
| 25 | inline uint32_t divide_round_up(uint32_t x, uint32_t q) { |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 26 | return x / q + uint32_t(x % q != 0); |
| 27 | } |
| 28 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 29 | inline uint32_t round_up(uint32_t x, uint32_t q) { |
| 30 | return q * divide_round_up(x, q); |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 31 | } |
| 32 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 33 | inline uint32_t min(uint32_t a, uint32_t b) { |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 34 | return a < b ? a : b; |
| 35 | } |
| 36 | |
| 37 | class Requantization : public benchmark::Fixture { |
| 38 | public: |
| 39 | inline Requantization() |
| 40 | { |
| 41 | cpuinfo_initialize(); |
| 42 | const size_t l1d_size = cpuinfo_get_l1d_cache(0)->size; |
| 43 | const size_t l1d_reserve = 1024; |
| 44 | n_ = (l1d_size - l1d_reserve) / (sizeof(int32_t) + sizeof(uint8_t)); |
| 45 | n_ = n_ / 16 * 16; |
| 46 | } |
| 47 | |
| 48 | virtual void SetUp(const benchmark::State&) override |
| 49 | { |
| 50 | std::random_device random_device; |
| 51 | auto rng = std::mt19937(random_device()); |
| 52 | auto s32rng = std::bind(std::uniform_int_distribution<int32_t>(), rng); |
| 53 | |
| 54 | input_.resize(n()); |
| 55 | std::generate(input_.begin(), input_.end(), std::ref(s32rng)); |
| 56 | output_.resize(n()); |
| 57 | std::fill(output_.begin(), output_.end(), 0xA5); |
| 58 | } |
| 59 | |
| 60 | virtual void TearDown(benchmark::State& state) override |
| 61 | { |
| 62 | state.SetItemsProcessed(uint64_t(state.iterations()) * n()); |
| 63 | state.SetBytesProcessed(uint64_t(state.iterations()) * n() * (sizeof(int32_t) + sizeof(uint8_t))); |
| 64 | input_.clear(); |
| 65 | output_.clear(); |
| 66 | } |
| 67 | |
| 68 | inline const int32_t* input() const |
| 69 | { |
| 70 | return input_.data(); |
| 71 | } |
| 72 | |
| 73 | inline uint8_t* output() |
| 74 | { |
| 75 | return output_.data(); |
| 76 | } |
| 77 | |
| 78 | inline size_t n() const |
| 79 | { |
| 80 | return n_; |
| 81 | } |
| 82 | |
| 83 | protected: |
| 84 | std::vector<int32_t, AlignedAllocator<int32_t, 32>> input_; |
| 85 | std::vector<uint8_t> output_; |
| 86 | size_t n_; |
| 87 | }; |
| 88 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 89 | BENCHMARK_F(Requantization, precise__scalar_unsigned32)(benchmark::State& state) { |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 90 | for (auto _ : state) { |
| 91 | xnn_requantize_precise__scalar_unsigned32( |
| 92 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 93 | } |
| 94 | } |
| 95 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 96 | BENCHMARK_F(Requantization, precise__scalar_unsigned64)(benchmark::State& state) { |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 97 | for (auto _ : state) { |
| 98 | xnn_requantize_precise__scalar_unsigned64( |
| 99 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 100 | } |
| 101 | } |
| 102 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 103 | BENCHMARK_F(Requantization, precise__scalar_signed64)(benchmark::State& state) { |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 104 | for (auto _ : state) { |
| 105 | xnn_requantize_precise__scalar_signed64( |
| 106 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 107 | } |
| 108 | } |
| 109 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 110 | BENCHMARK_F(Requantization, fp32__scalar_lrintf)(benchmark::State& state) { |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 111 | for (auto _ : state) { |
| 112 | xnn_requantize_fp32__scalar_lrintf( |
| 113 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 114 | } |
| 115 | } |
| 116 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 117 | BENCHMARK_F(Requantization, fp32__scalar_magic)(benchmark::State& state) { |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 118 | for (auto _ : state) { |
| 119 | xnn_requantize_fp32__scalar_magic( |
| 120 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 121 | } |
| 122 | } |
| 123 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 124 | BENCHMARK_F(Requantization, gemmlowp__scalar)(benchmark::State& state) { |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 125 | for (auto _ : state) { |
| 126 | xnn_requantize_gemmlowp__scalar( |
| 127 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 128 | } |
| 129 | } |
| 130 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 131 | #if !XNN_ARCH_ASMJS && !XNN_ARCH_WASM |
| 132 | BENCHMARK_F(Requantization, precise__psimd)(benchmark::State& state) { |
| 133 | for (auto _ : state) { |
| 134 | xnn_requantize_precise__psimd( |
| 135 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 136 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 137 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 138 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 139 | BENCHMARK_F(Requantization, fp32__psimd)(benchmark::State& state) { |
| 140 | for (auto _ : state) { |
| 141 | xnn_requantize_fp32__psimd( |
| 142 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 143 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 144 | } |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 145 | #endif // !XNN_ARCH_ASMJS && !XNN_ARCH_WASM |
| 146 | |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 147 | |
Marat Dukhan | 1dadbf7 | 2019-10-01 10:46:20 -0700 | [diff] [blame] | 148 | #if XNN_ARCH_ARM || XNN_ARCH_ARM64 |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 149 | BENCHMARK_F(Requantization, precise__neon)(benchmark::State& state) { |
| 150 | for (auto _ : state) { |
| 151 | xnn_requantize_precise__neon( |
| 152 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 153 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 154 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 155 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 156 | BENCHMARK_F(Requantization, fp32__neon)(benchmark::State& state) { |
| 157 | for (auto _ : state) { |
| 158 | xnn_requantize_fp32__neon( |
| 159 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 160 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 161 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 162 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 163 | BENCHMARK_F(Requantization, q31__neon)(benchmark::State& state) { |
| 164 | for (auto _ : state) { |
| 165 | xnn_requantize_q31__neon( |
| 166 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 167 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 168 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 169 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 170 | BENCHMARK_F(Requantization, gemmlowp__neon)(benchmark::State& state) { |
| 171 | for (auto _ : state) { |
| 172 | xnn_requantize_gemmlowp__neon( |
| 173 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 174 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 175 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 176 | #endif |
| 177 | |
Marat Dukhan | 1dadbf7 | 2019-10-01 10:46:20 -0700 | [diff] [blame] | 178 | #if XNN_ARCH_X86 || XNN_ARCH_X86_64 |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 179 | BENCHMARK_F(Requantization, precise__sse2)(benchmark::State& state) { |
| 180 | for (auto _ : state) { |
| 181 | xnn_requantize_precise__sse2( |
| 182 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 183 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 184 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 185 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 186 | BENCHMARK_F(Requantization, precise__ssse3)(benchmark::State& state) { |
| 187 | for (auto _ : state) { |
| 188 | xnn_requantize_precise__ssse3( |
| 189 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 190 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 191 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 192 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 193 | BENCHMARK_F(Requantization, precise__sse4)(benchmark::State& state) { |
| 194 | for (auto _ : state) { |
| 195 | xnn_requantize_precise__sse4( |
| 196 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 197 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 198 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 199 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 200 | BENCHMARK_F(Requantization, fp32__sse2)(benchmark::State& state) { |
| 201 | for (auto _ : state) { |
| 202 | xnn_requantize_fp32__sse2( |
| 203 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 204 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 205 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 206 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 207 | BENCHMARK_F(Requantization, q31__sse2)(benchmark::State& state) { |
| 208 | for (auto _ : state) { |
| 209 | xnn_requantize_q31__sse2( |
| 210 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 211 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 212 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 213 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 214 | BENCHMARK_F(Requantization, q31__ssse3)(benchmark::State& state) { |
| 215 | for (auto _ : state) { |
| 216 | xnn_requantize_q31__ssse3( |
| 217 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 218 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 219 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 220 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 221 | BENCHMARK_F(Requantization, q31__sse4)(benchmark::State& state) { |
| 222 | for (auto _ : state) { |
| 223 | xnn_requantize_q31__sse4( |
| 224 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 225 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 226 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 227 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 228 | BENCHMARK_F(Requantization, gemmlowp__sse2)(benchmark::State& state) { |
| 229 | for (auto _ : state) { |
| 230 | xnn_requantize_gemmlowp__sse2( |
| 231 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 232 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 233 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 234 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 235 | BENCHMARK_F(Requantization, gemmlowp__ssse3)(benchmark::State& state) { |
| 236 | for (auto _ : state) { |
| 237 | xnn_requantize_gemmlowp__ssse3( |
| 238 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 239 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 240 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 241 | |
Marat Dukhan | fe7acb6 | 2020-03-09 19:30:05 -0700 | [diff] [blame^] | 242 | BENCHMARK_F(Requantization, gemmlowp__sse4)(benchmark::State& state) { |
| 243 | for (auto _ : state) { |
| 244 | xnn_requantize_gemmlowp__sse4( |
| 245 | n(), input(), 0x1.0p-12f /* scale */, 128 /* zero point */, 1 /* qmin */, 254 /* qmax */, output()); |
| 246 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 247 | } |
XNNPACK Team | b455b12 | 2019-09-27 18:10:33 -0700 | [diff] [blame] | 248 | #endif |
| 249 | |
| 250 | #ifndef XNNPACK_BENCHMARK_NO_MAIN |
| 251 | BENCHMARK_MAIN(); |
| 252 | #endif |