blob: 5838ca4b8d29006bf976fdf67960d34fce8f1de9 [file] [log] [blame]
Keir Mierle866cff42020-04-28 22:24:44 -07001// Copyright 2020 The Pigweed Authors
2//
3// Licensed under the Apache License, Version 2.0 (the "License"); you may not
4// use this file except in compliance with the License. You may obtain a copy of
5// the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12// License for the specific language governing permissions and limitations under
13// the License.
14
15#include "pw_ring_buffer/prefixed_entry_ring_buffer.h"
16
17#include <cstddef>
18#include <cstdint>
19
Keir Mierlebcdf4602020-05-07 11:39:45 -070020#include "pw_assert/assert.h"
Keir Mierle866cff42020-04-28 22:24:44 -070021#include "pw_containers/vector.h"
22#include "pw_unit_test/framework.h"
23
24using std::byte;
25
26namespace pw {
27namespace ring_buffer {
28namespace {
29
30TEST(PrefixedEntryRingBuffer, NoBuffer) {
31 PrefixedEntryRingBuffer ring(false);
32
33 byte buf[32];
34 size_t count;
35
36 EXPECT_EQ(ring.EntryCount(), 0u);
Wyatt Heplere2cbadf2020-06-22 11:21:45 -070037 EXPECT_EQ(ring.SetBuffer(std::span<byte>(nullptr, 10u)),
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070038 Status::InvalidArgument());
39 EXPECT_EQ(ring.SetBuffer(std::span(buf, 0u)), Status::InvalidArgument());
Keir Mierle866cff42020-04-28 22:24:44 -070040 EXPECT_EQ(ring.FrontEntryDataSizeBytes(), 0u);
41
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070042 EXPECT_EQ(ring.PushBack(buf), Status::FailedPrecondition());
Keir Mierle866cff42020-04-28 22:24:44 -070043 EXPECT_EQ(ring.EntryCount(), 0u);
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070044 EXPECT_EQ(ring.PeekFront(buf, &count), Status::FailedPrecondition());
Keir Mierle866cff42020-04-28 22:24:44 -070045 EXPECT_EQ(count, 0u);
46 EXPECT_EQ(ring.EntryCount(), 0u);
47 EXPECT_EQ(ring.PeekFrontWithPreamble(buf, &count),
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070048 Status::FailedPrecondition());
Keir Mierle866cff42020-04-28 22:24:44 -070049 EXPECT_EQ(count, 0u);
50 EXPECT_EQ(ring.EntryCount(), 0u);
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070051 EXPECT_EQ(ring.PopFront(), Status::FailedPrecondition());
Keir Mierle866cff42020-04-28 22:24:44 -070052 EXPECT_EQ(ring.EntryCount(), 0u);
53}
54
55// Single entry to write/read/pop over and over again.
56constexpr byte single_entry_data[] = {byte(1),
57 byte(2),
58 byte(3),
59 byte(4),
60 byte(5),
61 byte(6),
62 byte(7),
63 byte(8),
64 byte(9)};
65constexpr size_t single_entry_total_size = sizeof(single_entry_data) + 1;
66constexpr size_t single_entry_test_buffer_size =
67 (single_entry_total_size * 7) / 2;
68
69// Make sure the single_entry_size is even so single_entry_buffer_Size gets the
70// proper wrap/even behavior when getting to the end of the buffer.
71static_assert((single_entry_total_size % 2) == 0u);
72constexpr size_t kSingleEntryCycles = 300u;
73
74// Repeatedly write the same data, read it, and pop it, done over and over
75// again.
76void SingleEntryWriteReadTest(bool user_data) {
77 PrefixedEntryRingBuffer ring(user_data);
78 byte test_buffer[single_entry_test_buffer_size];
79
80 byte read_buffer[single_entry_total_size];
81
82 // Set read_size to an unexpected value to make sure result checks don't luck
83 // out and happen to see a previous value.
84 size_t read_size = 500U;
85
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070086 EXPECT_EQ(ring.SetBuffer(test_buffer), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -070087
88 EXPECT_EQ(ring.EntryCount(), 0u);
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070089 EXPECT_EQ(ring.PopFront(), Status::OutOfRange());
Keir Mierle866cff42020-04-28 22:24:44 -070090 EXPECT_EQ(ring.EntryCount(), 0u);
Wyatt Heplere2cbadf2020-06-22 11:21:45 -070091 EXPECT_EQ(ring.PushBack(std::span(single_entry_data, 0u)),
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070092 Status::InvalidArgument());
Keir Mierle866cff42020-04-28 22:24:44 -070093 EXPECT_EQ(ring.EntryCount(), 0u);
Wyatt Heplere2cbadf2020-06-22 11:21:45 -070094 EXPECT_EQ(
95 ring.PushBack(std::span(single_entry_data, sizeof(test_buffer) + 5)),
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070096 Status::OutOfRange());
Keir Mierle866cff42020-04-28 22:24:44 -070097 EXPECT_EQ(ring.EntryCount(), 0u);
Wyatt Heplerd78f7c62020-09-28 14:27:32 -070098 EXPECT_EQ(ring.PeekFront(read_buffer, &read_size), Status::OutOfRange());
Keir Mierle866cff42020-04-28 22:24:44 -070099 EXPECT_EQ(read_size, 0u);
100 read_size = 500U;
101 EXPECT_EQ(ring.PeekFrontWithPreamble(read_buffer, &read_size),
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700102 Status::OutOfRange());
Keir Mierle866cff42020-04-28 22:24:44 -0700103 EXPECT_EQ(read_size, 0u);
104
105 size_t user_preamble_bytes = (user_data ? 1 : 0);
106 size_t data_size = sizeof(single_entry_data) - user_preamble_bytes;
107 size_t data_offset = single_entry_total_size - data_size;
108
109 byte expect_buffer[single_entry_total_size] = {};
110 expect_buffer[user_preamble_bytes] = byte(data_size);
111 memcpy(expect_buffer + data_offset, single_entry_data, data_size);
112
113 for (size_t i = 0; i < kSingleEntryCycles; i++) {
114 ASSERT_EQ(ring.FrontEntryDataSizeBytes(), 0u);
115 ASSERT_EQ(ring.FrontEntryTotalSizeBytes(), 0u);
116
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700117 ASSERT_EQ(ring.PushBack(std::span(single_entry_data, data_size), byte(i)),
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700118 Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700119 ASSERT_EQ(ring.FrontEntryDataSizeBytes(), data_size);
120 ASSERT_EQ(ring.FrontEntryTotalSizeBytes(), single_entry_total_size);
121
122 read_size = 500U;
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700123 ASSERT_EQ(ring.PeekFront(read_buffer, &read_size), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700124 ASSERT_EQ(read_size, data_size);
125
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700126 // ASSERT_THAT(std::span(expect_buffer).last(data_size),
127 // testing::ElementsAreArray(std::span(read_buffer, data_size)));
128 ASSERT_EQ(memcmp(std::span(expect_buffer).last(data_size).data(),
129 read_buffer,
130 data_size),
131 0);
Keir Mierle866cff42020-04-28 22:24:44 -0700132
133 read_size = 500U;
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700134 ASSERT_EQ(ring.PeekFrontWithPreamble(read_buffer, &read_size),
135 Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700136 ASSERT_EQ(read_size, single_entry_total_size);
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700137 ASSERT_EQ(ring.PopFront(), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700138
139 if (user_data) {
140 expect_buffer[0] = byte(i);
141 }
142
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700143 // ASSERT_THAT(std::span(expect_buffer),
144 // testing::ElementsAreArray(std::span(read_buffer)));
Keir Mierle866cff42020-04-28 22:24:44 -0700145 ASSERT_EQ(memcmp(expect_buffer, read_buffer, single_entry_total_size), 0);
146 }
147}
148
149TEST(PrefixedEntryRingBuffer, SingleEntryWriteReadNoUserData) {
150 SingleEntryWriteReadTest(false);
151}
152
153TEST(PrefixedEntryRingBuffer, SingleEntryWriteReadYesUserData) {
154 SingleEntryWriteReadTest(true);
155}
156
Keir Mierle32d1c122020-06-04 12:40:14 -0700157// TODO(pwbug/196): Increase this to 5000 once we have a way to detect targets
158// with more computation and memory oomph.
159constexpr size_t kOuterCycles = 50u;
Keir Mierle866cff42020-04-28 22:24:44 -0700160constexpr size_t kCountingUpMaxExpectedEntries =
161 single_entry_test_buffer_size / single_entry_total_size;
162
163// Write data that is filled with a byte value that increments each write. Write
164// many times without read/pop and then check to make sure correct contents are
165// in the ring buffer.
166template <bool user_data>
167void CountingUpWriteReadTest() {
168 PrefixedEntryRingBuffer ring(user_data);
169 byte test_buffer[single_entry_test_buffer_size];
170
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700171 EXPECT_EQ(ring.SetBuffer(test_buffer), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700172 EXPECT_EQ(ring.EntryCount(), 0u);
173
174 constexpr size_t data_size = sizeof(single_entry_data) - (user_data ? 1 : 0);
175
176 for (size_t i = 0; i < kOuterCycles; i++) {
177 size_t seed = i;
178
179 byte write_buffer[data_size];
180
181 size_t j;
182 for (j = 0; j < kSingleEntryCycles; j++) {
183 memset(write_buffer, j + seed, sizeof(write_buffer));
184
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700185 ASSERT_EQ(ring.PushBack(write_buffer), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700186
187 size_t expected_count = (j < kCountingUpMaxExpectedEntries)
188 ? j + 1
189 : kCountingUpMaxExpectedEntries;
190 ASSERT_EQ(ring.EntryCount(), expected_count);
191 }
192 size_t final_write_j = j;
193 size_t fill_val = seed + final_write_j - kCountingUpMaxExpectedEntries;
194
195 for (j = 0; j < kCountingUpMaxExpectedEntries; j++) {
196 byte read_buffer[sizeof(write_buffer)];
197 size_t read_size;
198 memset(write_buffer, fill_val + j, sizeof(write_buffer));
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700199 ASSERT_EQ(ring.PeekFront(read_buffer, &read_size), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700200
201 ASSERT_EQ(memcmp(write_buffer, read_buffer, data_size), 0);
202
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700203 ASSERT_EQ(ring.PopFront(), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700204 }
205 }
206}
207
208TEST(PrefixedEntryRingBuffer, CountingUpWriteReadNoUserData) {
209 CountingUpWriteReadTest<false>();
210}
211
212TEST(PrefixedEntryRingBuffer, CountingUpWriteReadYesUserData) {
213 CountingUpWriteReadTest<true>();
214}
215
216// Create statically to prevent allocating a capture in the lambda below.
217static pw::Vector<byte, single_entry_total_size> read_buffer;
218
219// Repeatedly write the same data, read it, and pop it, done over and over
220// again.
221void SingleEntryWriteReadWithSectionWriterTest(bool user_data) {
222 PrefixedEntryRingBuffer ring(user_data);
223 byte test_buffer[single_entry_test_buffer_size];
224
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700225 EXPECT_EQ(ring.SetBuffer(test_buffer), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700226
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700227 auto output = [](std::span<const byte> src) -> Status {
Keir Mierle866cff42020-04-28 22:24:44 -0700228 for (byte b : src) {
229 read_buffer.push_back(b);
230 }
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700231 return Status::Ok();
Keir Mierle866cff42020-04-28 22:24:44 -0700232 };
233
234 size_t user_preamble_bytes = (user_data ? 1 : 0);
235 size_t data_size = sizeof(single_entry_data) - user_preamble_bytes;
236 size_t data_offset = single_entry_total_size - data_size;
237
238 byte expect_buffer[single_entry_total_size] = {};
239 expect_buffer[user_preamble_bytes] = byte(data_size);
240 memcpy(expect_buffer + data_offset, single_entry_data, data_size);
241
242 for (size_t i = 0; i < kSingleEntryCycles; i++) {
243 ASSERT_EQ(ring.FrontEntryDataSizeBytes(), 0u);
244 ASSERT_EQ(ring.FrontEntryTotalSizeBytes(), 0u);
245
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700246 ASSERT_EQ(ring.PushBack(std::span(single_entry_data, data_size), byte(i)),
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700247 Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700248 ASSERT_EQ(ring.FrontEntryDataSizeBytes(), data_size);
249 ASSERT_EQ(ring.FrontEntryTotalSizeBytes(), single_entry_total_size);
250
251 read_buffer.clear();
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700252 ASSERT_EQ(ring.PeekFront(output), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700253 ASSERT_EQ(read_buffer.size(), data_size);
254
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700255 ASSERT_EQ(memcmp(std::span(expect_buffer).last(data_size).data(),
Keir Mierle866cff42020-04-28 22:24:44 -0700256 read_buffer.data(),
257 data_size),
258 0);
259
260 read_buffer.clear();
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700261 ASSERT_EQ(ring.PeekFrontWithPreamble(output), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700262 ASSERT_EQ(read_buffer.size(), single_entry_total_size);
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700263 ASSERT_EQ(ring.PopFront(), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700264
265 if (user_data) {
266 expect_buffer[0] = byte(i);
267 }
268
269 ASSERT_EQ(
270 memcmp(expect_buffer, read_buffer.data(), single_entry_total_size), 0);
271 }
272}
273
274TEST(PrefixedEntryRingBuffer, SingleEntryWriteReadWithSectionWriterNoUserData) {
275 SingleEntryWriteReadWithSectionWriterTest(false);
276}
277
278TEST(PrefixedEntryRingBuffer,
279 SingleEntryWriteReadWithSectionWriterYesUserData) {
280 SingleEntryWriteReadWithSectionWriterTest(true);
281}
282
283constexpr size_t kEntrySizeBytes = 8u;
284constexpr size_t kTotalEntryCount = 20u;
285constexpr size_t kBufferExtraBytes = 5u;
286constexpr size_t kTestBufferSize =
287 (kEntrySizeBytes * kTotalEntryCount) + kBufferExtraBytes;
288
289// Create statically to prevent allocating a capture in the lambda below.
290static pw::Vector<byte, kTestBufferSize> actual_result;
291
292void DeringTest(bool preload) {
293 PrefixedEntryRingBuffer ring;
294
295 byte test_buffer[kTestBufferSize];
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700296 EXPECT_EQ(ring.SetBuffer(test_buffer), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700297
298 // Entry data is entry size - preamble (single byte in this case).
Armando Montanez888370d2020-05-01 18:29:22 -0700299 byte single_entry_buffer[kEntrySizeBytes - 1u];
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700300 auto entry_data = std::span(single_entry_buffer);
Keir Mierle866cff42020-04-28 22:24:44 -0700301 size_t i;
302
Keir Mierle32d1c122020-06-04 12:40:14 -0700303 // TODO(pwbug/196): Increase this to 500 once we have a way to detect targets
304 // with more computation and memory oomph.
305 size_t loop_goal = preload ? 50 : 1;
Keir Mierle866cff42020-04-28 22:24:44 -0700306
307 for (size_t main_loop_count = 0; main_loop_count < loop_goal;
308 main_loop_count++) {
309 if (preload) {
310 // Prime the ringbuffer with some junk data to get the buffer
311 // wrapped.
312 for (i = 0; i < (kTotalEntryCount * (main_loop_count % 64u)); i++) {
Armando Montanez888370d2020-05-01 18:29:22 -0700313 memset(single_entry_buffer, i, sizeof(single_entry_buffer));
314 ring.PushBack(single_entry_buffer);
Keir Mierle866cff42020-04-28 22:24:44 -0700315 }
316 }
317
318 // Build up the expected buffer and fill the ring buffer with the test data.
319 pw::Vector<byte, kTestBufferSize> expected_result;
320 for (i = 0; i < kTotalEntryCount; i++) {
321 // First component of the entry: the varint size.
Armando Montanez888370d2020-05-01 18:29:22 -0700322 static_assert(sizeof(single_entry_buffer) < 127);
323 expected_result.push_back(byte(sizeof(single_entry_buffer)));
Keir Mierle866cff42020-04-28 22:24:44 -0700324
325 // Second component of the entry: the raw data.
Armando Montanez888370d2020-05-01 18:29:22 -0700326 memset(single_entry_buffer, 'a' + i, sizeof(single_entry_buffer));
Keir Mierle866cff42020-04-28 22:24:44 -0700327 for (byte b : entry_data) {
328 expected_result.push_back(b);
329 }
330
331 // The ring buffer internally pushes the varint size byte.
Armando Montanez888370d2020-05-01 18:29:22 -0700332 ring.PushBack(single_entry_buffer);
Keir Mierle866cff42020-04-28 22:24:44 -0700333 }
334
335 // Check values before doing the dering.
336 EXPECT_EQ(ring.EntryCount(), kTotalEntryCount);
337 EXPECT_EQ(expected_result.size(), ring.TotalUsedBytes());
338
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700339 ASSERT_EQ(ring.Dering(), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700340
341 // Check values after doing the dering.
342 EXPECT_EQ(ring.EntryCount(), kTotalEntryCount);
343 EXPECT_EQ(expected_result.size(), ring.TotalUsedBytes());
344
345 // Read out the entries of the ring buffer.
346 actual_result.clear();
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700347 auto output = [](std::span<const byte> src) -> Status {
Keir Mierle866cff42020-04-28 22:24:44 -0700348 for (byte b : src) {
349 actual_result.push_back(b);
350 }
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700351 return Status::Ok();
Keir Mierle866cff42020-04-28 22:24:44 -0700352 };
353 while (ring.EntryCount()) {
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700354 ASSERT_EQ(ring.PeekFrontWithPreamble(output), Status::Ok());
355 ASSERT_EQ(ring.PopFront(), Status::Ok());
Keir Mierle866cff42020-04-28 22:24:44 -0700356 }
357
358 // Ensure the actual result out of the ring buffer matches our manually
359 // computed result.
360 EXPECT_EQ(expected_result.size(), actual_result.size());
361 ASSERT_EQ(memcmp(test_buffer, actual_result.data(), actual_result.size()),
362 0);
363 ASSERT_EQ(
364 memcmp(
365 expected_result.data(), actual_result.data(), actual_result.size()),
366 0);
367 }
368}
369
370TEST(PrefixedEntryRingBuffer, Dering) { DeringTest(true); }
371TEST(PrefixedEntryRingBuffer, DeringNoPreload) { DeringTest(false); }
372
Keir Mierlebcdf4602020-05-07 11:39:45 -0700373template <typename T>
374Status PushBack(PrefixedEntryRingBuffer& ring, T element) {
375 union {
376 std::array<byte, sizeof(element)> buffer;
377 T item;
378 } aliased;
379 aliased.item = element;
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700380 return ring.PushBack(aliased.buffer);
Keir Mierlebcdf4602020-05-07 11:39:45 -0700381}
382
383template <typename T>
384Status TryPushBack(PrefixedEntryRingBuffer& ring, T element) {
385 union {
386 std::array<byte, sizeof(element)> buffer;
387 T item;
388 } aliased;
389 aliased.item = element;
Wyatt Heplere2cbadf2020-06-22 11:21:45 -0700390 return ring.TryPushBack(aliased.buffer);
Keir Mierlebcdf4602020-05-07 11:39:45 -0700391}
392
393template <typename T>
394T PeekFront(PrefixedEntryRingBuffer& ring) {
395 union {
396 std::array<byte, sizeof(T)> buffer;
397 T item;
398 } aliased;
399 size_t bytes_read = 0;
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700400 PW_CHECK_INT_EQ(ring.PeekFront(aliased.buffer, &bytes_read), Status::Ok());
Keir Mierlebcdf4602020-05-07 11:39:45 -0700401 PW_CHECK_INT_EQ(bytes_read, sizeof(T));
402 return aliased.item;
403}
404
405TEST(PrefixedEntryRingBuffer, TryPushBack) {
406 PrefixedEntryRingBuffer ring;
407 byte test_buffer[kTestBufferSize];
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700408 EXPECT_EQ(ring.SetBuffer(test_buffer), Status::Ok());
Keir Mierlebcdf4602020-05-07 11:39:45 -0700409
410 // Fill up the ring buffer with a constant.
411 int total_items = 0;
412 while (true) {
413 Status status = TryPushBack<int>(ring, 5);
414 if (status.ok()) {
415 total_items++;
416 } else {
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700417 EXPECT_EQ(status, Status::ResourceExhausted());
Keir Mierlebcdf4602020-05-07 11:39:45 -0700418 break;
419 }
420 }
421 EXPECT_EQ(PeekFront<int>(ring), 5);
422
423 // Should be unable to push more items.
424 for (int i = 0; i < total_items; ++i) {
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700425 EXPECT_EQ(TryPushBack<int>(ring, 100), Status::ResourceExhausted());
Keir Mierlebcdf4602020-05-07 11:39:45 -0700426 EXPECT_EQ(PeekFront<int>(ring), 5);
427 }
428
429 // Fill up the ring buffer with a constant.
430 for (int i = 0; i < total_items; ++i) {
Wyatt Heplerd78f7c62020-09-28 14:27:32 -0700431 EXPECT_EQ(PushBack<int>(ring, 100), Status::Ok());
Keir Mierlebcdf4602020-05-07 11:39:45 -0700432 }
433 EXPECT_EQ(PeekFront<int>(ring), 100);
434}
435
Keir Mierle866cff42020-04-28 22:24:44 -0700436} // namespace
437} // namespace ring_buffer
438} // namespace pw