blob: b9be8856d652fdfd865c1f0a3ce0481722b76317 [file] [log] [blame]
Matthew Wilcox4f3755d2016-05-20 17:02:14 -07001/*
2 * multiorder.c: Multi-order radix tree entry testing
3 * Copyright (c) 2016 Intel Corporation
4 * Author: Ross Zwisler <ross.zwisler@linux.intel.com>
5 * Author: Matthew Wilcox <matthew.r.wilcox@intel.com>
6 *
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms and conditions of the GNU General Public License,
9 * version 2, as published by the Free Software Foundation.
10 *
11 * This program is distributed in the hope it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 */
16#include <linux/radix-tree.h>
17#include <linux/slab.h>
18#include <linux/errno.h>
19
20#include "test.h"
21
Ross Zwisler0fc9b8c2016-05-20 17:02:41 -070022#define for_each_index(i, base, order) \
23 for (i = base; i < base + (1 << order); i++)
24
25static void __multiorder_tag_test(int index, int order)
26{
27 RADIX_TREE(tree, GFP_KERNEL);
28 int base, err, i;
Matthew Wilcox070c5ac2016-05-20 17:02:52 -070029 unsigned long first = 0;
Ross Zwisler0fc9b8c2016-05-20 17:02:41 -070030
31 /* our canonical entry */
32 base = index & ~((1 << order) - 1);
33
34 printf("Multiorder tag test with index %d, canonical entry %d\n",
35 index, base);
36
37 err = item_insert_order(&tree, index, order);
38 assert(!err);
39
40 /*
41 * Verify we get collisions for covered indices. We try and fail to
42 * insert an exceptional entry so we don't leak memory via
43 * item_insert_order().
44 */
45 for_each_index(i, base, order) {
46 err = __radix_tree_insert(&tree, i, order,
47 (void *)(0xA0 | RADIX_TREE_EXCEPTIONAL_ENTRY));
48 assert(err == -EEXIST);
49 }
50
51 for_each_index(i, base, order) {
52 assert(!radix_tree_tag_get(&tree, i, 0));
53 assert(!radix_tree_tag_get(&tree, i, 1));
54 }
55
56 assert(radix_tree_tag_set(&tree, index, 0));
57
58 for_each_index(i, base, order) {
59 assert(radix_tree_tag_get(&tree, i, 0));
60 assert(!radix_tree_tag_get(&tree, i, 1));
61 }
62
Matthew Wilcox070c5ac2016-05-20 17:02:52 -070063 assert(radix_tree_range_tag_if_tagged(&tree, &first, ~0UL, 10, 0, 1) == 1);
Ross Zwisler0fc9b8c2016-05-20 17:02:41 -070064 assert(radix_tree_tag_clear(&tree, index, 0));
65
66 for_each_index(i, base, order) {
67 assert(!radix_tree_tag_get(&tree, i, 0));
Matthew Wilcox070c5ac2016-05-20 17:02:52 -070068 assert(radix_tree_tag_get(&tree, i, 1));
Ross Zwisler0fc9b8c2016-05-20 17:02:41 -070069 }
70
Matthew Wilcox070c5ac2016-05-20 17:02:52 -070071 assert(radix_tree_tag_clear(&tree, index, 1));
72
Ross Zwisler0fc9b8c2016-05-20 17:02:41 -070073 assert(!radix_tree_tagged(&tree, 0));
74 assert(!radix_tree_tagged(&tree, 1));
75
76 item_kill_tree(&tree);
77}
78
79static void multiorder_tag_tests(void)
80{
81 /* test multi-order entry for indices 0-7 with no sibling pointers */
82 __multiorder_tag_test(0, 3);
83 __multiorder_tag_test(5, 3);
84
85 /* test multi-order entry for indices 8-15 with no sibling pointers */
86 __multiorder_tag_test(8, 3);
87 __multiorder_tag_test(15, 3);
88
89 /*
90 * Our order 5 entry covers indices 0-31 in a tree with height=2.
91 * This is broken up as follows:
92 * 0-7: canonical entry
93 * 8-15: sibling 1
94 * 16-23: sibling 2
95 * 24-31: sibling 3
96 */
97 __multiorder_tag_test(0, 5);
98 __multiorder_tag_test(29, 5);
99
100 /* same test, but with indices 32-63 */
101 __multiorder_tag_test(32, 5);
102 __multiorder_tag_test(44, 5);
103
104 /*
105 * Our order 8 entry covers indices 0-255 in a tree with height=3.
106 * This is broken up as follows:
107 * 0-63: canonical entry
108 * 64-127: sibling 1
109 * 128-191: sibling 2
110 * 192-255: sibling 3
111 */
112 __multiorder_tag_test(0, 8);
113 __multiorder_tag_test(190, 8);
114
115 /* same test, but with indices 256-511 */
116 __multiorder_tag_test(256, 8);
117 __multiorder_tag_test(300, 8);
118
119 __multiorder_tag_test(0x12345678UL, 8);
120}
121
Matthew Wilcox4f3755d2016-05-20 17:02:14 -0700122static void multiorder_check(unsigned long index, int order)
123{
124 unsigned long i;
125 unsigned long min = index & ~((1UL << order) - 1);
126 unsigned long max = min + (1UL << order);
Matthew Wilcox62fd5252016-09-22 11:53:34 -0700127 void **slot;
Matthew Wilcox101d9602016-12-14 15:08:23 -0800128 struct item *item2 = item_create(min, order);
Matthew Wilcox4f3755d2016-05-20 17:02:14 -0700129 RADIX_TREE(tree, GFP_KERNEL);
130
131 printf("Multiorder index %ld, order %d\n", index, order);
132
133 assert(item_insert_order(&tree, index, order) == 0);
134
135 for (i = min; i < max; i++) {
136 struct item *item = item_lookup(&tree, i);
137 assert(item != 0);
138 assert(item->index == index);
139 }
140 for (i = 0; i < min; i++)
141 item_check_absent(&tree, i);
142 for (i = max; i < 2*max; i++)
143 item_check_absent(&tree, i);
Matthew Wilcox62fd5252016-09-22 11:53:34 -0700144 for (i = min; i < max; i++)
145 assert(radix_tree_insert(&tree, i, item2) == -EEXIST);
146
147 slot = radix_tree_lookup_slot(&tree, index);
148 free(*slot);
Johannes Weiner6d75f362016-12-12 16:43:43 -0800149 radix_tree_replace_slot(&tree, slot, item2);
Matthew Wilcox8a14f4d2016-05-20 17:02:44 -0700150 for (i = min; i < max; i++) {
Matthew Wilcox62fd5252016-09-22 11:53:34 -0700151 struct item *item = item_lookup(&tree, i);
152 assert(item != 0);
153 assert(item->index == min);
Matthew Wilcox8a14f4d2016-05-20 17:02:44 -0700154 }
Matthew Wilcox4f3755d2016-05-20 17:02:14 -0700155
Matthew Wilcox62fd5252016-09-22 11:53:34 -0700156 assert(item_delete(&tree, min) != 0);
Matthew Wilcox4f3755d2016-05-20 17:02:14 -0700157
158 for (i = 0; i < 2*max; i++)
159 item_check_absent(&tree, i);
160}
161
Matthew Wilcoxafe0e392016-05-20 17:02:17 -0700162static void multiorder_shrink(unsigned long index, int order)
163{
164 unsigned long i;
165 unsigned long max = 1 << order;
166 RADIX_TREE(tree, GFP_KERNEL);
167 struct radix_tree_node *node;
168
169 printf("Multiorder shrink index %ld, order %d\n", index, order);
170
171 assert(item_insert_order(&tree, 0, order) == 0);
172
173 node = tree.rnode;
174
175 assert(item_insert(&tree, index) == 0);
176 assert(node != tree.rnode);
177
178 assert(item_delete(&tree, index) != 0);
179 assert(node == tree.rnode);
180
181 for (i = 0; i < max; i++) {
182 struct item *item = item_lookup(&tree, i);
183 assert(item != 0);
184 assert(item->index == 0);
185 }
186 for (i = max; i < 2*max; i++)
187 item_check_absent(&tree, i);
188
189 if (!item_delete(&tree, 0)) {
190 printf("failed to delete index %ld (order %d)\n", index, order); abort();
191 }
192
193 for (i = 0; i < 2*max; i++)
194 item_check_absent(&tree, i);
195}
196
Matthew Wilcox7b60e9a2016-05-20 17:02:23 -0700197static void multiorder_insert_bug(void)
198{
199 RADIX_TREE(tree, GFP_KERNEL);
200
201 item_insert(&tree, 0);
202 radix_tree_tag_set(&tree, 0, 0);
203 item_insert_order(&tree, 3 << 6, 6);
204
205 item_kill_tree(&tree);
206}
207
Ross Zwisler643b57d2016-05-20 17:02:29 -0700208void multiorder_iteration(void)
209{
210 RADIX_TREE(tree, GFP_KERNEL);
211 struct radix_tree_iter iter;
212 void **slot;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700213 int i, j, err;
Ross Zwisler643b57d2016-05-20 17:02:29 -0700214
215 printf("Multiorder iteration test\n");
216
217#define NUM_ENTRIES 11
218 int index[NUM_ENTRIES] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128};
219 int order[NUM_ENTRIES] = {1, 1, 2, 3, 4, 1, 0, 1, 3, 0, 7};
220
221 for (i = 0; i < NUM_ENTRIES; i++) {
222 err = item_insert_order(&tree, index[i], order[i]);
223 assert(!err);
224 }
225
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700226 for (j = 0; j < 256; j++) {
227 for (i = 0; i < NUM_ENTRIES; i++)
228 if (j <= (index[i] | ((1 << order[i]) - 1)))
229 break;
Ross Zwisler643b57d2016-05-20 17:02:29 -0700230
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700231 radix_tree_for_each_slot(slot, &tree, &iter, j) {
232 int height = order[i] / RADIX_TREE_MAP_SHIFT;
233 int shift = height * RADIX_TREE_MAP_SHIFT;
Matthew Wilcox148deab2016-12-14 15:08:49 -0800234 unsigned long mask = (1UL << order[i]) - 1;
235 struct item *item = *slot;
Ross Zwisler643b57d2016-05-20 17:02:29 -0700236
Matthew Wilcox148deab2016-12-14 15:08:49 -0800237 assert((iter.index | mask) == (index[i] | mask));
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700238 assert(iter.shift == shift);
Matthew Wilcox148deab2016-12-14 15:08:49 -0800239 assert(!radix_tree_is_internal_node(item));
240 assert((item->index | mask) == (index[i] | mask));
241 assert(item->order == order[i]);
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700242 i++;
243 }
Ross Zwisler643b57d2016-05-20 17:02:29 -0700244 }
245
246 item_kill_tree(&tree);
247}
248
249void multiorder_tagged_iteration(void)
250{
251 RADIX_TREE(tree, GFP_KERNEL);
252 struct radix_tree_iter iter;
253 void **slot;
Matthew Wilcox070c5ac2016-05-20 17:02:52 -0700254 unsigned long first = 0;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700255 int i, j;
Ross Zwisler643b57d2016-05-20 17:02:29 -0700256
257 printf("Multiorder tagged iteration test\n");
258
259#define MT_NUM_ENTRIES 9
260 int index[MT_NUM_ENTRIES] = {0, 2, 4, 16, 32, 40, 64, 72, 128};
261 int order[MT_NUM_ENTRIES] = {1, 0, 2, 4, 3, 1, 3, 0, 7};
262
263#define TAG_ENTRIES 7
264 int tag_index[TAG_ENTRIES] = {0, 4, 16, 40, 64, 72, 128};
265
266 for (i = 0; i < MT_NUM_ENTRIES; i++)
267 assert(!item_insert_order(&tree, index[i], order[i]));
268
269 assert(!radix_tree_tagged(&tree, 1));
270
271 for (i = 0; i < TAG_ENTRIES; i++)
272 assert(radix_tree_tag_set(&tree, tag_index[i], 1));
273
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700274 for (j = 0; j < 256; j++) {
Matthew Wilcox148deab2016-12-14 15:08:49 -0800275 int k;
Ross Zwisler643b57d2016-05-20 17:02:29 -0700276
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700277 for (i = 0; i < TAG_ENTRIES; i++) {
278 for (k = i; index[k] < tag_index[i]; k++)
279 ;
280 if (j <= (index[k] | ((1 << order[k]) - 1)))
281 break;
282 }
283
284 radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) {
Matthew Wilcox148deab2016-12-14 15:08:49 -0800285 unsigned long mask;
286 struct item *item = *slot;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700287 for (k = i; index[k] < tag_index[i]; k++)
288 ;
Matthew Wilcox148deab2016-12-14 15:08:49 -0800289 mask = (1UL << order[k]) - 1;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700290
Matthew Wilcox148deab2016-12-14 15:08:49 -0800291 assert((iter.index | mask) == (tag_index[i] | mask));
292 assert(!radix_tree_is_internal_node(item));
293 assert((item->index | mask) == (tag_index[i] | mask));
294 assert(item->order == order[k]);
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700295 i++;
296 }
Ross Zwisler643b57d2016-05-20 17:02:29 -0700297 }
298
Matthew Wilcox070c5ac2016-05-20 17:02:52 -0700299 radix_tree_range_tag_if_tagged(&tree, &first, ~0UL,
300 MT_NUM_ENTRIES, 1, 2);
301
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700302 for (j = 0; j < 256; j++) {
303 int mask, k;
304
305 for (i = 0; i < TAG_ENTRIES; i++) {
306 for (k = i; index[k] < tag_index[i]; k++)
307 ;
308 if (j <= (index[k] | ((1 << order[k]) - 1)))
309 break;
310 }
311
312 radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) {
Matthew Wilcox148deab2016-12-14 15:08:49 -0800313 struct item *item = *slot;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700314 for (k = i; index[k] < tag_index[i]; k++)
315 ;
316 mask = (1 << order[k]) - 1;
317
Matthew Wilcox148deab2016-12-14 15:08:49 -0800318 assert((iter.index | mask) == (tag_index[i] | mask));
319 assert(!radix_tree_is_internal_node(item));
320 assert((item->index | mask) == (tag_index[i] | mask));
321 assert(item->order == order[k]);
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700322 i++;
323 }
Matthew Wilcox070c5ac2016-05-20 17:02:52 -0700324 }
325
326 first = 1;
327 radix_tree_range_tag_if_tagged(&tree, &first, ~0UL,
328 MT_NUM_ENTRIES, 1, 0);
329 i = 0;
330 radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) {
331 assert(iter.index == tag_index[i]);
332 i++;
333 }
334
Ross Zwisler643b57d2016-05-20 17:02:29 -0700335 item_kill_tree(&tree);
336}
337
Matthew Wilcox4f3755d2016-05-20 17:02:14 -0700338void multiorder_checks(void)
339{
340 int i;
341
342 for (i = 0; i < 20; i++) {
343 multiorder_check(200, i);
344 multiorder_check(0, i);
345 multiorder_check((1UL << i) + 1, i);
346 }
Matthew Wilcoxafe0e392016-05-20 17:02:17 -0700347
348 for (i = 0; i < 15; i++)
349 multiorder_shrink((1UL << (i + RADIX_TREE_MAP_SHIFT)), i);
350
Matthew Wilcox7b60e9a2016-05-20 17:02:23 -0700351 multiorder_insert_bug();
Ross Zwisler0fc9b8c2016-05-20 17:02:41 -0700352 multiorder_tag_tests();
Ross Zwisler643b57d2016-05-20 17:02:29 -0700353 multiorder_iteration();
354 multiorder_tagged_iteration();
Matthew Wilcox4f3755d2016-05-20 17:02:14 -0700355}