Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 1 | /* |
| 2 | * multiorder.c: Multi-order radix tree entry testing |
| 3 | * Copyright (c) 2016 Intel Corporation |
| 4 | * Author: Ross Zwisler <ross.zwisler@linux.intel.com> |
| 5 | * Author: Matthew Wilcox <matthew.r.wilcox@intel.com> |
| 6 | * |
| 7 | * This program is free software; you can redistribute it and/or modify it |
| 8 | * under the terms and conditions of the GNU General Public License, |
| 9 | * version 2, as published by the Free Software Foundation. |
| 10 | * |
| 11 | * This program is distributed in the hope it will be useful, but WITHOUT |
| 12 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 13 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| 14 | * more details. |
| 15 | */ |
| 16 | #include <linux/radix-tree.h> |
| 17 | #include <linux/slab.h> |
| 18 | #include <linux/errno.h> |
| 19 | |
| 20 | #include "test.h" |
| 21 | |
Ross Zwisler | 0fc9b8c | 2016-05-20 17:02:41 -0700 | [diff] [blame] | 22 | #define for_each_index(i, base, order) \ |
| 23 | for (i = base; i < base + (1 << order); i++) |
| 24 | |
| 25 | static void __multiorder_tag_test(int index, int order) |
| 26 | { |
| 27 | RADIX_TREE(tree, GFP_KERNEL); |
| 28 | int base, err, i; |
| 29 | |
| 30 | /* our canonical entry */ |
| 31 | base = index & ~((1 << order) - 1); |
| 32 | |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 33 | printv(2, "Multiorder tag test with index %d, canonical entry %d\n", |
Ross Zwisler | 0fc9b8c | 2016-05-20 17:02:41 -0700 | [diff] [blame] | 34 | index, base); |
| 35 | |
| 36 | err = item_insert_order(&tree, index, order); |
| 37 | assert(!err); |
| 38 | |
| 39 | /* |
| 40 | * Verify we get collisions for covered indices. We try and fail to |
| 41 | * insert an exceptional entry so we don't leak memory via |
| 42 | * item_insert_order(). |
| 43 | */ |
| 44 | for_each_index(i, base, order) { |
| 45 | err = __radix_tree_insert(&tree, i, order, |
| 46 | (void *)(0xA0 | RADIX_TREE_EXCEPTIONAL_ENTRY)); |
| 47 | assert(err == -EEXIST); |
| 48 | } |
| 49 | |
| 50 | for_each_index(i, base, order) { |
| 51 | assert(!radix_tree_tag_get(&tree, i, 0)); |
| 52 | assert(!radix_tree_tag_get(&tree, i, 1)); |
| 53 | } |
| 54 | |
| 55 | assert(radix_tree_tag_set(&tree, index, 0)); |
| 56 | |
| 57 | for_each_index(i, base, order) { |
| 58 | assert(radix_tree_tag_get(&tree, i, 0)); |
| 59 | assert(!radix_tree_tag_get(&tree, i, 1)); |
| 60 | } |
| 61 | |
Matthew Wilcox | 268f42d | 2016-12-14 15:08:55 -0800 | [diff] [blame] | 62 | assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 1); |
Ross Zwisler | 0fc9b8c | 2016-05-20 17:02:41 -0700 | [diff] [blame] | 63 | assert(radix_tree_tag_clear(&tree, index, 0)); |
| 64 | |
| 65 | for_each_index(i, base, order) { |
| 66 | assert(!radix_tree_tag_get(&tree, i, 0)); |
Matthew Wilcox | 070c5ac | 2016-05-20 17:02:52 -0700 | [diff] [blame] | 67 | assert(radix_tree_tag_get(&tree, i, 1)); |
Ross Zwisler | 0fc9b8c | 2016-05-20 17:02:41 -0700 | [diff] [blame] | 68 | } |
| 69 | |
Matthew Wilcox | 070c5ac | 2016-05-20 17:02:52 -0700 | [diff] [blame] | 70 | assert(radix_tree_tag_clear(&tree, index, 1)); |
| 71 | |
Ross Zwisler | 0fc9b8c | 2016-05-20 17:02:41 -0700 | [diff] [blame] | 72 | assert(!radix_tree_tagged(&tree, 0)); |
| 73 | assert(!radix_tree_tagged(&tree, 1)); |
| 74 | |
| 75 | item_kill_tree(&tree); |
| 76 | } |
| 77 | |
Matthew Wilcox | 3e3cdc6 | 2016-12-14 15:09:10 -0800 | [diff] [blame] | 78 | static void __multiorder_tag_test2(unsigned order, unsigned long index2) |
| 79 | { |
| 80 | RADIX_TREE(tree, GFP_KERNEL); |
| 81 | unsigned long index = (1 << order); |
| 82 | index2 += index; |
| 83 | |
| 84 | assert(item_insert_order(&tree, 0, order) == 0); |
| 85 | assert(item_insert(&tree, index2) == 0); |
| 86 | |
| 87 | assert(radix_tree_tag_set(&tree, 0, 0)); |
| 88 | assert(radix_tree_tag_set(&tree, index2, 0)); |
| 89 | |
| 90 | assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 2); |
| 91 | |
| 92 | item_kill_tree(&tree); |
| 93 | } |
| 94 | |
Ross Zwisler | 0fc9b8c | 2016-05-20 17:02:41 -0700 | [diff] [blame] | 95 | static void multiorder_tag_tests(void) |
| 96 | { |
Matthew Wilcox | 3e3cdc6 | 2016-12-14 15:09:10 -0800 | [diff] [blame] | 97 | int i, j; |
| 98 | |
Ross Zwisler | 0fc9b8c | 2016-05-20 17:02:41 -0700 | [diff] [blame] | 99 | /* test multi-order entry for indices 0-7 with no sibling pointers */ |
| 100 | __multiorder_tag_test(0, 3); |
| 101 | __multiorder_tag_test(5, 3); |
| 102 | |
| 103 | /* test multi-order entry for indices 8-15 with no sibling pointers */ |
| 104 | __multiorder_tag_test(8, 3); |
| 105 | __multiorder_tag_test(15, 3); |
| 106 | |
| 107 | /* |
| 108 | * Our order 5 entry covers indices 0-31 in a tree with height=2. |
| 109 | * This is broken up as follows: |
| 110 | * 0-7: canonical entry |
| 111 | * 8-15: sibling 1 |
| 112 | * 16-23: sibling 2 |
| 113 | * 24-31: sibling 3 |
| 114 | */ |
| 115 | __multiorder_tag_test(0, 5); |
| 116 | __multiorder_tag_test(29, 5); |
| 117 | |
| 118 | /* same test, but with indices 32-63 */ |
| 119 | __multiorder_tag_test(32, 5); |
| 120 | __multiorder_tag_test(44, 5); |
| 121 | |
| 122 | /* |
| 123 | * Our order 8 entry covers indices 0-255 in a tree with height=3. |
| 124 | * This is broken up as follows: |
| 125 | * 0-63: canonical entry |
| 126 | * 64-127: sibling 1 |
| 127 | * 128-191: sibling 2 |
| 128 | * 192-255: sibling 3 |
| 129 | */ |
| 130 | __multiorder_tag_test(0, 8); |
| 131 | __multiorder_tag_test(190, 8); |
| 132 | |
| 133 | /* same test, but with indices 256-511 */ |
| 134 | __multiorder_tag_test(256, 8); |
| 135 | __multiorder_tag_test(300, 8); |
| 136 | |
| 137 | __multiorder_tag_test(0x12345678UL, 8); |
Matthew Wilcox | 3e3cdc6 | 2016-12-14 15:09:10 -0800 | [diff] [blame] | 138 | |
| 139 | for (i = 1; i < 10; i++) |
| 140 | for (j = 0; j < (10 << i); j++) |
| 141 | __multiorder_tag_test2(i, j); |
Ross Zwisler | 0fc9b8c | 2016-05-20 17:02:41 -0700 | [diff] [blame] | 142 | } |
| 143 | |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 144 | static void multiorder_check(unsigned long index, int order) |
| 145 | { |
| 146 | unsigned long i; |
| 147 | unsigned long min = index & ~((1UL << order) - 1); |
| 148 | unsigned long max = min + (1UL << order); |
Matthew Wilcox | 62fd525 | 2016-09-22 11:53:34 -0700 | [diff] [blame] | 149 | void **slot; |
Matthew Wilcox | 101d960 | 2016-12-14 15:08:23 -0800 | [diff] [blame] | 150 | struct item *item2 = item_create(min, order); |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 151 | RADIX_TREE(tree, GFP_KERNEL); |
| 152 | |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 153 | printv(2, "Multiorder index %ld, order %d\n", index, order); |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 154 | |
| 155 | assert(item_insert_order(&tree, index, order) == 0); |
| 156 | |
| 157 | for (i = min; i < max; i++) { |
| 158 | struct item *item = item_lookup(&tree, i); |
| 159 | assert(item != 0); |
| 160 | assert(item->index == index); |
| 161 | } |
| 162 | for (i = 0; i < min; i++) |
| 163 | item_check_absent(&tree, i); |
| 164 | for (i = max; i < 2*max; i++) |
| 165 | item_check_absent(&tree, i); |
Matthew Wilcox | 62fd525 | 2016-09-22 11:53:34 -0700 | [diff] [blame] | 166 | for (i = min; i < max; i++) |
| 167 | assert(radix_tree_insert(&tree, i, item2) == -EEXIST); |
| 168 | |
| 169 | slot = radix_tree_lookup_slot(&tree, index); |
| 170 | free(*slot); |
Johannes Weiner | 6d75f36 | 2016-12-12 16:43:43 -0800 | [diff] [blame] | 171 | radix_tree_replace_slot(&tree, slot, item2); |
Matthew Wilcox | 8a14f4d | 2016-05-20 17:02:44 -0700 | [diff] [blame] | 172 | for (i = min; i < max; i++) { |
Matthew Wilcox | 62fd525 | 2016-09-22 11:53:34 -0700 | [diff] [blame] | 173 | struct item *item = item_lookup(&tree, i); |
| 174 | assert(item != 0); |
| 175 | assert(item->index == min); |
Matthew Wilcox | 8a14f4d | 2016-05-20 17:02:44 -0700 | [diff] [blame] | 176 | } |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 177 | |
Matthew Wilcox | 62fd525 | 2016-09-22 11:53:34 -0700 | [diff] [blame] | 178 | assert(item_delete(&tree, min) != 0); |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 179 | |
| 180 | for (i = 0; i < 2*max; i++) |
| 181 | item_check_absent(&tree, i); |
| 182 | } |
| 183 | |
Matthew Wilcox | afe0e39 | 2016-05-20 17:02:17 -0700 | [diff] [blame] | 184 | static void multiorder_shrink(unsigned long index, int order) |
| 185 | { |
| 186 | unsigned long i; |
| 187 | unsigned long max = 1 << order; |
| 188 | RADIX_TREE(tree, GFP_KERNEL); |
| 189 | struct radix_tree_node *node; |
| 190 | |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 191 | printv(2, "Multiorder shrink index %ld, order %d\n", index, order); |
Matthew Wilcox | afe0e39 | 2016-05-20 17:02:17 -0700 | [diff] [blame] | 192 | |
| 193 | assert(item_insert_order(&tree, 0, order) == 0); |
| 194 | |
| 195 | node = tree.rnode; |
| 196 | |
| 197 | assert(item_insert(&tree, index) == 0); |
| 198 | assert(node != tree.rnode); |
| 199 | |
| 200 | assert(item_delete(&tree, index) != 0); |
| 201 | assert(node == tree.rnode); |
| 202 | |
| 203 | for (i = 0; i < max; i++) { |
| 204 | struct item *item = item_lookup(&tree, i); |
| 205 | assert(item != 0); |
| 206 | assert(item->index == 0); |
| 207 | } |
| 208 | for (i = max; i < 2*max; i++) |
| 209 | item_check_absent(&tree, i); |
| 210 | |
| 211 | if (!item_delete(&tree, 0)) { |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 212 | printv(2, "failed to delete index %ld (order %d)\n", index, order); |
| 213 | abort(); |
Matthew Wilcox | afe0e39 | 2016-05-20 17:02:17 -0700 | [diff] [blame] | 214 | } |
| 215 | |
| 216 | for (i = 0; i < 2*max; i++) |
| 217 | item_check_absent(&tree, i); |
| 218 | } |
| 219 | |
Matthew Wilcox | 7b60e9a | 2016-05-20 17:02:23 -0700 | [diff] [blame] | 220 | static void multiorder_insert_bug(void) |
| 221 | { |
| 222 | RADIX_TREE(tree, GFP_KERNEL); |
| 223 | |
| 224 | item_insert(&tree, 0); |
| 225 | radix_tree_tag_set(&tree, 0, 0); |
| 226 | item_insert_order(&tree, 3 << 6, 6); |
| 227 | |
| 228 | item_kill_tree(&tree); |
| 229 | } |
| 230 | |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 231 | void multiorder_iteration(void) |
| 232 | { |
| 233 | RADIX_TREE(tree, GFP_KERNEL); |
| 234 | struct radix_tree_iter iter; |
| 235 | void **slot; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 236 | int i, j, err; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 237 | |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 238 | printv(1, "Multiorder iteration test\n"); |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 239 | |
| 240 | #define NUM_ENTRIES 11 |
| 241 | int index[NUM_ENTRIES] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128}; |
| 242 | int order[NUM_ENTRIES] = {1, 1, 2, 3, 4, 1, 0, 1, 3, 0, 7}; |
| 243 | |
| 244 | for (i = 0; i < NUM_ENTRIES; i++) { |
| 245 | err = item_insert_order(&tree, index[i], order[i]); |
| 246 | assert(!err); |
| 247 | } |
| 248 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 249 | for (j = 0; j < 256; j++) { |
| 250 | for (i = 0; i < NUM_ENTRIES; i++) |
| 251 | if (j <= (index[i] | ((1 << order[i]) - 1))) |
| 252 | break; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 253 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 254 | radix_tree_for_each_slot(slot, &tree, &iter, j) { |
| 255 | int height = order[i] / RADIX_TREE_MAP_SHIFT; |
| 256 | int shift = height * RADIX_TREE_MAP_SHIFT; |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 257 | unsigned long mask = (1UL << order[i]) - 1; |
| 258 | struct item *item = *slot; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 259 | |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 260 | assert((iter.index | mask) == (index[i] | mask)); |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 261 | assert(iter.shift == shift); |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 262 | assert(!radix_tree_is_internal_node(item)); |
| 263 | assert((item->index | mask) == (index[i] | mask)); |
| 264 | assert(item->order == order[i]); |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 265 | i++; |
| 266 | } |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 267 | } |
| 268 | |
| 269 | item_kill_tree(&tree); |
| 270 | } |
| 271 | |
| 272 | void multiorder_tagged_iteration(void) |
| 273 | { |
| 274 | RADIX_TREE(tree, GFP_KERNEL); |
| 275 | struct radix_tree_iter iter; |
| 276 | void **slot; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 277 | int i, j; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 278 | |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 279 | printv(1, "Multiorder tagged iteration test\n"); |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 280 | |
| 281 | #define MT_NUM_ENTRIES 9 |
| 282 | int index[MT_NUM_ENTRIES] = {0, 2, 4, 16, 32, 40, 64, 72, 128}; |
| 283 | int order[MT_NUM_ENTRIES] = {1, 0, 2, 4, 3, 1, 3, 0, 7}; |
| 284 | |
| 285 | #define TAG_ENTRIES 7 |
| 286 | int tag_index[TAG_ENTRIES] = {0, 4, 16, 40, 64, 72, 128}; |
| 287 | |
| 288 | for (i = 0; i < MT_NUM_ENTRIES; i++) |
| 289 | assert(!item_insert_order(&tree, index[i], order[i])); |
| 290 | |
| 291 | assert(!radix_tree_tagged(&tree, 1)); |
| 292 | |
| 293 | for (i = 0; i < TAG_ENTRIES; i++) |
| 294 | assert(radix_tree_tag_set(&tree, tag_index[i], 1)); |
| 295 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 296 | for (j = 0; j < 256; j++) { |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 297 | int k; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 298 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 299 | for (i = 0; i < TAG_ENTRIES; i++) { |
| 300 | for (k = i; index[k] < tag_index[i]; k++) |
| 301 | ; |
| 302 | if (j <= (index[k] | ((1 << order[k]) - 1))) |
| 303 | break; |
| 304 | } |
| 305 | |
| 306 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) { |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 307 | unsigned long mask; |
| 308 | struct item *item = *slot; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 309 | for (k = i; index[k] < tag_index[i]; k++) |
| 310 | ; |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 311 | mask = (1UL << order[k]) - 1; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 312 | |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 313 | assert((iter.index | mask) == (tag_index[i] | mask)); |
| 314 | assert(!radix_tree_is_internal_node(item)); |
| 315 | assert((item->index | mask) == (tag_index[i] | mask)); |
| 316 | assert(item->order == order[k]); |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 317 | i++; |
| 318 | } |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 319 | } |
| 320 | |
Matthew Wilcox | 268f42d | 2016-12-14 15:08:55 -0800 | [diff] [blame] | 321 | assert(tag_tagged_items(&tree, NULL, 0, ~0UL, TAG_ENTRIES, 1, 2) == |
| 322 | TAG_ENTRIES); |
Matthew Wilcox | 070c5ac | 2016-05-20 17:02:52 -0700 | [diff] [blame] | 323 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 324 | for (j = 0; j < 256; j++) { |
| 325 | int mask, k; |
| 326 | |
| 327 | for (i = 0; i < TAG_ENTRIES; i++) { |
| 328 | for (k = i; index[k] < tag_index[i]; k++) |
| 329 | ; |
| 330 | if (j <= (index[k] | ((1 << order[k]) - 1))) |
| 331 | break; |
| 332 | } |
| 333 | |
| 334 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) { |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 335 | struct item *item = *slot; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 336 | for (k = i; index[k] < tag_index[i]; k++) |
| 337 | ; |
| 338 | mask = (1 << order[k]) - 1; |
| 339 | |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 340 | assert((iter.index | mask) == (tag_index[i] | mask)); |
| 341 | assert(!radix_tree_is_internal_node(item)); |
| 342 | assert((item->index | mask) == (tag_index[i] | mask)); |
| 343 | assert(item->order == order[k]); |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 344 | i++; |
| 345 | } |
Matthew Wilcox | 070c5ac | 2016-05-20 17:02:52 -0700 | [diff] [blame] | 346 | } |
| 347 | |
Matthew Wilcox | 268f42d | 2016-12-14 15:08:55 -0800 | [diff] [blame] | 348 | assert(tag_tagged_items(&tree, NULL, 1, ~0UL, MT_NUM_ENTRIES * 2, 1, 0) |
| 349 | == TAG_ENTRIES); |
Matthew Wilcox | 070c5ac | 2016-05-20 17:02:52 -0700 | [diff] [blame] | 350 | i = 0; |
| 351 | radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) { |
| 352 | assert(iter.index == tag_index[i]); |
| 353 | i++; |
| 354 | } |
| 355 | |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 356 | item_kill_tree(&tree); |
| 357 | } |
| 358 | |
Matthew Wilcox | 3b7869c | 2017-01-29 02:00:31 -0500 | [diff] [blame] | 359 | /* |
| 360 | * Basic join checks: make sure we can't find an entry in the tree after |
| 361 | * a larger entry has replaced it |
| 362 | */ |
Matthew Wilcox | e8de434 | 2016-12-14 15:09:31 -0800 | [diff] [blame] | 363 | static void multiorder_join1(unsigned long index, |
Matthew Wilcox | 175542f | 2016-12-14 15:08:58 -0800 | [diff] [blame] | 364 | unsigned order1, unsigned order2) |
| 365 | { |
| 366 | unsigned long loc; |
| 367 | void *item, *item2 = item_create(index + 1, order1); |
| 368 | RADIX_TREE(tree, GFP_KERNEL); |
| 369 | |
| 370 | item_insert_order(&tree, index, order2); |
| 371 | item = radix_tree_lookup(&tree, index); |
| 372 | radix_tree_join(&tree, index + 1, order1, item2); |
| 373 | loc = find_item(&tree, item); |
| 374 | if (loc == -1) |
| 375 | free(item); |
| 376 | item = radix_tree_lookup(&tree, index + 1); |
| 377 | assert(item == item2); |
| 378 | item_kill_tree(&tree); |
| 379 | } |
| 380 | |
Matthew Wilcox | 3b7869c | 2017-01-29 02:00:31 -0500 | [diff] [blame] | 381 | /* |
| 382 | * Check that the accounting of exceptional entries is handled correctly |
| 383 | * by joining an exceptional entry to a normal pointer. |
| 384 | */ |
Matthew Wilcox | e8de434 | 2016-12-14 15:09:31 -0800 | [diff] [blame] | 385 | static void multiorder_join2(unsigned order1, unsigned order2) |
Matthew Wilcox | 175542f | 2016-12-14 15:08:58 -0800 | [diff] [blame] | 386 | { |
| 387 | RADIX_TREE(tree, GFP_KERNEL); |
| 388 | struct radix_tree_node *node; |
| 389 | void *item1 = item_create(0, order1); |
| 390 | void *item2; |
| 391 | |
| 392 | item_insert_order(&tree, 0, order2); |
| 393 | radix_tree_insert(&tree, 1 << order2, (void *)0x12UL); |
| 394 | item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL); |
| 395 | assert(item2 == (void *)0x12UL); |
| 396 | assert(node->exceptional == 1); |
| 397 | |
Matthew Wilcox | 3b7869c | 2017-01-29 02:00:31 -0500 | [diff] [blame] | 398 | item2 = radix_tree_lookup(&tree, 0); |
| 399 | free(item2); |
| 400 | |
Matthew Wilcox | 175542f | 2016-12-14 15:08:58 -0800 | [diff] [blame] | 401 | radix_tree_join(&tree, 0, order1, item1); |
| 402 | item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL); |
| 403 | assert(item2 == item1); |
| 404 | assert(node->exceptional == 0); |
| 405 | item_kill_tree(&tree); |
| 406 | } |
| 407 | |
Matthew Wilcox | e8de434 | 2016-12-14 15:09:31 -0800 | [diff] [blame] | 408 | /* |
| 409 | * This test revealed an accounting bug for exceptional entries at one point. |
| 410 | * Nodes were being freed back into the pool with an elevated exception count |
| 411 | * by radix_tree_join() and then radix_tree_split() was failing to zero the |
| 412 | * count of exceptional entries. |
| 413 | */ |
| 414 | static void multiorder_join3(unsigned int order) |
| 415 | { |
| 416 | RADIX_TREE(tree, GFP_KERNEL); |
| 417 | struct radix_tree_node *node; |
| 418 | void **slot; |
| 419 | struct radix_tree_iter iter; |
| 420 | unsigned long i; |
| 421 | |
| 422 | for (i = 0; i < (1 << order); i++) { |
| 423 | radix_tree_insert(&tree, i, (void *)0x12UL); |
| 424 | } |
| 425 | |
| 426 | radix_tree_join(&tree, 0, order, (void *)0x16UL); |
| 427 | rcu_barrier(); |
| 428 | |
| 429 | radix_tree_split(&tree, 0, 0); |
| 430 | |
| 431 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { |
| 432 | radix_tree_iter_replace(&tree, &iter, slot, (void *)0x12UL); |
| 433 | } |
| 434 | |
| 435 | __radix_tree_lookup(&tree, 0, &node, NULL); |
| 436 | assert(node->exceptional == node->count); |
| 437 | |
| 438 | item_kill_tree(&tree); |
| 439 | } |
| 440 | |
Matthew Wilcox | 175542f | 2016-12-14 15:08:58 -0800 | [diff] [blame] | 441 | static void multiorder_join(void) |
| 442 | { |
| 443 | int i, j, idx; |
| 444 | |
| 445 | for (idx = 0; idx < 1024; idx = idx * 2 + 3) { |
| 446 | for (i = 1; i < 15; i++) { |
| 447 | for (j = 0; j < i; j++) { |
Matthew Wilcox | e8de434 | 2016-12-14 15:09:31 -0800 | [diff] [blame] | 448 | multiorder_join1(idx, i, j); |
Matthew Wilcox | 175542f | 2016-12-14 15:08:58 -0800 | [diff] [blame] | 449 | } |
| 450 | } |
| 451 | } |
| 452 | |
| 453 | for (i = 1; i < 15; i++) { |
| 454 | for (j = 0; j < i; j++) { |
Matthew Wilcox | e8de434 | 2016-12-14 15:09:31 -0800 | [diff] [blame] | 455 | multiorder_join2(i, j); |
Matthew Wilcox | 175542f | 2016-12-14 15:08:58 -0800 | [diff] [blame] | 456 | } |
| 457 | } |
Matthew Wilcox | e8de434 | 2016-12-14 15:09:31 -0800 | [diff] [blame] | 458 | |
| 459 | for (i = 3; i < 10; i++) { |
| 460 | multiorder_join3(i); |
| 461 | } |
Matthew Wilcox | 175542f | 2016-12-14 15:08:58 -0800 | [diff] [blame] | 462 | } |
| 463 | |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 464 | static void check_mem(unsigned old_order, unsigned new_order, unsigned alloc) |
| 465 | { |
| 466 | struct radix_tree_preload *rtp = &radix_tree_preloads; |
| 467 | if (rtp->nr != 0) |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 468 | printv(2, "split(%u %u) remaining %u\n", old_order, new_order, |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 469 | rtp->nr); |
| 470 | /* |
| 471 | * Can't check for equality here as some nodes may have been |
| 472 | * RCU-freed while we ran. But we should never finish with more |
| 473 | * nodes allocated since they should have all been preloaded. |
| 474 | */ |
| 475 | if (nr_allocated > alloc) |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 476 | printv(2, "split(%u %u) allocated %u %u\n", old_order, new_order, |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 477 | alloc, nr_allocated); |
| 478 | } |
| 479 | |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 480 | static void __multiorder_split(int old_order, int new_order) |
| 481 | { |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 482 | RADIX_TREE(tree, GFP_ATOMIC); |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 483 | void **slot; |
| 484 | struct radix_tree_iter iter; |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 485 | unsigned alloc; |
Matthew Wilcox | 3b7869c | 2017-01-29 02:00:31 -0500 | [diff] [blame] | 486 | struct item *item; |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 487 | |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 488 | radix_tree_preload(GFP_KERNEL); |
| 489 | assert(item_insert_order(&tree, 0, old_order) == 0); |
| 490 | radix_tree_preload_end(); |
| 491 | |
| 492 | /* Wipe out the preloaded cache or it'll confuse check_mem() */ |
| 493 | radix_tree_cpu_dead(0); |
| 494 | |
Matthew Wilcox | 3b7869c | 2017-01-29 02:00:31 -0500 | [diff] [blame] | 495 | item = radix_tree_tag_set(&tree, 0, 2); |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 496 | |
| 497 | radix_tree_split_preload(old_order, new_order, GFP_KERNEL); |
| 498 | alloc = nr_allocated; |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 499 | radix_tree_split(&tree, 0, new_order); |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 500 | check_mem(old_order, new_order, alloc); |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 501 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { |
| 502 | radix_tree_iter_replace(&tree, &iter, slot, |
| 503 | item_create(iter.index, new_order)); |
| 504 | } |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 505 | radix_tree_preload_end(); |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 506 | |
| 507 | item_kill_tree(&tree); |
Matthew Wilcox | 3b7869c | 2017-01-29 02:00:31 -0500 | [diff] [blame] | 508 | free(item); |
Matthew Wilcox | a90eb3a | 2016-12-14 15:09:07 -0800 | [diff] [blame] | 509 | } |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 510 | |
Matthew Wilcox | a90eb3a | 2016-12-14 15:09:07 -0800 | [diff] [blame] | 511 | static void __multiorder_split2(int old_order, int new_order) |
| 512 | { |
| 513 | RADIX_TREE(tree, GFP_KERNEL); |
| 514 | void **slot; |
| 515 | struct radix_tree_iter iter; |
| 516 | struct radix_tree_node *node; |
| 517 | void *item; |
| 518 | |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 519 | __radix_tree_insert(&tree, 0, old_order, (void *)0x12); |
| 520 | |
| 521 | item = __radix_tree_lookup(&tree, 0, &node, NULL); |
| 522 | assert(item == (void *)0x12); |
| 523 | assert(node->exceptional > 0); |
| 524 | |
| 525 | radix_tree_split(&tree, 0, new_order); |
| 526 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { |
| 527 | radix_tree_iter_replace(&tree, &iter, slot, |
| 528 | item_create(iter.index, new_order)); |
| 529 | } |
| 530 | |
| 531 | item = __radix_tree_lookup(&tree, 0, &node, NULL); |
| 532 | assert(item != (void *)0x12); |
| 533 | assert(node->exceptional == 0); |
| 534 | |
| 535 | item_kill_tree(&tree); |
Matthew Wilcox | a90eb3a | 2016-12-14 15:09:07 -0800 | [diff] [blame] | 536 | } |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 537 | |
Matthew Wilcox | a90eb3a | 2016-12-14 15:09:07 -0800 | [diff] [blame] | 538 | static void __multiorder_split3(int old_order, int new_order) |
| 539 | { |
| 540 | RADIX_TREE(tree, GFP_KERNEL); |
| 541 | void **slot; |
| 542 | struct radix_tree_iter iter; |
| 543 | struct radix_tree_node *node; |
| 544 | void *item; |
| 545 | |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 546 | __radix_tree_insert(&tree, 0, old_order, (void *)0x12); |
| 547 | |
| 548 | item = __radix_tree_lookup(&tree, 0, &node, NULL); |
| 549 | assert(item == (void *)0x12); |
| 550 | assert(node->exceptional > 0); |
| 551 | |
| 552 | radix_tree_split(&tree, 0, new_order); |
| 553 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { |
| 554 | radix_tree_iter_replace(&tree, &iter, slot, (void *)0x16); |
| 555 | } |
| 556 | |
| 557 | item = __radix_tree_lookup(&tree, 0, &node, NULL); |
| 558 | assert(item == (void *)0x16); |
| 559 | assert(node->exceptional > 0); |
| 560 | |
| 561 | item_kill_tree(&tree); |
Matthew Wilcox | a90eb3a | 2016-12-14 15:09:07 -0800 | [diff] [blame] | 562 | |
| 563 | __radix_tree_insert(&tree, 0, old_order, (void *)0x12); |
| 564 | |
| 565 | item = __radix_tree_lookup(&tree, 0, &node, NULL); |
| 566 | assert(item == (void *)0x12); |
| 567 | assert(node->exceptional > 0); |
| 568 | |
| 569 | radix_tree_split(&tree, 0, new_order); |
| 570 | radix_tree_for_each_slot(slot, &tree, &iter, 0) { |
| 571 | if (iter.index == (1 << new_order)) |
| 572 | radix_tree_iter_replace(&tree, &iter, slot, |
| 573 | (void *)0x16); |
| 574 | else |
| 575 | radix_tree_iter_replace(&tree, &iter, slot, NULL); |
| 576 | } |
| 577 | |
| 578 | item = __radix_tree_lookup(&tree, 1 << new_order, &node, NULL); |
| 579 | assert(item == (void *)0x16); |
| 580 | assert(node->count == node->exceptional); |
| 581 | do { |
| 582 | node = node->parent; |
| 583 | if (!node) |
| 584 | break; |
| 585 | assert(node->count == 1); |
| 586 | assert(node->exceptional == 0); |
| 587 | } while (1); |
| 588 | |
| 589 | item_kill_tree(&tree); |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 590 | } |
| 591 | |
| 592 | static void multiorder_split(void) |
| 593 | { |
| 594 | int i, j; |
| 595 | |
Matthew Wilcox | a90eb3a | 2016-12-14 15:09:07 -0800 | [diff] [blame] | 596 | for (i = 3; i < 11; i++) |
| 597 | for (j = 0; j < i; j++) { |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 598 | __multiorder_split(i, j); |
Matthew Wilcox | a90eb3a | 2016-12-14 15:09:07 -0800 | [diff] [blame] | 599 | __multiorder_split2(i, j); |
| 600 | __multiorder_split3(i, j); |
| 601 | } |
| 602 | } |
| 603 | |
| 604 | static void multiorder_account(void) |
| 605 | { |
| 606 | RADIX_TREE(tree, GFP_KERNEL); |
| 607 | struct radix_tree_node *node; |
| 608 | void **slot; |
| 609 | |
| 610 | item_insert_order(&tree, 0, 5); |
| 611 | |
| 612 | __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12); |
| 613 | __radix_tree_lookup(&tree, 0, &node, NULL); |
| 614 | assert(node->count == node->exceptional * 2); |
| 615 | radix_tree_delete(&tree, 1 << 5); |
| 616 | assert(node->exceptional == 0); |
| 617 | |
| 618 | __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12); |
| 619 | __radix_tree_lookup(&tree, 1 << 5, &node, &slot); |
| 620 | assert(node->count == node->exceptional * 2); |
Mel Gorman | c7df8ad | 2017-11-15 17:37:41 -0800 | [diff] [blame] | 621 | __radix_tree_replace(&tree, node, slot, NULL, NULL); |
Matthew Wilcox | a90eb3a | 2016-12-14 15:09:07 -0800 | [diff] [blame] | 622 | assert(node->exceptional == 0); |
| 623 | |
| 624 | item_kill_tree(&tree); |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 625 | } |
| 626 | |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 627 | void multiorder_checks(void) |
| 628 | { |
| 629 | int i; |
| 630 | |
| 631 | for (i = 0; i < 20; i++) { |
| 632 | multiorder_check(200, i); |
| 633 | multiorder_check(0, i); |
| 634 | multiorder_check((1UL << i) + 1, i); |
| 635 | } |
Matthew Wilcox | afe0e39 | 2016-05-20 17:02:17 -0700 | [diff] [blame] | 636 | |
| 637 | for (i = 0; i < 15; i++) |
| 638 | multiorder_shrink((1UL << (i + RADIX_TREE_MAP_SHIFT)), i); |
| 639 | |
Matthew Wilcox | 7b60e9a | 2016-05-20 17:02:23 -0700 | [diff] [blame] | 640 | multiorder_insert_bug(); |
Ross Zwisler | 0fc9b8c | 2016-05-20 17:02:41 -0700 | [diff] [blame] | 641 | multiorder_tag_tests(); |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 642 | multiorder_iteration(); |
| 643 | multiorder_tagged_iteration(); |
Matthew Wilcox | 175542f | 2016-12-14 15:08:58 -0800 | [diff] [blame] | 644 | multiorder_join(); |
Matthew Wilcox | e157b55 | 2016-12-14 15:09:01 -0800 | [diff] [blame] | 645 | multiorder_split(); |
Matthew Wilcox | a90eb3a | 2016-12-14 15:09:07 -0800 | [diff] [blame] | 646 | multiorder_account(); |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 647 | |
| 648 | radix_tree_cpu_dead(0); |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 649 | } |
Matthew Wilcox | 8ac0486 | 2016-12-18 22:56:05 -0500 | [diff] [blame] | 650 | |
| 651 | int __weak main(void) |
| 652 | { |
| 653 | radix_tree_init(); |
| 654 | multiorder_checks(); |
| 655 | return 0; |
| 656 | } |