Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 1 | /* |
| 2 | * multiorder.c: Multi-order radix tree entry testing |
| 3 | * Copyright (c) 2016 Intel Corporation |
| 4 | * Author: Ross Zwisler <ross.zwisler@linux.intel.com> |
| 5 | * Author: Matthew Wilcox <matthew.r.wilcox@intel.com> |
| 6 | * |
| 7 | * This program is free software; you can redistribute it and/or modify it |
| 8 | * under the terms and conditions of the GNU General Public License, |
| 9 | * version 2, as published by the Free Software Foundation. |
| 10 | * |
| 11 | * This program is distributed in the hope it will be useful, but WITHOUT |
| 12 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 13 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| 14 | * more details. |
| 15 | */ |
| 16 | #include <linux/radix-tree.h> |
| 17 | #include <linux/slab.h> |
| 18 | #include <linux/errno.h> |
Ross Zwisler | fd8f58c | 2018-05-18 16:09:01 -0700 | [diff] [blame] | 19 | #include <pthread.h> |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 20 | |
| 21 | #include "test.h" |
| 22 | |
Matthew Wilcox | 4bb53bd | 2018-09-12 23:29:32 -0400 | [diff] [blame^] | 23 | static int item_insert_order(struct xarray *xa, unsigned long index, |
| 24 | unsigned order) |
| 25 | { |
| 26 | XA_STATE_ORDER(xas, xa, index, order); |
| 27 | struct item *item = item_create(index, order); |
| 28 | |
| 29 | do { |
| 30 | xas_lock(&xas); |
| 31 | xas_store(&xas, item); |
| 32 | xas_unlock(&xas); |
| 33 | } while (xas_nomem(&xas, GFP_KERNEL)); |
| 34 | |
| 35 | if (!xas_error(&xas)) |
| 36 | return 0; |
| 37 | |
| 38 | free(item); |
| 39 | return xas_error(&xas); |
| 40 | } |
| 41 | |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 42 | void multiorder_iteration(void) |
| 43 | { |
| 44 | RADIX_TREE(tree, GFP_KERNEL); |
| 45 | struct radix_tree_iter iter; |
| 46 | void **slot; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 47 | int i, j, err; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 48 | |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 49 | printv(1, "Multiorder iteration test\n"); |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 50 | |
| 51 | #define NUM_ENTRIES 11 |
| 52 | int index[NUM_ENTRIES] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128}; |
| 53 | int order[NUM_ENTRIES] = {1, 1, 2, 3, 4, 1, 0, 1, 3, 0, 7}; |
| 54 | |
| 55 | for (i = 0; i < NUM_ENTRIES; i++) { |
| 56 | err = item_insert_order(&tree, index[i], order[i]); |
| 57 | assert(!err); |
| 58 | } |
| 59 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 60 | for (j = 0; j < 256; j++) { |
| 61 | for (i = 0; i < NUM_ENTRIES; i++) |
| 62 | if (j <= (index[i] | ((1 << order[i]) - 1))) |
| 63 | break; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 64 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 65 | radix_tree_for_each_slot(slot, &tree, &iter, j) { |
| 66 | int height = order[i] / RADIX_TREE_MAP_SHIFT; |
| 67 | int shift = height * RADIX_TREE_MAP_SHIFT; |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 68 | unsigned long mask = (1UL << order[i]) - 1; |
| 69 | struct item *item = *slot; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 70 | |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 71 | assert((iter.index | mask) == (index[i] | mask)); |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 72 | assert(iter.shift == shift); |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 73 | assert(!radix_tree_is_internal_node(item)); |
| 74 | assert((item->index | mask) == (index[i] | mask)); |
| 75 | assert(item->order == order[i]); |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 76 | i++; |
| 77 | } |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 78 | } |
| 79 | |
| 80 | item_kill_tree(&tree); |
| 81 | } |
| 82 | |
| 83 | void multiorder_tagged_iteration(void) |
| 84 | { |
| 85 | RADIX_TREE(tree, GFP_KERNEL); |
| 86 | struct radix_tree_iter iter; |
| 87 | void **slot; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 88 | int i, j; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 89 | |
Rehas Sachdeva | 73bc029 | 2017-01-04 11:55:00 -0500 | [diff] [blame] | 90 | printv(1, "Multiorder tagged iteration test\n"); |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 91 | |
| 92 | #define MT_NUM_ENTRIES 9 |
| 93 | int index[MT_NUM_ENTRIES] = {0, 2, 4, 16, 32, 40, 64, 72, 128}; |
| 94 | int order[MT_NUM_ENTRIES] = {1, 0, 2, 4, 3, 1, 3, 0, 7}; |
| 95 | |
| 96 | #define TAG_ENTRIES 7 |
| 97 | int tag_index[TAG_ENTRIES] = {0, 4, 16, 40, 64, 72, 128}; |
| 98 | |
| 99 | for (i = 0; i < MT_NUM_ENTRIES; i++) |
| 100 | assert(!item_insert_order(&tree, index[i], order[i])); |
| 101 | |
| 102 | assert(!radix_tree_tagged(&tree, 1)); |
| 103 | |
| 104 | for (i = 0; i < TAG_ENTRIES; i++) |
| 105 | assert(radix_tree_tag_set(&tree, tag_index[i], 1)); |
| 106 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 107 | for (j = 0; j < 256; j++) { |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 108 | int k; |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 109 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 110 | for (i = 0; i < TAG_ENTRIES; i++) { |
| 111 | for (k = i; index[k] < tag_index[i]; k++) |
| 112 | ; |
| 113 | if (j <= (index[k] | ((1 << order[k]) - 1))) |
| 114 | break; |
| 115 | } |
| 116 | |
| 117 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) { |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 118 | unsigned long mask; |
| 119 | struct item *item = *slot; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 120 | for (k = i; index[k] < tag_index[i]; k++) |
| 121 | ; |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 122 | mask = (1UL << order[k]) - 1; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 123 | |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 124 | assert((iter.index | mask) == (tag_index[i] | mask)); |
| 125 | assert(!radix_tree_is_internal_node(item)); |
| 126 | assert((item->index | mask) == (tag_index[i] | mask)); |
| 127 | assert(item->order == order[k]); |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 128 | i++; |
| 129 | } |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 130 | } |
| 131 | |
Matthew Wilcox | 372266b | 2018-08-18 07:09:22 -0400 | [diff] [blame] | 132 | assert(tag_tagged_items(&tree, 0, ~0UL, TAG_ENTRIES, XA_MARK_1, |
| 133 | XA_MARK_2) == TAG_ENTRIES); |
Matthew Wilcox | 070c5ac | 2016-05-20 17:02:52 -0700 | [diff] [blame] | 134 | |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 135 | for (j = 0; j < 256; j++) { |
| 136 | int mask, k; |
| 137 | |
| 138 | for (i = 0; i < TAG_ENTRIES; i++) { |
| 139 | for (k = i; index[k] < tag_index[i]; k++) |
| 140 | ; |
| 141 | if (j <= (index[k] | ((1 << order[k]) - 1))) |
| 142 | break; |
| 143 | } |
| 144 | |
| 145 | radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) { |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 146 | struct item *item = *slot; |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 147 | for (k = i; index[k] < tag_index[i]; k++) |
| 148 | ; |
| 149 | mask = (1 << order[k]) - 1; |
| 150 | |
Matthew Wilcox | 148deab | 2016-12-14 15:08:49 -0800 | [diff] [blame] | 151 | assert((iter.index | mask) == (tag_index[i] | mask)); |
| 152 | assert(!radix_tree_is_internal_node(item)); |
| 153 | assert((item->index | mask) == (tag_index[i] | mask)); |
| 154 | assert(item->order == order[k]); |
Matthew Wilcox | 8c1244d | 2016-05-20 17:03:36 -0700 | [diff] [blame] | 155 | i++; |
| 156 | } |
Matthew Wilcox | 070c5ac | 2016-05-20 17:02:52 -0700 | [diff] [blame] | 157 | } |
| 158 | |
Matthew Wilcox | 372266b | 2018-08-18 07:09:22 -0400 | [diff] [blame] | 159 | assert(tag_tagged_items(&tree, 1, ~0UL, MT_NUM_ENTRIES * 2, XA_MARK_1, |
| 160 | XA_MARK_0) == TAG_ENTRIES); |
Matthew Wilcox | 070c5ac | 2016-05-20 17:02:52 -0700 | [diff] [blame] | 161 | i = 0; |
| 162 | radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) { |
| 163 | assert(iter.index == tag_index[i]); |
| 164 | i++; |
| 165 | } |
| 166 | |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 167 | item_kill_tree(&tree); |
| 168 | } |
| 169 | |
Ross Zwisler | fd8f58c | 2018-05-18 16:09:01 -0700 | [diff] [blame] | 170 | bool stop_iteration = false; |
| 171 | |
| 172 | static void *creator_func(void *ptr) |
| 173 | { |
| 174 | /* 'order' is set up to ensure we have sibling entries */ |
| 175 | unsigned int order = RADIX_TREE_MAP_SHIFT - 1; |
| 176 | struct radix_tree_root *tree = ptr; |
| 177 | int i; |
| 178 | |
| 179 | for (i = 0; i < 10000; i++) { |
| 180 | item_insert_order(tree, 0, order); |
| 181 | item_delete_rcu(tree, 0); |
| 182 | } |
| 183 | |
| 184 | stop_iteration = true; |
| 185 | return NULL; |
| 186 | } |
| 187 | |
| 188 | static void *iterator_func(void *ptr) |
| 189 | { |
| 190 | struct radix_tree_root *tree = ptr; |
| 191 | struct radix_tree_iter iter; |
| 192 | struct item *item; |
| 193 | void **slot; |
| 194 | |
| 195 | while (!stop_iteration) { |
| 196 | rcu_read_lock(); |
| 197 | radix_tree_for_each_slot(slot, tree, &iter, 0) { |
| 198 | item = radix_tree_deref_slot(slot); |
| 199 | |
| 200 | if (!item) |
| 201 | continue; |
| 202 | if (radix_tree_deref_retry(item)) { |
| 203 | slot = radix_tree_iter_retry(&iter); |
| 204 | continue; |
| 205 | } |
| 206 | |
| 207 | item_sanity(item, iter.index); |
| 208 | } |
| 209 | rcu_read_unlock(); |
| 210 | } |
| 211 | return NULL; |
| 212 | } |
| 213 | |
| 214 | static void multiorder_iteration_race(void) |
| 215 | { |
| 216 | const int num_threads = sysconf(_SC_NPROCESSORS_ONLN); |
| 217 | pthread_t worker_thread[num_threads]; |
| 218 | RADIX_TREE(tree, GFP_KERNEL); |
| 219 | int i; |
| 220 | |
| 221 | pthread_create(&worker_thread[0], NULL, &creator_func, &tree); |
| 222 | for (i = 1; i < num_threads; i++) |
| 223 | pthread_create(&worker_thread[i], NULL, &iterator_func, &tree); |
| 224 | |
| 225 | for (i = 0; i < num_threads; i++) |
| 226 | pthread_join(worker_thread[i], NULL); |
| 227 | |
| 228 | item_kill_tree(&tree); |
| 229 | } |
| 230 | |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 231 | void multiorder_checks(void) |
| 232 | { |
Ross Zwisler | 643b57d | 2016-05-20 17:02:29 -0700 | [diff] [blame] | 233 | multiorder_iteration(); |
| 234 | multiorder_tagged_iteration(); |
Ross Zwisler | fd8f58c | 2018-05-18 16:09:01 -0700 | [diff] [blame] | 235 | multiorder_iteration_race(); |
Matthew Wilcox | 2791653 | 2016-12-14 15:09:04 -0800 | [diff] [blame] | 236 | |
| 237 | radix_tree_cpu_dead(0); |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 238 | } |
Matthew Wilcox | 8ac0486 | 2016-12-18 22:56:05 -0500 | [diff] [blame] | 239 | |
| 240 | int __weak main(void) |
| 241 | { |
| 242 | radix_tree_init(); |
| 243 | multiorder_checks(); |
| 244 | return 0; |
| 245 | } |