blob: 8c41dca272b153e626713228510055116eada3a4 [file] [log] [blame]
Matthew Wilcox4f3755d2016-05-20 17:02:14 -07001/*
2 * multiorder.c: Multi-order radix tree entry testing
3 * Copyright (c) 2016 Intel Corporation
4 * Author: Ross Zwisler <ross.zwisler@linux.intel.com>
5 * Author: Matthew Wilcox <matthew.r.wilcox@intel.com>
6 *
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms and conditions of the GNU General Public License,
9 * version 2, as published by the Free Software Foundation.
10 *
11 * This program is distributed in the hope it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 */
16#include <linux/radix-tree.h>
17#include <linux/slab.h>
18#include <linux/errno.h>
Ross Zwislerfd8f58c2018-05-18 16:09:01 -070019#include <pthread.h>
Matthew Wilcox4f3755d2016-05-20 17:02:14 -070020
21#include "test.h"
22
Matthew Wilcox4bb53bd2018-09-12 23:29:32 -040023static int item_insert_order(struct xarray *xa, unsigned long index,
24 unsigned order)
25{
26 XA_STATE_ORDER(xas, xa, index, order);
27 struct item *item = item_create(index, order);
28
29 do {
30 xas_lock(&xas);
31 xas_store(&xas, item);
32 xas_unlock(&xas);
33 } while (xas_nomem(&xas, GFP_KERNEL));
34
35 if (!xas_error(&xas))
36 return 0;
37
38 free(item);
39 return xas_error(&xas);
40}
41
Ross Zwisler643b57d2016-05-20 17:02:29 -070042void multiorder_iteration(void)
43{
44 RADIX_TREE(tree, GFP_KERNEL);
45 struct radix_tree_iter iter;
46 void **slot;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -070047 int i, j, err;
Ross Zwisler643b57d2016-05-20 17:02:29 -070048
Rehas Sachdeva73bc0292017-01-04 11:55:00 -050049 printv(1, "Multiorder iteration test\n");
Ross Zwisler643b57d2016-05-20 17:02:29 -070050
51#define NUM_ENTRIES 11
52 int index[NUM_ENTRIES] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128};
53 int order[NUM_ENTRIES] = {1, 1, 2, 3, 4, 1, 0, 1, 3, 0, 7};
54
55 for (i = 0; i < NUM_ENTRIES; i++) {
56 err = item_insert_order(&tree, index[i], order[i]);
57 assert(!err);
58 }
59
Matthew Wilcox8c1244d2016-05-20 17:03:36 -070060 for (j = 0; j < 256; j++) {
61 for (i = 0; i < NUM_ENTRIES; i++)
62 if (j <= (index[i] | ((1 << order[i]) - 1)))
63 break;
Ross Zwisler643b57d2016-05-20 17:02:29 -070064
Matthew Wilcox8c1244d2016-05-20 17:03:36 -070065 radix_tree_for_each_slot(slot, &tree, &iter, j) {
66 int height = order[i] / RADIX_TREE_MAP_SHIFT;
67 int shift = height * RADIX_TREE_MAP_SHIFT;
Matthew Wilcox148deab2016-12-14 15:08:49 -080068 unsigned long mask = (1UL << order[i]) - 1;
69 struct item *item = *slot;
Ross Zwisler643b57d2016-05-20 17:02:29 -070070
Matthew Wilcox148deab2016-12-14 15:08:49 -080071 assert((iter.index | mask) == (index[i] | mask));
Matthew Wilcox8c1244d2016-05-20 17:03:36 -070072 assert(iter.shift == shift);
Matthew Wilcox148deab2016-12-14 15:08:49 -080073 assert(!radix_tree_is_internal_node(item));
74 assert((item->index | mask) == (index[i] | mask));
75 assert(item->order == order[i]);
Matthew Wilcox8c1244d2016-05-20 17:03:36 -070076 i++;
77 }
Ross Zwisler643b57d2016-05-20 17:02:29 -070078 }
79
80 item_kill_tree(&tree);
81}
82
83void multiorder_tagged_iteration(void)
84{
85 RADIX_TREE(tree, GFP_KERNEL);
86 struct radix_tree_iter iter;
87 void **slot;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -070088 int i, j;
Ross Zwisler643b57d2016-05-20 17:02:29 -070089
Rehas Sachdeva73bc0292017-01-04 11:55:00 -050090 printv(1, "Multiorder tagged iteration test\n");
Ross Zwisler643b57d2016-05-20 17:02:29 -070091
92#define MT_NUM_ENTRIES 9
93 int index[MT_NUM_ENTRIES] = {0, 2, 4, 16, 32, 40, 64, 72, 128};
94 int order[MT_NUM_ENTRIES] = {1, 0, 2, 4, 3, 1, 3, 0, 7};
95
96#define TAG_ENTRIES 7
97 int tag_index[TAG_ENTRIES] = {0, 4, 16, 40, 64, 72, 128};
98
99 for (i = 0; i < MT_NUM_ENTRIES; i++)
100 assert(!item_insert_order(&tree, index[i], order[i]));
101
102 assert(!radix_tree_tagged(&tree, 1));
103
104 for (i = 0; i < TAG_ENTRIES; i++)
105 assert(radix_tree_tag_set(&tree, tag_index[i], 1));
106
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700107 for (j = 0; j < 256; j++) {
Matthew Wilcox148deab2016-12-14 15:08:49 -0800108 int k;
Ross Zwisler643b57d2016-05-20 17:02:29 -0700109
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700110 for (i = 0; i < TAG_ENTRIES; i++) {
111 for (k = i; index[k] < tag_index[i]; k++)
112 ;
113 if (j <= (index[k] | ((1 << order[k]) - 1)))
114 break;
115 }
116
117 radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) {
Matthew Wilcox148deab2016-12-14 15:08:49 -0800118 unsigned long mask;
119 struct item *item = *slot;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700120 for (k = i; index[k] < tag_index[i]; k++)
121 ;
Matthew Wilcox148deab2016-12-14 15:08:49 -0800122 mask = (1UL << order[k]) - 1;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700123
Matthew Wilcox148deab2016-12-14 15:08:49 -0800124 assert((iter.index | mask) == (tag_index[i] | mask));
125 assert(!radix_tree_is_internal_node(item));
126 assert((item->index | mask) == (tag_index[i] | mask));
127 assert(item->order == order[k]);
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700128 i++;
129 }
Ross Zwisler643b57d2016-05-20 17:02:29 -0700130 }
131
Matthew Wilcox372266b2018-08-18 07:09:22 -0400132 assert(tag_tagged_items(&tree, 0, ~0UL, TAG_ENTRIES, XA_MARK_1,
133 XA_MARK_2) == TAG_ENTRIES);
Matthew Wilcox070c5ac2016-05-20 17:02:52 -0700134
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700135 for (j = 0; j < 256; j++) {
136 int mask, k;
137
138 for (i = 0; i < TAG_ENTRIES; i++) {
139 for (k = i; index[k] < tag_index[i]; k++)
140 ;
141 if (j <= (index[k] | ((1 << order[k]) - 1)))
142 break;
143 }
144
145 radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) {
Matthew Wilcox148deab2016-12-14 15:08:49 -0800146 struct item *item = *slot;
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700147 for (k = i; index[k] < tag_index[i]; k++)
148 ;
149 mask = (1 << order[k]) - 1;
150
Matthew Wilcox148deab2016-12-14 15:08:49 -0800151 assert((iter.index | mask) == (tag_index[i] | mask));
152 assert(!radix_tree_is_internal_node(item));
153 assert((item->index | mask) == (tag_index[i] | mask));
154 assert(item->order == order[k]);
Matthew Wilcox8c1244d2016-05-20 17:03:36 -0700155 i++;
156 }
Matthew Wilcox070c5ac2016-05-20 17:02:52 -0700157 }
158
Matthew Wilcox372266b2018-08-18 07:09:22 -0400159 assert(tag_tagged_items(&tree, 1, ~0UL, MT_NUM_ENTRIES * 2, XA_MARK_1,
160 XA_MARK_0) == TAG_ENTRIES);
Matthew Wilcox070c5ac2016-05-20 17:02:52 -0700161 i = 0;
162 radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) {
163 assert(iter.index == tag_index[i]);
164 i++;
165 }
166
Ross Zwisler643b57d2016-05-20 17:02:29 -0700167 item_kill_tree(&tree);
168}
169
Ross Zwislerfd8f58c2018-05-18 16:09:01 -0700170bool stop_iteration = false;
171
172static void *creator_func(void *ptr)
173{
174 /* 'order' is set up to ensure we have sibling entries */
175 unsigned int order = RADIX_TREE_MAP_SHIFT - 1;
176 struct radix_tree_root *tree = ptr;
177 int i;
178
179 for (i = 0; i < 10000; i++) {
180 item_insert_order(tree, 0, order);
181 item_delete_rcu(tree, 0);
182 }
183
184 stop_iteration = true;
185 return NULL;
186}
187
188static void *iterator_func(void *ptr)
189{
190 struct radix_tree_root *tree = ptr;
191 struct radix_tree_iter iter;
192 struct item *item;
193 void **slot;
194
195 while (!stop_iteration) {
196 rcu_read_lock();
197 radix_tree_for_each_slot(slot, tree, &iter, 0) {
198 item = radix_tree_deref_slot(slot);
199
200 if (!item)
201 continue;
202 if (radix_tree_deref_retry(item)) {
203 slot = radix_tree_iter_retry(&iter);
204 continue;
205 }
206
207 item_sanity(item, iter.index);
208 }
209 rcu_read_unlock();
210 }
211 return NULL;
212}
213
214static void multiorder_iteration_race(void)
215{
216 const int num_threads = sysconf(_SC_NPROCESSORS_ONLN);
217 pthread_t worker_thread[num_threads];
218 RADIX_TREE(tree, GFP_KERNEL);
219 int i;
220
221 pthread_create(&worker_thread[0], NULL, &creator_func, &tree);
222 for (i = 1; i < num_threads; i++)
223 pthread_create(&worker_thread[i], NULL, &iterator_func, &tree);
224
225 for (i = 0; i < num_threads; i++)
226 pthread_join(worker_thread[i], NULL);
227
228 item_kill_tree(&tree);
229}
230
Matthew Wilcox4f3755d2016-05-20 17:02:14 -0700231void multiorder_checks(void)
232{
Ross Zwisler643b57d2016-05-20 17:02:29 -0700233 multiorder_iteration();
234 multiorder_tagged_iteration();
Ross Zwislerfd8f58c2018-05-18 16:09:01 -0700235 multiorder_iteration_race();
Matthew Wilcox27916532016-12-14 15:09:04 -0800236
237 radix_tree_cpu_dead(0);
Matthew Wilcox4f3755d2016-05-20 17:02:14 -0700238}
Matthew Wilcox8ac04862016-12-18 22:56:05 -0500239
240int __weak main(void)
241{
242 radix_tree_init();
243 multiorder_checks();
244 return 0;
245}