blob: 3fe401067e20ba81c762fb00ae00730de6b9502f [file] [log] [blame]
Andrew Mortond0da23b2014-08-06 16:09:46 -07001
2#define pr_fmt(fmt) "list_sort_test: " fmt
3
Dave Chinner2c761272010-01-12 17:39:16 +11004#include <linux/kernel.h>
Rasmus Villemoes7259fa02015-02-12 15:02:48 -08005#include <linux/bug.h>
6#include <linux/compiler.h>
7#include <linux/export.h>
8#include <linux/string.h>
Dave Chinner2c761272010-01-12 17:39:16 +11009#include <linux/list_sort.h>
Dave Chinner2c761272010-01-12 17:39:16 +110010#include <linux/list.h>
11
Don Mullis835cc0c2010-03-05 13:43:15 -080012#define MAX_LIST_LENGTH_BITS 20
13
14/*
15 * Returns a list organized in an intermediate format suited
16 * to chaining of merge() calls: null-terminated, no reserved or
17 * sentinel head node, "prev" links not maintained.
18 */
19static struct list_head *merge(void *priv,
20 int (*cmp)(void *priv, struct list_head *a,
21 struct list_head *b),
22 struct list_head *a, struct list_head *b)
23{
24 struct list_head head, *tail = &head;
25
26 while (a && b) {
27 /* if equal, take 'a' -- important for sort stability */
28 if ((*cmp)(priv, a, b) <= 0) {
29 tail->next = a;
30 a = a->next;
31 } else {
32 tail->next = b;
33 b = b->next;
34 }
35 tail = tail->next;
36 }
37 tail->next = a?:b;
38 return head.next;
39}
40
41/*
42 * Combine final list merge with restoration of standard doubly-linked
43 * list structure. This approach duplicates code from merge(), but
44 * runs faster than the tidier alternatives of either a separate final
45 * prev-link restoration pass, or maintaining the prev links
46 * throughout.
47 */
48static void merge_and_restore_back_links(void *priv,
49 int (*cmp)(void *priv, struct list_head *a,
50 struct list_head *b),
51 struct list_head *head,
52 struct list_head *a, struct list_head *b)
53{
54 struct list_head *tail = head;
Rasmus Villemoes61b3d6c2014-08-06 16:09:44 -070055 u8 count = 0;
Don Mullis835cc0c2010-03-05 13:43:15 -080056
57 while (a && b) {
58 /* if equal, take 'a' -- important for sort stability */
59 if ((*cmp)(priv, a, b) <= 0) {
60 tail->next = a;
61 a->prev = tail;
62 a = a->next;
63 } else {
64 tail->next = b;
65 b->prev = tail;
66 b = b->next;
67 }
68 tail = tail->next;
69 }
70 tail->next = a ? : b;
71
72 do {
73 /*
74 * In worst cases this loop may run many iterations.
75 * Continue callbacks to the client even though no
76 * element comparison is needed, so the client's cmp()
77 * routine can invoke cond_resched() periodically.
78 */
Rasmus Villemoes61b3d6c2014-08-06 16:09:44 -070079 if (unlikely(!(++count)))
80 (*cmp)(priv, tail->next, tail->next);
Don Mullis835cc0c2010-03-05 13:43:15 -080081
82 tail->next->prev = tail;
83 tail = tail->next;
84 } while (tail->next);
85
86 tail->next = head;
87 head->prev = tail;
88}
89
Dave Chinner2c761272010-01-12 17:39:16 +110090/**
Don Mullis02b12b72010-03-05 13:43:15 -080091 * list_sort - sort a list
92 * @priv: private data, opaque to list_sort(), passed to @cmp
Dave Chinner2c761272010-01-12 17:39:16 +110093 * @head: the list to sort
94 * @cmp: the elements comparison function
95 *
Don Mullis02b12b72010-03-05 13:43:15 -080096 * This function implements "merge sort", which has O(nlog(n))
97 * complexity.
Dave Chinner2c761272010-01-12 17:39:16 +110098 *
Don Mullis02b12b72010-03-05 13:43:15 -080099 * The comparison function @cmp must return a negative value if @a
100 * should sort before @b, and a positive value if @a should sort after
101 * @b. If @a and @b are equivalent, and their original relative
102 * ordering is to be preserved, @cmp must return 0.
Dave Chinner2c761272010-01-12 17:39:16 +1100103 */
104void list_sort(void *priv, struct list_head *head,
Don Mullis835cc0c2010-03-05 13:43:15 -0800105 int (*cmp)(void *priv, struct list_head *a,
106 struct list_head *b))
Dave Chinner2c761272010-01-12 17:39:16 +1100107{
Don Mullis835cc0c2010-03-05 13:43:15 -0800108 struct list_head *part[MAX_LIST_LENGTH_BITS+1]; /* sorted partial lists
109 -- last slot is a sentinel */
110 int lev; /* index into part[] */
111 int max_lev = 0;
112 struct list_head *list;
Dave Chinner2c761272010-01-12 17:39:16 +1100113
114 if (list_empty(head))
115 return;
116
Don Mullis835cc0c2010-03-05 13:43:15 -0800117 memset(part, 0, sizeof(part));
118
119 head->prev->next = NULL;
Dave Chinner2c761272010-01-12 17:39:16 +1100120 list = head->next;
Dave Chinner2c761272010-01-12 17:39:16 +1100121
Don Mullis835cc0c2010-03-05 13:43:15 -0800122 while (list) {
123 struct list_head *cur = list;
124 list = list->next;
125 cur->next = NULL;
Dave Chinner2c761272010-01-12 17:39:16 +1100126
Don Mullis835cc0c2010-03-05 13:43:15 -0800127 for (lev = 0; part[lev]; lev++) {
128 cur = merge(priv, cmp, part[lev], cur);
129 part[lev] = NULL;
Dave Chinner2c761272010-01-12 17:39:16 +1100130 }
Don Mullis835cc0c2010-03-05 13:43:15 -0800131 if (lev > max_lev) {
132 if (unlikely(lev >= ARRAY_SIZE(part)-1)) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700133 printk_once(KERN_DEBUG "list too long for efficiency\n");
Don Mullis835cc0c2010-03-05 13:43:15 -0800134 lev--;
135 }
136 max_lev = lev;
137 }
138 part[lev] = cur;
Dave Chinner2c761272010-01-12 17:39:16 +1100139 }
140
Don Mullis835cc0c2010-03-05 13:43:15 -0800141 for (lev = 0; lev < max_lev; lev++)
142 if (part[lev])
143 list = merge(priv, cmp, part[lev], list);
144
145 merge_and_restore_back_links(priv, cmp, head, part[max_lev], list);
146}
147EXPORT_SYMBOL(list_sort);
148
Artem Bityutskiy6d411e62010-10-26 14:23:05 -0700149#ifdef CONFIG_TEST_LIST_SORT
Artem Bityutskiyeeee9eb2010-10-26 14:23:06 -0700150
Rasmus Villemoes7259fa02015-02-12 15:02:48 -0800151#include <linux/slab.h>
Artem Bityutskiyeeee9eb2010-10-26 14:23:06 -0700152#include <linux/random.h>
153
Don Mullis835cc0c2010-03-05 13:43:15 -0800154/*
155 * The pattern of set bits in the list length determines which cases
156 * are hit in list_sort().
157 */
Artem Bityutskiyeeee9eb2010-10-26 14:23:06 -0700158#define TEST_LIST_LEN (512+128+2) /* not including head */
Don Mullis835cc0c2010-03-05 13:43:15 -0800159
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700160#define TEST_POISON1 0xDEADBEEF
161#define TEST_POISON2 0xA324354C
162
163struct debug_el {
164 unsigned int poison1;
165 struct list_head list;
166 unsigned int poison2;
167 int value;
168 unsigned serial;
169};
170
171/* Array, containing pointers to all elements in the test list */
172static struct debug_el **elts __initdata;
173
174static int __init check(struct debug_el *ela, struct debug_el *elb)
175{
176 if (ela->serial >= TEST_LIST_LEN) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700177 pr_err("error: incorrect serial %d\n", ela->serial);
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700178 return -EINVAL;
179 }
180 if (elb->serial >= TEST_LIST_LEN) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700181 pr_err("error: incorrect serial %d\n", elb->serial);
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700182 return -EINVAL;
183 }
184 if (elts[ela->serial] != ela || elts[elb->serial] != elb) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700185 pr_err("error: phantom element\n");
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700186 return -EINVAL;
187 }
188 if (ela->poison1 != TEST_POISON1 || ela->poison2 != TEST_POISON2) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700189 pr_err("error: bad poison: %#x/%#x\n",
190 ela->poison1, ela->poison2);
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700191 return -EINVAL;
192 }
193 if (elb->poison1 != TEST_POISON1 || elb->poison2 != TEST_POISON2) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700194 pr_err("error: bad poison: %#x/%#x\n",
195 elb->poison1, elb->poison2);
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700196 return -EINVAL;
197 }
198 return 0;
199}
200
201static int __init cmp(void *priv, struct list_head *a, struct list_head *b)
202{
203 struct debug_el *ela, *elb;
204
205 ela = container_of(a, struct debug_el, list);
206 elb = container_of(b, struct debug_el, list);
207
208 check(ela, elb);
209 return ela->value - elb->value;
210}
211
Don Mullis835cc0c2010-03-05 13:43:15 -0800212static int __init list_sort_test(void)
213{
Rasmus Villemoes27d555d2014-08-06 16:09:38 -0700214 int i, count = 1, err = -ENOMEM;
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700215 struct debug_el *el;
Rasmus Villemoes69412302014-08-06 16:09:42 -0700216 struct list_head *cur;
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700217 LIST_HEAD(head);
Don Mullis835cc0c2010-03-05 13:43:15 -0800218
Andrew Mortond0da23b2014-08-06 16:09:46 -0700219 pr_debug("start testing list_sort()\n");
Don Mullis835cc0c2010-03-05 13:43:15 -0800220
Rasmus Villemoes69412302014-08-06 16:09:42 -0700221 elts = kcalloc(TEST_LIST_LEN, sizeof(*elts), GFP_KERNEL);
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700222 if (!elts) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700223 pr_err("error: cannot allocate memory\n");
Rasmus Villemoes69412302014-08-06 16:09:42 -0700224 return err;
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700225 }
226
Artem Bityutskiyeeee9eb2010-10-26 14:23:06 -0700227 for (i = 0; i < TEST_LIST_LEN; i++) {
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700228 el = kmalloc(sizeof(*el), GFP_KERNEL);
229 if (!el) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700230 pr_err("error: cannot allocate memory\n");
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700231 goto exit;
232 }
Don Mullis835cc0c2010-03-05 13:43:15 -0800233 /* force some equivalencies */
Akinobu Mitaf39fee52013-04-29 16:21:28 -0700234 el->value = prandom_u32() % (TEST_LIST_LEN / 3);
Don Mullis835cc0c2010-03-05 13:43:15 -0800235 el->serial = i;
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700236 el->poison1 = TEST_POISON1;
237 el->poison2 = TEST_POISON2;
238 elts[i] = el;
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700239 list_add_tail(&el->list, &head);
Don Mullis835cc0c2010-03-05 13:43:15 -0800240 }
Don Mullis835cc0c2010-03-05 13:43:15 -0800241
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700242 list_sort(NULL, &head, cmp);
Don Mullis835cc0c2010-03-05 13:43:15 -0800243
Rasmus Villemoes27d555d2014-08-06 16:09:38 -0700244 err = -EINVAL;
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700245 for (cur = head.next; cur->next != &head; cur = cur->next) {
246 struct debug_el *el1;
247 int cmp_result;
248
Don Mullis835cc0c2010-03-05 13:43:15 -0800249 if (cur->next->prev != cur) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700250 pr_err("error: list is corrupted\n");
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700251 goto exit;
252 }
253
254 cmp_result = cmp(NULL, cur, cur->next);
255 if (cmp_result > 0) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700256 pr_err("error: list is not sorted\n");
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700257 goto exit;
258 }
259
260 el = container_of(cur, struct debug_el, list);
261 el1 = container_of(cur->next, struct debug_el, list);
262 if (cmp_result == 0 && el->serial >= el1->serial) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700263 pr_err("error: order of equivalent elements not "
264 "preserved\n");
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700265 goto exit;
Don Mullis835cc0c2010-03-05 13:43:15 -0800266 }
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700267
268 if (check(el, el1)) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700269 pr_err("error: element check failed\n");
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700270 goto exit;
271 }
Don Mullis835cc0c2010-03-05 13:43:15 -0800272 count++;
273 }
Rasmus Villemoes9d418dc2014-08-06 16:09:40 -0700274 if (head.prev != cur) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700275 pr_err("error: list is corrupted\n");
Rasmus Villemoes9d418dc2014-08-06 16:09:40 -0700276 goto exit;
277 }
278
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700279
Artem Bityutskiyeeee9eb2010-10-26 14:23:06 -0700280 if (count != TEST_LIST_LEN) {
Andrew Mortond0da23b2014-08-06 16:09:46 -0700281 pr_err("error: bad list length %d", count);
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700282 goto exit;
Don Mullis835cc0c2010-03-05 13:43:15 -0800283 }
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700284
285 err = 0;
286exit:
Rasmus Villemoes69412302014-08-06 16:09:42 -0700287 for (i = 0; i < TEST_LIST_LEN; i++)
288 kfree(elts[i]);
Artem Bityutskiy041b78f2010-10-26 14:23:08 -0700289 kfree(elts);
Artem Bityutskiyf3dc0e32010-10-26 14:23:07 -0700290 return err;
Don Mullis835cc0c2010-03-05 13:43:15 -0800291}
Paul Gortmaker4c7217f2015-05-01 21:57:34 -0400292late_initcall(list_sort_test);
Artem Bityutskiy6d411e62010-10-26 14:23:05 -0700293#endif /* CONFIG_TEST_LIST_SORT */