Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | /* |
| 3 | * A fast, small, non-recursive O(nlog n) sort for the Linux kernel |
| 4 | * |
| 5 | * Jan 23 2005 Matt Mackall <mpm@selenic.com> |
| 6 | */ |
| 7 | |
Kostenzer Felix | c5adae9 | 2017-02-24 15:01:07 -0800 | [diff] [blame] | 8 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
| 9 | |
Rasmus Villemoes | 42cf809 | 2015-02-12 15:02:35 -0800 | [diff] [blame] | 10 | #include <linux/types.h> |
| 11 | #include <linux/export.h> |
Adrian Bunk | ecec4cb | 2005-09-10 00:26:59 -0700 | [diff] [blame] | 12 | #include <linux/sort.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 13 | |
Daniel Wagner | ca96ab8 | 2015-06-25 15:02:14 -0700 | [diff] [blame] | 14 | static int alignment_ok(const void *base, int align) |
| 15 | { |
| 16 | return IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || |
| 17 | ((unsigned long)base & (align - 1)) == 0; |
| 18 | } |
| 19 | |
Adrian Bunk | ecec4cb | 2005-09-10 00:26:59 -0700 | [diff] [blame] | 20 | static void u32_swap(void *a, void *b, int size) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 21 | { |
| 22 | u32 t = *(u32 *)a; |
| 23 | *(u32 *)a = *(u32 *)b; |
| 24 | *(u32 *)b = t; |
| 25 | } |
| 26 | |
Daniel Wagner | ca96ab8 | 2015-06-25 15:02:14 -0700 | [diff] [blame] | 27 | static void u64_swap(void *a, void *b, int size) |
| 28 | { |
| 29 | u64 t = *(u64 *)a; |
| 30 | *(u64 *)a = *(u64 *)b; |
| 31 | *(u64 *)b = t; |
| 32 | } |
| 33 | |
Adrian Bunk | ecec4cb | 2005-09-10 00:26:59 -0700 | [diff] [blame] | 34 | static void generic_swap(void *a, void *b, int size) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 35 | { |
| 36 | char t; |
| 37 | |
| 38 | do { |
| 39 | t = *(char *)a; |
| 40 | *(char *)a++ = *(char *)b; |
| 41 | *(char *)b++ = t; |
| 42 | } while (--size > 0); |
| 43 | } |
| 44 | |
Robert P. J. Day | 72fd4a3 | 2007-02-10 01:45:59 -0800 | [diff] [blame] | 45 | /** |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 46 | * sort - sort an array of elements |
| 47 | * @base: pointer to data to sort |
| 48 | * @num: number of elements |
| 49 | * @size: size of each element |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 50 | * @cmp_func: pointer to comparison function |
| 51 | * @swap_func: pointer to swap function or NULL |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 52 | * |
| 53 | * This function does a heapsort on the given array. You may provide a |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 54 | * swap_func function optimized to your element type. |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 55 | * |
| 56 | * Sorting time is O(n log n) both on average and worst-case. While |
| 57 | * qsort is about 20% faster on average, it suffers from exploitable |
| 58 | * O(n*n) worst-case behavior and extra memory requirements that make |
| 59 | * it less suitable for kernel use. |
| 60 | */ |
| 61 | |
| 62 | void sort(void *base, size_t num, size_t size, |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 63 | int (*cmp_func)(const void *, const void *), |
| 64 | void (*swap_func)(void *, void *, int size)) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 65 | { |
| 66 | /* pre-scale counters for performance */ |
keios | d3717bd | 2006-10-03 01:13:49 -0700 | [diff] [blame] | 67 | int i = (num/2 - 1) * size, n = num * size, c, r; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 68 | |
Daniel Wagner | ca96ab8 | 2015-06-25 15:02:14 -0700 | [diff] [blame] | 69 | if (!swap_func) { |
| 70 | if (size == 4 && alignment_ok(base, 4)) |
| 71 | swap_func = u32_swap; |
| 72 | else if (size == 8 && alignment_ok(base, 8)) |
| 73 | swap_func = u64_swap; |
| 74 | else |
| 75 | swap_func = generic_swap; |
| 76 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 77 | |
| 78 | /* heapify */ |
| 79 | for ( ; i >= 0; i -= size) { |
keios | d3717bd | 2006-10-03 01:13:49 -0700 | [diff] [blame] | 80 | for (r = i; r * 2 + size < n; r = c) { |
| 81 | c = r * 2 + size; |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 82 | if (c < n - size && |
| 83 | cmp_func(base + c, base + c + size) < 0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 84 | c += size; |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 85 | if (cmp_func(base + r, base + c) >= 0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 86 | break; |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 87 | swap_func(base + r, base + c, size); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 88 | } |
| 89 | } |
| 90 | |
| 91 | /* sort */ |
Subbaiah Venkata | 995e428 | 2007-10-16 23:27:06 -0700 | [diff] [blame] | 92 | for (i = n - size; i > 0; i -= size) { |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 93 | swap_func(base, base + i, size); |
keios | d3717bd | 2006-10-03 01:13:49 -0700 | [diff] [blame] | 94 | for (r = 0; r * 2 + size < i; r = c) { |
| 95 | c = r * 2 + size; |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 96 | if (c < i - size && |
| 97 | cmp_func(base + c, base + c + size) < 0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 98 | c += size; |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 99 | if (cmp_func(base + r, base + c) >= 0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 100 | break; |
Wu Fengguang | b53907c | 2009-01-07 18:09:11 -0800 | [diff] [blame] | 101 | swap_func(base + r, base + c, size); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 102 | } |
| 103 | } |
| 104 | } |
| 105 | |
| 106 | EXPORT_SYMBOL(sort); |