blob: 19aebf2add1a85d4b684b4c5865f4df7b49b3fa8 [file] [log] [blame]
Vojtech Fried3e031b72012-08-24 16:52:44 +08001/*
2 * taken from https://github.com/swenson/sort
3 * Kept as is for the moment to be able to apply upstream patches for that
4 * code, currently used only to speed up XPath node sorting, see xpath.c
5 */
6
7/*
8 * All code in this header, unless otherwise specified, is hereby licensed under the MIT Public License:
9
10Copyright (c) 2010 Christopher Swenson
11
12Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
13
14The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
15
16THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
17*/
18
19#include <stdlib.h>
20#include <stdio.h>
21#include <string.h>
Rob Richards236ea1e2012-08-27 11:56:07 -040022#ifdef HAVE_STDINT_H
Vojtech Fried3e031b72012-08-24 16:52:44 +080023#include <stdint.h>
Rob Richards236ea1e2012-08-27 11:56:07 -040024#else
25#ifdef HAVE_INTTYPES_H
26#include <inttypes.h>
27#elif defined(WIN32)
28typedef __int64 int64_t;
29typedef unsigned __int64 uint64_t;
30#endif
31#endif
Vojtech Fried3e031b72012-08-24 16:52:44 +080032
Daniel Richardbbe19452012-09-18 11:15:06 +080033#ifndef MK_UINT64
34#if defined(WIN32) && defined(_MSC_VER) && _MSC_VER < 1300
35#define MK_UINT64(x) ((uint64_t)(x))
36#else
37#define MK_UINT64(x) x##ULL
38#endif
39#endif
40
Vojtech Fried3e031b72012-08-24 16:52:44 +080041#ifndef MAX
42#define MAX(x,y) (((x) > (y) ? (x) : (y)))
43#endif
44#ifndef MIN
45#define MIN(x,y) (((x) < (y) ? (x) : (y)))
46#endif
47
48int compute_minrun(uint64_t);
49
50#ifndef CLZ
Daniel Veillard4ea74a42012-10-29 10:27:18 +080051#if defined(__GNUC__) && ((__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ > 3))
Vojtech Fried3e031b72012-08-24 16:52:44 +080052#define CLZ __builtin_clzll
53#else
54
55int clzll(uint64_t);
56
57/* adapted from Hacker's Delight */
58int clzll(uint64_t x) /* {{{ */
59{
60 int n;
61
62 if (x == 0) return(64);
63 n = 0;
Patrick Monnerat3a76bfe2013-12-12 15:01:53 +080064 if (x <= MK_UINT64(0x00000000FFFFFFFF)) {n = n + 32; x = x << 32;}
65 if (x <= MK_UINT64(0x0000FFFFFFFFFFFF)) {n = n + 16; x = x << 16;}
66 if (x <= MK_UINT64(0x00FFFFFFFFFFFFFF)) {n = n + 8; x = x << 8;}
67 if (x <= MK_UINT64(0x0FFFFFFFFFFFFFFF)) {n = n + 4; x = x << 4;}
68 if (x <= MK_UINT64(0x3FFFFFFFFFFFFFFF)) {n = n + 2; x = x << 2;}
69 if (x <= MK_UINT64(0x7FFFFFFFFFFFFFFF)) {n = n + 1;}
Vojtech Fried3e031b72012-08-24 16:52:44 +080070 return n;
71}
72/* }}} */
73
74#define CLZ clzll
75#endif
76#endif
77
Daniel Richardbbe19452012-09-18 11:15:06 +080078int compute_minrun(uint64_t size) /* {{{ */
Vojtech Fried3e031b72012-08-24 16:52:44 +080079{
80 const int top_bit = 64 - CLZ(size);
81 const int shift = MAX(top_bit, 6) - 6;
82 const int minrun = size >> shift;
Daniel Richardbbe19452012-09-18 11:15:06 +080083 const uint64_t mask = (MK_UINT64(1) << shift) - 1;
Vojtech Fried3e031b72012-08-24 16:52:44 +080084 if (mask & size) return minrun + 1;
85 return minrun;
86}
87/* }}} */
88
89#ifndef SORT_NAME
90#error "Must declare SORT_NAME"
91#endif
92
93#ifndef SORT_TYPE
94#error "Must declare SORT_TYPE"
95#endif
96
97#ifndef SORT_CMP
98#define SORT_CMP(x, y) ((x) < (y) ? -1 : ((x) == (y) ? 0 : 1))
99#endif
100
101
102#define SORT_SWAP(x,y) {SORT_TYPE __SORT_SWAP_t = (x); (x) = (y); (y) = __SORT_SWAP_t;}
103
104#define SORT_CONCAT(x, y) x ## _ ## y
105#define SORT_MAKE_STR1(x, y) SORT_CONCAT(x,y)
106#define SORT_MAKE_STR(x) SORT_MAKE_STR1(SORT_NAME,x)
107
108#define BINARY_INSERTION_FIND SORT_MAKE_STR(binary_insertion_find)
109#define BINARY_INSERTION_SORT_START SORT_MAKE_STR(binary_insertion_sort_start)
110#define BINARY_INSERTION_SORT SORT_MAKE_STR(binary_insertion_sort)
111#define REVERSE_ELEMENTS SORT_MAKE_STR(reverse_elements)
112#define COUNT_RUN SORT_MAKE_STR(count_run)
113#define CHECK_INVARIANT SORT_MAKE_STR(check_invariant)
114#define TIM_SORT SORT_MAKE_STR(tim_sort)
115#define TIM_SORT_RESIZE SORT_MAKE_STR(tim_sort_resize)
116#define TIM_SORT_MERGE SORT_MAKE_STR(tim_sort_merge)
117#define TIM_SORT_COLLAPSE SORT_MAKE_STR(tim_sort_collapse)
118
119#define TIM_SORT_RUN_T SORT_MAKE_STR(tim_sort_run_t)
120#define TEMP_STORAGE_T SORT_MAKE_STR(temp_storage_t)
121
122typedef struct {
123 int64_t start;
124 int64_t length;
125} TIM_SORT_RUN_T;
126
127void BINARY_INSERTION_SORT(SORT_TYPE *dst, const size_t size);
128void TIM_SORT(SORT_TYPE *dst, const size_t size);
129
130/* Function used to do a binary search for binary insertion sort */
131static int64_t BINARY_INSERTION_FIND(SORT_TYPE *dst, const SORT_TYPE x, const size_t size)
132{
133 int64_t l, c, r;
134 SORT_TYPE lx;
135 SORT_TYPE cx;
136 l = 0;
137 r = size - 1;
138 c = r >> 1;
139 lx = dst[l];
140
141 /* check for beginning conditions */
142 if (SORT_CMP(x, lx) < 0)
143 return 0;
144 else if (SORT_CMP(x, lx) == 0)
145 {
146 int64_t i = 1;
147 while (SORT_CMP(x, dst[i]) == 0) i++;
148 return i;
149 }
150
151 cx = dst[c];
152 while (1)
153 {
154 const int val = SORT_CMP(x, cx);
155 if (val < 0)
156 {
157 if (c - l <= 1) return c;
158 r = c;
159 }
160 else if (val > 0)
161 {
162 if (r - c <= 1) return c + 1;
163 l = c;
164 lx = cx;
165 }
166 else
167 {
168 do
169 {
170 cx = dst[++c];
171 } while (SORT_CMP(x, cx) == 0);
172 return c;
173 }
174 c = l + ((r - l) >> 1);
175 cx = dst[c];
176 }
177}
178
179/* Binary insertion sort, but knowing that the first "start" entries are sorted. Used in timsort. */
180static void BINARY_INSERTION_SORT_START(SORT_TYPE *dst, const size_t start, const size_t size)
181{
182 int64_t i;
Daniel Veillard76516062012-09-11 14:02:08 +0800183 for (i = start; i < (int64_t) size; i++)
Vojtech Fried3e031b72012-08-24 16:52:44 +0800184 {
185 int64_t j;
186 SORT_TYPE x;
187 int64_t location;
188 /* If this entry is already correct, just move along */
189 if (SORT_CMP(dst[i - 1], dst[i]) <= 0) continue;
190
191 /* Else we need to find the right place, shift everything over, and squeeze in */
192 x = dst[i];
193 location = BINARY_INSERTION_FIND(dst, x, i);
194 for (j = i - 1; j >= location; j--)
195 {
196 dst[j + 1] = dst[j];
197 }
198 dst[location] = x;
199 }
200}
201
202/* Binary insertion sort */
203void BINARY_INSERTION_SORT(SORT_TYPE *dst, const size_t size)
204{
205 BINARY_INSERTION_SORT_START(dst, 1, size);
206}
207
208/* timsort implementation, based on timsort.txt */
209
210static void REVERSE_ELEMENTS(SORT_TYPE *dst, int64_t start, int64_t end)
211{
212 while (1)
213 {
214 if (start >= end) return;
215 SORT_SWAP(dst[start], dst[end]);
216 start++;
217 end--;
218 }
219}
220
221static int64_t COUNT_RUN(SORT_TYPE *dst, const int64_t start, const size_t size)
222{
223 int64_t curr;
224 if (size - start == 1) return 1;
Daniel Veillard76516062012-09-11 14:02:08 +0800225 if (start >= (int64_t) size - 2)
Vojtech Fried3e031b72012-08-24 16:52:44 +0800226 {
227 if (SORT_CMP(dst[size - 2], dst[size - 1]) > 0)
228 SORT_SWAP(dst[size - 2], dst[size - 1]);
229 return 2;
230 }
231
232 curr = start + 2;
233
234 if (SORT_CMP(dst[start], dst[start + 1]) <= 0)
235 {
236 /* increasing run */
237 while (1)
238 {
Daniel Veillard76516062012-09-11 14:02:08 +0800239 if (curr == (int64_t) size - 1) break;
Vojtech Fried3e031b72012-08-24 16:52:44 +0800240 if (SORT_CMP(dst[curr - 1], dst[curr]) > 0) break;
241 curr++;
242 }
243 return curr - start;
244 }
245 else
246 {
247 /* decreasing run */
248 while (1)
249 {
Daniel Veillard76516062012-09-11 14:02:08 +0800250 if (curr == (int64_t) size - 1) break;
Vojtech Fried3e031b72012-08-24 16:52:44 +0800251 if (SORT_CMP(dst[curr - 1], dst[curr]) <= 0) break;
252 curr++;
253 }
254 /* reverse in-place */
255 REVERSE_ELEMENTS(dst, start, curr - 1);
256 return curr - start;
257 }
258}
259
260#define PUSH_NEXT() do {\
261len = COUNT_RUN(dst, curr, size);\
262run = minrun;\
263if (run < minrun) run = minrun;\
Daniel Veillard76516062012-09-11 14:02:08 +0800264if (run > (int64_t) size - curr) run = size - curr;\
Vojtech Fried3e031b72012-08-24 16:52:44 +0800265if (run > len)\
266{\
267 BINARY_INSERTION_SORT_START(&dst[curr], len, run);\
268 len = run;\
269}\
270{\
271run_stack[stack_curr].start = curr;\
272run_stack[stack_curr].length = len;\
273stack_curr++;\
274}\
275curr += len;\
Daniel Veillard76516062012-09-11 14:02:08 +0800276if (curr == (int64_t) size)\
Vojtech Fried3e031b72012-08-24 16:52:44 +0800277{\
278 /* finish up */ \
279 while (stack_curr > 1) \
280 { \
281 TIM_SORT_MERGE(dst, run_stack, stack_curr, store); \
282 run_stack[stack_curr - 2].length += run_stack[stack_curr - 1].length; \
283 stack_curr--; \
284 } \
285 if (store->storage != NULL)\
286 {\
287 free(store->storage);\
288 store->storage = NULL;\
289 }\
290 return;\
291}\
292}\
293while (0)
294
295static int CHECK_INVARIANT(TIM_SORT_RUN_T *stack, const int stack_curr)
296{
297 int64_t A, B, C;
298 if (stack_curr < 2) return 1;
299 if (stack_curr == 2)
300 {
301 const int64_t A1 = stack[stack_curr - 2].length;
302 const int64_t B1 = stack[stack_curr - 1].length;
303 if (A1 <= B1) return 0;
304 return 1;
305 }
306 A = stack[stack_curr - 3].length;
307 B = stack[stack_curr - 2].length;
308 C = stack[stack_curr - 1].length;
309 if ((A <= B + C) || (B <= C)) return 0;
310 return 1;
311}
312
313typedef struct {
314 size_t alloc;
315 SORT_TYPE *storage;
316} TEMP_STORAGE_T;
317
318
319static void TIM_SORT_RESIZE(TEMP_STORAGE_T *store, const size_t new_size)
320{
321 if (store->alloc < new_size)
322 {
323 SORT_TYPE *tempstore = (SORT_TYPE *)realloc(store->storage, new_size * sizeof(SORT_TYPE));
324 if (tempstore == NULL)
325 {
Nick Wellnhoferc2545cb2016-08-22 11:44:18 +0200326 fprintf(stderr, "Error allocating temporary storage for tim sort: need %lu bytes", (unsigned long) (sizeof(SORT_TYPE) * new_size));
Vojtech Fried3e031b72012-08-24 16:52:44 +0800327 exit(1);
328 }
329 store->storage = tempstore;
330 store->alloc = new_size;
331 }
332}
333
334static void TIM_SORT_MERGE(SORT_TYPE *dst, const TIM_SORT_RUN_T *stack, const int stack_curr, TEMP_STORAGE_T *store)
335{
336 const int64_t A = stack[stack_curr - 2].length;
337 const int64_t B = stack[stack_curr - 1].length;
338 const int64_t curr = stack[stack_curr - 2].start;
339 SORT_TYPE *storage;
340 int64_t i, j, k;
341
342 TIM_SORT_RESIZE(store, MIN(A, B));
343 storage = store->storage;
344
345 /* left merge */
346 if (A < B)
347 {
348 memcpy(storage, &dst[curr], A * sizeof(SORT_TYPE));
349 i = 0;
350 j = curr + A;
351
352 for (k = curr; k < curr + A + B; k++)
353 {
354 if ((i < A) && (j < curr + A + B))
355 {
356 if (SORT_CMP(storage[i], dst[j]) <= 0)
357 dst[k] = storage[i++];
358 else
359 dst[k] = dst[j++];
360 }
361 else if (i < A)
362 {
363 dst[k] = storage[i++];
364 }
365 else
366 dst[k] = dst[j++];
367 }
368 }
369 /* right merge */
370 else
371 {
372 memcpy(storage, &dst[curr + A], B * sizeof(SORT_TYPE));
373 i = B - 1;
374 j = curr + A - 1;
375
376 for (k = curr + A + B - 1; k >= curr; k--)
377 {
378 if ((i >= 0) && (j >= curr))
379 {
380 if (SORT_CMP(dst[j], storage[i]) > 0)
381 dst[k] = dst[j--];
382 else
383 dst[k] = storage[i--];
384 }
385 else if (i >= 0)
386 dst[k] = storage[i--];
387 else
388 dst[k] = dst[j--];
389 }
390 }
391}
392
393static int TIM_SORT_COLLAPSE(SORT_TYPE *dst, TIM_SORT_RUN_T *stack, int stack_curr, TEMP_STORAGE_T *store, const size_t size)
394{
Christopher Swenson9b987f82015-02-27 14:55:49 +0800395 while (1) {
396 int64_t A, B, C, D;
Nick Wellnhofer94613f62016-08-22 12:16:31 +0200397 int ABC, BCD, CD;
Christopher Swenson9b987f82015-02-27 14:55:49 +0800398
Vojtech Fried3e031b72012-08-24 16:52:44 +0800399 /* if the stack only has one thing on it, we are done with the collapse */
Christopher Swenson9b987f82015-02-27 14:55:49 +0800400 if (stack_curr <= 1) {
401 break;
402 }
403
Vojtech Fried3e031b72012-08-24 16:52:44 +0800404 /* if this is the last merge, just do it */
Christopher Swenson9b987f82015-02-27 14:55:49 +0800405 if ((stack_curr == 2) && (stack[0].length + stack[1].length == size)) {
Vojtech Fried3e031b72012-08-24 16:52:44 +0800406 TIM_SORT_MERGE(dst, stack, stack_curr, store);
407 stack[0].length += stack[1].length;
408 stack_curr--;
409 break;
410 }
411 /* check if the invariant is off for a stack of 2 elements */
Christopher Swenson9b987f82015-02-27 14:55:49 +0800412 else if ((stack_curr == 2) && (stack[0].length <= stack[1].length)) {
Vojtech Fried3e031b72012-08-24 16:52:44 +0800413 TIM_SORT_MERGE(dst, stack, stack_curr, store);
414 stack[0].length += stack[1].length;
415 stack_curr--;
416 break;
Christopher Swenson9b987f82015-02-27 14:55:49 +0800417 } else if (stack_curr == 2) {
Vojtech Fried3e031b72012-08-24 16:52:44 +0800418 break;
Vojtech Fried3e031b72012-08-24 16:52:44 +0800419 }
Christopher Swenson9b987f82015-02-27 14:55:49 +0800420
421 B = stack[stack_curr - 3].length;
422 C = stack[stack_curr - 2].length;
423 D = stack[stack_curr - 1].length;
424
425 if (stack_curr >= 4) {
426 A = stack[stack_curr - 4].length;
427 ABC = (A <= B + C);
428 } else {
429 ABC = 0;
430 }
431
432 BCD = (B <= C + D) || ABC;
433 CD = (C <= D);
Christopher Swenson9b987f82015-02-27 14:55:49 +0800434
435 /* Both invariants are good */
436 if (!BCD && !CD) {
437 break;
438 }
439
440 /* left merge */
441 if (BCD && !CD) {
442 TIM_SORT_MERGE(dst, stack, stack_curr - 1, store);
443 stack[stack_curr - 3].length += stack[stack_curr - 2].length;
444 stack[stack_curr - 2] = stack[stack_curr - 1];
445 stack_curr--;
446 } else {
447 /* right merge */
Vojtech Fried3e031b72012-08-24 16:52:44 +0800448 TIM_SORT_MERGE(dst, stack, stack_curr, store);
449 stack[stack_curr - 2].length += stack[stack_curr - 1].length;
450 stack_curr--;
451 }
Vojtech Fried3e031b72012-08-24 16:52:44 +0800452 }
Christopher Swenson9b987f82015-02-27 14:55:49 +0800453
Vojtech Fried3e031b72012-08-24 16:52:44 +0800454 return stack_curr;
455}
456
457void TIM_SORT(SORT_TYPE *dst, const size_t size)
458{
459 int minrun;
460 TEMP_STORAGE_T _store, *store;
461 TIM_SORT_RUN_T run_stack[128];
462 int stack_curr = 0;
463 int64_t len, run;
464 int64_t curr = 0;
465
466 if (size < 64)
467 {
468 BINARY_INSERTION_SORT(dst, size);
469 return;
470 }
471
472 /* compute the minimum run length */
473 minrun = compute_minrun(size);
474
475 /* temporary storage for merges */
476 store = &_store;
477 store->alloc = 0;
478 store->storage = NULL;
479
480 PUSH_NEXT();
481 PUSH_NEXT();
482 PUSH_NEXT();
483
484 while (1)
485 {
486 if (!CHECK_INVARIANT(run_stack, stack_curr))
487 {
488 stack_curr = TIM_SORT_COLLAPSE(dst, run_stack, stack_curr, store, size);
489 continue;
490 }
491 PUSH_NEXT();
492 }
493}
494
495#undef SORT_CONCAT
496#undef SORT_MAKE_STR1
497#undef SORT_MAKE_STR
498#undef SORT_NAME
499#undef SORT_TYPE
500#undef SORT_CMP
501#undef TEMP_STORAGE_T
502#undef TIM_SORT_RUN_T
503#undef PUSH_NEXT
504#undef SORT_SWAP
505#undef SORT_CONCAT
506#undef SORT_MAKE_STR1
507#undef SORT_MAKE_STR
508#undef BINARY_INSERTION_FIND
509#undef BINARY_INSERTION_SORT_START
510#undef BINARY_INSERTION_SORT
511#undef REVERSE_ELEMENTS
512#undef COUNT_RUN
513#undef TIM_SORT
514#undef TIM_SORT_RESIZE
515#undef TIM_SORT_COLLAPSE
516#undef TIM_SORT_RUN_T
517#undef TEMP_STORAGE_T