aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/sort.c30
1 files changed, 16 insertions, 14 deletions
diff --git a/lib/sort.c b/lib/sort.c
index 6abbaf3d5858..926d00429ed2 100644
--- a/lib/sort.c
+++ b/lib/sort.c
@@ -32,11 +32,11 @@ static void generic_swap(void *a, void *b, int size)
32 * @base: pointer to data to sort 32 * @base: pointer to data to sort
33 * @num: number of elements 33 * @num: number of elements
34 * @size: size of each element 34 * @size: size of each element
35 * @cmp: pointer to comparison function 35 * @cmp_func: pointer to comparison function
36 * @swap: pointer to swap function or NULL 36 * @swap_func: pointer to swap function or NULL
37 * 37 *
38 * This function does a heapsort on the given array. You may provide a 38 * This function does a heapsort on the given array. You may provide a
39 * swap function optimized to your element type. 39 * swap_func function optimized to your element type.
40 * 40 *
41 * Sorting time is O(n log n) both on average and worst-case. While 41 * Sorting time is O(n log n) both on average and worst-case. While
42 * qsort is about 20% faster on average, it suffers from exploitable 42 * qsort is about 20% faster on average, it suffers from exploitable
@@ -45,37 +45,39 @@ static void generic_swap(void *a, void *b, int size)
45 */ 45 */
46 46
47void sort(void *base, size_t num, size_t size, 47void sort(void *base, size_t num, size_t size,
48 int (*cmp)(const void *, const void *), 48 int (*cmp_func)(const void *, const void *),
49 void (*swap)(void *, void *, int size)) 49 void (*swap_func)(void *, void *, int size))
50{ 50{
51 /* pre-scale counters for performance */ 51 /* pre-scale counters for performance */
52 int i = (num/2 - 1) * size, n = num * size, c, r; 52 int i = (num/2 - 1) * size, n = num * size, c, r;
53 53
54 if (!swap) 54 if (!swap_func)
55 swap = (size == 4 ? u32_swap : generic_swap); 55 swap_func = (size == 4 ? u32_swap : generic_swap);
56 56
57 /* heapify */ 57 /* heapify */
58 for ( ; i >= 0; i -= size) { 58 for ( ; i >= 0; i -= size) {
59 for (r = i; r * 2 + size < n; r = c) { 59 for (r = i; r * 2 + size < n; r = c) {
60 c = r * 2 + size; 60 c = r * 2 + size;
61 if (c < n - size && cmp(base + c, base + c + size) < 0) 61 if (c < n - size &&
62 cmp_func(base + c, base + c + size) < 0)
62 c += size; 63 c += size;
63 if (cmp(base + r, base + c) >= 0) 64 if (cmp_func(base + r, base + c) >= 0)
64 break; 65 break;
65 swap(base + r, base + c, size); 66 swap_func(base + r, base + c, size);
66 } 67 }
67 } 68 }
68 69
69 /* sort */ 70 /* sort */
70 for (i = n - size; i > 0; i -= size) { 71 for (i = n - size; i > 0; i -= size) {
71 swap(base, base + i, size); 72 swap_func(base, base + i, size);
72 for (r = 0; r * 2 + size < i; r = c) { 73 for (r = 0; r * 2 + size < i; r = c) {
73 c = r * 2 + size; 74 c = r * 2 + size;
74 if (c < i - size && cmp(base + c, base + c + size) < 0) 75 if (c < i - size &&
76 cmp_func(base + c, base + c + size) < 0)
75 c += size; 77 c += size;
76 if (cmp(base + r, base + c) >= 0) 78 if (cmp_func(base + r, base + c) >= 0)
77 break; 79 break;
78 swap(base + r, base + c, size); 80 swap_func(base + r, base + c, size);
79 } 81 }
80 } 82 }
81} 83}