Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 2 | #include <linux/kernel.h> |
Rasmus Villemoes | 7259fa0 | 2015-02-12 15:02:48 -0800 | [diff] [blame] | 3 | #include <linux/bug.h> |
| 4 | #include <linux/compiler.h> |
| 5 | #include <linux/export.h> |
| 6 | #include <linux/string.h> |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 7 | #include <linux/list_sort.h> |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 8 | #include <linux/list.h> |
| 9 | |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 10 | #define MAX_LIST_LENGTH_BITS 20 |
| 11 | |
| 12 | /* |
| 13 | * Returns a list organized in an intermediate format suited |
| 14 | * to chaining of merge() calls: null-terminated, no reserved or |
| 15 | * sentinel head node, "prev" links not maintained. |
| 16 | */ |
| 17 | static struct list_head *merge(void *priv, |
| 18 | int (*cmp)(void *priv, struct list_head *a, |
| 19 | struct list_head *b), |
| 20 | struct list_head *a, struct list_head *b) |
| 21 | { |
| 22 | struct list_head head, *tail = &head; |
| 23 | |
| 24 | while (a && b) { |
| 25 | /* if equal, take 'a' -- important for sort stability */ |
| 26 | if ((*cmp)(priv, a, b) <= 0) { |
| 27 | tail->next = a; |
| 28 | a = a->next; |
| 29 | } else { |
| 30 | tail->next = b; |
| 31 | b = b->next; |
| 32 | } |
| 33 | tail = tail->next; |
| 34 | } |
| 35 | tail->next = a?:b; |
| 36 | return head.next; |
| 37 | } |
| 38 | |
| 39 | /* |
| 40 | * Combine final list merge with restoration of standard doubly-linked |
| 41 | * list structure. This approach duplicates code from merge(), but |
| 42 | * runs faster than the tidier alternatives of either a separate final |
| 43 | * prev-link restoration pass, or maintaining the prev links |
| 44 | * throughout. |
| 45 | */ |
| 46 | static void merge_and_restore_back_links(void *priv, |
| 47 | int (*cmp)(void *priv, struct list_head *a, |
| 48 | struct list_head *b), |
| 49 | struct list_head *head, |
| 50 | struct list_head *a, struct list_head *b) |
| 51 | { |
| 52 | struct list_head *tail = head; |
Rasmus Villemoes | 61b3d6c | 2014-08-06 16:09:44 -0700 | [diff] [blame] | 53 | u8 count = 0; |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 54 | |
| 55 | while (a && b) { |
| 56 | /* if equal, take 'a' -- important for sort stability */ |
| 57 | if ((*cmp)(priv, a, b) <= 0) { |
| 58 | tail->next = a; |
| 59 | a->prev = tail; |
| 60 | a = a->next; |
| 61 | } else { |
| 62 | tail->next = b; |
| 63 | b->prev = tail; |
| 64 | b = b->next; |
| 65 | } |
| 66 | tail = tail->next; |
| 67 | } |
| 68 | tail->next = a ? : b; |
| 69 | |
| 70 | do { |
| 71 | /* |
| 72 | * In worst cases this loop may run many iterations. |
| 73 | * Continue callbacks to the client even though no |
| 74 | * element comparison is needed, so the client's cmp() |
| 75 | * routine can invoke cond_resched() periodically. |
| 76 | */ |
Rasmus Villemoes | 61b3d6c | 2014-08-06 16:09:44 -0700 | [diff] [blame] | 77 | if (unlikely(!(++count))) |
| 78 | (*cmp)(priv, tail->next, tail->next); |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 79 | |
| 80 | tail->next->prev = tail; |
| 81 | tail = tail->next; |
| 82 | } while (tail->next); |
| 83 | |
| 84 | tail->next = head; |
| 85 | head->prev = tail; |
| 86 | } |
| 87 | |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 88 | /** |
Don Mullis | 02b12b7 | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 89 | * list_sort - sort a list |
| 90 | * @priv: private data, opaque to list_sort(), passed to @cmp |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 91 | * @head: the list to sort |
| 92 | * @cmp: the elements comparison function |
| 93 | * |
Don Mullis | 02b12b7 | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 94 | * This function implements "merge sort", which has O(nlog(n)) |
| 95 | * complexity. |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 96 | * |
Don Mullis | 02b12b7 | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 97 | * The comparison function @cmp must return a negative value if @a |
| 98 | * should sort before @b, and a positive value if @a should sort after |
| 99 | * @b. If @a and @b are equivalent, and their original relative |
| 100 | * ordering is to be preserved, @cmp must return 0. |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 101 | */ |
| 102 | void list_sort(void *priv, struct list_head *head, |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 103 | int (*cmp)(void *priv, struct list_head *a, |
| 104 | struct list_head *b)) |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 105 | { |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 106 | struct list_head *part[MAX_LIST_LENGTH_BITS+1]; /* sorted partial lists |
| 107 | -- last slot is a sentinel */ |
| 108 | int lev; /* index into part[] */ |
| 109 | int max_lev = 0; |
| 110 | struct list_head *list; |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 111 | |
| 112 | if (list_empty(head)) |
| 113 | return; |
| 114 | |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 115 | memset(part, 0, sizeof(part)); |
| 116 | |
| 117 | head->prev->next = NULL; |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 118 | list = head->next; |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 119 | |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 120 | while (list) { |
| 121 | struct list_head *cur = list; |
| 122 | list = list->next; |
| 123 | cur->next = NULL; |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 124 | |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 125 | for (lev = 0; part[lev]; lev++) { |
| 126 | cur = merge(priv, cmp, part[lev], cur); |
| 127 | part[lev] = NULL; |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 128 | } |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 129 | if (lev > max_lev) { |
| 130 | if (unlikely(lev >= ARRAY_SIZE(part)-1)) { |
Andrew Morton | d0da23b | 2014-08-06 16:09:46 -0700 | [diff] [blame] | 131 | printk_once(KERN_DEBUG "list too long for efficiency\n"); |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 132 | lev--; |
| 133 | } |
| 134 | max_lev = lev; |
| 135 | } |
| 136 | part[lev] = cur; |
Dave Chinner | 2c76127 | 2010-01-12 17:39:16 +1100 | [diff] [blame] | 137 | } |
| 138 | |
Don Mullis | 835cc0c | 2010-03-05 13:43:15 -0800 | [diff] [blame] | 139 | for (lev = 0; lev < max_lev; lev++) |
| 140 | if (part[lev]) |
| 141 | list = merge(priv, cmp, part[lev], list); |
| 142 | |
| 143 | merge_and_restore_back_links(priv, cmp, head, part[max_lev], list); |
| 144 | } |
| 145 | EXPORT_SYMBOL(list_sort); |