Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 1 | #include <asm/bug.h> |
| 2 | #include <linux/rbtree_augmented.h> |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 3 | #include "drbd_interval.h" |
| 4 | |
| 5 | /** |
| 6 | * interval_end - return end of @node |
| 7 | */ |
| 8 | static inline |
| 9 | sector_t interval_end(struct rb_node *node) |
| 10 | { |
| 11 | struct drbd_interval *this = rb_entry(node, struct drbd_interval, rb); |
| 12 | return this->end; |
| 13 | } |
| 14 | |
| 15 | /** |
Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 16 | * compute_subtree_last - compute end of @node |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 17 | * |
| 18 | * The end of an interval is the highest (start + (size >> 9)) value of this |
| 19 | * node and of its children. Called for @node and its parents whenever the end |
| 20 | * may have changed. |
| 21 | */ |
Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 22 | static inline sector_t |
| 23 | compute_subtree_last(struct drbd_interval *node) |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 24 | { |
Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 25 | sector_t max = node->sector + (node->size >> 9); |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 26 | |
Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 27 | if (node->rb.rb_left) { |
| 28 | sector_t left = interval_end(node->rb.rb_left); |
| 29 | if (left > max) |
| 30 | max = left; |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 31 | } |
Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 32 | if (node->rb.rb_right) { |
| 33 | sector_t right = interval_end(node->rb.rb_right); |
| 34 | if (right > max) |
| 35 | max = right; |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 36 | } |
Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 37 | return max; |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 38 | } |
| 39 | |
Lai Jiangshan | e9f05b4 | 2014-09-18 16:49:42 +0200 | [diff] [blame] | 40 | RB_DECLARE_CALLBACKS(static, augment_callbacks, struct drbd_interval, rb, |
| 41 | sector_t, end, compute_subtree_last); |
Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 42 | |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 43 | /** |
| 44 | * drbd_insert_interval - insert a new interval into a tree |
| 45 | */ |
| 46 | bool |
| 47 | drbd_insert_interval(struct rb_root *root, struct drbd_interval *this) |
| 48 | { |
| 49 | struct rb_node **new = &root->rb_node, *parent = NULL; |
Lai Jiangshan | 82cfb90 | 2014-09-18 16:49:41 +0200 | [diff] [blame] | 50 | sector_t this_end = this->sector + (this->size >> 9); |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 51 | |
| 52 | BUG_ON(!IS_ALIGNED(this->size, 512)); |
| 53 | |
| 54 | while (*new) { |
| 55 | struct drbd_interval *here = |
| 56 | rb_entry(*new, struct drbd_interval, rb); |
| 57 | |
| 58 | parent = *new; |
Lai Jiangshan | 82cfb90 | 2014-09-18 16:49:41 +0200 | [diff] [blame] | 59 | if (here->end < this_end) |
| 60 | here->end = this_end; |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 61 | if (this->sector < here->sector) |
| 62 | new = &(*new)->rb_left; |
| 63 | else if (this->sector > here->sector) |
| 64 | new = &(*new)->rb_right; |
| 65 | else if (this < here) |
| 66 | new = &(*new)->rb_left; |
Andreas Gruenbacher | 6618bf1 | 2011-01-26 13:06:08 +0100 | [diff] [blame] | 67 | else if (this > here) |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 68 | new = &(*new)->rb_right; |
Andreas Gruenbacher | 6618bf1 | 2011-01-26 13:06:08 +0100 | [diff] [blame] | 69 | else |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 70 | return false; |
| 71 | } |
| 72 | |
Lai Jiangshan | 82cfb90 | 2014-09-18 16:49:41 +0200 | [diff] [blame] | 73 | this->end = this_end; |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 74 | rb_link_node(&this->rb, parent, new); |
Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 75 | rb_insert_augmented(&this->rb, root, &augment_callbacks); |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 76 | return true; |
| 77 | } |
| 78 | |
| 79 | /** |
| 80 | * drbd_contains_interval - check if a tree contains a given interval |
| 81 | * @sector: start sector of @interval |
| 82 | * @interval: may not be a valid pointer |
| 83 | * |
| 84 | * Returns if the tree contains the node @interval with start sector @start. |
| 85 | * Does not dereference @interval until @interval is known to be a valid object |
| 86 | * in @tree. Returns %false if @interval is in the tree but with a different |
| 87 | * sector number. |
| 88 | */ |
| 89 | bool |
| 90 | drbd_contains_interval(struct rb_root *root, sector_t sector, |
| 91 | struct drbd_interval *interval) |
| 92 | { |
| 93 | struct rb_node *node = root->rb_node; |
| 94 | |
| 95 | while (node) { |
| 96 | struct drbd_interval *here = |
| 97 | rb_entry(node, struct drbd_interval, rb); |
| 98 | |
| 99 | if (sector < here->sector) |
| 100 | node = node->rb_left; |
| 101 | else if (sector > here->sector) |
| 102 | node = node->rb_right; |
| 103 | else if (interval < here) |
| 104 | node = node->rb_left; |
| 105 | else if (interval > here) |
| 106 | node = node->rb_right; |
| 107 | else |
Andreas Gruenbacher | 3e05146 | 2011-01-27 16:20:57 +0100 | [diff] [blame] | 108 | return true; |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 109 | } |
| 110 | return false; |
| 111 | } |
| 112 | |
| 113 | /** |
| 114 | * drbd_remove_interval - remove an interval from a tree |
| 115 | */ |
| 116 | void |
| 117 | drbd_remove_interval(struct rb_root *root, struct drbd_interval *this) |
| 118 | { |
Philipp Reisner | 9868365 | 2012-11-09 14:18:43 +0100 | [diff] [blame] | 119 | rb_erase_augmented(&this->rb, root, &augment_callbacks); |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 120 | } |
| 121 | |
| 122 | /** |
| 123 | * drbd_find_overlap - search for an interval overlapping with [sector, sector + size) |
| 124 | * @sector: start sector |
| 125 | * @size: size, aligned to 512 bytes |
| 126 | * |
Andreas Gruenbacher | 70b1987 | 2011-02-04 12:11:05 +0100 | [diff] [blame] | 127 | * Returns an interval overlapping with [sector, sector + size), or NULL if |
| 128 | * there is none. When there is more than one overlapping interval in the |
| 129 | * tree, the interval with the lowest start sector is returned, and all other |
| 130 | * overlapping intervals will be on the right side of the tree, reachable with |
| 131 | * rb_next(). |
Andreas Gruenbacher | 0939b0e | 2011-01-03 17:42:00 +0100 | [diff] [blame] | 132 | */ |
| 133 | struct drbd_interval * |
| 134 | drbd_find_overlap(struct rb_root *root, sector_t sector, unsigned int size) |
| 135 | { |
| 136 | struct rb_node *node = root->rb_node; |
| 137 | struct drbd_interval *overlap = NULL; |
| 138 | sector_t end = sector + (size >> 9); |
| 139 | |
| 140 | BUG_ON(!IS_ALIGNED(size, 512)); |
| 141 | |
| 142 | while (node) { |
| 143 | struct drbd_interval *here = |
| 144 | rb_entry(node, struct drbd_interval, rb); |
| 145 | |
| 146 | if (node->rb_left && |
| 147 | sector < interval_end(node->rb_left)) { |
| 148 | /* Overlap if any must be on left side */ |
| 149 | node = node->rb_left; |
| 150 | } else if (here->sector < end && |
| 151 | sector < here->sector + (here->size >> 9)) { |
| 152 | overlap = here; |
| 153 | break; |
| 154 | } else if (sector >= here->sector) { |
| 155 | /* Overlap if any must be on right side */ |
| 156 | node = node->rb_right; |
| 157 | } else |
| 158 | break; |
| 159 | } |
| 160 | return overlap; |
| 161 | } |
Andreas Gruenbacher | d0e22a2 | 2011-02-17 18:11:24 +0100 | [diff] [blame] | 162 | |
| 163 | struct drbd_interval * |
| 164 | drbd_next_overlap(struct drbd_interval *i, sector_t sector, unsigned int size) |
| 165 | { |
| 166 | sector_t end = sector + (size >> 9); |
| 167 | struct rb_node *node; |
| 168 | |
| 169 | for (;;) { |
| 170 | node = rb_next(&i->rb); |
| 171 | if (!node) |
| 172 | return NULL; |
| 173 | i = rb_entry(node, struct drbd_interval, rb); |
| 174 | if (i->sector >= end) |
| 175 | return NULL; |
| 176 | if (sector < i->sector + (i->size >> 9)) |
| 177 | return i; |
| 178 | } |
| 179 | } |