Jason Evans | 0657f12 | 2011-03-18 17:56:14 -0700 | [diff] [blame] | 1 | #define JEMALLOC_RTREE_C_ |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 2 | #include "jemalloc/internal/jemalloc_internal.h" |
| 3 | |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 4 | static unsigned |
| 5 | hmin(unsigned ha, unsigned hb) |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 6 | { |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 7 | |
| 8 | return (ha < hb ? ha : hb); |
| 9 | } |
| 10 | |
Jason Evans | 2d2b4e9 | 2016-03-28 03:06:35 -0700 | [diff] [blame] | 11 | /* |
| 12 | * Only the most significant bits of keys passed to rtree_{read,write}() are |
| 13 | * used. |
| 14 | */ |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 15 | bool |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 16 | rtree_new(rtree_t *rtree, unsigned bits) |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 17 | { |
| 18 | unsigned bits_in_leaf, height, i; |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 19 | |
Jason Evans | 6c460ad | 2016-03-22 17:54:35 -0700 | [diff] [blame] | 20 | assert(RTREE_HEIGHT_MAX == ((ZU(1) << (LG_SIZEOF_PTR+3)) / |
| 21 | RTREE_BITS_PER_LEVEL)); |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 22 | assert(bits > 0 && bits <= (sizeof(uintptr_t) << 3)); |
| 23 | |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 24 | bits_in_leaf = (bits % RTREE_BITS_PER_LEVEL) == 0 ? RTREE_BITS_PER_LEVEL |
| 25 | : (bits % RTREE_BITS_PER_LEVEL); |
Jason Evans | b954bc5 | 2014-01-02 17:36:38 -0800 | [diff] [blame] | 26 | if (bits > bits_in_leaf) { |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 27 | height = 1 + (bits - bits_in_leaf) / RTREE_BITS_PER_LEVEL; |
| 28 | if ((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf != bits) |
Jason Evans | b954bc5 | 2014-01-02 17:36:38 -0800 | [diff] [blame] | 29 | height++; |
Jason Evans | b954bc5 | 2014-01-02 17:36:38 -0800 | [diff] [blame] | 30 | } else |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 31 | height = 1; |
| 32 | assert((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf == bits); |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 33 | |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 34 | rtree->height = height; |
| 35 | |
| 36 | /* Root level. */ |
| 37 | rtree->levels[0].subtree = NULL; |
| 38 | rtree->levels[0].bits = (height > 1) ? RTREE_BITS_PER_LEVEL : |
| 39 | bits_in_leaf; |
| 40 | rtree->levels[0].cumbits = rtree->levels[0].bits; |
| 41 | /* Interior levels. */ |
| 42 | for (i = 1; i < height-1; i++) { |
| 43 | rtree->levels[i].subtree = NULL; |
| 44 | rtree->levels[i].bits = RTREE_BITS_PER_LEVEL; |
| 45 | rtree->levels[i].cumbits = rtree->levels[i-1].cumbits + |
| 46 | RTREE_BITS_PER_LEVEL; |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 47 | } |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 48 | /* Leaf level. */ |
| 49 | if (height > 1) { |
| 50 | rtree->levels[height-1].subtree = NULL; |
| 51 | rtree->levels[height-1].bits = bits_in_leaf; |
| 52 | rtree->levels[height-1].cumbits = bits; |
| 53 | } |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 54 | |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 55 | /* Compute lookup table to be used by rtree_start_level(). */ |
| 56 | for (i = 0; i < RTREE_HEIGHT_MAX; i++) { |
| 57 | rtree->start_level[i] = hmin(RTREE_HEIGHT_MAX - 1 - i, height - |
| 58 | 1); |
| 59 | } |
| 60 | |
| 61 | return (false); |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 62 | } |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 63 | |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 64 | #ifdef JEMALLOC_JET |
| 65 | #undef rtree_node_alloc |
| 66 | #define rtree_node_alloc JEMALLOC_N(rtree_node_alloc_impl) |
| 67 | #endif |
| 68 | static rtree_elm_t * |
| 69 | rtree_node_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) |
| 70 | { |
| 71 | |
| 72 | return ((rtree_elm_t *)base_alloc(tsdn, nelms * sizeof(rtree_elm_t))); |
| 73 | } |
| 74 | #ifdef JEMALLOC_JET |
| 75 | #undef rtree_node_alloc |
| 76 | #define rtree_node_alloc JEMALLOC_N(rtree_node_alloc) |
| 77 | rtree_node_alloc_t *rtree_node_alloc = JEMALLOC_N(rtree_node_alloc_impl); |
| 78 | #endif |
| 79 | |
| 80 | #ifdef JEMALLOC_JET |
| 81 | #undef rtree_node_dalloc |
| 82 | #define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc_impl) |
| 83 | #endif |
| 84 | UNUSED static void |
| 85 | rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node) |
| 86 | { |
| 87 | |
| 88 | /* Nodes are never deleted during normal operation. */ |
| 89 | not_reached(); |
| 90 | } |
| 91 | #ifdef JEMALLOC_JET |
| 92 | #undef rtree_node_dalloc |
| 93 | #define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc) |
| 94 | rtree_node_dalloc_t *rtree_node_dalloc = JEMALLOC_N(rtree_node_dalloc_impl); |
| 95 | #endif |
| 96 | |
| 97 | #ifdef JEMALLOC_JET |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 98 | static void |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 99 | rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node, |
| 100 | unsigned level) |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 101 | { |
| 102 | |
Jason Evans | fbd8d77 | 2015-03-11 23:14:50 -0700 | [diff] [blame] | 103 | if (level + 1 < rtree->height) { |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 104 | size_t nchildren, i; |
| 105 | |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 106 | nchildren = ZU(1) << rtree->levels[level].bits; |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 107 | for (i = 0; i < nchildren; i++) { |
Jason Evans | 2d2b4e9 | 2016-03-28 03:06:35 -0700 | [diff] [blame] | 108 | rtree_elm_t *child = node[i].child; |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 109 | if (child != NULL) { |
| 110 | rtree_delete_subtree(tsdn, rtree, child, level + |
| 111 | 1); |
| 112 | } |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 113 | } |
| 114 | } |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 115 | rtree_node_dalloc(tsdn, rtree, node); |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 116 | } |
| 117 | |
| 118 | void |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 119 | rtree_delete(tsdn_t *tsdn, rtree_t *rtree) |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 120 | { |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 121 | unsigned i; |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 122 | |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 123 | for (i = 0; i < rtree->height; i++) { |
Jason Evans | 2d2b4e9 | 2016-03-28 03:06:35 -0700 | [diff] [blame] | 124 | rtree_elm_t *subtree = rtree->levels[i].subtree; |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 125 | if (subtree != NULL) |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 126 | rtree_delete_subtree(tsdn, rtree, subtree, i); |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 127 | } |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 128 | } |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 129 | #endif |
Jason Evans | b980cc7 | 2014-01-02 16:08:28 -0800 | [diff] [blame] | 130 | |
Jason Evans | 2d2b4e9 | 2016-03-28 03:06:35 -0700 | [diff] [blame] | 131 | static rtree_elm_t * |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 132 | rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level, |
| 133 | rtree_elm_t **elmp) |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 134 | { |
Jason Evans | 2d2b4e9 | 2016-03-28 03:06:35 -0700 | [diff] [blame] | 135 | rtree_elm_t *node; |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 136 | |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 137 | if (atomic_cas_p((void **)elmp, NULL, RTREE_NODE_INITIALIZING)) { |
| 138 | /* |
| 139 | * Another thread is already in the process of initializing. |
| 140 | * Spin-wait until initialization is complete. |
| 141 | */ |
| 142 | do { |
| 143 | CPU_SPINWAIT; |
| 144 | node = atomic_read_p((void **)elmp); |
| 145 | } while (node == RTREE_NODE_INITIALIZING); |
| 146 | } else { |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 147 | node = rtree_node_alloc(tsdn, rtree, ZU(1) << |
| 148 | rtree->levels[level].bits); |
Jason Evans | 8d0e04d | 2015-01-30 22:54:08 -0800 | [diff] [blame] | 149 | if (node == NULL) |
| 150 | return (NULL); |
| 151 | atomic_write_p((void **)elmp, node); |
| 152 | } |
| 153 | |
| 154 | return (node); |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 155 | } |
| 156 | |
Jason Evans | 2d2b4e9 | 2016-03-28 03:06:35 -0700 | [diff] [blame] | 157 | rtree_elm_t * |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 158 | rtree_subtree_read_hard(tsdn_t *tsdn, rtree_t *rtree, unsigned level) |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 159 | { |
| 160 | |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 161 | return (rtree_node_init(tsdn, rtree, level, |
| 162 | &rtree->levels[level].subtree)); |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 163 | } |
| 164 | |
Jason Evans | 2d2b4e9 | 2016-03-28 03:06:35 -0700 | [diff] [blame] | 165 | rtree_elm_t * |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 166 | rtree_child_read_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm, |
| 167 | unsigned level) |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 168 | { |
| 169 | |
Jason Evans | 8c9be3e | 2016-04-16 00:36:11 -0700 | [diff] [blame] | 170 | return (rtree_node_init(tsdn, rtree, level, &elm->child)); |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 171 | } |
Jason Evans | e75e9be | 2016-04-17 12:55:10 -0700 | [diff] [blame] | 172 | |
| 173 | static int |
| 174 | rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b, |
| 175 | void *ob) |
| 176 | { |
| 177 | uintptr_t ka = (uintptr_t)oa; |
| 178 | uintptr_t kb = (uintptr_t)ob; |
| 179 | |
| 180 | assert(ka != 0); |
| 181 | assert(kb != 0); |
| 182 | |
| 183 | return ((ka > kb) - (ka < kb)); |
| 184 | } |
| 185 | |
| 186 | static witness_t * |
| 187 | rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm) |
| 188 | { |
| 189 | witness_t *witness; |
| 190 | size_t i; |
| 191 | rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd); |
| 192 | |
| 193 | /* Iterate over entire array to detect double allocation attempts. */ |
| 194 | witness = NULL; |
| 195 | for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t); |
| 196 | i++) { |
| 197 | rtree_elm_witness_t *rew = &witnesses->witnesses[i]; |
| 198 | |
| 199 | assert(rew->elm != elm); |
| 200 | if (rew->elm == NULL && witness == NULL) { |
| 201 | rew->elm = elm; |
| 202 | witness = &rew->witness; |
| 203 | witness_init(witness, "rtree_elm", |
| 204 | WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp, |
| 205 | (void *)key); |
| 206 | } |
| 207 | } |
| 208 | assert(witness != NULL); |
| 209 | return (witness); |
| 210 | } |
| 211 | |
| 212 | static witness_t * |
| 213 | rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm) |
| 214 | { |
| 215 | size_t i; |
| 216 | rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd); |
| 217 | |
| 218 | for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t); |
| 219 | i++) { |
| 220 | rtree_elm_witness_t *rew = &witnesses->witnesses[i]; |
| 221 | |
| 222 | if (rew->elm == elm) |
| 223 | return (&rew->witness); |
| 224 | } |
| 225 | not_reached(); |
| 226 | } |
| 227 | |
| 228 | static void |
| 229 | rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness, const rtree_elm_t *elm) |
| 230 | { |
| 231 | size_t i; |
| 232 | rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd); |
| 233 | |
| 234 | for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t); |
| 235 | i++) { |
| 236 | rtree_elm_witness_t *rew = &witnesses->witnesses[i]; |
| 237 | |
| 238 | if (rew->elm == elm) { |
| 239 | rew->elm = NULL; |
| 240 | witness_init(&rew->witness, "rtree_elm", |
| 241 | WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp, |
| 242 | NULL); |
| 243 | return; |
| 244 | } |
| 245 | } |
| 246 | not_reached(); |
| 247 | } |
| 248 | |
| 249 | void |
| 250 | rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree, uintptr_t key, |
| 251 | const rtree_elm_t *elm) |
| 252 | { |
| 253 | witness_t *witness; |
| 254 | |
| 255 | if (tsdn_null(tsdn)) |
| 256 | return; |
| 257 | |
| 258 | witness = rtree_elm_witness_alloc(tsdn_tsd(tsdn), key, elm); |
| 259 | witness_lock(tsdn, witness); |
| 260 | } |
| 261 | |
| 262 | void |
| 263 | rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree, |
| 264 | const rtree_elm_t *elm) |
| 265 | { |
| 266 | witness_t *witness; |
| 267 | |
| 268 | if (tsdn_null(tsdn)) |
| 269 | return; |
| 270 | |
| 271 | witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm); |
| 272 | witness_assert_owner(tsdn, witness); |
| 273 | } |
| 274 | |
| 275 | void |
| 276 | rtree_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree, |
| 277 | const rtree_elm_t *elm) |
| 278 | { |
| 279 | witness_t *witness; |
| 280 | |
| 281 | if (tsdn_null(tsdn)) |
| 282 | return; |
| 283 | |
| 284 | witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm); |
| 285 | witness_unlock(tsdn, witness); |
| 286 | rtree_elm_witness_dalloc(tsdn_tsd(tsdn), witness, elm); |
| 287 | } |