blob: 421de3e88e2aa126b8df4bbc45ba6a76bf96c8f7 [file] [log] [blame]
Jason Evans0657f122011-03-18 17:56:14 -07001#define JEMALLOC_RTREE_C_
Jason Evans2dbecf12010-09-05 10:35:13 -07002#include "jemalloc/internal/jemalloc_internal.h"
3
Jason Evans8d0e04d2015-01-30 22:54:08 -08004static unsigned
5hmin(unsigned ha, unsigned hb)
Jason Evans2dbecf12010-09-05 10:35:13 -07006{
Jason Evans8d0e04d2015-01-30 22:54:08 -08007
8 return (ha < hb ? ha : hb);
9}
10
Jason Evans2d2b4e92016-03-28 03:06:35 -070011/*
12 * Only the most significant bits of keys passed to rtree_{read,write}() are
13 * used.
14 */
Jason Evans8d0e04d2015-01-30 22:54:08 -080015bool
Jason Evans8c9be3e2016-04-16 00:36:11 -070016rtree_new(rtree_t *rtree, unsigned bits)
Jason Evans8d0e04d2015-01-30 22:54:08 -080017{
18 unsigned bits_in_leaf, height, i;
Jason Evans2dbecf12010-09-05 10:35:13 -070019
Jason Evans6c460ad2016-03-22 17:54:35 -070020 assert(RTREE_HEIGHT_MAX == ((ZU(1) << (LG_SIZEOF_PTR+3)) /
21 RTREE_BITS_PER_LEVEL));
Jason Evansb980cc72014-01-02 16:08:28 -080022 assert(bits > 0 && bits <= (sizeof(uintptr_t) << 3));
23
Jason Evans8d0e04d2015-01-30 22:54:08 -080024 bits_in_leaf = (bits % RTREE_BITS_PER_LEVEL) == 0 ? RTREE_BITS_PER_LEVEL
25 : (bits % RTREE_BITS_PER_LEVEL);
Jason Evansb954bc52014-01-02 17:36:38 -080026 if (bits > bits_in_leaf) {
Jason Evans8d0e04d2015-01-30 22:54:08 -080027 height = 1 + (bits - bits_in_leaf) / RTREE_BITS_PER_LEVEL;
28 if ((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf != bits)
Jason Evansb954bc52014-01-02 17:36:38 -080029 height++;
Jason Evansb954bc52014-01-02 17:36:38 -080030 } else
Jason Evans8d0e04d2015-01-30 22:54:08 -080031 height = 1;
32 assert((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf == bits);
Jason Evans2dbecf12010-09-05 10:35:13 -070033
Jason Evans8d0e04d2015-01-30 22:54:08 -080034 rtree->height = height;
35
36 /* Root level. */
37 rtree->levels[0].subtree = NULL;
38 rtree->levels[0].bits = (height > 1) ? RTREE_BITS_PER_LEVEL :
39 bits_in_leaf;
40 rtree->levels[0].cumbits = rtree->levels[0].bits;
41 /* Interior levels. */
42 for (i = 1; i < height-1; i++) {
43 rtree->levels[i].subtree = NULL;
44 rtree->levels[i].bits = RTREE_BITS_PER_LEVEL;
45 rtree->levels[i].cumbits = rtree->levels[i-1].cumbits +
46 RTREE_BITS_PER_LEVEL;
Jason Evans2dbecf12010-09-05 10:35:13 -070047 }
Jason Evans8d0e04d2015-01-30 22:54:08 -080048 /* Leaf level. */
49 if (height > 1) {
50 rtree->levels[height-1].subtree = NULL;
51 rtree->levels[height-1].bits = bits_in_leaf;
52 rtree->levels[height-1].cumbits = bits;
53 }
Jason Evans2dbecf12010-09-05 10:35:13 -070054
Jason Evans6f29a832016-06-02 18:43:10 -070055 /* Compute lookup table to be used by rtree_[ctx_]start_level(). */
Jason Evans8d0e04d2015-01-30 22:54:08 -080056 for (i = 0; i < RTREE_HEIGHT_MAX; i++) {
57 rtree->start_level[i] = hmin(RTREE_HEIGHT_MAX - 1 - i, height -
58 1);
59 }
Jason Evans6f29a832016-06-02 18:43:10 -070060 rtree->start_level[RTREE_HEIGHT_MAX] = 0;
Jason Evans8d0e04d2015-01-30 22:54:08 -080061
62 return (false);
Jason Evans2dbecf12010-09-05 10:35:13 -070063}
Jason Evans20f1fc92012-10-09 14:46:22 -070064
Jason Evans8c9be3e2016-04-16 00:36:11 -070065#ifdef JEMALLOC_JET
66#undef rtree_node_alloc
67#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc_impl)
68#endif
69static rtree_elm_t *
70rtree_node_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms)
71{
72
73 return ((rtree_elm_t *)base_alloc(tsdn, nelms * sizeof(rtree_elm_t)));
74}
75#ifdef JEMALLOC_JET
76#undef rtree_node_alloc
77#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc)
78rtree_node_alloc_t *rtree_node_alloc = JEMALLOC_N(rtree_node_alloc_impl);
79#endif
80
81#ifdef JEMALLOC_JET
82#undef rtree_node_dalloc
83#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc_impl)
84#endif
85UNUSED static void
86rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node)
87{
88
89 /* Nodes are never deleted during normal operation. */
90 not_reached();
91}
92#ifdef JEMALLOC_JET
93#undef rtree_node_dalloc
94#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc)
95rtree_node_dalloc_t *rtree_node_dalloc = JEMALLOC_N(rtree_node_dalloc_impl);
96#endif
97
98#ifdef JEMALLOC_JET
Jason Evansb980cc72014-01-02 16:08:28 -080099static void
Jason Evans8c9be3e2016-04-16 00:36:11 -0700100rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node,
101 unsigned level)
Jason Evansb980cc72014-01-02 16:08:28 -0800102{
103
Jason Evansfbd8d772015-03-11 23:14:50 -0700104 if (level + 1 < rtree->height) {
Jason Evansb980cc72014-01-02 16:08:28 -0800105 size_t nchildren, i;
106
Jason Evans8d0e04d2015-01-30 22:54:08 -0800107 nchildren = ZU(1) << rtree->levels[level].bits;
Jason Evansb980cc72014-01-02 16:08:28 -0800108 for (i = 0; i < nchildren; i++) {
Jason Evans2d2b4e92016-03-28 03:06:35 -0700109 rtree_elm_t *child = node[i].child;
Jason Evans8c9be3e2016-04-16 00:36:11 -0700110 if (child != NULL) {
111 rtree_delete_subtree(tsdn, rtree, child, level +
112 1);
113 }
Jason Evansb980cc72014-01-02 16:08:28 -0800114 }
115 }
Jason Evans8c9be3e2016-04-16 00:36:11 -0700116 rtree_node_dalloc(tsdn, rtree, node);
Jason Evansb980cc72014-01-02 16:08:28 -0800117}
118
119void
Jason Evans8c9be3e2016-04-16 00:36:11 -0700120rtree_delete(tsdn_t *tsdn, rtree_t *rtree)
Jason Evansb980cc72014-01-02 16:08:28 -0800121{
Jason Evans8d0e04d2015-01-30 22:54:08 -0800122 unsigned i;
Jason Evansb980cc72014-01-02 16:08:28 -0800123
Jason Evans8d0e04d2015-01-30 22:54:08 -0800124 for (i = 0; i < rtree->height; i++) {
Jason Evans2d2b4e92016-03-28 03:06:35 -0700125 rtree_elm_t *subtree = rtree->levels[i].subtree;
Jason Evans8d0e04d2015-01-30 22:54:08 -0800126 if (subtree != NULL)
Jason Evans8c9be3e2016-04-16 00:36:11 -0700127 rtree_delete_subtree(tsdn, rtree, subtree, i);
Jason Evans8d0e04d2015-01-30 22:54:08 -0800128 }
Jason Evansb980cc72014-01-02 16:08:28 -0800129}
Jason Evans8c9be3e2016-04-16 00:36:11 -0700130#endif
Jason Evansb980cc72014-01-02 16:08:28 -0800131
Jason Evans2d2b4e92016-03-28 03:06:35 -0700132static rtree_elm_t *
Jason Evans8c9be3e2016-04-16 00:36:11 -0700133rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
134 rtree_elm_t **elmp)
Jason Evans20f1fc92012-10-09 14:46:22 -0700135{
Jason Evans2d2b4e92016-03-28 03:06:35 -0700136 rtree_elm_t *node;
Jason Evans20f1fc92012-10-09 14:46:22 -0700137
Jason Evans8d0e04d2015-01-30 22:54:08 -0800138 if (atomic_cas_p((void **)elmp, NULL, RTREE_NODE_INITIALIZING)) {
139 /*
140 * Another thread is already in the process of initializing.
141 * Spin-wait until initialization is complete.
142 */
143 do {
144 CPU_SPINWAIT;
145 node = atomic_read_p((void **)elmp);
146 } while (node == RTREE_NODE_INITIALIZING);
147 } else {
Jason Evans8c9be3e2016-04-16 00:36:11 -0700148 node = rtree_node_alloc(tsdn, rtree, ZU(1) <<
149 rtree->levels[level].bits);
Jason Evans8d0e04d2015-01-30 22:54:08 -0800150 if (node == NULL)
151 return (NULL);
152 atomic_write_p((void **)elmp, node);
153 }
154
155 return (node);
Jason Evans20f1fc92012-10-09 14:46:22 -0700156}
157
Jason Evans2d2b4e92016-03-28 03:06:35 -0700158rtree_elm_t *
Jason Evans8c9be3e2016-04-16 00:36:11 -0700159rtree_subtree_read_hard(tsdn_t *tsdn, rtree_t *rtree, unsigned level)
Jason Evans20f1fc92012-10-09 14:46:22 -0700160{
161
Jason Evans8c9be3e2016-04-16 00:36:11 -0700162 return (rtree_node_init(tsdn, rtree, level,
163 &rtree->levels[level].subtree));
Jason Evans20f1fc92012-10-09 14:46:22 -0700164}
165
Jason Evans2d2b4e92016-03-28 03:06:35 -0700166rtree_elm_t *
Jason Evans8c9be3e2016-04-16 00:36:11 -0700167rtree_child_read_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm,
168 unsigned level)
Jason Evans20f1fc92012-10-09 14:46:22 -0700169{
170
Jason Evans8c9be3e2016-04-16 00:36:11 -0700171 return (rtree_node_init(tsdn, rtree, level, &elm->child));
Jason Evans20f1fc92012-10-09 14:46:22 -0700172}
Jason Evanse75e9be2016-04-17 12:55:10 -0700173
174static int
175rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
176 void *ob)
177{
178 uintptr_t ka = (uintptr_t)oa;
179 uintptr_t kb = (uintptr_t)ob;
180
181 assert(ka != 0);
182 assert(kb != 0);
183
184 return ((ka > kb) - (ka < kb));
185}
186
187static witness_t *
188rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm)
189{
190 witness_t *witness;
191 size_t i;
192 rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
193
194 /* Iterate over entire array to detect double allocation attempts. */
195 witness = NULL;
196 for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
197 i++) {
198 rtree_elm_witness_t *rew = &witnesses->witnesses[i];
199
200 assert(rew->elm != elm);
201 if (rew->elm == NULL && witness == NULL) {
202 rew->elm = elm;
203 witness = &rew->witness;
204 witness_init(witness, "rtree_elm",
205 WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp,
206 (void *)key);
207 }
208 }
209 assert(witness != NULL);
210 return (witness);
211}
212
213static witness_t *
214rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm)
215{
216 size_t i;
217 rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
218
219 for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
220 i++) {
221 rtree_elm_witness_t *rew = &witnesses->witnesses[i];
222
223 if (rew->elm == elm)
224 return (&rew->witness);
225 }
226 not_reached();
227}
228
229static void
230rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness, const rtree_elm_t *elm)
231{
232 size_t i;
233 rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
234
235 for (i = 0; i < sizeof(rtree_elm_witness_tsd_t) / sizeof(witness_t);
236 i++) {
237 rtree_elm_witness_t *rew = &witnesses->witnesses[i];
238
239 if (rew->elm == elm) {
240 rew->elm = NULL;
241 witness_init(&rew->witness, "rtree_elm",
242 WITNESS_RANK_RTREE_ELM, rtree_elm_witness_comp,
243 NULL);
244 return;
245 }
246 }
247 not_reached();
248}
249
250void
251rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree, uintptr_t key,
252 const rtree_elm_t *elm)
253{
254 witness_t *witness;
255
256 if (tsdn_null(tsdn))
257 return;
258
259 witness = rtree_elm_witness_alloc(tsdn_tsd(tsdn), key, elm);
260 witness_lock(tsdn, witness);
261}
262
263void
264rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
265 const rtree_elm_t *elm)
266{
267 witness_t *witness;
268
269 if (tsdn_null(tsdn))
270 return;
271
272 witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
273 witness_assert_owner(tsdn, witness);
274}
275
276void
277rtree_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
278 const rtree_elm_t *elm)
279{
280 witness_t *witness;
281
282 if (tsdn_null(tsdn))
283 return;
284
285 witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
286 witness_unlock(tsdn, witness);
287 rtree_elm_witness_dalloc(tsdn_tsd(tsdn), witness, elm);
288}