blob: af0d97e753018d2da0dba40a73bc06e4ffee4499 [file] [log] [blame]
Jason Evans0657f122011-03-18 17:56:14 -07001#define JEMALLOC_RTREE_C_
Jason Evans2dbecf12010-09-05 10:35:13 -07002#include "jemalloc/internal/jemalloc_internal.h"
3
Christopher Ferris83e57672015-04-22 06:59:28 +00004static unsigned
5hmin(unsigned ha, unsigned hb)
Jason Evans2dbecf12010-09-05 10:35:13 -07006{
Christopher Ferris83e57672015-04-22 06:59:28 +00007
8 return (ha < hb ? ha : hb);
9}
10
11/* Only the most significant bits of keys passed to rtree_[gs]et() are used. */
12bool
13rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc,
14 rtree_node_dalloc_t *dalloc)
15{
16 unsigned bits_in_leaf, height, i;
Jason Evans2dbecf12010-09-05 10:35:13 -070017
Jason Evansb980cc72014-01-02 16:08:28 -080018 assert(bits > 0 && bits <= (sizeof(uintptr_t) << 3));
19
Christopher Ferris83e57672015-04-22 06:59:28 +000020 bits_in_leaf = (bits % RTREE_BITS_PER_LEVEL) == 0 ? RTREE_BITS_PER_LEVEL
21 : (bits % RTREE_BITS_PER_LEVEL);
Jason Evansb954bc52014-01-02 17:36:38 -080022 if (bits > bits_in_leaf) {
Christopher Ferris83e57672015-04-22 06:59:28 +000023 height = 1 + (bits - bits_in_leaf) / RTREE_BITS_PER_LEVEL;
24 if ((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf != bits)
Jason Evansb954bc52014-01-02 17:36:38 -080025 height++;
Nicolas Geoffray75929a92015-04-16 11:10:55 +000026 } else
Christopher Ferris83e57672015-04-22 06:59:28 +000027 height = 1;
28 assert((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf == bits);
Jason Evans2dbecf12010-09-05 10:35:13 -070029
Christopher Ferris83e57672015-04-22 06:59:28 +000030 rtree->alloc = alloc;
31 rtree->dalloc = dalloc;
32 rtree->height = height;
33
34 /* Root level. */
35 rtree->levels[0].subtree = NULL;
36 rtree->levels[0].bits = (height > 1) ? RTREE_BITS_PER_LEVEL :
37 bits_in_leaf;
38 rtree->levels[0].cumbits = rtree->levels[0].bits;
39 /* Interior levels. */
40 for (i = 1; i < height-1; i++) {
41 rtree->levels[i].subtree = NULL;
42 rtree->levels[i].bits = RTREE_BITS_PER_LEVEL;
43 rtree->levels[i].cumbits = rtree->levels[i-1].cumbits +
44 RTREE_BITS_PER_LEVEL;
Jason Evans8d0e04d2015-01-30 22:54:08 -080045 }
Christopher Ferris83e57672015-04-22 06:59:28 +000046 /* Leaf level. */
47 if (height > 1) {
48 rtree->levels[height-1].subtree = NULL;
49 rtree->levels[height-1].bits = bits_in_leaf;
50 rtree->levels[height-1].cumbits = bits;
51 }
Jason Evans8d0e04d2015-01-30 22:54:08 -080052
Christopher Ferris83e57672015-04-22 06:59:28 +000053 /* Compute lookup table to be used by rtree_start_level(). */
54 for (i = 0; i < RTREE_HEIGHT_MAX; i++) {
55 rtree->start_level[i] = hmin(RTREE_HEIGHT_MAX - 1 - i, height -
56 1);
57 }
58
59 return (false);
Jason Evans2dbecf12010-09-05 10:35:13 -070060}
Jason Evans20f1fc92012-10-09 14:46:22 -070061
Jason Evansb980cc72014-01-02 16:08:28 -080062static void
Christopher Ferris83e57672015-04-22 06:59:28 +000063rtree_delete_subtree(rtree_t *rtree, rtree_node_elm_t *node, unsigned level)
Jason Evansb980cc72014-01-02 16:08:28 -080064{
65
Christopher Ferris83e57672015-04-22 06:59:28 +000066 if (level + 1 < rtree->height) {
Jason Evansb980cc72014-01-02 16:08:28 -080067 size_t nchildren, i;
68
Christopher Ferris83e57672015-04-22 06:59:28 +000069 nchildren = ZU(1) << rtree->levels[level].bits;
Jason Evansb980cc72014-01-02 16:08:28 -080070 for (i = 0; i < nchildren; i++) {
Christopher Ferris83e57672015-04-22 06:59:28 +000071 rtree_node_elm_t *child = node[i].child;
Jason Evansb980cc72014-01-02 16:08:28 -080072 if (child != NULL)
73 rtree_delete_subtree(rtree, child, level + 1);
74 }
75 }
76 rtree->dalloc(node);
77}
78
79void
80rtree_delete(rtree_t *rtree)
81{
Christopher Ferris83e57672015-04-22 06:59:28 +000082 unsigned i;
Jason Evansb980cc72014-01-02 16:08:28 -080083
Christopher Ferris83e57672015-04-22 06:59:28 +000084 for (i = 0; i < rtree->height; i++) {
85 rtree_node_elm_t *subtree = rtree->levels[i].subtree;
86 if (subtree != NULL)
87 rtree_delete_subtree(rtree, subtree, i);
88 }
Jason Evansb980cc72014-01-02 16:08:28 -080089}
90
Christopher Ferris83e57672015-04-22 06:59:28 +000091static rtree_node_elm_t *
92rtree_node_init(rtree_t *rtree, unsigned level, rtree_node_elm_t **elmp)
Jason Evans20f1fc92012-10-09 14:46:22 -070093{
Christopher Ferris83e57672015-04-22 06:59:28 +000094 rtree_node_elm_t *node;
Jason Evans20f1fc92012-10-09 14:46:22 -070095
Christopher Ferris83e57672015-04-22 06:59:28 +000096 if (atomic_cas_p((void **)elmp, NULL, RTREE_NODE_INITIALIZING)) {
97 /*
98 * Another thread is already in the process of initializing.
99 * Spin-wait until initialization is complete.
100 */
101 do {
102 CPU_SPINWAIT;
103 node = atomic_read_p((void **)elmp);
104 } while (node == RTREE_NODE_INITIALIZING);
105 } else {
106 node = rtree->alloc(ZU(1) << rtree->levels[level].bits);
107 if (node == NULL)
108 return (NULL);
109 atomic_write_p((void **)elmp, node);
110 }
111
112 return (node);
Jason Evans20f1fc92012-10-09 14:46:22 -0700113}
114
Christopher Ferris83e57672015-04-22 06:59:28 +0000115rtree_node_elm_t *
116rtree_subtree_read_hard(rtree_t *rtree, unsigned level)
Jason Evans20f1fc92012-10-09 14:46:22 -0700117{
118
Christopher Ferris83e57672015-04-22 06:59:28 +0000119 return (rtree_node_init(rtree, level, &rtree->levels[level].subtree));
Jason Evans20f1fc92012-10-09 14:46:22 -0700120}
121
Christopher Ferris83e57672015-04-22 06:59:28 +0000122rtree_node_elm_t *
123rtree_child_read_hard(rtree_t *rtree, rtree_node_elm_t *elm, unsigned level)
Jason Evans20f1fc92012-10-09 14:46:22 -0700124{
125
Christopher Ferris83e57672015-04-22 06:59:28 +0000126 return (rtree_node_init(rtree, level, &elm->child));
Jason Evans20f1fc92012-10-09 14:46:22 -0700127}