blob: 3751adc45acfd233eca18cc735d673baab44554c [file] [log] [blame]
Jason Evanse476f8a2010-01-16 09:53:50 -08001/******************************************************************************/
2#ifdef JEMALLOC_H_TYPES
3
4typedef struct extent_node_s extent_node_t;
5
6#endif /* JEMALLOC_H_TYPES */
7/******************************************************************************/
8#ifdef JEMALLOC_H_STRUCTS
9
Jason Evansee41ad42015-02-15 18:04:46 -080010/* Tree of extents. Use accessor functions for en_* fields. */
Jason Evanse476f8a2010-01-16 09:53:50 -080011struct extent_node_s {
Jason Evanscbf3a6d2015-02-11 12:24:27 -080012 /* Arena from which this extent came, if any. */
Jason Evansee41ad42015-02-15 18:04:46 -080013 arena_t *en_arena;
Jason Evans6109fe02010-02-10 10:37:56 -080014
Jason Evanse476f8a2010-01-16 09:53:50 -080015 /* Pointer to the extent that this tree node is responsible for. */
Jason Evansee41ad42015-02-15 18:04:46 -080016 void *en_addr;
17
18 /* Total region size. */
19 size_t en_size;
Jason Evanse476f8a2010-01-16 09:53:50 -080020
Jason Evanscbf3a6d2015-02-11 12:24:27 -080021 /*
Jason Evansee41ad42015-02-15 18:04:46 -080022 * The zeroed flag is used by chunk recycling code to track whether
23 * memory is zero-filled.
Jason Evanscbf3a6d2015-02-11 12:24:27 -080024 */
Jason Evansee41ad42015-02-15 18:04:46 -080025 bool en_zeroed;
Jason Evans7de92762012-10-08 17:56:11 -070026
Jason Evans918a1a52015-01-30 21:21:16 -080027 /*
Jason Evansee41ad42015-02-15 18:04:46 -080028 * The achunk flag is used to validate that huge allocation lookups
29 * don't return arena chunks.
Jason Evans918a1a52015-01-30 21:21:16 -080030 */
Jason Evansee41ad42015-02-15 18:04:46 -080031 bool en_achunk;
32
Jason Evansa4e18882015-02-17 15:13:52 -080033 /* Profile counters, used for huge objects. */
34 prof_tctx_t *en_prof_tctx;
Jason Evans918a1a52015-01-30 21:21:16 -080035
Jason Evans738e0892015-02-18 01:15:50 -080036 /* Linkage for arena's runs_dirty and chunks_cache rings. */
Jason Evansf5c8f372015-03-10 18:29:49 -070037 arena_runs_dirty_link_t rd;
Jason Evans738e0892015-02-18 01:15:50 -080038 qr(extent_node_t) cc_link;
Jason Evanscbf3a6d2015-02-11 12:24:27 -080039
40 union {
41 /* Linkage for the size/address-ordered tree. */
Jason Evans2195ba42015-02-15 16:43:52 -080042 rb_node(extent_node_t) szad_link;
Jason Evanscbf3a6d2015-02-11 12:24:27 -080043
Jason Evansee41ad42015-02-15 18:04:46 -080044 /* Linkage for arena's huge and node_cache lists. */
Jason Evans2195ba42015-02-15 16:43:52 -080045 ql_elm(extent_node_t) ql_link;
Jason Evanscbf3a6d2015-02-11 12:24:27 -080046 };
47
48 /* Linkage for the address-ordered tree. */
Jason Evans2195ba42015-02-15 16:43:52 -080049 rb_node(extent_node_t) ad_link;
Jason Evanse476f8a2010-01-16 09:53:50 -080050};
51typedef rb_tree(extent_node_t) extent_tree_t;
52
53#endif /* JEMALLOC_H_STRUCTS */
54/******************************************************************************/
55#ifdef JEMALLOC_H_EXTERNS
56
Jason Evanse476f8a2010-01-16 09:53:50 -080057rb_proto(, extent_tree_szad_, extent_tree_t, extent_node_t)
Jason Evanse476f8a2010-01-16 09:53:50 -080058
59rb_proto(, extent_tree_ad_, extent_tree_t, extent_node_t)
60
61#endif /* JEMALLOC_H_EXTERNS */
62/******************************************************************************/
63#ifdef JEMALLOC_H_INLINES
64
Jason Evansee41ad42015-02-15 18:04:46 -080065#ifndef JEMALLOC_ENABLE_INLINE
66arena_t *extent_node_arena_get(const extent_node_t *node);
67void *extent_node_addr_get(const extent_node_t *node);
68size_t extent_node_size_get(const extent_node_t *node);
69bool extent_node_zeroed_get(const extent_node_t *node);
70bool extent_node_achunk_get(const extent_node_t *node);
71prof_tctx_t *extent_node_prof_tctx_get(const extent_node_t *node);
72void extent_node_arena_set(extent_node_t *node, arena_t *arena);
73void extent_node_addr_set(extent_node_t *node, void *addr);
74void extent_node_size_set(extent_node_t *node, size_t size);
75void extent_node_zeroed_set(extent_node_t *node, bool zeroed);
76void extent_node_achunk_set(extent_node_t *node, bool achunk);
77void extent_node_prof_tctx_set(extent_node_t *node, prof_tctx_t *tctx);
Jason Evansa4e18882015-02-17 15:13:52 -080078void extent_node_init(extent_node_t *node, arena_t *arena, void *addr,
79 size_t size, bool zeroed);
Jason Evans47701b22015-02-17 22:23:10 -080080void extent_node_dirty_linkage_init(extent_node_t *node);
Jason Evans738e0892015-02-18 01:15:50 -080081void extent_node_dirty_insert(extent_node_t *node,
Jason Evans38e42d32015-03-10 18:15:40 -070082 arena_runs_dirty_link_t *runs_dirty, extent_node_t *chunks_dirty);
Jason Evans738e0892015-02-18 01:15:50 -080083void extent_node_dirty_remove(extent_node_t *node);
Jason Evansee41ad42015-02-15 18:04:46 -080084#endif
85
86#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_EXTENT_C_))
87JEMALLOC_INLINE arena_t *
88extent_node_arena_get(const extent_node_t *node)
89{
90
91 return (node->en_arena);
92}
93
94JEMALLOC_INLINE void *
95extent_node_addr_get(const extent_node_t *node)
96{
97
98 return (node->en_addr);
99}
100
101JEMALLOC_INLINE size_t
102extent_node_size_get(const extent_node_t *node)
103{
104
105 return (node->en_size);
106}
107
108JEMALLOC_INLINE bool
109extent_node_zeroed_get(const extent_node_t *node)
110{
111
112 return (node->en_zeroed);
113}
114
115JEMALLOC_INLINE bool
116extent_node_achunk_get(const extent_node_t *node)
117{
118
119 return (node->en_achunk);
120}
121
122JEMALLOC_INLINE prof_tctx_t *
123extent_node_prof_tctx_get(const extent_node_t *node)
124{
125
126 return (node->en_prof_tctx);
127}
128
129JEMALLOC_INLINE void
130extent_node_arena_set(extent_node_t *node, arena_t *arena)
131{
132
133 node->en_arena = arena;
134}
135
136JEMALLOC_INLINE void
137extent_node_addr_set(extent_node_t *node, void *addr)
138{
139
140 node->en_addr = addr;
141}
142
143JEMALLOC_INLINE void
144extent_node_size_set(extent_node_t *node, size_t size)
145{
146
147 node->en_size = size;
148}
149
150JEMALLOC_INLINE void
151extent_node_zeroed_set(extent_node_t *node, bool zeroed)
152{
153
154 node->en_zeroed = zeroed;
155}
156
157JEMALLOC_INLINE void
158extent_node_achunk_set(extent_node_t *node, bool achunk)
159{
160
161 node->en_achunk = achunk;
162}
163
164JEMALLOC_INLINE void
165extent_node_prof_tctx_set(extent_node_t *node, prof_tctx_t *tctx)
166{
167
168 node->en_prof_tctx = tctx;
169}
Jason Evansa4e18882015-02-17 15:13:52 -0800170
171JEMALLOC_INLINE void
172extent_node_init(extent_node_t *node, arena_t *arena, void *addr, size_t size,
173 bool zeroed)
174{
175
176 extent_node_arena_set(node, arena);
177 extent_node_addr_set(node, addr);
178 extent_node_size_set(node, size);
179 extent_node_zeroed_set(node, zeroed);
180 extent_node_achunk_set(node, false);
181 if (config_prof)
182 extent_node_prof_tctx_set(node, NULL);
183}
Jason Evans47701b22015-02-17 22:23:10 -0800184
185JEMALLOC_INLINE void
186extent_node_dirty_linkage_init(extent_node_t *node)
187{
188
Jason Evansf5c8f372015-03-10 18:29:49 -0700189 qr_new(&node->rd, rd_link);
Jason Evans738e0892015-02-18 01:15:50 -0800190 qr_new(node, cc_link);
Jason Evans47701b22015-02-17 22:23:10 -0800191}
Jason Evans738e0892015-02-18 01:15:50 -0800192
193JEMALLOC_INLINE void
194extent_node_dirty_insert(extent_node_t *node,
Jason Evans38e42d32015-03-10 18:15:40 -0700195 arena_runs_dirty_link_t *runs_dirty, extent_node_t *chunks_dirty)
Jason Evans738e0892015-02-18 01:15:50 -0800196{
197
Jason Evansf5c8f372015-03-10 18:29:49 -0700198 qr_meld(runs_dirty, &node->rd, rd_link);
Jason Evans738e0892015-02-18 01:15:50 -0800199 qr_meld(chunks_dirty, node, cc_link);
200}
201
202JEMALLOC_INLINE void
203extent_node_dirty_remove(extent_node_t *node)
204{
205
Jason Evansf5c8f372015-03-10 18:29:49 -0700206 qr_remove(&node->rd, rd_link);
Jason Evans738e0892015-02-18 01:15:50 -0800207 qr_remove(node, cc_link);
208}
209
Jason Evansee41ad42015-02-15 18:04:46 -0800210#endif
211
Jason Evanse476f8a2010-01-16 09:53:50 -0800212#endif /* JEMALLOC_H_INLINES */
213/******************************************************************************/
214