blob: 10607614045dcd4129333f7e79f886655d7dd357 [file] [log] [blame]
Jason Evanse476f8a2010-01-16 09:53:50 -08001/******************************************************************************/
2#ifdef JEMALLOC_H_TYPES
3
4typedef struct extent_node_s extent_node_t;
5
6#endif /* JEMALLOC_H_TYPES */
7/******************************************************************************/
8#ifdef JEMALLOC_H_STRUCTS
9
Jason Evansee41ad42015-02-15 18:04:46 -080010/* Tree of extents. Use accessor functions for en_* fields. */
Jason Evanse476f8a2010-01-16 09:53:50 -080011struct extent_node_s {
Jason Evanscbf3a6d2015-02-11 12:24:27 -080012 /* Arena from which this extent came, if any. */
Jason Evansee41ad42015-02-15 18:04:46 -080013 arena_t *en_arena;
Jason Evans6109fe02010-02-10 10:37:56 -080014
Jason Evanse476f8a2010-01-16 09:53:50 -080015 /* Pointer to the extent that this tree node is responsible for. */
Jason Evansee41ad42015-02-15 18:04:46 -080016 void *en_addr;
17
18 /* Total region size. */
19 size_t en_size;
Jason Evanse476f8a2010-01-16 09:53:50 -080020
Jason Evanscbf3a6d2015-02-11 12:24:27 -080021 /*
Jason Evansee41ad42015-02-15 18:04:46 -080022 * The zeroed flag is used by chunk recycling code to track whether
23 * memory is zero-filled.
Jason Evanscbf3a6d2015-02-11 12:24:27 -080024 */
Jason Evansee41ad42015-02-15 18:04:46 -080025 bool en_zeroed;
Jason Evans7de92762012-10-08 17:56:11 -070026
Jason Evans918a1a52015-01-30 21:21:16 -080027 /*
Jason Evansee41ad42015-02-15 18:04:46 -080028 * The achunk flag is used to validate that huge allocation lookups
29 * don't return arena chunks.
Jason Evans918a1a52015-01-30 21:21:16 -080030 */
Jason Evansee41ad42015-02-15 18:04:46 -080031 bool en_achunk;
32
Jason Evans918a1a52015-01-30 21:21:16 -080033 union {
34 /* Profile counters, used for huge objects. */
Jason Evansee41ad42015-02-15 18:04:46 -080035 prof_tctx_t *en_prof_tctx;
Jason Evans918a1a52015-01-30 21:21:16 -080036
Jason Evansee41ad42015-02-15 18:04:46 -080037 struct {
38 /*
39 * Linkage for arena's runs_dirty and chunks_dirty
40 * rings.
41 */
42 qr(extent_node_t) cd_link;
43 arena_chunk_map_misc_t runs_dirty;
44 };
Jason Evans918a1a52015-01-30 21:21:16 -080045 };
Jason Evanscbf3a6d2015-02-11 12:24:27 -080046
47 union {
48 /* Linkage for the size/address-ordered tree. */
Jason Evans2195ba42015-02-15 16:43:52 -080049 rb_node(extent_node_t) szad_link;
Jason Evanscbf3a6d2015-02-11 12:24:27 -080050
Jason Evansee41ad42015-02-15 18:04:46 -080051 /* Linkage for arena's huge and node_cache lists. */
Jason Evans2195ba42015-02-15 16:43:52 -080052 ql_elm(extent_node_t) ql_link;
Jason Evanscbf3a6d2015-02-11 12:24:27 -080053 };
54
55 /* Linkage for the address-ordered tree. */
Jason Evans2195ba42015-02-15 16:43:52 -080056 rb_node(extent_node_t) ad_link;
Jason Evanse476f8a2010-01-16 09:53:50 -080057};
58typedef rb_tree(extent_node_t) extent_tree_t;
59
60#endif /* JEMALLOC_H_STRUCTS */
61/******************************************************************************/
62#ifdef JEMALLOC_H_EXTERNS
63
Jason Evanse476f8a2010-01-16 09:53:50 -080064rb_proto(, extent_tree_szad_, extent_tree_t, extent_node_t)
Jason Evanse476f8a2010-01-16 09:53:50 -080065
66rb_proto(, extent_tree_ad_, extent_tree_t, extent_node_t)
67
68#endif /* JEMALLOC_H_EXTERNS */
69/******************************************************************************/
70#ifdef JEMALLOC_H_INLINES
71
Jason Evansee41ad42015-02-15 18:04:46 -080072#ifndef JEMALLOC_ENABLE_INLINE
73arena_t *extent_node_arena_get(const extent_node_t *node);
74void *extent_node_addr_get(const extent_node_t *node);
75size_t extent_node_size_get(const extent_node_t *node);
76bool extent_node_zeroed_get(const extent_node_t *node);
77bool extent_node_achunk_get(const extent_node_t *node);
78prof_tctx_t *extent_node_prof_tctx_get(const extent_node_t *node);
79void extent_node_arena_set(extent_node_t *node, arena_t *arena);
80void extent_node_addr_set(extent_node_t *node, void *addr);
81void extent_node_size_set(extent_node_t *node, size_t size);
82void extent_node_zeroed_set(extent_node_t *node, bool zeroed);
83void extent_node_achunk_set(extent_node_t *node, bool achunk);
84void extent_node_prof_tctx_set(extent_node_t *node, prof_tctx_t *tctx);
85#endif
86
87#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_EXTENT_C_))
88JEMALLOC_INLINE arena_t *
89extent_node_arena_get(const extent_node_t *node)
90{
91
92 return (node->en_arena);
93}
94
95JEMALLOC_INLINE void *
96extent_node_addr_get(const extent_node_t *node)
97{
98
99 return (node->en_addr);
100}
101
102JEMALLOC_INLINE size_t
103extent_node_size_get(const extent_node_t *node)
104{
105
106 return (node->en_size);
107}
108
109JEMALLOC_INLINE bool
110extent_node_zeroed_get(const extent_node_t *node)
111{
112
113 return (node->en_zeroed);
114}
115
116JEMALLOC_INLINE bool
117extent_node_achunk_get(const extent_node_t *node)
118{
119
120 return (node->en_achunk);
121}
122
123JEMALLOC_INLINE prof_tctx_t *
124extent_node_prof_tctx_get(const extent_node_t *node)
125{
126
127 return (node->en_prof_tctx);
128}
129
130JEMALLOC_INLINE void
131extent_node_arena_set(extent_node_t *node, arena_t *arena)
132{
133
134 node->en_arena = arena;
135}
136
137JEMALLOC_INLINE void
138extent_node_addr_set(extent_node_t *node, void *addr)
139{
140
141 node->en_addr = addr;
142}
143
144JEMALLOC_INLINE void
145extent_node_size_set(extent_node_t *node, size_t size)
146{
147
148 node->en_size = size;
149}
150
151JEMALLOC_INLINE void
152extent_node_zeroed_set(extent_node_t *node, bool zeroed)
153{
154
155 node->en_zeroed = zeroed;
156}
157
158JEMALLOC_INLINE void
159extent_node_achunk_set(extent_node_t *node, bool achunk)
160{
161
162 node->en_achunk = achunk;
163}
164
165JEMALLOC_INLINE void
166extent_node_prof_tctx_set(extent_node_t *node, prof_tctx_t *tctx)
167{
168
169 node->en_prof_tctx = tctx;
170}
171#endif
172
Jason Evanse476f8a2010-01-16 09:53:50 -0800173#endif /* JEMALLOC_H_INLINES */
174/******************************************************************************/
175