Use JEMALLOC_INLINE_C everywhere it's appropriate.
diff --git a/src/arena.c b/src/arena.c
index d7377ae..795f530 100644
--- a/src/arena.c
+++ b/src/arena.c
@@ -39,7 +39,7 @@
return arena_mapbits_get(chunk, pageind);
}
-static inline int
+JEMALLOC_INLINE_C int
arena_run_comp(arena_chunk_map_misc_t *a, arena_chunk_map_misc_t *b)
{
uintptr_t a_miscelm = (uintptr_t)a;
@@ -55,7 +55,7 @@
rb_gen(static UNUSED, arena_run_tree_, arena_run_tree_t, arena_chunk_map_misc_t,
rb_link, arena_run_comp)
-static inline int
+JEMALLOC_INLINE_C int
arena_avail_comp(arena_chunk_map_misc_t *a, arena_chunk_map_misc_t *b)
{
int ret;
@@ -139,7 +139,7 @@
arena->ndirty -= npages;
}
-static inline void *
+JEMALLOC_INLINE_C void *
arena_run_reg_alloc(arena_run_t *run, arena_bin_info_t *bin_info)
{
void *ret;
@@ -159,7 +159,7 @@
return (ret);
}
-static inline void
+JEMALLOC_INLINE_C void
arena_run_reg_dalloc(arena_run_t *run, void *ptr)
{
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
@@ -185,7 +185,7 @@
run->nfree++;
}
-static inline void
+JEMALLOC_INLINE_C void
arena_run_zero(arena_chunk_t *chunk, size_t run_ind, size_t npages)
{
@@ -195,7 +195,7 @@
(npages << LG_PAGE));
}
-static inline void
+JEMALLOC_INLINE_C void
arena_run_page_mark_zeroed(arena_chunk_t *chunk, size_t run_ind)
{
@@ -203,7 +203,7 @@
<< LG_PAGE)), PAGE);
}
-static inline void
+JEMALLOC_INLINE_C void
arena_run_page_validate_zeroed(arena_chunk_t *chunk, size_t run_ind)
{
size_t i;
@@ -834,7 +834,7 @@
return (arena_run_alloc_small_helper(arena, size, binind));
}
-static inline void
+JEMALLOC_INLINE_C void
arena_maybe_purge(arena_t *arena)
{
size_t threshold;
diff --git a/src/ctl.c b/src/ctl.c
index 72598b3..b367c9f 100644
--- a/src/ctl.c
+++ b/src/ctl.c
@@ -16,14 +16,14 @@
/******************************************************************************/
/* Helpers for named and indexed nodes. */
-static inline const ctl_named_node_t *
+JEMALLOC_INLINE_C const ctl_named_node_t *
ctl_named_node(const ctl_node_t *node)
{
return ((node->named) ? (const ctl_named_node_t *)node : NULL);
}
-static inline const ctl_named_node_t *
+JEMALLOC_INLINE_C const ctl_named_node_t *
ctl_named_children(const ctl_named_node_t *node, int index)
{
const ctl_named_node_t *children = ctl_named_node(node->children);
@@ -31,7 +31,7 @@
return (children ? &children[index] : NULL);
}
-static inline const ctl_indexed_node_t *
+JEMALLOC_INLINE_C const ctl_indexed_node_t *
ctl_indexed_node(const ctl_node_t *node)
{
diff --git a/src/extent.c b/src/extent.c
index 8c09b48..ca85201 100644
--- a/src/extent.c
+++ b/src/extent.c
@@ -3,7 +3,7 @@
/******************************************************************************/
-static inline int
+JEMALLOC_INLINE_C int
extent_szad_comp(extent_node_t *a, extent_node_t *b)
{
int ret;
@@ -25,7 +25,7 @@
rb_gen(, extent_tree_szad_, extent_tree_t, extent_node_t, link_szad,
extent_szad_comp)
-static inline int
+JEMALLOC_INLINE_C int
extent_ad_comp(extent_node_t *a, extent_node_t *b)
{
uintptr_t a_addr = (uintptr_t)a->addr;
diff --git a/src/prof.c b/src/prof.c
index 4016327..36ee758 100644
--- a/src/prof.c
+++ b/src/prof.c
@@ -244,7 +244,7 @@
bt->len = 0;
}
-static inline void
+JEMALLOC_INLINE_C void
prof_enter(prof_tdata_t *tdata)
{
@@ -256,7 +256,7 @@
malloc_mutex_lock(&bt2gctx_mtx);
}
-static inline void
+JEMALLOC_INLINE_C void
prof_leave(prof_tdata_t *tdata)
{
bool idump, gdump;