Integrate whole chunks into unused dirty page purging machinery.

Extend per arena unused dirty page purging to manage unused dirty chunks
in aaddtion to unused dirty runs.  Rather than immediately unmapping
deallocated chunks (or purging them in the --disable-munmap case), store
them in a separate set of trees, chunks_[sz]ad_dirty.  Preferrentially
allocate dirty chunks.  When excessive unused dirty pages accumulate,
purge runs and chunks in ingegrated LRU order (and unmap chunks in the
--enable-munmap case).

Refactor extent_node_t to provide accessor functions.
diff --git a/include/jemalloc/internal/extent.h b/include/jemalloc/internal/extent.h
index 885f475..1060761 100644
--- a/include/jemalloc/internal/extent.h
+++ b/include/jemalloc/internal/extent.h
@@ -7,36 +7,48 @@
 /******************************************************************************/
 #ifdef JEMALLOC_H_STRUCTS
 
-/* Tree of extents. */
+/* Tree of extents.  Use accessor functions for en_* fields. */
 struct extent_node_s {
 	/* Arena from which this extent came, if any. */
-	arena_t			*arena;
+	arena_t			*en_arena;
 
 	/* Pointer to the extent that this tree node is responsible for. */
-	void			*addr;
+	void			*en_addr;
+
+	/* Total region size. */
+	size_t			en_size;
 
 	/*
-	 * Total region size, or 0 if this node corresponds to an arena chunk.
+	 * The zeroed flag is used by chunk recycling code to track whether
+	 * memory is zero-filled.
 	 */
-	size_t			size;
+	bool			en_zeroed;
 
 	/*
-	 * 'prof_tctx' and 'zeroed' are never needed at the same time, so
-	 * overlay them in order to fit extent_node_t in one cache line.
+	 * The achunk flag is used to validate that huge allocation lookups
+	 * don't return arena chunks.
 	 */
+	bool			en_achunk;
+
 	union {
 		/* Profile counters, used for huge objects. */
-		prof_tctx_t	*prof_tctx;
+		prof_tctx_t	*en_prof_tctx;
 
-		/* True if zero-filled; used by chunk recycling code. */
-		bool		zeroed;
+		struct {
+			/*
+			 * Linkage for arena's runs_dirty and chunks_dirty
+			 * rings.
+			 */
+			qr(extent_node_t)	cd_link;
+			arena_chunk_map_misc_t	runs_dirty;
+		};
 	};
 
 	union {
 		/* Linkage for the size/address-ordered tree. */
 		rb_node(extent_node_t)	szad_link;
 
-		/* Linkage for huge allocations and cached chunks nodes. */
+		/* Linkage for arena's huge and node_cache lists. */
 		ql_elm(extent_node_t)	ql_link;
 	};
 
@@ -57,6 +69,107 @@
 /******************************************************************************/
 #ifdef JEMALLOC_H_INLINES
 
+#ifndef JEMALLOC_ENABLE_INLINE
+arena_t	*extent_node_arena_get(const extent_node_t *node);
+void	*extent_node_addr_get(const extent_node_t *node);
+size_t	extent_node_size_get(const extent_node_t *node);
+bool	extent_node_zeroed_get(const extent_node_t *node);
+bool	extent_node_achunk_get(const extent_node_t *node);
+prof_tctx_t	*extent_node_prof_tctx_get(const extent_node_t *node);
+void	extent_node_arena_set(extent_node_t *node, arena_t *arena);
+void	extent_node_addr_set(extent_node_t *node, void *addr);
+void	extent_node_size_set(extent_node_t *node, size_t size);
+void	extent_node_zeroed_set(extent_node_t *node, bool zeroed);
+void	extent_node_achunk_set(extent_node_t *node, bool achunk);
+void	extent_node_prof_tctx_set(extent_node_t *node, prof_tctx_t *tctx);
+#endif
+
+#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_EXTENT_C_))
+JEMALLOC_INLINE arena_t *
+extent_node_arena_get(const extent_node_t *node)
+{
+
+	return (node->en_arena);
+}
+
+JEMALLOC_INLINE void *
+extent_node_addr_get(const extent_node_t *node)
+{
+
+	return (node->en_addr);
+}
+
+JEMALLOC_INLINE size_t
+extent_node_size_get(const extent_node_t *node)
+{
+
+	return (node->en_size);
+}
+
+JEMALLOC_INLINE bool
+extent_node_zeroed_get(const extent_node_t *node)
+{
+
+	return (node->en_zeroed);
+}
+
+JEMALLOC_INLINE bool
+extent_node_achunk_get(const extent_node_t *node)
+{
+
+	return (node->en_achunk);
+}
+
+JEMALLOC_INLINE prof_tctx_t *
+extent_node_prof_tctx_get(const extent_node_t *node)
+{
+
+	return (node->en_prof_tctx);
+}
+
+JEMALLOC_INLINE void
+extent_node_arena_set(extent_node_t *node, arena_t *arena)
+{
+
+	node->en_arena = arena;
+}
+
+JEMALLOC_INLINE void
+extent_node_addr_set(extent_node_t *node, void *addr)
+{
+
+	node->en_addr = addr;
+}
+
+JEMALLOC_INLINE void
+extent_node_size_set(extent_node_t *node, size_t size)
+{
+
+	node->en_size = size;
+}
+
+JEMALLOC_INLINE void
+extent_node_zeroed_set(extent_node_t *node, bool zeroed)
+{
+
+	node->en_zeroed = zeroed;
+}
+
+JEMALLOC_INLINE void
+extent_node_achunk_set(extent_node_t *node, bool achunk)
+{
+
+	node->en_achunk = achunk;
+}
+
+JEMALLOC_INLINE void
+extent_node_prof_tctx_set(extent_node_t *node, prof_tctx_t *tctx)
+{
+
+	node->en_prof_tctx = tctx;
+}
+#endif
+
 #endif /* JEMALLOC_H_INLINES */
 /******************************************************************************/