Use arena dss prec instead of default for huge allocs.

Pass a dss_prec_t parameter to huge_{m,p,r}alloc instead of defaulting
to the chunk dss prec.
diff --git a/include/jemalloc/internal/huge.h b/include/jemalloc/internal/huge.h
index ddf1313..a2b9c77 100644
--- a/include/jemalloc/internal/huge.h
+++ b/include/jemalloc/internal/huge.h
@@ -17,18 +17,20 @@
 /* Protects chunk-related data structures. */
 extern malloc_mutex_t	huge_mtx;
 
-void	*huge_malloc(size_t size, bool zero);
-void	*huge_palloc(size_t size, size_t alignment, bool zero);
+void	*huge_malloc(size_t size, bool zero, dss_prec_t dss_prec);
+void	*huge_palloc(size_t size, size_t alignment, bool zero,
+    dss_prec_t dss_prec);
 bool	huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size,
     size_t extra);
 void	*huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra,
-    size_t alignment, bool zero, bool try_tcache_dalloc);
+    size_t alignment, bool zero, bool try_tcache_dalloc, dss_prec_t dss_prec);
 #ifdef JEMALLOC_JET
 typedef void (huge_dalloc_junk_t)(void *, size_t);
 extern huge_dalloc_junk_t *huge_dalloc_junk;
 #endif
 void	huge_dalloc(void *ptr, bool unmap);
 size_t	huge_salloc(const void *ptr);
+dss_prec_t	huge_dss_prec_get(arena_t *arena);
 prof_ctx_t	*huge_prof_ctx_get(const void *ptr);
 void	huge_prof_ctx_set(const void *ptr, prof_ctx_t *ctx);
 bool	huge_boot(void);
diff --git a/include/jemalloc/internal/jemalloc_internal.h.in b/include/jemalloc/internal/jemalloc_internal.h.in
index d24a1fe..574bbb1 100644
--- a/include/jemalloc/internal/jemalloc_internal.h.in
+++ b/include/jemalloc/internal/jemalloc_internal.h.in
@@ -770,7 +770,7 @@
 	if (size <= arena_maxclass)
 		return (arena_malloc(arena, size, false, try_tcache));
 	else
-		return (huge_malloc(size, false));
+		return (huge_malloc(size, false, huge_dss_prec_get(arena)));
 }
 
 JEMALLOC_ALWAYS_INLINE void *
@@ -787,7 +787,7 @@
 	if (size <= arena_maxclass)
 		return (arena_malloc(arena, size, true, try_tcache));
 	else
-		return (huge_malloc(size, true));
+		return (huge_malloc(size, true, huge_dss_prec_get(arena)));
 }
 
 JEMALLOC_ALWAYS_INLINE void *
@@ -813,9 +813,9 @@
 			ret = arena_palloc(choose_arena(arena), usize,
 			    alignment, zero);
 		} else if (alignment <= chunksize)
-			ret = huge_malloc(usize, zero);
+			ret = huge_malloc(usize, zero, huge_dss_prec_get(arena));
 		else
-			ret = huge_palloc(usize, alignment, zero);
+			ret = huge_palloc(usize, alignment, zero, huge_dss_prec_get(arena));
 	}
 
 	assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
@@ -984,7 +984,7 @@
 		    try_tcache_dalloc));
 	} else {
 		return (huge_ralloc(ptr, oldsize, size, extra,
-		    alignment, zero, try_tcache_dalloc));
+		    alignment, zero, try_tcache_dalloc, huge_dss_prec_get(arena)));
 	}
 }
 
diff --git a/src/huge.c b/src/huge.c
index 6d86aed..d72f213 100644
--- a/src/huge.c
+++ b/src/huge.c
@@ -16,14 +16,14 @@
 static extent_tree_t	huge;
 
 void *
-huge_malloc(size_t size, bool zero)
+huge_malloc(size_t size, bool zero, dss_prec_t dss_prec)
 {
 
-	return (huge_palloc(size, chunksize, zero));
+	return (huge_palloc(size, chunksize, zero, dss_prec));
 }
 
 void *
-huge_palloc(size_t size, size_t alignment, bool zero)
+huge_palloc(size_t size, size_t alignment, bool zero, dss_prec_t dss_prec)
 {
 	void *ret;
 	size_t csize;
@@ -48,8 +48,7 @@
 	 * it is possible to make correct junk/zero fill decisions below.
 	 */
 	is_zeroed = zero;
-	ret = chunk_alloc(csize, alignment, false, &is_zeroed,
-	    chunk_dss_prec_get());
+	ret = chunk_alloc(csize, alignment, false, &is_zeroed, dss_prec);
 	if (ret == NULL) {
 		base_node_dealloc(node);
 		return (NULL);
@@ -98,7 +97,7 @@
 
 void *
 huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra,
-    size_t alignment, bool zero, bool try_tcache_dalloc)
+    size_t alignment, bool zero, bool try_tcache_dalloc, dss_prec_t dss_prec)
 {
 	void *ret;
 	size_t copysize;
@@ -113,18 +112,18 @@
 	 * space and copying.
 	 */
 	if (alignment > chunksize)
-		ret = huge_palloc(size + extra, alignment, zero);
+		ret = huge_palloc(size + extra, alignment, zero, dss_prec);
 	else
-		ret = huge_malloc(size + extra, zero);
+		ret = huge_malloc(size + extra, zero, dss_prec);
 
 	if (ret == NULL) {
 		if (extra == 0)
 			return (NULL);
 		/* Try again, this time without extra. */
 		if (alignment > chunksize)
-			ret = huge_palloc(size, alignment, zero);
+			ret = huge_palloc(size, alignment, zero, dss_prec);
 		else
-			ret = huge_malloc(size, zero);
+			ret = huge_malloc(size, zero, dss_prec);
 
 		if (ret == NULL)
 			return (NULL);
@@ -264,6 +263,13 @@
 	return (ret);
 }
 
+dss_prec_t
+huge_dss_prec_get(arena_t *arena)
+{
+
+	return (arena_dss_prec_get(choose_arena(arena)));
+}
+
 prof_ctx_t *
 huge_prof_ctx_get(const void *ptr)
 {
diff --git a/src/jemalloc.c b/src/jemalloc.c
index 563d99f..204778b 100644
--- a/src/jemalloc.c
+++ b/src/jemalloc.c
@@ -2076,7 +2076,7 @@
 	if (size <= arena_maxclass)
 		return (arena_malloc(arenas[0], size, zero, false));
 	else
-		return (huge_malloc(size, zero));
+		return (huge_malloc(size, zero, huge_dss_prec_get(arenas[0])));
 }
 
 void *