Fix a chunk_recycle() regression.

This regression was introduced by
97c04a93838c4001688fe31bf018972b4696efe2 (Use first-fit rather than
first-best-fit run/chunk allocation.).
diff --git a/src/chunk.c b/src/chunk.c
index 875fa4c..fb8cd41 100644
--- a/src/chunk.c
+++ b/src/chunk.c
@@ -64,13 +64,22 @@
 
 /* Do first-fit chunk selection. */
 static extent_node_t *
-chunk_first_fit(arena_t *arena, extent_tree_t *chunks_szad, size_t size)
+chunk_first_fit(arena_t *arena, extent_tree_t *chunks_szad,
+    extent_tree_t *chunks_ad, size_t size)
 {
 	extent_node_t *node;
 	index_t index;
 
 	assert(size == CHUNK_CEILING(size));
 
+	if (size == chunksize) {
+		/*
+		 * Any chunk will suffice, so simply select the one lowest in
+		 * memory.
+		 */
+		return (extent_tree_ad_first(chunks_ad));
+	}
+
 	/*
 	 * Iterate over all size classes that are at least large enough to
 	 * satisfy the request, search for the lowest chunk of each size class,
@@ -113,12 +122,14 @@
 	if (alloc_size < size)
 		return (NULL);
 	malloc_mutex_lock(&arena->chunks_mtx);
-	if (new_addr != NULL || size == chunksize) {
+	if (new_addr != NULL) {
 		extent_node_t key;
 		extent_node_init(&key, arena, new_addr, alloc_size, false);
 		node = extent_tree_ad_search(chunks_ad, &key);
-	} else
-		node = chunk_first_fit(arena, chunks_szad, alloc_size);
+	} else {
+		node = chunk_first_fit(arena, chunks_szad, chunks_ad,
+		    alloc_size);
+	}
 	if (node == NULL || (new_addr != NULL && extent_node_size_get(node) <
 	    size)) {
 		malloc_mutex_unlock(&arena->chunks_mtx);