Added some statistics code to dict and list object code. I wanted to test how a larger freelist affects the reusage of freed objects. Contrary to my gut feelings 80 objects is more than fine for small apps. I haven't profiled a large app yet.
diff --git a/Objects/listobject.c b/Objects/listobject.c
index 9e89366..df7a405 100644
--- a/Objects/listobject.c
+++ b/Objects/listobject.c
@@ -63,6 +63,22 @@
return 0;
}
+/* Debug statistic to compare allocations with reuse through the free list */
+#undef SHOW_ALLOC_COUNT
+#ifdef SHOW_ALLOC_COUNT
+static size_t count_alloc = 0;
+static size_t count_reuse = 0;
+
+static void
+show_alloc(void)
+{
+ fprintf(stderr, "List allocations: %zd\n", count_alloc);
+ fprintf(stderr, "List reuse through freelist: %zd\n", count_reuse);
+ fprintf(stderr, "%.2f%% reuse rate\n\n",
+ (100.0*count_reuse/(count_alloc+count_reuse)));
+}
+#endif
+
/* Empty list reuse scheme to save calls to malloc and free */
#ifndef PyList_MAXFREELIST
#define PyList_MAXFREELIST 80
@@ -88,6 +104,13 @@
{
PyListObject *op;
size_t nbytes;
+#ifdef SHOW_ALLOC_COUNT
+ static int initialized = 0;
+ if (!initialized) {
+ Py_AtExit(show_alloc);
+ initialized = 1;
+ }
+#endif
if (size < 0) {
PyErr_BadInternalCall();
@@ -101,10 +124,16 @@
numfree--;
op = free_list[numfree];
_Py_NewReference((PyObject *)op);
+#ifdef SHOW_ALLOC_COUNT
+ count_reuse++;
+#endif
} else {
op = PyObject_GC_New(PyListObject, &PyList_Type);
if (op == NULL)
return NULL;
+#ifdef SHOW_ALLOC_COUNT
+ count_alloc++;
+#endif
}
if (size <= 0)
op->ob_item = NULL;