blob: 9bfae9180d1a6817e1200e7cc3107ebb008e08e6 [file] [log] [blame]
The Android Open Source Project2ad60cf2008-10-21 07:00:00 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Dalvik.h"
Barry Hayeseac47ed2009-06-22 11:45:20 -070018#include "alloc/clz.h"
The Android Open Source Project2ad60cf2008-10-21 07:00:00 -070019#include "alloc/HeapBitmap.h"
20#include "alloc/HeapInternal.h"
21#include "alloc/HeapSource.h"
22#include "alloc/MarkSweep.h"
23#include <limits.h> // for ULONG_MAX
24#include <sys/mman.h> // for madvise(), mmap()
25#include <cutils/ashmem.h>
26
27#define GC_DEBUG_PARANOID 2
28#define GC_DEBUG_BASIC 1
29#define GC_DEBUG_OFF 0
30#define GC_DEBUG(l) (GC_DEBUG_LEVEL >= (l))
31
32#if 1
33#define GC_DEBUG_LEVEL GC_DEBUG_PARANOID
34#else
35#define GC_DEBUG_LEVEL GC_DEBUG_OFF
36#endif
37
38#define VERBOSE_GC 0
39
40#define GC_LOG_TAG LOG_TAG "-gc"
41
42#if LOG_NDEBUG
43#define LOGV_GC(...) ((void)0)
44#define LOGD_GC(...) ((void)0)
45#else
46#define LOGV_GC(...) LOG(LOG_VERBOSE, GC_LOG_TAG, __VA_ARGS__)
47#define LOGD_GC(...) LOG(LOG_DEBUG, GC_LOG_TAG, __VA_ARGS__)
48#endif
49
50#if VERBOSE_GC
51#define LOGVV_GC(...) LOGV_GC(__VA_ARGS__)
52#else
53#define LOGVV_GC(...) ((void)0)
54#endif
55
56#define LOGI_GC(...) LOG(LOG_INFO, GC_LOG_TAG, __VA_ARGS__)
57#define LOGW_GC(...) LOG(LOG_WARN, GC_LOG_TAG, __VA_ARGS__)
58#define LOGE_GC(...) LOG(LOG_ERROR, GC_LOG_TAG, __VA_ARGS__)
59
60#define LOG_SCAN(...) LOGV_GC("SCAN: " __VA_ARGS__)
61#define LOG_MARK(...) LOGV_GC("MARK: " __VA_ARGS__)
62#define LOG_SWEEP(...) LOGV_GC("SWEEP: " __VA_ARGS__)
63#define LOG_REF(...) LOGV_GC("REF: " __VA_ARGS__)
64
65#define LOGV_SCAN(...) LOGVV_GC("SCAN: " __VA_ARGS__)
66#define LOGV_MARK(...) LOGVV_GC("MARK: " __VA_ARGS__)
67#define LOGV_SWEEP(...) LOGVV_GC("SWEEP: " __VA_ARGS__)
68#define LOGV_REF(...) LOGVV_GC("REF: " __VA_ARGS__)
69
70#if WITH_OBJECT_HEADERS
71u2 gGeneration = 0;
72static const Object *gMarkParent = NULL;
73#endif
74
75#ifndef PAGE_SIZE
76#define PAGE_SIZE 4096
77#endif
78#define ALIGN_UP_TO_PAGE_SIZE(p) \
79 (((size_t)(p) + (PAGE_SIZE - 1)) & ~(PAGE_SIZE - 1))
80
81/* Do not cast the result of this to a boolean; the only set bit
82 * may be > 1<<8.
83 */
84static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
85 __attribute__((always_inline));
86static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
87{
88 return dvmHeapBitmapIsObjectBitSetInList(ctx->bitmaps, ctx->numBitmaps, hc);
89}
90
91static bool
92createMarkStack(GcMarkStack *stack)
93{
94 const Object **limit;
95 size_t size;
96 int fd;
97
98 /* Create a stack big enough for the worst possible case,
99 * where the heap is perfectly full of the smallest object.
100 * TODO: be better about memory usage; use a smaller stack with
101 * overflow detection and recovery.
102 */
The Android Open Source Projectcc05ad22009-01-09 17:50:54 -0800103 size = dvmHeapSourceGetIdealFootprint() * sizeof(Object*) /
The Android Open Source Project2ad60cf2008-10-21 07:00:00 -0700104 (sizeof(Object) + HEAP_SOURCE_CHUNK_OVERHEAD);
105 size = ALIGN_UP_TO_PAGE_SIZE(size);
106 fd = ashmem_create_region("dalvik-heap-markstack", size);
107 if (fd < 0) {
108 LOGE_GC("Could not create %d-byte ashmem mark stack\n", size);
109 return false;
110 }
111 limit = (const Object **)mmap(NULL, size, PROT_READ | PROT_WRITE,
112 MAP_PRIVATE, fd, 0);
113 close(fd);
114 if (limit == MAP_FAILED) {
115 LOGE_GC("Could not mmap %d-byte ashmem mark stack\n", size);
116 return false;
117 }
118
119 memset(stack, 0, sizeof(*stack));
120 stack->limit = limit;
121 stack->base = (const Object **)((uintptr_t)limit + size);
122 stack->top = stack->base;
123
124 return true;
125}
126
127static void
128destroyMarkStack(GcMarkStack *stack)
129{
130 munmap((char *)stack->limit,
131 (uintptr_t)stack->base - (uintptr_t)stack->limit);
132 memset(stack, 0, sizeof(*stack));
133}
134
135#define MARK_STACK_PUSH(stack, obj) \
136 do { \
137 *--(stack).top = (obj); \
138 } while (false)
139
140bool
141dvmHeapBeginMarkStep()
142{
143 GcMarkContext *mc = &gDvm.gcHeap->markContext;
144 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
145 size_t numBitmaps;
146
147 if (!createMarkStack(&mc->stack)) {
148 return false;
149 }
150
151 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
152 HEAP_SOURCE_MAX_HEAP_COUNT);
153 if (numBitmaps <= 0) {
154 return false;
155 }
156
157 /* Create mark bitmaps that cover the same ranges as the
158 * current object bitmaps.
159 */
160 if (!dvmHeapBitmapInitListFromTemplates(mc->bitmaps, objectBitmaps,
161 numBitmaps, "mark"))
162 {
163 return false;
164 }
165
166 mc->numBitmaps = numBitmaps;
167 mc->finger = NULL;
168
169#if WITH_OBJECT_HEADERS
170 gGeneration++;
171#endif
172
173 return true;
174}
175
176static long setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
177 __attribute__((always_inline));
178static long
179setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
180{
181 return dvmHeapBitmapSetAndReturnObjectBitInList(ctx->bitmaps,
182 ctx->numBitmaps, hc);
183}
184
185static void _markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
186 bool checkFinger, bool forceStack)
187 __attribute__((always_inline));
188static void
189_markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
190 bool checkFinger, bool forceStack)
191{
192 DvmHeapChunk *hc;
193
194 assert(obj != NULL);
195
196#if GC_DEBUG(GC_DEBUG_PARANOID)
197//TODO: make sure we're locked
198 assert(obj != (Object *)gDvm.unlinkedJavaLangClass);
199 assert(dvmIsValidObject(obj));
200#endif
201
202 hc = ptr2chunk(obj);
203 if (!setAndReturnMarkBit(ctx, hc)) {
204 /* This object was not previously marked.
205 */
206 if (forceStack || (checkFinger && (void *)hc < ctx->finger)) {
207 /* This object will need to go on the mark stack.
208 */
209 MARK_STACK_PUSH(ctx->stack, obj);
210 }
211
212#if WITH_OBJECT_HEADERS
213 if (hc->scanGeneration != hc->markGeneration) {
214 LOGE("markObject(0x%08x): wasn't scanned last time\n", (uint)obj);
215 dvmAbort();
216 }
217 if (hc->markGeneration == gGeneration) {
218 LOGE("markObject(0x%08x): already marked this generation\n",
219 (uint)obj);
220 dvmAbort();
221 }
222 hc->oldMarkGeneration = hc->markGeneration;
223 hc->markGeneration = gGeneration;
224 hc->markFingerOld = hc->markFinger;
225 hc->markFinger = ctx->finger;
226 if (gMarkParent != NULL) {
227 hc->parentOld = hc->parent;
228 hc->parent = gMarkParent;
229 } else {
230 hc->parent = (const Object *)((uintptr_t)hc->parent | 1);
231 }
232 hc->markCount++;
233#endif
234#if WITH_HPROF
235 if (gDvm.gcHeap->hprofContext != NULL) {
236 hprofMarkRootObject(gDvm.gcHeap->hprofContext, obj, 0);
237 }
238#endif
239#if DVM_TRACK_HEAP_MARKING
240 gDvm.gcHeap->markCount++;
241 gDvm.gcHeap->markSize += dvmHeapSourceChunkSize((void *)hc) +
242 HEAP_SOURCE_CHUNK_OVERHEAD;
243#endif
244
245 /* obj->clazz can be NULL if we catch an object between
246 * dvmMalloc() and DVM_OBJECT_INIT(). This is ok.
247 */
248 LOGV_MARK("0x%08x %s\n", (uint)obj,
249 obj->clazz == NULL ? "<null class>" : obj->clazz->name);
250 }
251}
252
253/* Used to mark objects when recursing. Recursion is done by moving
254 * the finger across the bitmaps in address order and marking child
255 * objects. Any newly-marked objects whose addresses are lower than
256 * the finger won't be visited by the bitmap scan, so those objects
257 * need to be added to the mark stack.
258 */
259static void
260markObjectNonNull(const Object *obj, GcMarkContext *ctx)
261{
262 _markObjectNonNullCommon(obj, ctx, true, false);
263}
264
265#define markObject(obj, ctx) \
266 do { \
267 Object *MO_obj_ = (Object *)(obj); \
268 if (MO_obj_ != NULL) { \
269 markObjectNonNull(MO_obj_, (ctx)); \
270 } \
271 } while (false)
272
273/* If the object hasn't already been marked, mark it and
274 * schedule it to be scanned for references.
275 *
276 * obj may not be NULL. The macro dvmMarkObject() should
277 * be used in situations where a reference may be NULL.
278 *
279 * This function may only be called when marking the root
280 * set. When recursing, use the internal markObject[NonNull]().
281 */
282void
283dvmMarkObjectNonNull(const Object *obj)
284{
285 _markObjectNonNullCommon(obj, &gDvm.gcHeap->markContext, false, false);
286}
287
288/* Mark the set of root objects.
289 *
290 * Things we need to scan:
291 * - System classes defined by root classloader
292 * - For each thread:
293 * - Interpreted stack, from top to "curFrame"
294 * - Dalvik registers (args + local vars)
295 * - JNI local references
296 * - Automatic VM local references (TrackedAlloc)
297 * - Associated Thread/VMThread object
298 * - ThreadGroups (could track & start with these instead of working
299 * upward from Threads)
300 * - Exception currently being thrown, if present
301 * - JNI global references
302 * - Interned string table
303 * - Primitive classes
304 * - Special objects
305 * - gDvm.outOfMemoryObj
306 * - Objects allocated with ALLOC_NO_GC
307 * - Objects pending finalization (but not yet finalized)
308 * - Objects in debugger object registry
309 *
310 * Don't need:
311 * - Native stack (for in-progress stuff in the VM)
312 * - The TrackedAlloc stuff watches all native VM references.
313 */
314void dvmHeapMarkRootSet()
315{
316 HeapRefTable *refs;
317 GcHeap *gcHeap;
318 Object **op;
319
320 gcHeap = gDvm.gcHeap;
321
322 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_STICKY_CLASS, 0);
323
324 LOG_SCAN("root class loader\n");
325 dvmGcScanRootClassLoader();
326 LOG_SCAN("primitive classes\n");
327 dvmGcScanPrimitiveClasses();
328
329 /* dvmGcScanRootThreadGroups() sets a bunch of
330 * different scan states internally.
331 */
332 HPROF_CLEAR_GC_SCAN_STATE();
333
334 LOG_SCAN("root thread groups\n");
335 dvmGcScanRootThreadGroups();
336
337 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_INTERNED_STRING, 0);
338
339 LOG_SCAN("interned strings\n");
340 dvmGcScanInternedStrings();
341
342 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_JNI_GLOBAL, 0);
343
344 LOG_SCAN("JNI global refs\n");
345 dvmGcMarkJniGlobalRefs();
346
347 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
348
349 LOG_SCAN("pending reference operations\n");
350 dvmHeapMarkLargeTableRefs(gcHeap->referenceOperations, true);
351
352 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
353
354 LOG_SCAN("pending finalizations\n");
355 dvmHeapMarkLargeTableRefs(gcHeap->pendingFinalizationRefs, false);
356
357 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_DEBUGGER, 0);
358
359 LOG_SCAN("debugger refs\n");
360 dvmGcMarkDebuggerRefs();
361
362 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_VM_INTERNAL, 0);
363
364 /* Mark all ALLOC_NO_GC objects.
365 */
366 LOG_SCAN("ALLOC_NO_GC objects\n");
367 refs = &gcHeap->nonCollectableRefs;
368 op = refs->table;
369 while ((uintptr_t)op < (uintptr_t)refs->nextEntry) {
370 dvmMarkObjectNonNull(*(op++));
371 }
372
373 /* Mark any special objects we have sitting around.
374 */
375 LOG_SCAN("special objects\n");
376 dvmMarkObjectNonNull(gDvm.outOfMemoryObj);
377 dvmMarkObjectNonNull(gDvm.internalErrorObj);
378//TODO: scan object references sitting in gDvm; use pointer begin & end
379
380 HPROF_CLEAR_GC_SCAN_STATE();
381}
382
383/*
384 * Nothing past this point is allowed to use dvmMarkObject*().
385 * Scanning/recursion must use markObject*(), which takes the
386 * finger into account.
387 */
388#define dvmMarkObjectNonNull __dont_use_dvmMarkObjectNonNull__
389
390
391/* Mark all of a ClassObject's interfaces.
392 */
393static void markInterfaces(const ClassObject *clazz, GcMarkContext *ctx)
394{
395 ClassObject **interfaces;
396 int interfaceCount;
397 int i;
398
399 /* Mark all interfaces.
400 */
401 interfaces = clazz->interfaces;
402 interfaceCount = clazz->interfaceCount;
403 for (i = 0; i < interfaceCount; i++) {
404 markObjectNonNull((Object *)*interfaces, ctx);
405 interfaces++;
406 }
407}
408
409/* Mark all objects referred to by a ClassObject's static fields.
410 */
411static void scanStaticFields(const ClassObject *clazz, GcMarkContext *ctx)
412{
413 StaticField *f;
414 int i;
415
416 //TODO: Optimize this with a bit vector or something
417 f = clazz->sfields;
418 for (i = 0; i < clazz->sfieldCount; i++) {
419 char c = f->field.signature[0];
420 if (c == '[' || c == 'L') {
421 /* It's an array or class reference.
422 */
423 markObject((Object *)f->value.l, ctx);
424 }
425 f++;
426 }
427}
428
429/* Mark all objects referred to by a DataObject's instance fields.
430 */
431static void scanInstanceFields(const DataObject *obj, ClassObject *clazz,
432 GcMarkContext *ctx)
433{
Barry Hayeseac47ed2009-06-22 11:45:20 -0700434 if (clazz->refOffsets != CLASS_WALK_SUPER) {
435 unsigned int refOffsets = clazz->refOffsets;
436 while (refOffsets != 0) {
437 const int rshift = CLZ(refOffsets);
438 refOffsets &= ~(CLASS_HIGH_BIT >> rshift);
439 markObject(dvmGetFieldObject((Object*)obj,
440 CLASS_OFFSET_FROM_CLZ(rshift)), ctx);
The Android Open Source Project2ad60cf2008-10-21 07:00:00 -0700441 }
Barry Hayeseac47ed2009-06-22 11:45:20 -0700442 } else {
443 while (clazz != NULL) {
444 InstField *f;
445 int i;
The Android Open Source Project2ad60cf2008-10-21 07:00:00 -0700446
Barry Hayeseac47ed2009-06-22 11:45:20 -0700447 /* All of the fields that contain object references
448 * are guaranteed to be at the beginning of the ifields list.
449 */
450 f = clazz->ifields;
451 for (i = 0; i < clazz->ifieldRefCount; i++) {
452 /* Mark the array or object reference.
453 * May be NULL.
454 *
455 * Note that, per the comment on struct InstField,
456 * f->byteOffset is the offset from the beginning of
457 * obj, not the offset into obj->instanceData.
458 */
459 markObject(dvmGetFieldObject((Object*)obj, f->byteOffset), ctx);
460 f++;
461 }
462
463 /* This will be NULL when we hit java.lang.Object
464 */
465 clazz = clazz->super;
466 }
The Android Open Source Project2ad60cf2008-10-21 07:00:00 -0700467 }
468}
469
470/* Mark all objects referred to by the array's contents.
471 */
472static void scanObjectArray(const ArrayObject *array, GcMarkContext *ctx)
473{
474 Object **contents;
475 u4 length;
476 u4 i;
477
478 contents = (Object **)array->contents;
479 length = array->length;
480
481 for (i = 0; i < length; i++) {
482 markObject(*contents, ctx); // may be NULL
483 contents++;
484 }
485}
486
487/* Mark all objects referred to by the ClassObject.
488 */
489static void scanClassObject(const ClassObject *clazz, GcMarkContext *ctx)
490{
491 LOGV_SCAN("---------> %s\n", clazz->name);
492
493 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
494 /* We're an array; mark the class object of the contents
495 * of the array.
496 *
497 * Note that we won't necessarily reach the array's element
498 * class by scanning the array contents; the array may be
499 * zero-length, or may only contain null objects.
500 */
501 markObjectNonNull((Object *)clazz->elementClass, ctx);
502 }
503
504 /* We scan these explicitly in case the only remaining
505 * reference to a particular class object is via a data
506 * object; we may not be guaranteed to reach all
507 * live class objects via a classloader.
508 */
509 markObject((Object *)clazz->super, ctx); // may be NULL (java.lang.Object)
510 markObject(clazz->classLoader, ctx); // may be NULL
511
512 scanStaticFields(clazz, ctx);
513 markInterfaces(clazz, ctx);
514}
515
516/* Mark all objects that obj refers to.
517 *
518 * Called on every object in markList.
519 */
520static void scanObject(const Object *obj, GcMarkContext *ctx)
521{
522 ClassObject *clazz;
523
524 assert(dvmIsValidObject(obj));
525 LOGV_SCAN("0x%08x %s\n", (uint)obj, obj->clazz->name);
526
527#if WITH_HPROF
528 if (gDvm.gcHeap->hprofContext != NULL) {
529 hprofDumpHeapObject(gDvm.gcHeap->hprofContext, obj);
530 }
531#endif
532
Barry Hayes3592d622009-03-16 16:10:35 -0700533#if WITH_OBJECT_HEADERS
534 if (ptr2chunk(obj)->scanGeneration == gGeneration) {
535 LOGE("object 0x%08x was already scanned this generation\n",
536 (uintptr_t)obj);
537 dvmAbort();
538 }
539 ptr2chunk(obj)->oldScanGeneration = ptr2chunk(obj)->scanGeneration;
540 ptr2chunk(obj)->scanGeneration = gGeneration;
541 ptr2chunk(obj)->scanCount++;
542#endif
543
The Android Open Source Project2ad60cf2008-10-21 07:00:00 -0700544 /* Get and mark the class object for this particular instance.
545 */
546 clazz = obj->clazz;
547 if (clazz == NULL) {
548 /* This can happen if we catch an object between
549 * dvmMalloc() and DVM_OBJECT_INIT(). The object
550 * won't contain any references yet, so we can
551 * just skip it.
552 */
553 return;
554 } else if (clazz == gDvm.unlinkedJavaLangClass) {
555 /* This class hasn't been linked yet. We're guaranteed
556 * that the object doesn't contain any references that
557 * aren't already tracked, so we can skip scanning it.
558 *
559 * NOTE: unlinkedJavaLangClass is not on the heap, so
560 * it's very important that we don't try marking it.
561 */
562 return;
563 }
Barry Hayes3592d622009-03-16 16:10:35 -0700564
The Android Open Source Project2ad60cf2008-10-21 07:00:00 -0700565#if WITH_OBJECT_HEADERS
566 gMarkParent = obj;
The Android Open Source Project2ad60cf2008-10-21 07:00:00 -0700567#endif
568
569 assert(dvmIsValidObject((Object *)clazz));
570 markObjectNonNull((Object *)clazz, ctx);
571
572 /* Mark any references in this object.
573 */
574 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
575 /* It's an array object.
576 */
577 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISOBJECTARRAY)) {
578 /* It's an array of object references.
579 */
580 scanObjectArray((ArrayObject *)obj, ctx);
581 }
582 // else there's nothing else to scan
583 } else {
584 /* It's a DataObject-compatible object.
585 */
586 scanInstanceFields((DataObject *)obj, clazz, ctx);
587
588 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISREFERENCE)) {
589 GcHeap *gcHeap = gDvm.gcHeap;
590 Object *referent;
591
592 /* It's a subclass of java/lang/ref/Reference.
593 * The fields in this class have been arranged
594 * such that scanInstanceFields() did not actually
595 * mark the "referent" field; we need to handle
596 * it specially.
597 *
598 * If the referent already has a strong mark (isMarked(referent)),
599 * we don't care about its reference status.
600 */
601 referent = dvmGetFieldObject(obj,
602 gDvm.offJavaLangRefReference_referent);
603 if (referent != NULL &&
604 !isMarked(ptr2chunk(referent), &gcHeap->markContext))
605 {
606 u4 refFlags;
607
608 if (gcHeap->markAllReferents) {
609 LOG_REF("Hard-marking a reference\n");
610
611 /* Don't bother with normal reference-following
612 * behavior, just mark the referent. This should
613 * only be used when following objects that just
614 * became scheduled for finalization.
615 */
616 markObjectNonNull(referent, ctx);
617 goto skip_reference;
618 }
619
620 /* See if this reference was handled by a previous GC.
621 */
622 if (dvmGetFieldObject(obj,
623 gDvm.offJavaLangRefReference_vmData) ==
624 SCHEDULED_REFERENCE_MAGIC)
625 {
626 LOG_REF("Skipping scheduled reference\n");
627
628 /* Don't reschedule it, but make sure that its
629 * referent doesn't get collected (in case it's
630 * a PhantomReference and wasn't cleared automatically).
631 */
632 //TODO: Mark these after handling all new refs of
633 // this strength, in case the new refs refer
634 // to the same referent. Not a very common
635 // case, though.
636 markObjectNonNull(referent, ctx);
637 goto skip_reference;
638 }
639
640 /* Find out what kind of reference is pointing
641 * to referent.
642 */
643 refFlags = GET_CLASS_FLAG_GROUP(clazz,
644 CLASS_ISREFERENCE |
645 CLASS_ISWEAKREFERENCE |
646 CLASS_ISPHANTOMREFERENCE);
647
648 /* We use the vmData field of Reference objects
649 * as a next pointer in a singly-linked list.
650 * That way, we don't need to allocate any memory
651 * while we're doing a GC.
652 */
653#define ADD_REF_TO_LIST(list, ref) \
654 do { \
655 Object *ARTL_ref_ = (/*de-const*/Object *)(ref); \
656 dvmSetFieldObject(ARTL_ref_, \
657 gDvm.offJavaLangRefReference_vmData, list); \
658 list = ARTL_ref_; \
659 } while (false)
660
661 /* At this stage, we just keep track of all of
662 * the live references that we've seen. Later,
663 * we'll walk through each of these lists and
664 * deal with the referents.
665 */
666 if (refFlags == CLASS_ISREFERENCE) {
667 /* It's a soft reference. Depending on the state,
668 * we'll attempt to collect all of them, some of
669 * them, or none of them.
670 */
671 if (gcHeap->softReferenceCollectionState ==
672 SR_COLLECT_NONE)
673 {
674 sr_collect_none:
675 markObjectNonNull(referent, ctx);
676 } else if (gcHeap->softReferenceCollectionState ==
677 SR_COLLECT_ALL)
678 {
679 sr_collect_all:
680 ADD_REF_TO_LIST(gcHeap->softReferences, obj);
681 } else {
682 /* We'll only try to collect half of the
683 * referents.
684 */
685 if (gcHeap->softReferenceColor++ & 1) {
686 goto sr_collect_none;
687 }
688 goto sr_collect_all;
689 }
690 } else {
691 /* It's a weak or phantom reference.
692 * Clearing CLASS_ISREFERENCE will reveal which.
693 */
694 refFlags &= ~CLASS_ISREFERENCE;
695 if (refFlags == CLASS_ISWEAKREFERENCE) {
696 ADD_REF_TO_LIST(gcHeap->weakReferences, obj);
697 } else if (refFlags == CLASS_ISPHANTOMREFERENCE) {
698 ADD_REF_TO_LIST(gcHeap->phantomReferences, obj);
699 } else {
700 assert(!"Unknown reference type");
701 }
702 }
703#undef ADD_REF_TO_LIST
704 }
705 }
706
707 skip_reference:
708 /* If this is a class object, mark various other things that
709 * its internals point to.
710 *
711 * All class objects are instances of java.lang.Class,
712 * including the java.lang.Class class object.
713 */
714 if (clazz == gDvm.classJavaLangClass) {
715 scanClassObject((ClassObject *)obj, ctx);
716 }
717 }
718
719#if WITH_OBJECT_HEADERS
720 gMarkParent = NULL;
721#endif
722}
723
724static void
725processMarkStack(GcMarkContext *ctx)
726{
727 const Object **const base = ctx->stack.base;
728
729 /* Scan anything that's on the mark stack.
730 * We can't use the bitmaps anymore, so use
731 * a finger that points past the end of them.
732 */
733 ctx->finger = (void *)ULONG_MAX;
734 while (ctx->stack.top != base) {
735 scanObject(*ctx->stack.top++, ctx);
736 }
737}
738
739#ifndef NDEBUG
740static uintptr_t gLastFinger = 0;
741#endif
742
743static bool
744scanBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
745{
746 GcMarkContext *ctx = (GcMarkContext *)arg;
747 size_t i;
748
749#ifndef NDEBUG
750 assert((uintptr_t)finger >= gLastFinger);
751 gLastFinger = (uintptr_t)finger;
752#endif
753
754 ctx->finger = finger;
755 for (i = 0; i < numPtrs; i++) {
756 /* The pointers we're getting back are DvmHeapChunks,
757 * not Objects.
758 */
759 scanObject(chunk2ptr(*ptrs++), ctx);
760 }
761
762 return true;
763}
764
765/* Given bitmaps with the root set marked, find and mark all
766 * reachable objects. When this returns, the entire set of
767 * live objects will be marked and the mark stack will be empty.
768 */
769void dvmHeapScanMarkedObjects()
770{
771 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
772
773 assert(ctx->finger == NULL);
774
775 /* The bitmaps currently have bits set for the root set.
776 * Walk across the bitmaps and scan each object.
777 */
778#ifndef NDEBUG
779 gLastFinger = 0;
780#endif
781 dvmHeapBitmapWalkList(ctx->bitmaps, ctx->numBitmaps,
782 scanBitmapCallback, ctx);
783
784 /* We've walked the mark bitmaps. Scan anything that's
785 * left on the mark stack.
786 */
787 processMarkStack(ctx);
788
789 LOG_SCAN("done with marked objects\n");
790}
791
792/** @return true if we need to schedule a call to clear().
793 */
794static bool clearReference(Object *reference)
795{
796 /* This is what the default implementation of Reference.clear()
797 * does. We're required to clear all references to a given
798 * referent atomically, so we can't pop in and out of interp
799 * code each time.
800 *
801 * Also, someone may have subclassed one of the basic Reference
802 * types, overriding clear(). We can't trust the clear()
803 * implementation to call super.clear(); we cannot let clear()
804 * resurrect the referent. If we clear it here, we can safely
805 * call any overriding implementations.
806 */
807 dvmSetFieldObject(reference,
808 gDvm.offJavaLangRefReference_referent, NULL);
809
810#if FANCY_REFERENCE_SUBCLASS
811 /* See if clear() has actually been overridden. If so,
812 * we need to schedule a call to it before calling enqueue().
813 */
814 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_clear]->clazz !=
815 gDvm.classJavaLangRefReference)
816 {
817 /* clear() has been overridden; return true to indicate
818 * that we need to schedule a call to the real clear()
819 * implementation.
820 */
821 return true;
822 }
823#endif
824
825 return false;
826}
827
828/** @return true if we need to schedule a call to enqueue().
829 */
830static bool enqueueReference(Object *reference)
831{
832#if FANCY_REFERENCE_SUBCLASS
833 /* See if this reference class has overridden enqueue();
834 * if not, we can take a shortcut.
835 */
836 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_enqueue]->clazz
837 == gDvm.classJavaLangRefReference)
838#endif
839 {
840 Object *queue = dvmGetFieldObject(reference,
841 gDvm.offJavaLangRefReference_queue);
842 Object *queueNext = dvmGetFieldObject(reference,
843 gDvm.offJavaLangRefReference_queueNext);
844 if (queue == NULL || queueNext != NULL) {
845 /* There is no queue, or the reference has already
846 * been enqueued. The Reference.enqueue() method
847 * will do nothing even if we call it.
848 */
849 return false;
850 }
851 }
852
853 /* We need to call enqueue(), but if we called it from
854 * here we'd probably deadlock. Schedule a call.
855 */
856 return true;
857}
858
859/* All objects for stronger reference levels have been
860 * marked before this is called.
861 */
862void dvmHeapHandleReferences(Object *refListHead, enum RefType refType)
863{
864 Object *reference;
865 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
866 const int offVmData = gDvm.offJavaLangRefReference_vmData;
867 const int offReferent = gDvm.offJavaLangRefReference_referent;
868 bool workRequired = false;
869
870size_t numCleared = 0;
871size_t numEnqueued = 0;
872 reference = refListHead;
873 while (reference != NULL) {
874 Object *next;
875 Object *referent;
876
877 /* Pull the interesting fields out of the Reference object.
878 */
879 next = dvmGetFieldObject(reference, offVmData);
880 referent = dvmGetFieldObject(reference, offReferent);
881
882 //TODO: when handling REF_PHANTOM, unlink any references
883 // that fail this initial if(). We need to re-walk
884 // the list, and it would be nice to avoid the extra
885 // work.
886 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
887 bool schedClear, schedEnqueue;
888
889 /* This is the strongest reference that refers to referent.
890 * Do the right thing.
891 */
892 switch (refType) {
893 case REF_SOFT:
894 case REF_WEAK:
895 schedClear = clearReference(reference);
896 schedEnqueue = enqueueReference(reference);
897 break;
898 case REF_PHANTOM:
899 /* PhantomReferences are not cleared automatically.
900 * Until someone clears it (or the reference itself
901 * is collected), the referent must remain alive.
902 *
903 * It's necessary to fully mark the referent because
904 * it will still be present during the next GC, and
905 * all objects that it points to must be valid.
906 * (The referent will be marked outside of this loop,
907 * after handing all references of this strength, in
908 * case multiple references point to the same object.)
909 */
910 schedClear = false;
911
912 /* A PhantomReference is only useful with a
913 * queue, but since it's possible to create one
914 * without a queue, we need to check.
915 */
916 schedEnqueue = enqueueReference(reference);
917 break;
918 default:
919 assert(!"Bad reference type");
920 schedClear = false;
921 schedEnqueue = false;
922 break;
923 }
924numCleared += schedClear ? 1 : 0;
925numEnqueued += schedEnqueue ? 1 : 0;
926
927 if (schedClear || schedEnqueue) {
928 uintptr_t workBits;
929
930 /* Stuff the clear/enqueue bits in the bottom of
931 * the pointer. Assumes that objects are 8-byte
932 * aligned.
933 *
934 * Note that we are adding the *Reference* (which
935 * is by definition already marked at this point) to
936 * this list; we're not adding the referent (which
937 * has already been cleared).
938 */
939 assert(((intptr_t)reference & 3) == 0);
940 assert(((WORKER_CLEAR | WORKER_ENQUEUE) & ~3) == 0);
941 workBits = (schedClear ? WORKER_CLEAR : 0) |
942 (schedEnqueue ? WORKER_ENQUEUE : 0);
943 if (!dvmHeapAddRefToLargeTable(
944 &gDvm.gcHeap->referenceOperations,
945 (Object *)((uintptr_t)reference | workBits)))
946 {
947 LOGE_HEAP("dvmMalloc(): no room for any more "
948 "reference operations\n");
949 dvmAbort();
950 }
951 workRequired = true;
952 }
953
954 if (refType != REF_PHANTOM) {
955 /* Let later GCs know not to reschedule this reference.
956 */
957 dvmSetFieldObject(reference, offVmData,
958 SCHEDULED_REFERENCE_MAGIC);
959 } // else this is handled later for REF_PHANTOM
960
961 } // else there was a stronger reference to the referent.
962
963 reference = next;
964 }
965#define refType2str(r) \
966 ((r) == REF_SOFT ? "soft" : ( \
967 (r) == REF_WEAK ? "weak" : ( \
968 (r) == REF_PHANTOM ? "phantom" : "UNKNOWN" )))
969LOGD_HEAP("dvmHeapHandleReferences(): cleared %zd, enqueued %zd %s references\n", numCleared, numEnqueued, refType2str(refType));
970
971 /* Walk though the reference list again, and mark any non-clear/marked
972 * referents. Only PhantomReferences can have non-clear referents
973 * at this point.
974 */
975 if (refType == REF_PHANTOM) {
976 bool scanRequired = false;
977
978 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
979 reference = refListHead;
980 while (reference != NULL) {
981 Object *next;
982 Object *referent;
983
984 /* Pull the interesting fields out of the Reference object.
985 */
986 next = dvmGetFieldObject(reference, offVmData);
987 referent = dvmGetFieldObject(reference, offReferent);
988
989 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
990 markObjectNonNull(referent, markContext);
991 scanRequired = true;
992
993 /* Let later GCs know not to reschedule this reference.
994 */
995 dvmSetFieldObject(reference, offVmData,
996 SCHEDULED_REFERENCE_MAGIC);
997 }
998
999 reference = next;
1000 }
1001 HPROF_CLEAR_GC_SCAN_STATE();
1002
1003 if (scanRequired) {
1004 processMarkStack(markContext);
1005 }
1006 }
1007
1008 if (workRequired) {
1009 dvmSignalHeapWorker(false);
1010 }
1011}
1012
1013
1014/* Find unreachable objects that need to be finalized,
1015 * and schedule them for finalization.
1016 */
1017void dvmHeapScheduleFinalizations()
1018{
1019 HeapRefTable newPendingRefs;
1020 LargeHeapRefTable *finRefs = gDvm.gcHeap->finalizableRefs;
1021 Object **ref;
1022 Object **lastRef;
1023 size_t totalPendCount;
1024 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
1025
1026 /*
1027 * All reachable objects have been marked.
1028 * Any unmarked finalizable objects need to be finalized.
1029 */
1030
1031 /* Create a table that the new pending refs will
1032 * be added to.
1033 */
1034 if (!dvmHeapInitHeapRefTable(&newPendingRefs, 128)) {
1035 //TODO: mark all finalizable refs and hope that
1036 // we can schedule them next time. Watch out,
1037 // because we may be expecting to free up space
1038 // by calling finalizers.
1039 LOGE_GC("dvmHeapScheduleFinalizations(): no room for "
1040 "pending finalizations\n");
1041 dvmAbort();
1042 }
1043
1044 /* Walk through finalizableRefs and move any unmarked references
1045 * to the list of new pending refs.
1046 */
1047 totalPendCount = 0;
1048 while (finRefs != NULL) {
1049 Object **gapRef;
1050 size_t newPendCount = 0;
1051
1052 gapRef = ref = finRefs->refs.table;
1053 lastRef = finRefs->refs.nextEntry;
1054 while (ref < lastRef) {
1055 DvmHeapChunk *hc;
1056
1057 hc = ptr2chunk(*ref);
1058 if (!isMarked(hc, markContext)) {
1059 if (!dvmHeapAddToHeapRefTable(&newPendingRefs, *ref)) {
1060 //TODO: add the current table and allocate
1061 // a new, smaller one.
1062 LOGE_GC("dvmHeapScheduleFinalizations(): "
1063 "no room for any more pending finalizations: %zd\n",
1064 dvmHeapNumHeapRefTableEntries(&newPendingRefs));
1065 dvmAbort();
1066 }
1067 newPendCount++;
1068 } else {
1069 /* This ref is marked, so will remain on finalizableRefs.
1070 */
1071 if (newPendCount > 0) {
1072 /* Copy it up to fill the holes.
1073 */
1074 *gapRef++ = *ref;
1075 } else {
1076 /* No holes yet; don't bother copying.
1077 */
1078 gapRef++;
1079 }
1080 }
1081 ref++;
1082 }
1083 finRefs->refs.nextEntry = gapRef;
1084 //TODO: if the table is empty when we're done, free it.
1085 totalPendCount += newPendCount;
1086 finRefs = finRefs->next;
1087 }
1088 LOGD_GC("dvmHeapScheduleFinalizations(): %zd finalizers triggered.\n",
1089 totalPendCount);
1090 if (totalPendCount == 0) {
1091 /* No objects required finalization.
1092 * Free the empty temporary table.
1093 */
1094 dvmClearReferenceTable(&newPendingRefs);
1095 return;
1096 }
1097
1098 /* Add the new pending refs to the main list.
1099 */
1100 if (!dvmHeapAddTableToLargeTable(&gDvm.gcHeap->pendingFinalizationRefs,
1101 &newPendingRefs))
1102 {
1103 LOGE_GC("dvmHeapScheduleFinalizations(): can't insert new "
1104 "pending finalizations\n");
1105 dvmAbort();
1106 }
1107
1108 //TODO: try compacting the main list with a memcpy loop
1109
1110 /* Mark the refs we just moved; we don't want them or their
1111 * children to get swept yet.
1112 */
1113 ref = newPendingRefs.table;
1114 lastRef = newPendingRefs.nextEntry;
1115 assert(ref < lastRef);
1116 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
1117 while (ref < lastRef) {
1118 markObjectNonNull(*ref, markContext);
1119 ref++;
1120 }
1121 HPROF_CLEAR_GC_SCAN_STATE();
1122
1123 /* Set markAllReferents so that we don't collect referents whose
1124 * only references are in final-reachable objects.
1125 * TODO: eventually provide normal reference behavior by properly
1126 * marking these references.
1127 */
1128 gDvm.gcHeap->markAllReferents = true;
1129 processMarkStack(markContext);
1130 gDvm.gcHeap->markAllReferents = false;
1131
1132 dvmSignalHeapWorker(false);
1133}
1134
1135void dvmHeapFinishMarkStep()
1136{
1137 HeapBitmap *markBitmap;
1138 HeapBitmap objectBitmap;
1139 GcMarkContext *markContext;
1140
1141 markContext = &gDvm.gcHeap->markContext;
1142
1143 /* The sweep step freed every object that appeared in the
1144 * HeapSource bitmaps that didn't appear in the mark bitmaps.
1145 * The new state of the HeapSource is exactly the final
1146 * mark bitmaps, so swap them in.
1147 *
1148 * The old bitmaps will be swapped into the context so that
1149 * we can clean them up.
1150 */
1151 dvmHeapSourceReplaceObjectBitmaps(markContext->bitmaps,
1152 markContext->numBitmaps);
1153
1154 /* Clean up the old HeapSource bitmaps and anything else associated
1155 * with the marking process.
1156 */
1157 dvmHeapBitmapDeleteList(markContext->bitmaps, markContext->numBitmaps);
1158 destroyMarkStack(&markContext->stack);
1159
1160 memset(markContext, 0, sizeof(*markContext));
1161}
1162
1163#if WITH_HPROF && WITH_HPROF_UNREACHABLE
1164static bool
1165hprofUnreachableBitmapCallback(size_t numPtrs, void **ptrs,
1166 const void *finger, void *arg)
1167{
1168 hprof_context_t *hctx = (hprof_context_t *)arg;
1169 size_t i;
1170
1171 for (i = 0; i < numPtrs; i++) {
1172 Object *obj;
1173
1174 /* The pointers we're getting back are DvmHeapChunks, not
1175 * Objects.
1176 */
1177 obj = (Object *)chunk2ptr(*ptrs++);
1178
1179 hprofMarkRootObject(hctx, obj, 0);
1180 hprofDumpHeapObject(hctx, obj);
1181 }
1182
1183 return true;
1184}
1185
1186static void
1187hprofDumpUnmarkedObjects(const HeapBitmap markBitmaps[],
1188 const HeapBitmap objectBitmaps[], size_t numBitmaps)
1189{
1190 hprof_context_t *hctx = gDvm.gcHeap->hprofContext;
1191 if (hctx == NULL) {
1192 return;
1193 }
1194
1195 LOGI("hprof: dumping unreachable objects\n");
1196
1197 HPROF_SET_GC_SCAN_STATE(HPROF_UNREACHABLE, 0);
1198
1199 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1200 hprofUnreachableBitmapCallback, hctx);
1201
1202 HPROF_CLEAR_GC_SCAN_STATE();
1203}
1204#endif
1205
1206static bool
1207sweepBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
1208{
1209 const ClassObject *const classJavaLangClass = gDvm.classJavaLangClass;
1210 size_t i;
1211
1212 for (i = 0; i < numPtrs; i++) {
1213 DvmHeapChunk *hc;
1214 Object *obj;
1215
1216 /* The pointers we're getting back are DvmHeapChunks, not
1217 * Objects.
1218 */
1219 hc = (DvmHeapChunk *)*ptrs++;
1220 obj = (Object *)chunk2ptr(hc);
1221
1222#if WITH_OBJECT_HEADERS
1223 if (hc->markGeneration == gGeneration) {
1224 LOGE("sweeping marked object: 0x%08x\n", (uint)obj);
1225 dvmAbort();
1226 }
1227#endif
1228
1229 /* Free the monitor associated with the object.
1230 */
1231 dvmFreeObjectMonitor(obj);
1232
1233 /* NOTE: Dereferencing clazz is dangerous. If obj was the last
1234 * one to reference its class object, the class object could be
1235 * on the sweep list, and could already have been swept, leaving
1236 * us with a stale pointer.
1237 */
1238 LOGV_SWEEP("FREE: 0x%08x %s\n", (uint)obj, obj->clazz->name);
1239
1240 /* This assumes that java.lang.Class will never go away.
1241 * If it can, and we were the last reference to it, it
1242 * could have already been swept. However, even in that case,
1243 * gDvm.classJavaLangClass should still have a useful
1244 * value.
1245 */
1246 if (obj->clazz == classJavaLangClass) {
1247 LOGV_SWEEP("---------------> %s\n", ((ClassObject *)obj)->name);
1248 /* dvmFreeClassInnards() may have already been called,
1249 * but it's safe to call on the same ClassObject twice.
1250 */
1251 dvmFreeClassInnards((ClassObject *)obj);
1252 }
1253
1254#if 0
1255 /* Overwrite the to-be-freed object to make stale references
1256 * more obvious.
1257 */
1258 {
1259 int chunklen;
1260 ClassObject *clazz = obj->clazz;
1261#if WITH_OBJECT_HEADERS
1262 DvmHeapChunk chunk = *hc;
1263 chunk.header = ~OBJECT_HEADER | 1;
1264#endif
1265 chunklen = dvmHeapSourceChunkSize(hc);
1266 memset(hc, 0xa5, chunklen);
1267 obj->clazz = (ClassObject *)((uintptr_t)clazz ^ 0xffffffff);
1268#if WITH_OBJECT_HEADERS
1269 *hc = chunk;
1270#endif
1271 }
1272#endif
1273
1274//TODO: provide a heapsource function that takes a list of pointers to free
1275// and call it outside of this loop.
1276 dvmHeapSourceFree(hc);
1277 }
1278
1279 return true;
1280}
1281
1282/* A function suitable for passing to dvmHashForeachRemove()
1283 * to clear out any unmarked objects. Clears the low bits
1284 * of the pointer because the intern table may set them.
1285 */
1286static int isUnmarkedObject(void *object)
1287{
1288 return !isMarked(ptr2chunk((uintptr_t)object & ~(HB_OBJECT_ALIGNMENT-1)),
1289 &gDvm.gcHeap->markContext);
1290}
1291
1292/* Walk through the list of objects that haven't been
1293 * marked and free them.
1294 */
1295void
1296dvmHeapSweepUnmarkedObjects(int *numFreed, size_t *sizeFreed)
1297{
1298 const HeapBitmap *markBitmaps;
1299 const GcMarkContext *markContext;
1300 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
1301 size_t origObjectsAllocated;
1302 size_t origBytesAllocated;
1303 size_t numBitmaps;
1304
1305 /* All reachable objects have been marked.
1306 * Detach any unreachable interned strings before
1307 * we sweep.
1308 */
1309 dvmGcDetachDeadInternedStrings(isUnmarkedObject);
1310
1311 /* Free any known objects that are not marked.
1312 */
1313 origObjectsAllocated = dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1314 origBytesAllocated = dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1315
1316 markContext = &gDvm.gcHeap->markContext;
1317 markBitmaps = markContext->bitmaps;
1318 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
1319 HEAP_SOURCE_MAX_HEAP_COUNT);
1320#ifndef NDEBUG
1321 if (numBitmaps != markContext->numBitmaps) {
1322 LOGE("heap bitmap count mismatch: %zd != %zd\n",
1323 numBitmaps, markContext->numBitmaps);
1324 dvmAbort();
1325 }
1326#endif
1327
1328#if WITH_HPROF && WITH_HPROF_UNREACHABLE
1329 hprofDumpUnmarkedObjects(markBitmaps, objectBitmaps, numBitmaps);
1330#endif
1331
1332 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1333 sweepBitmapCallback, NULL);
1334
1335 *numFreed = origObjectsAllocated -
1336 dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1337 *sizeFreed = origBytesAllocated -
1338 dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1339
1340#ifdef WITH_PROFILER
1341 if (gDvm.allocProf.enabled) {
1342 gDvm.allocProf.freeCount += *numFreed;
1343 gDvm.allocProf.freeSize += *sizeFreed;
1344 }
1345#endif
1346}