blob: b8da3a3933b485daa4aaea3db089ca3171c0e9e3 [file] [log] [blame]
The Android Open Source Projectf6c38712009-03-03 19:28:47 -08001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Dalvik.h"
18#include "alloc/HeapBitmap.h"
19#include "alloc/HeapInternal.h"
20#include "alloc/HeapSource.h"
21#include "alloc/MarkSweep.h"
22#include <limits.h> // for ULONG_MAX
23#include <sys/mman.h> // for madvise(), mmap()
24#include <cutils/ashmem.h>
The Android Open Source Project99409882009-03-18 22:20:24 -070025#include <errno.h>
The Android Open Source Projectf6c38712009-03-03 19:28:47 -080026
27#define GC_DEBUG_PARANOID 2
28#define GC_DEBUG_BASIC 1
29#define GC_DEBUG_OFF 0
30#define GC_DEBUG(l) (GC_DEBUG_LEVEL >= (l))
31
32#if 1
33#define GC_DEBUG_LEVEL GC_DEBUG_PARANOID
34#else
35#define GC_DEBUG_LEVEL GC_DEBUG_OFF
36#endif
37
38#define VERBOSE_GC 0
39
40#define GC_LOG_TAG LOG_TAG "-gc"
41
42#if LOG_NDEBUG
43#define LOGV_GC(...) ((void)0)
44#define LOGD_GC(...) ((void)0)
45#else
46#define LOGV_GC(...) LOG(LOG_VERBOSE, GC_LOG_TAG, __VA_ARGS__)
47#define LOGD_GC(...) LOG(LOG_DEBUG, GC_LOG_TAG, __VA_ARGS__)
48#endif
49
50#if VERBOSE_GC
51#define LOGVV_GC(...) LOGV_GC(__VA_ARGS__)
52#else
53#define LOGVV_GC(...) ((void)0)
54#endif
55
56#define LOGI_GC(...) LOG(LOG_INFO, GC_LOG_TAG, __VA_ARGS__)
57#define LOGW_GC(...) LOG(LOG_WARN, GC_LOG_TAG, __VA_ARGS__)
58#define LOGE_GC(...) LOG(LOG_ERROR, GC_LOG_TAG, __VA_ARGS__)
59
60#define LOG_SCAN(...) LOGV_GC("SCAN: " __VA_ARGS__)
61#define LOG_MARK(...) LOGV_GC("MARK: " __VA_ARGS__)
62#define LOG_SWEEP(...) LOGV_GC("SWEEP: " __VA_ARGS__)
63#define LOG_REF(...) LOGV_GC("REF: " __VA_ARGS__)
64
65#define LOGV_SCAN(...) LOGVV_GC("SCAN: " __VA_ARGS__)
66#define LOGV_MARK(...) LOGVV_GC("MARK: " __VA_ARGS__)
67#define LOGV_SWEEP(...) LOGVV_GC("SWEEP: " __VA_ARGS__)
68#define LOGV_REF(...) LOGVV_GC("REF: " __VA_ARGS__)
69
70#if WITH_OBJECT_HEADERS
71u2 gGeneration = 0;
72static const Object *gMarkParent = NULL;
73#endif
74
75#ifndef PAGE_SIZE
76#define PAGE_SIZE 4096
77#endif
78#define ALIGN_UP_TO_PAGE_SIZE(p) \
79 (((size_t)(p) + (PAGE_SIZE - 1)) & ~(PAGE_SIZE - 1))
80
81/* Do not cast the result of this to a boolean; the only set bit
82 * may be > 1<<8.
83 */
84static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
85 __attribute__((always_inline));
86static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
87{
88 return dvmHeapBitmapIsObjectBitSetInList(ctx->bitmaps, ctx->numBitmaps, hc);
89}
90
91static bool
92createMarkStack(GcMarkStack *stack)
93{
94 const Object **limit;
95 size_t size;
The Android Open Source Project99409882009-03-18 22:20:24 -070096 int fd, err;
The Android Open Source Projectf6c38712009-03-03 19:28:47 -080097
98 /* Create a stack big enough for the worst possible case,
99 * where the heap is perfectly full of the smallest object.
100 * TODO: be better about memory usage; use a smaller stack with
101 * overflow detection and recovery.
102 */
103 size = dvmHeapSourceGetIdealFootprint() * sizeof(Object*) /
104 (sizeof(Object) + HEAP_SOURCE_CHUNK_OVERHEAD);
105 size = ALIGN_UP_TO_PAGE_SIZE(size);
106 fd = ashmem_create_region("dalvik-heap-markstack", size);
107 if (fd < 0) {
The Android Open Source Project99409882009-03-18 22:20:24 -0700108 LOGE_GC("Could not create %d-byte ashmem mark stack: %s\n",
109 size, strerror(errno));
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800110 return false;
111 }
112 limit = (const Object **)mmap(NULL, size, PROT_READ | PROT_WRITE,
113 MAP_PRIVATE, fd, 0);
The Android Open Source Project99409882009-03-18 22:20:24 -0700114 err = errno;
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800115 close(fd);
116 if (limit == MAP_FAILED) {
The Android Open Source Project99409882009-03-18 22:20:24 -0700117 LOGE_GC("Could not mmap %d-byte ashmem mark stack: %s\n",
118 size, strerror(err));
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800119 return false;
120 }
121
122 memset(stack, 0, sizeof(*stack));
123 stack->limit = limit;
124 stack->base = (const Object **)((uintptr_t)limit + size);
125 stack->top = stack->base;
126
127 return true;
128}
129
130static void
131destroyMarkStack(GcMarkStack *stack)
132{
133 munmap((char *)stack->limit,
134 (uintptr_t)stack->base - (uintptr_t)stack->limit);
135 memset(stack, 0, sizeof(*stack));
136}
137
138#define MARK_STACK_PUSH(stack, obj) \
139 do { \
140 *--(stack).top = (obj); \
141 } while (false)
142
143bool
144dvmHeapBeginMarkStep()
145{
146 GcMarkContext *mc = &gDvm.gcHeap->markContext;
147 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
148 size_t numBitmaps;
149
150 if (!createMarkStack(&mc->stack)) {
151 return false;
152 }
153
154 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
155 HEAP_SOURCE_MAX_HEAP_COUNT);
156 if (numBitmaps <= 0) {
157 return false;
158 }
159
160 /* Create mark bitmaps that cover the same ranges as the
161 * current object bitmaps.
162 */
163 if (!dvmHeapBitmapInitListFromTemplates(mc->bitmaps, objectBitmaps,
164 numBitmaps, "mark"))
165 {
166 return false;
167 }
168
169 mc->numBitmaps = numBitmaps;
170 mc->finger = NULL;
171
172#if WITH_OBJECT_HEADERS
173 gGeneration++;
174#endif
175
176 return true;
177}
178
179static long setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
180 __attribute__((always_inline));
181static long
182setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
183{
184 return dvmHeapBitmapSetAndReturnObjectBitInList(ctx->bitmaps,
185 ctx->numBitmaps, hc);
186}
187
188static void _markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
189 bool checkFinger, bool forceStack)
190 __attribute__((always_inline));
191static void
192_markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
193 bool checkFinger, bool forceStack)
194{
195 DvmHeapChunk *hc;
196
197 assert(obj != NULL);
198
199#if GC_DEBUG(GC_DEBUG_PARANOID)
200//TODO: make sure we're locked
201 assert(obj != (Object *)gDvm.unlinkedJavaLangClass);
202 assert(dvmIsValidObject(obj));
203#endif
204
205 hc = ptr2chunk(obj);
206 if (!setAndReturnMarkBit(ctx, hc)) {
207 /* This object was not previously marked.
208 */
209 if (forceStack || (checkFinger && (void *)hc < ctx->finger)) {
210 /* This object will need to go on the mark stack.
211 */
212 MARK_STACK_PUSH(ctx->stack, obj);
213 }
214
215#if WITH_OBJECT_HEADERS
216 if (hc->scanGeneration != hc->markGeneration) {
217 LOGE("markObject(0x%08x): wasn't scanned last time\n", (uint)obj);
218 dvmAbort();
219 }
220 if (hc->markGeneration == gGeneration) {
221 LOGE("markObject(0x%08x): already marked this generation\n",
222 (uint)obj);
223 dvmAbort();
224 }
225 hc->oldMarkGeneration = hc->markGeneration;
226 hc->markGeneration = gGeneration;
227 hc->markFingerOld = hc->markFinger;
228 hc->markFinger = ctx->finger;
229 if (gMarkParent != NULL) {
230 hc->parentOld = hc->parent;
231 hc->parent = gMarkParent;
232 } else {
233 hc->parent = (const Object *)((uintptr_t)hc->parent | 1);
234 }
235 hc->markCount++;
236#endif
237#if WITH_HPROF
238 if (gDvm.gcHeap->hprofContext != NULL) {
239 hprofMarkRootObject(gDvm.gcHeap->hprofContext, obj, 0);
240 }
241#endif
242#if DVM_TRACK_HEAP_MARKING
243 gDvm.gcHeap->markCount++;
244 gDvm.gcHeap->markSize += dvmHeapSourceChunkSize((void *)hc) +
245 HEAP_SOURCE_CHUNK_OVERHEAD;
246#endif
247
248 /* obj->clazz can be NULL if we catch an object between
249 * dvmMalloc() and DVM_OBJECT_INIT(). This is ok.
250 */
251 LOGV_MARK("0x%08x %s\n", (uint)obj,
252 obj->clazz == NULL ? "<null class>" : obj->clazz->name);
253 }
254}
255
256/* Used to mark objects when recursing. Recursion is done by moving
257 * the finger across the bitmaps in address order and marking child
258 * objects. Any newly-marked objects whose addresses are lower than
259 * the finger won't be visited by the bitmap scan, so those objects
260 * need to be added to the mark stack.
261 */
262static void
263markObjectNonNull(const Object *obj, GcMarkContext *ctx)
264{
265 _markObjectNonNullCommon(obj, ctx, true, false);
266}
267
268#define markObject(obj, ctx) \
269 do { \
270 Object *MO_obj_ = (Object *)(obj); \
271 if (MO_obj_ != NULL) { \
272 markObjectNonNull(MO_obj_, (ctx)); \
273 } \
274 } while (false)
275
276/* If the object hasn't already been marked, mark it and
277 * schedule it to be scanned for references.
278 *
279 * obj may not be NULL. The macro dvmMarkObject() should
280 * be used in situations where a reference may be NULL.
281 *
282 * This function may only be called when marking the root
283 * set. When recursing, use the internal markObject[NonNull]().
284 */
285void
286dvmMarkObjectNonNull(const Object *obj)
287{
288 _markObjectNonNullCommon(obj, &gDvm.gcHeap->markContext, false, false);
289}
290
291/* Mark the set of root objects.
292 *
293 * Things we need to scan:
294 * - System classes defined by root classloader
295 * - For each thread:
296 * - Interpreted stack, from top to "curFrame"
297 * - Dalvik registers (args + local vars)
298 * - JNI local references
299 * - Automatic VM local references (TrackedAlloc)
300 * - Associated Thread/VMThread object
301 * - ThreadGroups (could track & start with these instead of working
302 * upward from Threads)
303 * - Exception currently being thrown, if present
304 * - JNI global references
305 * - Interned string table
306 * - Primitive classes
307 * - Special objects
308 * - gDvm.outOfMemoryObj
309 * - Objects allocated with ALLOC_NO_GC
310 * - Objects pending finalization (but not yet finalized)
311 * - Objects in debugger object registry
312 *
313 * Don't need:
314 * - Native stack (for in-progress stuff in the VM)
315 * - The TrackedAlloc stuff watches all native VM references.
316 */
317void dvmHeapMarkRootSet()
318{
319 HeapRefTable *refs;
320 GcHeap *gcHeap;
321 Object **op;
322
323 gcHeap = gDvm.gcHeap;
324
325 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_STICKY_CLASS, 0);
326
327 LOG_SCAN("root class loader\n");
328 dvmGcScanRootClassLoader();
329 LOG_SCAN("primitive classes\n");
330 dvmGcScanPrimitiveClasses();
331
332 /* dvmGcScanRootThreadGroups() sets a bunch of
333 * different scan states internally.
334 */
335 HPROF_CLEAR_GC_SCAN_STATE();
336
337 LOG_SCAN("root thread groups\n");
338 dvmGcScanRootThreadGroups();
339
340 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_INTERNED_STRING, 0);
341
342 LOG_SCAN("interned strings\n");
343 dvmGcScanInternedStrings();
344
345 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_JNI_GLOBAL, 0);
346
347 LOG_SCAN("JNI global refs\n");
348 dvmGcMarkJniGlobalRefs();
349
350 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
351
352 LOG_SCAN("pending reference operations\n");
353 dvmHeapMarkLargeTableRefs(gcHeap->referenceOperations, true);
354
355 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
356
357 LOG_SCAN("pending finalizations\n");
358 dvmHeapMarkLargeTableRefs(gcHeap->pendingFinalizationRefs, false);
359
360 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_DEBUGGER, 0);
361
362 LOG_SCAN("debugger refs\n");
363 dvmGcMarkDebuggerRefs();
364
365 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_VM_INTERNAL, 0);
366
367 /* Mark all ALLOC_NO_GC objects.
368 */
369 LOG_SCAN("ALLOC_NO_GC objects\n");
370 refs = &gcHeap->nonCollectableRefs;
371 op = refs->table;
372 while ((uintptr_t)op < (uintptr_t)refs->nextEntry) {
373 dvmMarkObjectNonNull(*(op++));
374 }
375
376 /* Mark any special objects we have sitting around.
377 */
378 LOG_SCAN("special objects\n");
379 dvmMarkObjectNonNull(gDvm.outOfMemoryObj);
380 dvmMarkObjectNonNull(gDvm.internalErrorObj);
Andy McFadden7fc3ce82009-07-14 15:57:23 -0700381 dvmMarkObjectNonNull(gDvm.noClassDefFoundErrorObj);
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800382//TODO: scan object references sitting in gDvm; use pointer begin & end
383
384 HPROF_CLEAR_GC_SCAN_STATE();
385}
386
387/*
388 * Nothing past this point is allowed to use dvmMarkObject*().
389 * Scanning/recursion must use markObject*(), which takes the
390 * finger into account.
391 */
392#define dvmMarkObjectNonNull __dont_use_dvmMarkObjectNonNull__
393
394
395/* Mark all of a ClassObject's interfaces.
396 */
397static void markInterfaces(const ClassObject *clazz, GcMarkContext *ctx)
398{
399 ClassObject **interfaces;
400 int interfaceCount;
401 int i;
402
403 /* Mark all interfaces.
404 */
405 interfaces = clazz->interfaces;
406 interfaceCount = clazz->interfaceCount;
407 for (i = 0; i < interfaceCount; i++) {
408 markObjectNonNull((Object *)*interfaces, ctx);
409 interfaces++;
410 }
411}
412
413/* Mark all objects referred to by a ClassObject's static fields.
414 */
415static void scanStaticFields(const ClassObject *clazz, GcMarkContext *ctx)
416{
417 StaticField *f;
418 int i;
419
420 //TODO: Optimize this with a bit vector or something
421 f = clazz->sfields;
422 for (i = 0; i < clazz->sfieldCount; i++) {
423 char c = f->field.signature[0];
424 if (c == '[' || c == 'L') {
425 /* It's an array or class reference.
426 */
427 markObject((Object *)f->value.l, ctx);
428 }
429 f++;
430 }
431}
432
433/* Mark all objects referred to by a DataObject's instance fields.
434 */
435static void scanInstanceFields(const DataObject *obj, ClassObject *clazz,
436 GcMarkContext *ctx)
437{
438//TODO: Optimize this by avoiding walking the superclass chain
439 while (clazz != NULL) {
440 InstField *f;
441 int i;
442
443 /* All of the fields that contain object references
444 * are guaranteed to be at the beginning of the ifields list.
445 */
446 f = clazz->ifields;
447 for (i = 0; i < clazz->ifieldRefCount; i++) {
448 /* Mark the array or object reference.
449 * May be NULL.
450 *
451 * Note that, per the comment on struct InstField,
452 * f->byteOffset is the offset from the beginning of
453 * obj, not the offset into obj->instanceData.
454 */
455 markObject(dvmGetFieldObject((Object*)obj, f->byteOffset), ctx);
456 f++;
457 }
458
459 /* This will be NULL when we hit java.lang.Object
460 */
461 clazz = clazz->super;
462 }
463}
464
465/* Mark all objects referred to by the array's contents.
466 */
467static void scanObjectArray(const ArrayObject *array, GcMarkContext *ctx)
468{
469 Object **contents;
470 u4 length;
471 u4 i;
472
473 contents = (Object **)array->contents;
474 length = array->length;
475
476 for (i = 0; i < length; i++) {
477 markObject(*contents, ctx); // may be NULL
478 contents++;
479 }
480}
481
482/* Mark all objects referred to by the ClassObject.
483 */
484static void scanClassObject(const ClassObject *clazz, GcMarkContext *ctx)
485{
486 LOGV_SCAN("---------> %s\n", clazz->name);
487
488 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
489 /* We're an array; mark the class object of the contents
490 * of the array.
491 *
492 * Note that we won't necessarily reach the array's element
493 * class by scanning the array contents; the array may be
494 * zero-length, or may only contain null objects.
495 */
496 markObjectNonNull((Object *)clazz->elementClass, ctx);
497 }
498
499 /* We scan these explicitly in case the only remaining
500 * reference to a particular class object is via a data
501 * object; we may not be guaranteed to reach all
502 * live class objects via a classloader.
503 */
504 markObject((Object *)clazz->super, ctx); // may be NULL (java.lang.Object)
505 markObject(clazz->classLoader, ctx); // may be NULL
506
507 scanStaticFields(clazz, ctx);
508 markInterfaces(clazz, ctx);
509}
510
511/* Mark all objects that obj refers to.
512 *
513 * Called on every object in markList.
514 */
515static void scanObject(const Object *obj, GcMarkContext *ctx)
516{
517 ClassObject *clazz;
518
519 assert(dvmIsValidObject(obj));
520 LOGV_SCAN("0x%08x %s\n", (uint)obj, obj->clazz->name);
521
522#if WITH_HPROF
523 if (gDvm.gcHeap->hprofContext != NULL) {
524 hprofDumpHeapObject(gDvm.gcHeap->hprofContext, obj);
525 }
526#endif
527
Barry Hayes3592d622009-03-16 16:10:35 -0700528#if WITH_OBJECT_HEADERS
529 if (ptr2chunk(obj)->scanGeneration == gGeneration) {
530 LOGE("object 0x%08x was already scanned this generation\n",
531 (uintptr_t)obj);
532 dvmAbort();
533 }
534 ptr2chunk(obj)->oldScanGeneration = ptr2chunk(obj)->scanGeneration;
535 ptr2chunk(obj)->scanGeneration = gGeneration;
536 ptr2chunk(obj)->scanCount++;
537#endif
538
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800539 /* Get and mark the class object for this particular instance.
540 */
541 clazz = obj->clazz;
542 if (clazz == NULL) {
543 /* This can happen if we catch an object between
544 * dvmMalloc() and DVM_OBJECT_INIT(). The object
545 * won't contain any references yet, so we can
546 * just skip it.
547 */
548 return;
549 } else if (clazz == gDvm.unlinkedJavaLangClass) {
550 /* This class hasn't been linked yet. We're guaranteed
551 * that the object doesn't contain any references that
552 * aren't already tracked, so we can skip scanning it.
553 *
554 * NOTE: unlinkedJavaLangClass is not on the heap, so
555 * it's very important that we don't try marking it.
556 */
557 return;
558 }
Barry Hayes3592d622009-03-16 16:10:35 -0700559
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800560#if WITH_OBJECT_HEADERS
561 gMarkParent = obj;
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800562#endif
563
564 assert(dvmIsValidObject((Object *)clazz));
565 markObjectNonNull((Object *)clazz, ctx);
566
567 /* Mark any references in this object.
568 */
569 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
570 /* It's an array object.
571 */
572 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISOBJECTARRAY)) {
573 /* It's an array of object references.
574 */
575 scanObjectArray((ArrayObject *)obj, ctx);
576 }
577 // else there's nothing else to scan
578 } else {
579 /* It's a DataObject-compatible object.
580 */
581 scanInstanceFields((DataObject *)obj, clazz, ctx);
582
583 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISREFERENCE)) {
584 GcHeap *gcHeap = gDvm.gcHeap;
585 Object *referent;
586
587 /* It's a subclass of java/lang/ref/Reference.
588 * The fields in this class have been arranged
589 * such that scanInstanceFields() did not actually
590 * mark the "referent" field; we need to handle
591 * it specially.
592 *
593 * If the referent already has a strong mark (isMarked(referent)),
594 * we don't care about its reference status.
595 */
596 referent = dvmGetFieldObject(obj,
597 gDvm.offJavaLangRefReference_referent);
598 if (referent != NULL &&
599 !isMarked(ptr2chunk(referent), &gcHeap->markContext))
600 {
601 u4 refFlags;
602
603 if (gcHeap->markAllReferents) {
604 LOG_REF("Hard-marking a reference\n");
605
606 /* Don't bother with normal reference-following
607 * behavior, just mark the referent. This should
608 * only be used when following objects that just
609 * became scheduled for finalization.
610 */
611 markObjectNonNull(referent, ctx);
612 goto skip_reference;
613 }
614
615 /* See if this reference was handled by a previous GC.
616 */
617 if (dvmGetFieldObject(obj,
618 gDvm.offJavaLangRefReference_vmData) ==
619 SCHEDULED_REFERENCE_MAGIC)
620 {
621 LOG_REF("Skipping scheduled reference\n");
622
623 /* Don't reschedule it, but make sure that its
624 * referent doesn't get collected (in case it's
625 * a PhantomReference and wasn't cleared automatically).
626 */
627 //TODO: Mark these after handling all new refs of
628 // this strength, in case the new refs refer
629 // to the same referent. Not a very common
630 // case, though.
631 markObjectNonNull(referent, ctx);
632 goto skip_reference;
633 }
634
635 /* Find out what kind of reference is pointing
636 * to referent.
637 */
638 refFlags = GET_CLASS_FLAG_GROUP(clazz,
639 CLASS_ISREFERENCE |
640 CLASS_ISWEAKREFERENCE |
641 CLASS_ISPHANTOMREFERENCE);
642
643 /* We use the vmData field of Reference objects
644 * as a next pointer in a singly-linked list.
645 * That way, we don't need to allocate any memory
646 * while we're doing a GC.
647 */
648#define ADD_REF_TO_LIST(list, ref) \
649 do { \
650 Object *ARTL_ref_ = (/*de-const*/Object *)(ref); \
651 dvmSetFieldObject(ARTL_ref_, \
652 gDvm.offJavaLangRefReference_vmData, list); \
653 list = ARTL_ref_; \
654 } while (false)
655
656 /* At this stage, we just keep track of all of
657 * the live references that we've seen. Later,
658 * we'll walk through each of these lists and
659 * deal with the referents.
660 */
661 if (refFlags == CLASS_ISREFERENCE) {
662 /* It's a soft reference. Depending on the state,
663 * we'll attempt to collect all of them, some of
664 * them, or none of them.
665 */
666 if (gcHeap->softReferenceCollectionState ==
667 SR_COLLECT_NONE)
668 {
669 sr_collect_none:
670 markObjectNonNull(referent, ctx);
671 } else if (gcHeap->softReferenceCollectionState ==
672 SR_COLLECT_ALL)
673 {
674 sr_collect_all:
675 ADD_REF_TO_LIST(gcHeap->softReferences, obj);
676 } else {
677 /* We'll only try to collect half of the
678 * referents.
679 */
680 if (gcHeap->softReferenceColor++ & 1) {
681 goto sr_collect_none;
682 }
683 goto sr_collect_all;
684 }
685 } else {
686 /* It's a weak or phantom reference.
687 * Clearing CLASS_ISREFERENCE will reveal which.
688 */
689 refFlags &= ~CLASS_ISREFERENCE;
690 if (refFlags == CLASS_ISWEAKREFERENCE) {
691 ADD_REF_TO_LIST(gcHeap->weakReferences, obj);
692 } else if (refFlags == CLASS_ISPHANTOMREFERENCE) {
693 ADD_REF_TO_LIST(gcHeap->phantomReferences, obj);
694 } else {
695 assert(!"Unknown reference type");
696 }
697 }
698#undef ADD_REF_TO_LIST
699 }
700 }
701
702 skip_reference:
703 /* If this is a class object, mark various other things that
704 * its internals point to.
705 *
706 * All class objects are instances of java.lang.Class,
707 * including the java.lang.Class class object.
708 */
709 if (clazz == gDvm.classJavaLangClass) {
710 scanClassObject((ClassObject *)obj, ctx);
711 }
712 }
713
714#if WITH_OBJECT_HEADERS
715 gMarkParent = NULL;
716#endif
717}
718
719static void
720processMarkStack(GcMarkContext *ctx)
721{
722 const Object **const base = ctx->stack.base;
723
724 /* Scan anything that's on the mark stack.
725 * We can't use the bitmaps anymore, so use
726 * a finger that points past the end of them.
727 */
728 ctx->finger = (void *)ULONG_MAX;
729 while (ctx->stack.top != base) {
730 scanObject(*ctx->stack.top++, ctx);
731 }
732}
733
734#ifndef NDEBUG
735static uintptr_t gLastFinger = 0;
736#endif
737
738static bool
739scanBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
740{
741 GcMarkContext *ctx = (GcMarkContext *)arg;
742 size_t i;
743
744#ifndef NDEBUG
745 assert((uintptr_t)finger >= gLastFinger);
746 gLastFinger = (uintptr_t)finger;
747#endif
748
749 ctx->finger = finger;
750 for (i = 0; i < numPtrs; i++) {
751 /* The pointers we're getting back are DvmHeapChunks,
752 * not Objects.
753 */
754 scanObject(chunk2ptr(*ptrs++), ctx);
755 }
756
757 return true;
758}
759
760/* Given bitmaps with the root set marked, find and mark all
761 * reachable objects. When this returns, the entire set of
762 * live objects will be marked and the mark stack will be empty.
763 */
764void dvmHeapScanMarkedObjects()
765{
766 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
767
768 assert(ctx->finger == NULL);
769
770 /* The bitmaps currently have bits set for the root set.
771 * Walk across the bitmaps and scan each object.
772 */
773#ifndef NDEBUG
774 gLastFinger = 0;
775#endif
776 dvmHeapBitmapWalkList(ctx->bitmaps, ctx->numBitmaps,
777 scanBitmapCallback, ctx);
778
779 /* We've walked the mark bitmaps. Scan anything that's
780 * left on the mark stack.
781 */
782 processMarkStack(ctx);
783
784 LOG_SCAN("done with marked objects\n");
785}
786
787/** @return true if we need to schedule a call to clear().
788 */
789static bool clearReference(Object *reference)
790{
791 /* This is what the default implementation of Reference.clear()
792 * does. We're required to clear all references to a given
793 * referent atomically, so we can't pop in and out of interp
794 * code each time.
795 *
796 * Also, someone may have subclassed one of the basic Reference
797 * types, overriding clear(). We can't trust the clear()
798 * implementation to call super.clear(); we cannot let clear()
799 * resurrect the referent. If we clear it here, we can safely
800 * call any overriding implementations.
801 */
802 dvmSetFieldObject(reference,
803 gDvm.offJavaLangRefReference_referent, NULL);
804
805#if FANCY_REFERENCE_SUBCLASS
806 /* See if clear() has actually been overridden. If so,
807 * we need to schedule a call to it before calling enqueue().
808 */
809 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_clear]->clazz !=
810 gDvm.classJavaLangRefReference)
811 {
812 /* clear() has been overridden; return true to indicate
813 * that we need to schedule a call to the real clear()
814 * implementation.
815 */
816 return true;
817 }
818#endif
819
820 return false;
821}
822
823/** @return true if we need to schedule a call to enqueue().
824 */
825static bool enqueueReference(Object *reference)
826{
827#if FANCY_REFERENCE_SUBCLASS
828 /* See if this reference class has overridden enqueue();
829 * if not, we can take a shortcut.
830 */
831 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_enqueue]->clazz
832 == gDvm.classJavaLangRefReference)
833#endif
834 {
835 Object *queue = dvmGetFieldObject(reference,
836 gDvm.offJavaLangRefReference_queue);
837 Object *queueNext = dvmGetFieldObject(reference,
838 gDvm.offJavaLangRefReference_queueNext);
839 if (queue == NULL || queueNext != NULL) {
840 /* There is no queue, or the reference has already
841 * been enqueued. The Reference.enqueue() method
842 * will do nothing even if we call it.
843 */
844 return false;
845 }
846 }
847
848 /* We need to call enqueue(), but if we called it from
849 * here we'd probably deadlock. Schedule a call.
850 */
851 return true;
852}
853
854/* All objects for stronger reference levels have been
855 * marked before this is called.
856 */
857void dvmHeapHandleReferences(Object *refListHead, enum RefType refType)
858{
859 Object *reference;
860 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
861 const int offVmData = gDvm.offJavaLangRefReference_vmData;
862 const int offReferent = gDvm.offJavaLangRefReference_referent;
863 bool workRequired = false;
864
865size_t numCleared = 0;
866size_t numEnqueued = 0;
867 reference = refListHead;
868 while (reference != NULL) {
869 Object *next;
870 Object *referent;
871
872 /* Pull the interesting fields out of the Reference object.
873 */
874 next = dvmGetFieldObject(reference, offVmData);
875 referent = dvmGetFieldObject(reference, offReferent);
876
877 //TODO: when handling REF_PHANTOM, unlink any references
878 // that fail this initial if(). We need to re-walk
879 // the list, and it would be nice to avoid the extra
880 // work.
881 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
882 bool schedClear, schedEnqueue;
883
884 /* This is the strongest reference that refers to referent.
885 * Do the right thing.
886 */
887 switch (refType) {
888 case REF_SOFT:
889 case REF_WEAK:
890 schedClear = clearReference(reference);
891 schedEnqueue = enqueueReference(reference);
892 break;
893 case REF_PHANTOM:
894 /* PhantomReferences are not cleared automatically.
895 * Until someone clears it (or the reference itself
896 * is collected), the referent must remain alive.
897 *
898 * It's necessary to fully mark the referent because
899 * it will still be present during the next GC, and
900 * all objects that it points to must be valid.
901 * (The referent will be marked outside of this loop,
902 * after handing all references of this strength, in
903 * case multiple references point to the same object.)
904 */
905 schedClear = false;
906
907 /* A PhantomReference is only useful with a
908 * queue, but since it's possible to create one
909 * without a queue, we need to check.
910 */
911 schedEnqueue = enqueueReference(reference);
912 break;
913 default:
914 assert(!"Bad reference type");
915 schedClear = false;
916 schedEnqueue = false;
917 break;
918 }
919numCleared += schedClear ? 1 : 0;
920numEnqueued += schedEnqueue ? 1 : 0;
921
922 if (schedClear || schedEnqueue) {
923 uintptr_t workBits;
924
925 /* Stuff the clear/enqueue bits in the bottom of
926 * the pointer. Assumes that objects are 8-byte
927 * aligned.
928 *
929 * Note that we are adding the *Reference* (which
930 * is by definition already marked at this point) to
931 * this list; we're not adding the referent (which
932 * has already been cleared).
933 */
934 assert(((intptr_t)reference & 3) == 0);
935 assert(((WORKER_CLEAR | WORKER_ENQUEUE) & ~3) == 0);
936 workBits = (schedClear ? WORKER_CLEAR : 0) |
937 (schedEnqueue ? WORKER_ENQUEUE : 0);
938 if (!dvmHeapAddRefToLargeTable(
939 &gDvm.gcHeap->referenceOperations,
940 (Object *)((uintptr_t)reference | workBits)))
941 {
942 LOGE_HEAP("dvmMalloc(): no room for any more "
943 "reference operations\n");
944 dvmAbort();
945 }
946 workRequired = true;
947 }
948
949 if (refType != REF_PHANTOM) {
950 /* Let later GCs know not to reschedule this reference.
951 */
952 dvmSetFieldObject(reference, offVmData,
953 SCHEDULED_REFERENCE_MAGIC);
954 } // else this is handled later for REF_PHANTOM
955
956 } // else there was a stronger reference to the referent.
957
958 reference = next;
959 }
960#define refType2str(r) \
961 ((r) == REF_SOFT ? "soft" : ( \
962 (r) == REF_WEAK ? "weak" : ( \
963 (r) == REF_PHANTOM ? "phantom" : "UNKNOWN" )))
964LOGD_HEAP("dvmHeapHandleReferences(): cleared %zd, enqueued %zd %s references\n", numCleared, numEnqueued, refType2str(refType));
965
966 /* Walk though the reference list again, and mark any non-clear/marked
967 * referents. Only PhantomReferences can have non-clear referents
968 * at this point.
969 */
970 if (refType == REF_PHANTOM) {
971 bool scanRequired = false;
972
973 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
974 reference = refListHead;
975 while (reference != NULL) {
976 Object *next;
977 Object *referent;
978
979 /* Pull the interesting fields out of the Reference object.
980 */
981 next = dvmGetFieldObject(reference, offVmData);
982 referent = dvmGetFieldObject(reference, offReferent);
983
984 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
985 markObjectNonNull(referent, markContext);
986 scanRequired = true;
987
988 /* Let later GCs know not to reschedule this reference.
989 */
990 dvmSetFieldObject(reference, offVmData,
991 SCHEDULED_REFERENCE_MAGIC);
992 }
993
994 reference = next;
995 }
996 HPROF_CLEAR_GC_SCAN_STATE();
997
998 if (scanRequired) {
999 processMarkStack(markContext);
1000 }
1001 }
1002
1003 if (workRequired) {
1004 dvmSignalHeapWorker(false);
1005 }
1006}
1007
1008
1009/* Find unreachable objects that need to be finalized,
1010 * and schedule them for finalization.
1011 */
1012void dvmHeapScheduleFinalizations()
1013{
1014 HeapRefTable newPendingRefs;
1015 LargeHeapRefTable *finRefs = gDvm.gcHeap->finalizableRefs;
1016 Object **ref;
1017 Object **lastRef;
1018 size_t totalPendCount;
1019 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
1020
1021 /*
1022 * All reachable objects have been marked.
1023 * Any unmarked finalizable objects need to be finalized.
1024 */
1025
1026 /* Create a table that the new pending refs will
1027 * be added to.
1028 */
1029 if (!dvmHeapInitHeapRefTable(&newPendingRefs, 128)) {
1030 //TODO: mark all finalizable refs and hope that
1031 // we can schedule them next time. Watch out,
1032 // because we may be expecting to free up space
1033 // by calling finalizers.
1034 LOGE_GC("dvmHeapScheduleFinalizations(): no room for "
1035 "pending finalizations\n");
1036 dvmAbort();
1037 }
1038
1039 /* Walk through finalizableRefs and move any unmarked references
1040 * to the list of new pending refs.
1041 */
1042 totalPendCount = 0;
1043 while (finRefs != NULL) {
1044 Object **gapRef;
1045 size_t newPendCount = 0;
1046
1047 gapRef = ref = finRefs->refs.table;
1048 lastRef = finRefs->refs.nextEntry;
1049 while (ref < lastRef) {
1050 DvmHeapChunk *hc;
1051
1052 hc = ptr2chunk(*ref);
1053 if (!isMarked(hc, markContext)) {
1054 if (!dvmHeapAddToHeapRefTable(&newPendingRefs, *ref)) {
1055 //TODO: add the current table and allocate
1056 // a new, smaller one.
1057 LOGE_GC("dvmHeapScheduleFinalizations(): "
1058 "no room for any more pending finalizations: %zd\n",
1059 dvmHeapNumHeapRefTableEntries(&newPendingRefs));
1060 dvmAbort();
1061 }
1062 newPendCount++;
1063 } else {
1064 /* This ref is marked, so will remain on finalizableRefs.
1065 */
1066 if (newPendCount > 0) {
1067 /* Copy it up to fill the holes.
1068 */
1069 *gapRef++ = *ref;
1070 } else {
1071 /* No holes yet; don't bother copying.
1072 */
1073 gapRef++;
1074 }
1075 }
1076 ref++;
1077 }
1078 finRefs->refs.nextEntry = gapRef;
1079 //TODO: if the table is empty when we're done, free it.
1080 totalPendCount += newPendCount;
1081 finRefs = finRefs->next;
1082 }
1083 LOGD_GC("dvmHeapScheduleFinalizations(): %zd finalizers triggered.\n",
1084 totalPendCount);
1085 if (totalPendCount == 0) {
1086 /* No objects required finalization.
1087 * Free the empty temporary table.
1088 */
1089 dvmClearReferenceTable(&newPendingRefs);
1090 return;
1091 }
1092
1093 /* Add the new pending refs to the main list.
1094 */
1095 if (!dvmHeapAddTableToLargeTable(&gDvm.gcHeap->pendingFinalizationRefs,
1096 &newPendingRefs))
1097 {
1098 LOGE_GC("dvmHeapScheduleFinalizations(): can't insert new "
1099 "pending finalizations\n");
1100 dvmAbort();
1101 }
1102
1103 //TODO: try compacting the main list with a memcpy loop
1104
1105 /* Mark the refs we just moved; we don't want them or their
1106 * children to get swept yet.
1107 */
1108 ref = newPendingRefs.table;
1109 lastRef = newPendingRefs.nextEntry;
1110 assert(ref < lastRef);
1111 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
1112 while (ref < lastRef) {
1113 markObjectNonNull(*ref, markContext);
1114 ref++;
1115 }
1116 HPROF_CLEAR_GC_SCAN_STATE();
1117
1118 /* Set markAllReferents so that we don't collect referents whose
1119 * only references are in final-reachable objects.
1120 * TODO: eventually provide normal reference behavior by properly
1121 * marking these references.
1122 */
1123 gDvm.gcHeap->markAllReferents = true;
1124 processMarkStack(markContext);
1125 gDvm.gcHeap->markAllReferents = false;
1126
1127 dvmSignalHeapWorker(false);
1128}
1129
1130void dvmHeapFinishMarkStep()
1131{
1132 HeapBitmap *markBitmap;
1133 HeapBitmap objectBitmap;
1134 GcMarkContext *markContext;
1135
1136 markContext = &gDvm.gcHeap->markContext;
1137
1138 /* The sweep step freed every object that appeared in the
1139 * HeapSource bitmaps that didn't appear in the mark bitmaps.
1140 * The new state of the HeapSource is exactly the final
1141 * mark bitmaps, so swap them in.
1142 *
1143 * The old bitmaps will be swapped into the context so that
1144 * we can clean them up.
1145 */
1146 dvmHeapSourceReplaceObjectBitmaps(markContext->bitmaps,
1147 markContext->numBitmaps);
1148
1149 /* Clean up the old HeapSource bitmaps and anything else associated
1150 * with the marking process.
1151 */
1152 dvmHeapBitmapDeleteList(markContext->bitmaps, markContext->numBitmaps);
1153 destroyMarkStack(&markContext->stack);
1154
1155 memset(markContext, 0, sizeof(*markContext));
1156}
1157
1158#if WITH_HPROF && WITH_HPROF_UNREACHABLE
1159static bool
1160hprofUnreachableBitmapCallback(size_t numPtrs, void **ptrs,
1161 const void *finger, void *arg)
1162{
1163 hprof_context_t *hctx = (hprof_context_t *)arg;
1164 size_t i;
1165
1166 for (i = 0; i < numPtrs; i++) {
1167 Object *obj;
1168
1169 /* The pointers we're getting back are DvmHeapChunks, not
1170 * Objects.
1171 */
1172 obj = (Object *)chunk2ptr(*ptrs++);
1173
1174 hprofMarkRootObject(hctx, obj, 0);
1175 hprofDumpHeapObject(hctx, obj);
1176 }
1177
1178 return true;
1179}
1180
1181static void
1182hprofDumpUnmarkedObjects(const HeapBitmap markBitmaps[],
1183 const HeapBitmap objectBitmaps[], size_t numBitmaps)
1184{
1185 hprof_context_t *hctx = gDvm.gcHeap->hprofContext;
1186 if (hctx == NULL) {
1187 return;
1188 }
1189
1190 LOGI("hprof: dumping unreachable objects\n");
1191
1192 HPROF_SET_GC_SCAN_STATE(HPROF_UNREACHABLE, 0);
1193
1194 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1195 hprofUnreachableBitmapCallback, hctx);
1196
1197 HPROF_CLEAR_GC_SCAN_STATE();
1198}
1199#endif
1200
1201static bool
1202sweepBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
1203{
1204 const ClassObject *const classJavaLangClass = gDvm.classJavaLangClass;
1205 size_t i;
1206
1207 for (i = 0; i < numPtrs; i++) {
1208 DvmHeapChunk *hc;
1209 Object *obj;
1210
1211 /* The pointers we're getting back are DvmHeapChunks, not
1212 * Objects.
1213 */
1214 hc = (DvmHeapChunk *)*ptrs++;
1215 obj = (Object *)chunk2ptr(hc);
1216
1217#if WITH_OBJECT_HEADERS
1218 if (hc->markGeneration == gGeneration) {
1219 LOGE("sweeping marked object: 0x%08x\n", (uint)obj);
1220 dvmAbort();
1221 }
1222#endif
1223
1224 /* Free the monitor associated with the object.
1225 */
1226 dvmFreeObjectMonitor(obj);
1227
1228 /* NOTE: Dereferencing clazz is dangerous. If obj was the last
1229 * one to reference its class object, the class object could be
1230 * on the sweep list, and could already have been swept, leaving
1231 * us with a stale pointer.
1232 */
1233 LOGV_SWEEP("FREE: 0x%08x %s\n", (uint)obj, obj->clazz->name);
1234
1235 /* This assumes that java.lang.Class will never go away.
1236 * If it can, and we were the last reference to it, it
1237 * could have already been swept. However, even in that case,
1238 * gDvm.classJavaLangClass should still have a useful
1239 * value.
1240 */
1241 if (obj->clazz == classJavaLangClass) {
1242 LOGV_SWEEP("---------------> %s\n", ((ClassObject *)obj)->name);
1243 /* dvmFreeClassInnards() may have already been called,
1244 * but it's safe to call on the same ClassObject twice.
1245 */
1246 dvmFreeClassInnards((ClassObject *)obj);
1247 }
1248
1249#if 0
1250 /* Overwrite the to-be-freed object to make stale references
1251 * more obvious.
1252 */
1253 {
1254 int chunklen;
1255 ClassObject *clazz = obj->clazz;
1256#if WITH_OBJECT_HEADERS
1257 DvmHeapChunk chunk = *hc;
1258 chunk.header = ~OBJECT_HEADER | 1;
1259#endif
1260 chunklen = dvmHeapSourceChunkSize(hc);
1261 memset(hc, 0xa5, chunklen);
1262 obj->clazz = (ClassObject *)((uintptr_t)clazz ^ 0xffffffff);
1263#if WITH_OBJECT_HEADERS
1264 *hc = chunk;
1265#endif
1266 }
1267#endif
1268
1269//TODO: provide a heapsource function that takes a list of pointers to free
1270// and call it outside of this loop.
1271 dvmHeapSourceFree(hc);
1272 }
1273
1274 return true;
1275}
1276
1277/* A function suitable for passing to dvmHashForeachRemove()
1278 * to clear out any unmarked objects. Clears the low bits
1279 * of the pointer because the intern table may set them.
1280 */
1281static int isUnmarkedObject(void *object)
1282{
1283 return !isMarked(ptr2chunk((uintptr_t)object & ~(HB_OBJECT_ALIGNMENT-1)),
1284 &gDvm.gcHeap->markContext);
1285}
1286
1287/* Walk through the list of objects that haven't been
1288 * marked and free them.
1289 */
1290void
1291dvmHeapSweepUnmarkedObjects(int *numFreed, size_t *sizeFreed)
1292{
1293 const HeapBitmap *markBitmaps;
1294 const GcMarkContext *markContext;
1295 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
1296 size_t origObjectsAllocated;
1297 size_t origBytesAllocated;
1298 size_t numBitmaps;
1299
1300 /* All reachable objects have been marked.
1301 * Detach any unreachable interned strings before
1302 * we sweep.
1303 */
1304 dvmGcDetachDeadInternedStrings(isUnmarkedObject);
1305
1306 /* Free any known objects that are not marked.
1307 */
1308 origObjectsAllocated = dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1309 origBytesAllocated = dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1310
1311 markContext = &gDvm.gcHeap->markContext;
1312 markBitmaps = markContext->bitmaps;
1313 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
1314 HEAP_SOURCE_MAX_HEAP_COUNT);
1315#ifndef NDEBUG
1316 if (numBitmaps != markContext->numBitmaps) {
1317 LOGE("heap bitmap count mismatch: %zd != %zd\n",
1318 numBitmaps, markContext->numBitmaps);
1319 dvmAbort();
1320 }
1321#endif
1322
1323#if WITH_HPROF && WITH_HPROF_UNREACHABLE
1324 hprofDumpUnmarkedObjects(markBitmaps, objectBitmaps, numBitmaps);
1325#endif
1326
1327 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1328 sweepBitmapCallback, NULL);
1329
1330 *numFreed = origObjectsAllocated -
1331 dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1332 *sizeFreed = origBytesAllocated -
1333 dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1334
1335#ifdef WITH_PROFILER
1336 if (gDvm.allocProf.enabled) {
1337 gDvm.allocProf.freeCount += *numFreed;
1338 gDvm.allocProf.freeSize += *sizeFreed;
1339 }
1340#endif
1341}