blob: 84349a96380f83158473ca054bf479c73ca9ac11 [file] [log] [blame]
The Android Open Source Projectf6c38712009-03-03 19:28:47 -08001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Dalvik.h"
18#include "alloc/HeapBitmap.h"
19#include "alloc/HeapInternal.h"
20#include "alloc/HeapSource.h"
21#include "alloc/MarkSweep.h"
22#include <limits.h> // for ULONG_MAX
23#include <sys/mman.h> // for madvise(), mmap()
24#include <cutils/ashmem.h>
The Android Open Source Project99409882009-03-18 22:20:24 -070025#include <errno.h>
The Android Open Source Projectf6c38712009-03-03 19:28:47 -080026
27#define GC_DEBUG_PARANOID 2
28#define GC_DEBUG_BASIC 1
29#define GC_DEBUG_OFF 0
30#define GC_DEBUG(l) (GC_DEBUG_LEVEL >= (l))
31
32#if 1
33#define GC_DEBUG_LEVEL GC_DEBUG_PARANOID
34#else
35#define GC_DEBUG_LEVEL GC_DEBUG_OFF
36#endif
37
38#define VERBOSE_GC 0
39
40#define GC_LOG_TAG LOG_TAG "-gc"
41
42#if LOG_NDEBUG
43#define LOGV_GC(...) ((void)0)
44#define LOGD_GC(...) ((void)0)
45#else
46#define LOGV_GC(...) LOG(LOG_VERBOSE, GC_LOG_TAG, __VA_ARGS__)
47#define LOGD_GC(...) LOG(LOG_DEBUG, GC_LOG_TAG, __VA_ARGS__)
48#endif
49
50#if VERBOSE_GC
51#define LOGVV_GC(...) LOGV_GC(__VA_ARGS__)
52#else
53#define LOGVV_GC(...) ((void)0)
54#endif
55
56#define LOGI_GC(...) LOG(LOG_INFO, GC_LOG_TAG, __VA_ARGS__)
57#define LOGW_GC(...) LOG(LOG_WARN, GC_LOG_TAG, __VA_ARGS__)
58#define LOGE_GC(...) LOG(LOG_ERROR, GC_LOG_TAG, __VA_ARGS__)
59
60#define LOG_SCAN(...) LOGV_GC("SCAN: " __VA_ARGS__)
61#define LOG_MARK(...) LOGV_GC("MARK: " __VA_ARGS__)
62#define LOG_SWEEP(...) LOGV_GC("SWEEP: " __VA_ARGS__)
63#define LOG_REF(...) LOGV_GC("REF: " __VA_ARGS__)
64
65#define LOGV_SCAN(...) LOGVV_GC("SCAN: " __VA_ARGS__)
66#define LOGV_MARK(...) LOGVV_GC("MARK: " __VA_ARGS__)
67#define LOGV_SWEEP(...) LOGVV_GC("SWEEP: " __VA_ARGS__)
68#define LOGV_REF(...) LOGVV_GC("REF: " __VA_ARGS__)
69
70#if WITH_OBJECT_HEADERS
71u2 gGeneration = 0;
72static const Object *gMarkParent = NULL;
73#endif
74
75#ifndef PAGE_SIZE
76#define PAGE_SIZE 4096
77#endif
78#define ALIGN_UP_TO_PAGE_SIZE(p) \
79 (((size_t)(p) + (PAGE_SIZE - 1)) & ~(PAGE_SIZE - 1))
80
81/* Do not cast the result of this to a boolean; the only set bit
82 * may be > 1<<8.
83 */
84static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
85 __attribute__((always_inline));
86static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
87{
88 return dvmHeapBitmapIsObjectBitSetInList(ctx->bitmaps, ctx->numBitmaps, hc);
89}
90
91static bool
92createMarkStack(GcMarkStack *stack)
93{
94 const Object **limit;
95 size_t size;
The Android Open Source Project99409882009-03-18 22:20:24 -070096 int fd, err;
The Android Open Source Projectf6c38712009-03-03 19:28:47 -080097
98 /* Create a stack big enough for the worst possible case,
99 * where the heap is perfectly full of the smallest object.
100 * TODO: be better about memory usage; use a smaller stack with
101 * overflow detection and recovery.
102 */
103 size = dvmHeapSourceGetIdealFootprint() * sizeof(Object*) /
104 (sizeof(Object) + HEAP_SOURCE_CHUNK_OVERHEAD);
105 size = ALIGN_UP_TO_PAGE_SIZE(size);
106 fd = ashmem_create_region("dalvik-heap-markstack", size);
107 if (fd < 0) {
The Android Open Source Project99409882009-03-18 22:20:24 -0700108 LOGE_GC("Could not create %d-byte ashmem mark stack: %s\n",
109 size, strerror(errno));
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800110 return false;
111 }
112 limit = (const Object **)mmap(NULL, size, PROT_READ | PROT_WRITE,
113 MAP_PRIVATE, fd, 0);
The Android Open Source Project99409882009-03-18 22:20:24 -0700114 err = errno;
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800115 close(fd);
116 if (limit == MAP_FAILED) {
The Android Open Source Project99409882009-03-18 22:20:24 -0700117 LOGE_GC("Could not mmap %d-byte ashmem mark stack: %s\n",
118 size, strerror(err));
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800119 return false;
120 }
121
122 memset(stack, 0, sizeof(*stack));
123 stack->limit = limit;
124 stack->base = (const Object **)((uintptr_t)limit + size);
125 stack->top = stack->base;
126
127 return true;
128}
129
130static void
131destroyMarkStack(GcMarkStack *stack)
132{
133 munmap((char *)stack->limit,
134 (uintptr_t)stack->base - (uintptr_t)stack->limit);
135 memset(stack, 0, sizeof(*stack));
136}
137
138#define MARK_STACK_PUSH(stack, obj) \
139 do { \
140 *--(stack).top = (obj); \
141 } while (false)
142
143bool
144dvmHeapBeginMarkStep()
145{
146 GcMarkContext *mc = &gDvm.gcHeap->markContext;
147 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
148 size_t numBitmaps;
149
150 if (!createMarkStack(&mc->stack)) {
151 return false;
152 }
153
154 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
155 HEAP_SOURCE_MAX_HEAP_COUNT);
156 if (numBitmaps <= 0) {
157 return false;
158 }
159
160 /* Create mark bitmaps that cover the same ranges as the
161 * current object bitmaps.
162 */
163 if (!dvmHeapBitmapInitListFromTemplates(mc->bitmaps, objectBitmaps,
164 numBitmaps, "mark"))
165 {
166 return false;
167 }
168
169 mc->numBitmaps = numBitmaps;
170 mc->finger = NULL;
171
172#if WITH_OBJECT_HEADERS
173 gGeneration++;
174#endif
175
176 return true;
177}
178
179static long setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
180 __attribute__((always_inline));
181static long
182setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
183{
184 return dvmHeapBitmapSetAndReturnObjectBitInList(ctx->bitmaps,
185 ctx->numBitmaps, hc);
186}
187
188static void _markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
189 bool checkFinger, bool forceStack)
190 __attribute__((always_inline));
191static void
192_markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
193 bool checkFinger, bool forceStack)
194{
195 DvmHeapChunk *hc;
196
197 assert(obj != NULL);
198
199#if GC_DEBUG(GC_DEBUG_PARANOID)
200//TODO: make sure we're locked
201 assert(obj != (Object *)gDvm.unlinkedJavaLangClass);
202 assert(dvmIsValidObject(obj));
203#endif
204
205 hc = ptr2chunk(obj);
206 if (!setAndReturnMarkBit(ctx, hc)) {
207 /* This object was not previously marked.
208 */
209 if (forceStack || (checkFinger && (void *)hc < ctx->finger)) {
210 /* This object will need to go on the mark stack.
211 */
212 MARK_STACK_PUSH(ctx->stack, obj);
213 }
214
215#if WITH_OBJECT_HEADERS
216 if (hc->scanGeneration != hc->markGeneration) {
217 LOGE("markObject(0x%08x): wasn't scanned last time\n", (uint)obj);
218 dvmAbort();
219 }
220 if (hc->markGeneration == gGeneration) {
221 LOGE("markObject(0x%08x): already marked this generation\n",
222 (uint)obj);
223 dvmAbort();
224 }
225 hc->oldMarkGeneration = hc->markGeneration;
226 hc->markGeneration = gGeneration;
227 hc->markFingerOld = hc->markFinger;
228 hc->markFinger = ctx->finger;
229 if (gMarkParent != NULL) {
230 hc->parentOld = hc->parent;
231 hc->parent = gMarkParent;
232 } else {
233 hc->parent = (const Object *)((uintptr_t)hc->parent | 1);
234 }
235 hc->markCount++;
236#endif
237#if WITH_HPROF
238 if (gDvm.gcHeap->hprofContext != NULL) {
239 hprofMarkRootObject(gDvm.gcHeap->hprofContext, obj, 0);
240 }
241#endif
242#if DVM_TRACK_HEAP_MARKING
243 gDvm.gcHeap->markCount++;
244 gDvm.gcHeap->markSize += dvmHeapSourceChunkSize((void *)hc) +
245 HEAP_SOURCE_CHUNK_OVERHEAD;
246#endif
247
248 /* obj->clazz can be NULL if we catch an object between
249 * dvmMalloc() and DVM_OBJECT_INIT(). This is ok.
250 */
251 LOGV_MARK("0x%08x %s\n", (uint)obj,
252 obj->clazz == NULL ? "<null class>" : obj->clazz->name);
253 }
254}
255
256/* Used to mark objects when recursing. Recursion is done by moving
257 * the finger across the bitmaps in address order and marking child
258 * objects. Any newly-marked objects whose addresses are lower than
259 * the finger won't be visited by the bitmap scan, so those objects
260 * need to be added to the mark stack.
261 */
262static void
263markObjectNonNull(const Object *obj, GcMarkContext *ctx)
264{
265 _markObjectNonNullCommon(obj, ctx, true, false);
266}
267
268#define markObject(obj, ctx) \
269 do { \
270 Object *MO_obj_ = (Object *)(obj); \
271 if (MO_obj_ != NULL) { \
272 markObjectNonNull(MO_obj_, (ctx)); \
273 } \
274 } while (false)
275
276/* If the object hasn't already been marked, mark it and
277 * schedule it to be scanned for references.
278 *
279 * obj may not be NULL. The macro dvmMarkObject() should
280 * be used in situations where a reference may be NULL.
281 *
282 * This function may only be called when marking the root
283 * set. When recursing, use the internal markObject[NonNull]().
284 */
285void
286dvmMarkObjectNonNull(const Object *obj)
287{
288 _markObjectNonNullCommon(obj, &gDvm.gcHeap->markContext, false, false);
289}
290
291/* Mark the set of root objects.
292 *
293 * Things we need to scan:
294 * - System classes defined by root classloader
295 * - For each thread:
296 * - Interpreted stack, from top to "curFrame"
297 * - Dalvik registers (args + local vars)
298 * - JNI local references
299 * - Automatic VM local references (TrackedAlloc)
300 * - Associated Thread/VMThread object
301 * - ThreadGroups (could track & start with these instead of working
302 * upward from Threads)
303 * - Exception currently being thrown, if present
304 * - JNI global references
305 * - Interned string table
306 * - Primitive classes
307 * - Special objects
308 * - gDvm.outOfMemoryObj
309 * - Objects allocated with ALLOC_NO_GC
310 * - Objects pending finalization (but not yet finalized)
311 * - Objects in debugger object registry
312 *
313 * Don't need:
314 * - Native stack (for in-progress stuff in the VM)
315 * - The TrackedAlloc stuff watches all native VM references.
316 */
317void dvmHeapMarkRootSet()
318{
319 HeapRefTable *refs;
320 GcHeap *gcHeap;
321 Object **op;
322
323 gcHeap = gDvm.gcHeap;
324
325 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_STICKY_CLASS, 0);
326
327 LOG_SCAN("root class loader\n");
328 dvmGcScanRootClassLoader();
329 LOG_SCAN("primitive classes\n");
330 dvmGcScanPrimitiveClasses();
331
332 /* dvmGcScanRootThreadGroups() sets a bunch of
333 * different scan states internally.
334 */
335 HPROF_CLEAR_GC_SCAN_STATE();
336
337 LOG_SCAN("root thread groups\n");
338 dvmGcScanRootThreadGroups();
339
340 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_INTERNED_STRING, 0);
341
342 LOG_SCAN("interned strings\n");
343 dvmGcScanInternedStrings();
344
345 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_JNI_GLOBAL, 0);
346
347 LOG_SCAN("JNI global refs\n");
348 dvmGcMarkJniGlobalRefs();
349
350 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
351
352 LOG_SCAN("pending reference operations\n");
353 dvmHeapMarkLargeTableRefs(gcHeap->referenceOperations, true);
354
355 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
356
357 LOG_SCAN("pending finalizations\n");
358 dvmHeapMarkLargeTableRefs(gcHeap->pendingFinalizationRefs, false);
359
360 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_DEBUGGER, 0);
361
362 LOG_SCAN("debugger refs\n");
363 dvmGcMarkDebuggerRefs();
364
365 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_VM_INTERNAL, 0);
366
367 /* Mark all ALLOC_NO_GC objects.
368 */
369 LOG_SCAN("ALLOC_NO_GC objects\n");
370 refs = &gcHeap->nonCollectableRefs;
371 op = refs->table;
372 while ((uintptr_t)op < (uintptr_t)refs->nextEntry) {
373 dvmMarkObjectNonNull(*(op++));
374 }
375
376 /* Mark any special objects we have sitting around.
377 */
378 LOG_SCAN("special objects\n");
379 dvmMarkObjectNonNull(gDvm.outOfMemoryObj);
380 dvmMarkObjectNonNull(gDvm.internalErrorObj);
381//TODO: scan object references sitting in gDvm; use pointer begin & end
382
383 HPROF_CLEAR_GC_SCAN_STATE();
384}
385
386/*
387 * Nothing past this point is allowed to use dvmMarkObject*().
388 * Scanning/recursion must use markObject*(), which takes the
389 * finger into account.
390 */
391#define dvmMarkObjectNonNull __dont_use_dvmMarkObjectNonNull__
392
393
394/* Mark all of a ClassObject's interfaces.
395 */
396static void markInterfaces(const ClassObject *clazz, GcMarkContext *ctx)
397{
398 ClassObject **interfaces;
399 int interfaceCount;
400 int i;
401
402 /* Mark all interfaces.
403 */
404 interfaces = clazz->interfaces;
405 interfaceCount = clazz->interfaceCount;
406 for (i = 0; i < interfaceCount; i++) {
407 markObjectNonNull((Object *)*interfaces, ctx);
408 interfaces++;
409 }
410}
411
412/* Mark all objects referred to by a ClassObject's static fields.
413 */
414static void scanStaticFields(const ClassObject *clazz, GcMarkContext *ctx)
415{
416 StaticField *f;
417 int i;
418
419 //TODO: Optimize this with a bit vector or something
420 f = clazz->sfields;
421 for (i = 0; i < clazz->sfieldCount; i++) {
422 char c = f->field.signature[0];
423 if (c == '[' || c == 'L') {
424 /* It's an array or class reference.
425 */
426 markObject((Object *)f->value.l, ctx);
427 }
428 f++;
429 }
430}
431
432/* Mark all objects referred to by a DataObject's instance fields.
433 */
434static void scanInstanceFields(const DataObject *obj, ClassObject *clazz,
435 GcMarkContext *ctx)
436{
437//TODO: Optimize this by avoiding walking the superclass chain
438 while (clazz != NULL) {
439 InstField *f;
440 int i;
441
442 /* All of the fields that contain object references
443 * are guaranteed to be at the beginning of the ifields list.
444 */
445 f = clazz->ifields;
446 for (i = 0; i < clazz->ifieldRefCount; i++) {
447 /* Mark the array or object reference.
448 * May be NULL.
449 *
450 * Note that, per the comment on struct InstField,
451 * f->byteOffset is the offset from the beginning of
452 * obj, not the offset into obj->instanceData.
453 */
454 markObject(dvmGetFieldObject((Object*)obj, f->byteOffset), ctx);
455 f++;
456 }
457
458 /* This will be NULL when we hit java.lang.Object
459 */
460 clazz = clazz->super;
461 }
462}
463
464/* Mark all objects referred to by the array's contents.
465 */
466static void scanObjectArray(const ArrayObject *array, GcMarkContext *ctx)
467{
468 Object **contents;
469 u4 length;
470 u4 i;
471
472 contents = (Object **)array->contents;
473 length = array->length;
474
475 for (i = 0; i < length; i++) {
476 markObject(*contents, ctx); // may be NULL
477 contents++;
478 }
479}
480
481/* Mark all objects referred to by the ClassObject.
482 */
483static void scanClassObject(const ClassObject *clazz, GcMarkContext *ctx)
484{
485 LOGV_SCAN("---------> %s\n", clazz->name);
486
487 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
488 /* We're an array; mark the class object of the contents
489 * of the array.
490 *
491 * Note that we won't necessarily reach the array's element
492 * class by scanning the array contents; the array may be
493 * zero-length, or may only contain null objects.
494 */
495 markObjectNonNull((Object *)clazz->elementClass, ctx);
496 }
497
498 /* We scan these explicitly in case the only remaining
499 * reference to a particular class object is via a data
500 * object; we may not be guaranteed to reach all
501 * live class objects via a classloader.
502 */
503 markObject((Object *)clazz->super, ctx); // may be NULL (java.lang.Object)
504 markObject(clazz->classLoader, ctx); // may be NULL
505
506 scanStaticFields(clazz, ctx);
507 markInterfaces(clazz, ctx);
508}
509
510/* Mark all objects that obj refers to.
511 *
512 * Called on every object in markList.
513 */
514static void scanObject(const Object *obj, GcMarkContext *ctx)
515{
516 ClassObject *clazz;
517
518 assert(dvmIsValidObject(obj));
519 LOGV_SCAN("0x%08x %s\n", (uint)obj, obj->clazz->name);
520
521#if WITH_HPROF
522 if (gDvm.gcHeap->hprofContext != NULL) {
523 hprofDumpHeapObject(gDvm.gcHeap->hprofContext, obj);
524 }
525#endif
526
Barry Hayes3592d622009-03-16 16:10:35 -0700527#if WITH_OBJECT_HEADERS
528 if (ptr2chunk(obj)->scanGeneration == gGeneration) {
529 LOGE("object 0x%08x was already scanned this generation\n",
530 (uintptr_t)obj);
531 dvmAbort();
532 }
533 ptr2chunk(obj)->oldScanGeneration = ptr2chunk(obj)->scanGeneration;
534 ptr2chunk(obj)->scanGeneration = gGeneration;
535 ptr2chunk(obj)->scanCount++;
536#endif
537
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800538 /* Get and mark the class object for this particular instance.
539 */
540 clazz = obj->clazz;
541 if (clazz == NULL) {
542 /* This can happen if we catch an object between
543 * dvmMalloc() and DVM_OBJECT_INIT(). The object
544 * won't contain any references yet, so we can
545 * just skip it.
546 */
547 return;
548 } else if (clazz == gDvm.unlinkedJavaLangClass) {
549 /* This class hasn't been linked yet. We're guaranteed
550 * that the object doesn't contain any references that
551 * aren't already tracked, so we can skip scanning it.
552 *
553 * NOTE: unlinkedJavaLangClass is not on the heap, so
554 * it's very important that we don't try marking it.
555 */
556 return;
557 }
Barry Hayes3592d622009-03-16 16:10:35 -0700558
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800559#if WITH_OBJECT_HEADERS
560 gMarkParent = obj;
The Android Open Source Projectf6c38712009-03-03 19:28:47 -0800561#endif
562
563 assert(dvmIsValidObject((Object *)clazz));
564 markObjectNonNull((Object *)clazz, ctx);
565
566 /* Mark any references in this object.
567 */
568 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
569 /* It's an array object.
570 */
571 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISOBJECTARRAY)) {
572 /* It's an array of object references.
573 */
574 scanObjectArray((ArrayObject *)obj, ctx);
575 }
576 // else there's nothing else to scan
577 } else {
578 /* It's a DataObject-compatible object.
579 */
580 scanInstanceFields((DataObject *)obj, clazz, ctx);
581
582 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISREFERENCE)) {
583 GcHeap *gcHeap = gDvm.gcHeap;
584 Object *referent;
585
586 /* It's a subclass of java/lang/ref/Reference.
587 * The fields in this class have been arranged
588 * such that scanInstanceFields() did not actually
589 * mark the "referent" field; we need to handle
590 * it specially.
591 *
592 * If the referent already has a strong mark (isMarked(referent)),
593 * we don't care about its reference status.
594 */
595 referent = dvmGetFieldObject(obj,
596 gDvm.offJavaLangRefReference_referent);
597 if (referent != NULL &&
598 !isMarked(ptr2chunk(referent), &gcHeap->markContext))
599 {
600 u4 refFlags;
601
602 if (gcHeap->markAllReferents) {
603 LOG_REF("Hard-marking a reference\n");
604
605 /* Don't bother with normal reference-following
606 * behavior, just mark the referent. This should
607 * only be used when following objects that just
608 * became scheduled for finalization.
609 */
610 markObjectNonNull(referent, ctx);
611 goto skip_reference;
612 }
613
614 /* See if this reference was handled by a previous GC.
615 */
616 if (dvmGetFieldObject(obj,
617 gDvm.offJavaLangRefReference_vmData) ==
618 SCHEDULED_REFERENCE_MAGIC)
619 {
620 LOG_REF("Skipping scheduled reference\n");
621
622 /* Don't reschedule it, but make sure that its
623 * referent doesn't get collected (in case it's
624 * a PhantomReference and wasn't cleared automatically).
625 */
626 //TODO: Mark these after handling all new refs of
627 // this strength, in case the new refs refer
628 // to the same referent. Not a very common
629 // case, though.
630 markObjectNonNull(referent, ctx);
631 goto skip_reference;
632 }
633
634 /* Find out what kind of reference is pointing
635 * to referent.
636 */
637 refFlags = GET_CLASS_FLAG_GROUP(clazz,
638 CLASS_ISREFERENCE |
639 CLASS_ISWEAKREFERENCE |
640 CLASS_ISPHANTOMREFERENCE);
641
642 /* We use the vmData field of Reference objects
643 * as a next pointer in a singly-linked list.
644 * That way, we don't need to allocate any memory
645 * while we're doing a GC.
646 */
647#define ADD_REF_TO_LIST(list, ref) \
648 do { \
649 Object *ARTL_ref_ = (/*de-const*/Object *)(ref); \
650 dvmSetFieldObject(ARTL_ref_, \
651 gDvm.offJavaLangRefReference_vmData, list); \
652 list = ARTL_ref_; \
653 } while (false)
654
655 /* At this stage, we just keep track of all of
656 * the live references that we've seen. Later,
657 * we'll walk through each of these lists and
658 * deal with the referents.
659 */
660 if (refFlags == CLASS_ISREFERENCE) {
661 /* It's a soft reference. Depending on the state,
662 * we'll attempt to collect all of them, some of
663 * them, or none of them.
664 */
665 if (gcHeap->softReferenceCollectionState ==
666 SR_COLLECT_NONE)
667 {
668 sr_collect_none:
669 markObjectNonNull(referent, ctx);
670 } else if (gcHeap->softReferenceCollectionState ==
671 SR_COLLECT_ALL)
672 {
673 sr_collect_all:
674 ADD_REF_TO_LIST(gcHeap->softReferences, obj);
675 } else {
676 /* We'll only try to collect half of the
677 * referents.
678 */
679 if (gcHeap->softReferenceColor++ & 1) {
680 goto sr_collect_none;
681 }
682 goto sr_collect_all;
683 }
684 } else {
685 /* It's a weak or phantom reference.
686 * Clearing CLASS_ISREFERENCE will reveal which.
687 */
688 refFlags &= ~CLASS_ISREFERENCE;
689 if (refFlags == CLASS_ISWEAKREFERENCE) {
690 ADD_REF_TO_LIST(gcHeap->weakReferences, obj);
691 } else if (refFlags == CLASS_ISPHANTOMREFERENCE) {
692 ADD_REF_TO_LIST(gcHeap->phantomReferences, obj);
693 } else {
694 assert(!"Unknown reference type");
695 }
696 }
697#undef ADD_REF_TO_LIST
698 }
699 }
700
701 skip_reference:
702 /* If this is a class object, mark various other things that
703 * its internals point to.
704 *
705 * All class objects are instances of java.lang.Class,
706 * including the java.lang.Class class object.
707 */
708 if (clazz == gDvm.classJavaLangClass) {
709 scanClassObject((ClassObject *)obj, ctx);
710 }
711 }
712
713#if WITH_OBJECT_HEADERS
714 gMarkParent = NULL;
715#endif
716}
717
718static void
719processMarkStack(GcMarkContext *ctx)
720{
721 const Object **const base = ctx->stack.base;
722
723 /* Scan anything that's on the mark stack.
724 * We can't use the bitmaps anymore, so use
725 * a finger that points past the end of them.
726 */
727 ctx->finger = (void *)ULONG_MAX;
728 while (ctx->stack.top != base) {
729 scanObject(*ctx->stack.top++, ctx);
730 }
731}
732
733#ifndef NDEBUG
734static uintptr_t gLastFinger = 0;
735#endif
736
737static bool
738scanBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
739{
740 GcMarkContext *ctx = (GcMarkContext *)arg;
741 size_t i;
742
743#ifndef NDEBUG
744 assert((uintptr_t)finger >= gLastFinger);
745 gLastFinger = (uintptr_t)finger;
746#endif
747
748 ctx->finger = finger;
749 for (i = 0; i < numPtrs; i++) {
750 /* The pointers we're getting back are DvmHeapChunks,
751 * not Objects.
752 */
753 scanObject(chunk2ptr(*ptrs++), ctx);
754 }
755
756 return true;
757}
758
759/* Given bitmaps with the root set marked, find and mark all
760 * reachable objects. When this returns, the entire set of
761 * live objects will be marked and the mark stack will be empty.
762 */
763void dvmHeapScanMarkedObjects()
764{
765 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
766
767 assert(ctx->finger == NULL);
768
769 /* The bitmaps currently have bits set for the root set.
770 * Walk across the bitmaps and scan each object.
771 */
772#ifndef NDEBUG
773 gLastFinger = 0;
774#endif
775 dvmHeapBitmapWalkList(ctx->bitmaps, ctx->numBitmaps,
776 scanBitmapCallback, ctx);
777
778 /* We've walked the mark bitmaps. Scan anything that's
779 * left on the mark stack.
780 */
781 processMarkStack(ctx);
782
783 LOG_SCAN("done with marked objects\n");
784}
785
786/** @return true if we need to schedule a call to clear().
787 */
788static bool clearReference(Object *reference)
789{
790 /* This is what the default implementation of Reference.clear()
791 * does. We're required to clear all references to a given
792 * referent atomically, so we can't pop in and out of interp
793 * code each time.
794 *
795 * Also, someone may have subclassed one of the basic Reference
796 * types, overriding clear(). We can't trust the clear()
797 * implementation to call super.clear(); we cannot let clear()
798 * resurrect the referent. If we clear it here, we can safely
799 * call any overriding implementations.
800 */
801 dvmSetFieldObject(reference,
802 gDvm.offJavaLangRefReference_referent, NULL);
803
804#if FANCY_REFERENCE_SUBCLASS
805 /* See if clear() has actually been overridden. If so,
806 * we need to schedule a call to it before calling enqueue().
807 */
808 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_clear]->clazz !=
809 gDvm.classJavaLangRefReference)
810 {
811 /* clear() has been overridden; return true to indicate
812 * that we need to schedule a call to the real clear()
813 * implementation.
814 */
815 return true;
816 }
817#endif
818
819 return false;
820}
821
822/** @return true if we need to schedule a call to enqueue().
823 */
824static bool enqueueReference(Object *reference)
825{
826#if FANCY_REFERENCE_SUBCLASS
827 /* See if this reference class has overridden enqueue();
828 * if not, we can take a shortcut.
829 */
830 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_enqueue]->clazz
831 == gDvm.classJavaLangRefReference)
832#endif
833 {
834 Object *queue = dvmGetFieldObject(reference,
835 gDvm.offJavaLangRefReference_queue);
836 Object *queueNext = dvmGetFieldObject(reference,
837 gDvm.offJavaLangRefReference_queueNext);
838 if (queue == NULL || queueNext != NULL) {
839 /* There is no queue, or the reference has already
840 * been enqueued. The Reference.enqueue() method
841 * will do nothing even if we call it.
842 */
843 return false;
844 }
845 }
846
847 /* We need to call enqueue(), but if we called it from
848 * here we'd probably deadlock. Schedule a call.
849 */
850 return true;
851}
852
853/* All objects for stronger reference levels have been
854 * marked before this is called.
855 */
856void dvmHeapHandleReferences(Object *refListHead, enum RefType refType)
857{
858 Object *reference;
859 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
860 const int offVmData = gDvm.offJavaLangRefReference_vmData;
861 const int offReferent = gDvm.offJavaLangRefReference_referent;
862 bool workRequired = false;
863
864size_t numCleared = 0;
865size_t numEnqueued = 0;
866 reference = refListHead;
867 while (reference != NULL) {
868 Object *next;
869 Object *referent;
870
871 /* Pull the interesting fields out of the Reference object.
872 */
873 next = dvmGetFieldObject(reference, offVmData);
874 referent = dvmGetFieldObject(reference, offReferent);
875
876 //TODO: when handling REF_PHANTOM, unlink any references
877 // that fail this initial if(). We need to re-walk
878 // the list, and it would be nice to avoid the extra
879 // work.
880 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
881 bool schedClear, schedEnqueue;
882
883 /* This is the strongest reference that refers to referent.
884 * Do the right thing.
885 */
886 switch (refType) {
887 case REF_SOFT:
888 case REF_WEAK:
889 schedClear = clearReference(reference);
890 schedEnqueue = enqueueReference(reference);
891 break;
892 case REF_PHANTOM:
893 /* PhantomReferences are not cleared automatically.
894 * Until someone clears it (or the reference itself
895 * is collected), the referent must remain alive.
896 *
897 * It's necessary to fully mark the referent because
898 * it will still be present during the next GC, and
899 * all objects that it points to must be valid.
900 * (The referent will be marked outside of this loop,
901 * after handing all references of this strength, in
902 * case multiple references point to the same object.)
903 */
904 schedClear = false;
905
906 /* A PhantomReference is only useful with a
907 * queue, but since it's possible to create one
908 * without a queue, we need to check.
909 */
910 schedEnqueue = enqueueReference(reference);
911 break;
912 default:
913 assert(!"Bad reference type");
914 schedClear = false;
915 schedEnqueue = false;
916 break;
917 }
918numCleared += schedClear ? 1 : 0;
919numEnqueued += schedEnqueue ? 1 : 0;
920
921 if (schedClear || schedEnqueue) {
922 uintptr_t workBits;
923
924 /* Stuff the clear/enqueue bits in the bottom of
925 * the pointer. Assumes that objects are 8-byte
926 * aligned.
927 *
928 * Note that we are adding the *Reference* (which
929 * is by definition already marked at this point) to
930 * this list; we're not adding the referent (which
931 * has already been cleared).
932 */
933 assert(((intptr_t)reference & 3) == 0);
934 assert(((WORKER_CLEAR | WORKER_ENQUEUE) & ~3) == 0);
935 workBits = (schedClear ? WORKER_CLEAR : 0) |
936 (schedEnqueue ? WORKER_ENQUEUE : 0);
937 if (!dvmHeapAddRefToLargeTable(
938 &gDvm.gcHeap->referenceOperations,
939 (Object *)((uintptr_t)reference | workBits)))
940 {
941 LOGE_HEAP("dvmMalloc(): no room for any more "
942 "reference operations\n");
943 dvmAbort();
944 }
945 workRequired = true;
946 }
947
948 if (refType != REF_PHANTOM) {
949 /* Let later GCs know not to reschedule this reference.
950 */
951 dvmSetFieldObject(reference, offVmData,
952 SCHEDULED_REFERENCE_MAGIC);
953 } // else this is handled later for REF_PHANTOM
954
955 } // else there was a stronger reference to the referent.
956
957 reference = next;
958 }
959#define refType2str(r) \
960 ((r) == REF_SOFT ? "soft" : ( \
961 (r) == REF_WEAK ? "weak" : ( \
962 (r) == REF_PHANTOM ? "phantom" : "UNKNOWN" )))
963LOGD_HEAP("dvmHeapHandleReferences(): cleared %zd, enqueued %zd %s references\n", numCleared, numEnqueued, refType2str(refType));
964
965 /* Walk though the reference list again, and mark any non-clear/marked
966 * referents. Only PhantomReferences can have non-clear referents
967 * at this point.
968 */
969 if (refType == REF_PHANTOM) {
970 bool scanRequired = false;
971
972 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
973 reference = refListHead;
974 while (reference != NULL) {
975 Object *next;
976 Object *referent;
977
978 /* Pull the interesting fields out of the Reference object.
979 */
980 next = dvmGetFieldObject(reference, offVmData);
981 referent = dvmGetFieldObject(reference, offReferent);
982
983 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
984 markObjectNonNull(referent, markContext);
985 scanRequired = true;
986
987 /* Let later GCs know not to reschedule this reference.
988 */
989 dvmSetFieldObject(reference, offVmData,
990 SCHEDULED_REFERENCE_MAGIC);
991 }
992
993 reference = next;
994 }
995 HPROF_CLEAR_GC_SCAN_STATE();
996
997 if (scanRequired) {
998 processMarkStack(markContext);
999 }
1000 }
1001
1002 if (workRequired) {
1003 dvmSignalHeapWorker(false);
1004 }
1005}
1006
1007
1008/* Find unreachable objects that need to be finalized,
1009 * and schedule them for finalization.
1010 */
1011void dvmHeapScheduleFinalizations()
1012{
1013 HeapRefTable newPendingRefs;
1014 LargeHeapRefTable *finRefs = gDvm.gcHeap->finalizableRefs;
1015 Object **ref;
1016 Object **lastRef;
1017 size_t totalPendCount;
1018 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
1019
1020 /*
1021 * All reachable objects have been marked.
1022 * Any unmarked finalizable objects need to be finalized.
1023 */
1024
1025 /* Create a table that the new pending refs will
1026 * be added to.
1027 */
1028 if (!dvmHeapInitHeapRefTable(&newPendingRefs, 128)) {
1029 //TODO: mark all finalizable refs and hope that
1030 // we can schedule them next time. Watch out,
1031 // because we may be expecting to free up space
1032 // by calling finalizers.
1033 LOGE_GC("dvmHeapScheduleFinalizations(): no room for "
1034 "pending finalizations\n");
1035 dvmAbort();
1036 }
1037
1038 /* Walk through finalizableRefs and move any unmarked references
1039 * to the list of new pending refs.
1040 */
1041 totalPendCount = 0;
1042 while (finRefs != NULL) {
1043 Object **gapRef;
1044 size_t newPendCount = 0;
1045
1046 gapRef = ref = finRefs->refs.table;
1047 lastRef = finRefs->refs.nextEntry;
1048 while (ref < lastRef) {
1049 DvmHeapChunk *hc;
1050
1051 hc = ptr2chunk(*ref);
1052 if (!isMarked(hc, markContext)) {
1053 if (!dvmHeapAddToHeapRefTable(&newPendingRefs, *ref)) {
1054 //TODO: add the current table and allocate
1055 // a new, smaller one.
1056 LOGE_GC("dvmHeapScheduleFinalizations(): "
1057 "no room for any more pending finalizations: %zd\n",
1058 dvmHeapNumHeapRefTableEntries(&newPendingRefs));
1059 dvmAbort();
1060 }
1061 newPendCount++;
1062 } else {
1063 /* This ref is marked, so will remain on finalizableRefs.
1064 */
1065 if (newPendCount > 0) {
1066 /* Copy it up to fill the holes.
1067 */
1068 *gapRef++ = *ref;
1069 } else {
1070 /* No holes yet; don't bother copying.
1071 */
1072 gapRef++;
1073 }
1074 }
1075 ref++;
1076 }
1077 finRefs->refs.nextEntry = gapRef;
1078 //TODO: if the table is empty when we're done, free it.
1079 totalPendCount += newPendCount;
1080 finRefs = finRefs->next;
1081 }
1082 LOGD_GC("dvmHeapScheduleFinalizations(): %zd finalizers triggered.\n",
1083 totalPendCount);
1084 if (totalPendCount == 0) {
1085 /* No objects required finalization.
1086 * Free the empty temporary table.
1087 */
1088 dvmClearReferenceTable(&newPendingRefs);
1089 return;
1090 }
1091
1092 /* Add the new pending refs to the main list.
1093 */
1094 if (!dvmHeapAddTableToLargeTable(&gDvm.gcHeap->pendingFinalizationRefs,
1095 &newPendingRefs))
1096 {
1097 LOGE_GC("dvmHeapScheduleFinalizations(): can't insert new "
1098 "pending finalizations\n");
1099 dvmAbort();
1100 }
1101
1102 //TODO: try compacting the main list with a memcpy loop
1103
1104 /* Mark the refs we just moved; we don't want them or their
1105 * children to get swept yet.
1106 */
1107 ref = newPendingRefs.table;
1108 lastRef = newPendingRefs.nextEntry;
1109 assert(ref < lastRef);
1110 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
1111 while (ref < lastRef) {
1112 markObjectNonNull(*ref, markContext);
1113 ref++;
1114 }
1115 HPROF_CLEAR_GC_SCAN_STATE();
1116
1117 /* Set markAllReferents so that we don't collect referents whose
1118 * only references are in final-reachable objects.
1119 * TODO: eventually provide normal reference behavior by properly
1120 * marking these references.
1121 */
1122 gDvm.gcHeap->markAllReferents = true;
1123 processMarkStack(markContext);
1124 gDvm.gcHeap->markAllReferents = false;
1125
1126 dvmSignalHeapWorker(false);
1127}
1128
1129void dvmHeapFinishMarkStep()
1130{
1131 HeapBitmap *markBitmap;
1132 HeapBitmap objectBitmap;
1133 GcMarkContext *markContext;
1134
1135 markContext = &gDvm.gcHeap->markContext;
1136
1137 /* The sweep step freed every object that appeared in the
1138 * HeapSource bitmaps that didn't appear in the mark bitmaps.
1139 * The new state of the HeapSource is exactly the final
1140 * mark bitmaps, so swap them in.
1141 *
1142 * The old bitmaps will be swapped into the context so that
1143 * we can clean them up.
1144 */
1145 dvmHeapSourceReplaceObjectBitmaps(markContext->bitmaps,
1146 markContext->numBitmaps);
1147
1148 /* Clean up the old HeapSource bitmaps and anything else associated
1149 * with the marking process.
1150 */
1151 dvmHeapBitmapDeleteList(markContext->bitmaps, markContext->numBitmaps);
1152 destroyMarkStack(&markContext->stack);
1153
1154 memset(markContext, 0, sizeof(*markContext));
1155}
1156
1157#if WITH_HPROF && WITH_HPROF_UNREACHABLE
1158static bool
1159hprofUnreachableBitmapCallback(size_t numPtrs, void **ptrs,
1160 const void *finger, void *arg)
1161{
1162 hprof_context_t *hctx = (hprof_context_t *)arg;
1163 size_t i;
1164
1165 for (i = 0; i < numPtrs; i++) {
1166 Object *obj;
1167
1168 /* The pointers we're getting back are DvmHeapChunks, not
1169 * Objects.
1170 */
1171 obj = (Object *)chunk2ptr(*ptrs++);
1172
1173 hprofMarkRootObject(hctx, obj, 0);
1174 hprofDumpHeapObject(hctx, obj);
1175 }
1176
1177 return true;
1178}
1179
1180static void
1181hprofDumpUnmarkedObjects(const HeapBitmap markBitmaps[],
1182 const HeapBitmap objectBitmaps[], size_t numBitmaps)
1183{
1184 hprof_context_t *hctx = gDvm.gcHeap->hprofContext;
1185 if (hctx == NULL) {
1186 return;
1187 }
1188
1189 LOGI("hprof: dumping unreachable objects\n");
1190
1191 HPROF_SET_GC_SCAN_STATE(HPROF_UNREACHABLE, 0);
1192
1193 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1194 hprofUnreachableBitmapCallback, hctx);
1195
1196 HPROF_CLEAR_GC_SCAN_STATE();
1197}
1198#endif
1199
1200static bool
1201sweepBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
1202{
1203 const ClassObject *const classJavaLangClass = gDvm.classJavaLangClass;
1204 size_t i;
1205
1206 for (i = 0; i < numPtrs; i++) {
1207 DvmHeapChunk *hc;
1208 Object *obj;
1209
1210 /* The pointers we're getting back are DvmHeapChunks, not
1211 * Objects.
1212 */
1213 hc = (DvmHeapChunk *)*ptrs++;
1214 obj = (Object *)chunk2ptr(hc);
1215
1216#if WITH_OBJECT_HEADERS
1217 if (hc->markGeneration == gGeneration) {
1218 LOGE("sweeping marked object: 0x%08x\n", (uint)obj);
1219 dvmAbort();
1220 }
1221#endif
1222
1223 /* Free the monitor associated with the object.
1224 */
1225 dvmFreeObjectMonitor(obj);
1226
1227 /* NOTE: Dereferencing clazz is dangerous. If obj was the last
1228 * one to reference its class object, the class object could be
1229 * on the sweep list, and could already have been swept, leaving
1230 * us with a stale pointer.
1231 */
1232 LOGV_SWEEP("FREE: 0x%08x %s\n", (uint)obj, obj->clazz->name);
1233
1234 /* This assumes that java.lang.Class will never go away.
1235 * If it can, and we were the last reference to it, it
1236 * could have already been swept. However, even in that case,
1237 * gDvm.classJavaLangClass should still have a useful
1238 * value.
1239 */
1240 if (obj->clazz == classJavaLangClass) {
1241 LOGV_SWEEP("---------------> %s\n", ((ClassObject *)obj)->name);
1242 /* dvmFreeClassInnards() may have already been called,
1243 * but it's safe to call on the same ClassObject twice.
1244 */
1245 dvmFreeClassInnards((ClassObject *)obj);
1246 }
1247
1248#if 0
1249 /* Overwrite the to-be-freed object to make stale references
1250 * more obvious.
1251 */
1252 {
1253 int chunklen;
1254 ClassObject *clazz = obj->clazz;
1255#if WITH_OBJECT_HEADERS
1256 DvmHeapChunk chunk = *hc;
1257 chunk.header = ~OBJECT_HEADER | 1;
1258#endif
1259 chunklen = dvmHeapSourceChunkSize(hc);
1260 memset(hc, 0xa5, chunklen);
1261 obj->clazz = (ClassObject *)((uintptr_t)clazz ^ 0xffffffff);
1262#if WITH_OBJECT_HEADERS
1263 *hc = chunk;
1264#endif
1265 }
1266#endif
1267
1268//TODO: provide a heapsource function that takes a list of pointers to free
1269// and call it outside of this loop.
1270 dvmHeapSourceFree(hc);
1271 }
1272
1273 return true;
1274}
1275
1276/* A function suitable for passing to dvmHashForeachRemove()
1277 * to clear out any unmarked objects. Clears the low bits
1278 * of the pointer because the intern table may set them.
1279 */
1280static int isUnmarkedObject(void *object)
1281{
1282 return !isMarked(ptr2chunk((uintptr_t)object & ~(HB_OBJECT_ALIGNMENT-1)),
1283 &gDvm.gcHeap->markContext);
1284}
1285
1286/* Walk through the list of objects that haven't been
1287 * marked and free them.
1288 */
1289void
1290dvmHeapSweepUnmarkedObjects(int *numFreed, size_t *sizeFreed)
1291{
1292 const HeapBitmap *markBitmaps;
1293 const GcMarkContext *markContext;
1294 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
1295 size_t origObjectsAllocated;
1296 size_t origBytesAllocated;
1297 size_t numBitmaps;
1298
1299 /* All reachable objects have been marked.
1300 * Detach any unreachable interned strings before
1301 * we sweep.
1302 */
1303 dvmGcDetachDeadInternedStrings(isUnmarkedObject);
1304
1305 /* Free any known objects that are not marked.
1306 */
1307 origObjectsAllocated = dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1308 origBytesAllocated = dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1309
1310 markContext = &gDvm.gcHeap->markContext;
1311 markBitmaps = markContext->bitmaps;
1312 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
1313 HEAP_SOURCE_MAX_HEAP_COUNT);
1314#ifndef NDEBUG
1315 if (numBitmaps != markContext->numBitmaps) {
1316 LOGE("heap bitmap count mismatch: %zd != %zd\n",
1317 numBitmaps, markContext->numBitmaps);
1318 dvmAbort();
1319 }
1320#endif
1321
1322#if WITH_HPROF && WITH_HPROF_UNREACHABLE
1323 hprofDumpUnmarkedObjects(markBitmaps, objectBitmaps, numBitmaps);
1324#endif
1325
1326 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1327 sweepBitmapCallback, NULL);
1328
1329 *numFreed = origObjectsAllocated -
1330 dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1331 *sizeFreed = origBytesAllocated -
1332 dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1333
1334#ifdef WITH_PROFILER
1335 if (gDvm.allocProf.enabled) {
1336 gDvm.allocProf.freeCount += *numFreed;
1337 gDvm.allocProf.freeSize += *sizeFreed;
1338 }
1339#endif
1340}