blob: f7d89d0543803eed06a22ad89b7bb1541aa69bb3 [file] [log] [blame]
Mathieu Chartier39e32612013-11-12 16:28:05 -08001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_REFERENCE_QUEUE_H_
18#define ART_RUNTIME_GC_REFERENCE_QUEUE_H_
19
20#include <iosfwd>
21#include <string>
22#include <vector>
23
Ian Rogersef7d42f2014-01-06 12:55:46 -080024#include "atomic.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080025#include "base/timing_logger.h"
26#include "globals.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080027#include "jni.h"
Mathieu Chartier83c8ee02014-01-28 14:50:23 -080028#include "object_callbacks.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080029#include "offsets.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080030#include "thread_pool.h"
31
32namespace art {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070033namespace mirror {
34class Reference;
35} // namespace mirror
36
Mathieu Chartier39e32612013-11-12 16:28:05 -080037namespace gc {
38
39class Heap;
40
41// Used to temporarily store java.lang.ref.Reference(s) during GC and prior to queueing on the
42// appropriate java.lang.ref.ReferenceQueue. The linked list is maintained in the
43// java.lang.ref.Reference objects.
44class ReferenceQueue {
45 public:
Mathieu Chartiera5a53ef2014-09-12 12:58:05 -070046 explicit ReferenceQueue(Mutex* lock);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070047
Mathieu Chartier39e32612013-11-12 16:28:05 -080048 // Enqueue a reference if is not already enqueued. Thread safe to call from multiple threads
49 // since it uses a lock to avoid a race between checking for the references presence and adding
50 // it.
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070051 void AtomicEnqueueIfNotEnqueued(Thread* self, mirror::Reference* ref)
Mathieu Chartier39e32612013-11-12 16:28:05 -080052 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) LOCKS_EXCLUDED(lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070053
Mathieu Chartier39e32612013-11-12 16:28:05 -080054 // Enqueue a reference, unlike EnqueuePendingReference, enqueue reference checks that the
55 // reference IsEnqueueable. Not thread safe, used when mutators are paused to minimize lock
56 // overhead.
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070057 void EnqueueReference(mirror::Reference* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070058
Mathieu Chartier9e2094f2014-12-11 18:43:48 -080059 // Enqueue a reference without checking that it is enqueable.
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070060 void EnqueuePendingReference(mirror::Reference* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070061
Mathieu Chartier9e2094f2014-12-11 18:43:48 -080062 // Dequeue the first reference (returns list_).
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070063 mirror::Reference* DequeuePendingReference() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070064
Mathieu Chartier9e2094f2014-12-11 18:43:48 -080065 // Enqueues finalizer references with white referents. White referents are blackened, moved to
66 // the zombie field, and the referent field is cleared.
Mathieu Chartier308351a2014-06-15 12:39:02 -070067 void EnqueueFinalizerReferences(ReferenceQueue* cleared_references,
68 IsHeapReferenceMarkedCallback* is_marked_callback,
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -070069 MarkObjectCallback* mark_object_callback, void* arg)
Mathieu Chartier39e32612013-11-12 16:28:05 -080070 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070071
Mathieu Chartier39e32612013-11-12 16:28:05 -080072 // Walks the reference list marking any references subject to the reference clearing policy.
73 // References with a black referent are removed from the list. References with white referents
74 // biased toward saving are blackened and also removed from the list.
Mathieu Chartier308351a2014-06-15 12:39:02 -070075 void ForwardSoftReferences(IsHeapReferenceMarkedCallback* preserve_callback, void* arg)
Mathieu Chartier39e32612013-11-12 16:28:05 -080076 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070077
Mathieu Chartier9e2094f2014-12-11 18:43:48 -080078 // Unlink the reference list clearing references objects with white referents. Cleared references
Mathieu Chartier39e32612013-11-12 16:28:05 -080079 // registered to a reference queue are scheduled for appending by the heap worker thread.
Mathieu Chartier308351a2014-06-15 12:39:02 -070080 void ClearWhiteReferences(ReferenceQueue* cleared_references,
81 IsHeapReferenceMarkedCallback* is_marked_callback, void* arg)
Mathieu Chartier39e32612013-11-12 16:28:05 -080082 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070083
Mathieu Chartier9e2094f2014-12-11 18:43:48 -080084 void Dump(std::ostream& os) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
85 size_t GetLength() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070086
Mathieu Chartier39e32612013-11-12 16:28:05 -080087 bool IsEmpty() const {
88 return list_ == nullptr;
89 }
90 void Clear() {
91 list_ = nullptr;
92 }
Mathieu Chartier9e2094f2014-12-11 18:43:48 -080093 mirror::Reference* GetList() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier39e32612013-11-12 16:28:05 -080094 return list_;
95 }
Ian Rogers6f3dbba2014-10-14 17:41:57 -070096
Mathieu Chartier52e4b432014-06-10 11:22:31 -070097 // Visits list_, currently only used for the mark compact GC.
98 void UpdateRoots(IsMarkedCallback* callback, void* arg)
99 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800100
101 private:
102 // Lock, used for parallel GC reference enqueuing. It allows for multiple threads simultaneously
103 // calling AtomicEnqueueIfNotEnqueued.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700104 Mutex* const lock_;
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700105 // The actual reference list. Only a root for the mark compact GC since it will be null for other
106 // GC types.
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -0700107 mirror::Reference* list_;
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700108
109 DISALLOW_COPY_AND_ASSIGN(ReferenceQueue);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800110};
111
112} // namespace gc
113} // namespace art
114
115#endif // ART_RUNTIME_GC_REFERENCE_QUEUE_H_