summaryrefslogtreecommitdiffstats
path: root/runtime/gc/reference_queue.h
blob: 90f0be773fd73fea14ab7e380ba79633c6499cb5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
/*
 * Copyright (C) 2013 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef ART_RUNTIME_GC_REFERENCE_QUEUE_H_
#define ART_RUNTIME_GC_REFERENCE_QUEUE_H_

#include <iosfwd>
#include <string>
#include <vector>

#include "base/atomic.h"
#include "base/locks.h"
#include "base/timing_logger.h"
#include "jni.h"
#include "obj_ptr.h"
#include "offsets.h"
#include "runtime_globals.h"
#include "thread_pool.h"

namespace art {

class Mutex;

namespace mirror {
class Reference;
}  // namespace mirror

class IsMarkedVisitor;
class MarkObjectVisitor;

namespace gc {

namespace collector {
class GarbageCollector;
}  // namespace collector

class Heap;

// Used to temporarily store java.lang.ref.Reference(s) during GC and prior to queueing on the
// appropriate java.lang.ref.ReferenceQueue. The linked list is maintained as an unordered,
// circular, and singly-linked list using the pendingNext fields of the java.lang.ref.Reference
// objects.
class ReferenceQueue {
 public:
  explicit ReferenceQueue(Mutex* lock);

  // Enqueue a reference if it is unprocessed. Thread safe to call from multiple
  // threads since it uses a lock to avoid a race between checking for the references presence and
  // adding it.
  void AtomicEnqueueIfNotEnqueued(Thread* self, ObjPtr<mirror::Reference> ref)
      REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!*lock_);

  // Enqueue a reference. The reference must be unprocessed.
  // Not thread safe, used when mutators are paused to minimize lock overhead.
  void EnqueueReference(ObjPtr<mirror::Reference> ref) REQUIRES_SHARED(Locks::mutator_lock_);

  // Dequeue a reference from the queue and return that dequeued reference.
  // Call DisableReadBarrierForReference for the reference that's returned from this function.
  ObjPtr<mirror::Reference> DequeuePendingReference() REQUIRES_SHARED(Locks::mutator_lock_);

  // If applicable, disable the read barrier for the reference after its referent is handled (see
  // ConcurrentCopying::ProcessMarkStackRef.) This must be called for a reference that's dequeued
  // from pending queue (DequeuePendingReference).
  void DisableReadBarrierForReference(ObjPtr<mirror::Reference> ref)
      REQUIRES_SHARED(Locks::mutator_lock_);

  // Enqueues finalizer references with white referents.  White referents are blackened, moved to
  // the zombie field, and the referent field is cleared.
  void EnqueueFinalizerReferences(ReferenceQueue* cleared_references,
                                  collector::GarbageCollector* collector)
      REQUIRES_SHARED(Locks::mutator_lock_);

  // Walks the reference list marking any references subject to the reference clearing policy.
  // References with a black referent are removed from the list.  References with white referents
  // biased toward saving are blackened and also removed from the list.
  void ForwardSoftReferences(MarkObjectVisitor* visitor)
      REQUIRES_SHARED(Locks::mutator_lock_);

  // Unlink the reference list clearing references objects with white referents. Cleared references
  // registered to a reference queue are scheduled for appending by the heap worker thread.
  void ClearWhiteReferences(ReferenceQueue* cleared_references,
                            collector::GarbageCollector* collector)
      REQUIRES_SHARED(Locks::mutator_lock_);

  void Dump(std::ostream& os) const REQUIRES_SHARED(Locks::mutator_lock_);
  size_t GetLength() const REQUIRES_SHARED(Locks::mutator_lock_);

  bool IsEmpty() const {
    return list_ == nullptr;
  }
  void Clear() {
    list_ = nullptr;
  }
  mirror::Reference* GetList() REQUIRES_SHARED(Locks::mutator_lock_) {
    return list_;
  }

  // Visits list_, currently only used for the mark compact GC.
  void UpdateRoots(IsMarkedVisitor* visitor)
      REQUIRES_SHARED(Locks::mutator_lock_);

 private:
  // Lock, used for parallel GC reference enqueuing. It allows for multiple threads simultaneously
  // calling AtomicEnqueueIfNotEnqueued.
  Mutex* const lock_;
  // The actual reference list. Only a root for the mark compact GC since it will be null for other
  // GC types. Not an ObjPtr since it is accessed from multiple threads.
  mirror::Reference* list_;

  DISALLOW_IMPLICIT_CONSTRUCTORS(ReferenceQueue);
};

}  // namespace gc
}  // namespace art

#endif  // ART_RUNTIME_GC_REFERENCE_QUEUE_H_