summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--runtime/indirect_reference_table-inl.h3
-rw-r--r--runtime/indirect_reference_table.cc7
-rw-r--r--runtime/indirect_reference_table.h10
-rw-r--r--runtime/jni_internal.cc9
-rw-r--r--runtime/read_barrier.h4
-rw-r--r--runtime/reference_table.cc19
-rw-r--r--runtime/reference_table.h8
-rw-r--r--runtime/scoped_thread_state_change.h6
-rw-r--r--runtime/thread.cc4
9 files changed, 40 insertions, 30 deletions
diff --git a/runtime/indirect_reference_table-inl.h b/runtime/indirect_reference_table-inl.h
index b787233b37..f561643399 100644
--- a/runtime/indirect_reference_table-inl.h
+++ b/runtime/indirect_reference_table-inl.h
@@ -59,8 +59,7 @@ inline bool IndirectReferenceTable::GetChecked(IndirectRef iref) const {
// Make sure that the entry at "idx" is correctly paired with "iref".
inline bool IndirectReferenceTable::CheckEntry(const char* what, IndirectRef iref, int idx) const {
- const mirror::Object* obj = table_[idx];
- IndirectRef checkRef = ToIndirectRef(obj, idx);
+ IndirectRef checkRef = ToIndirectRef(idx);
if (UNLIKELY(checkRef != iref)) {
LOG(ERROR) << "JNI ERROR (app bug): attempt to " << what
<< " stale " << kind_ << " " << iref
diff --git a/runtime/indirect_reference_table.cc b/runtime/indirect_reference_table.cc
index 98e1d21d93..ad798ed60f 100644
--- a/runtime/indirect_reference_table.cc
+++ b/runtime/indirect_reference_table.cc
@@ -137,13 +137,13 @@ IndirectRef IndirectReferenceTable::Add(uint32_t cookie, mirror::Object* obj) {
DCHECK_GE(pScan, table_ + prevState.parts.topIndex);
}
UpdateSlotAdd(obj, pScan - table_);
- result = ToIndirectRef(obj, pScan - table_);
+ result = ToIndirectRef(pScan - table_);
*pScan = obj;
segment_state_.parts.numHoles--;
} else {
// Add to the end.
UpdateSlotAdd(obj, topIndex);
- result = ToIndirectRef(obj, topIndex);
+ result = ToIndirectRef(topIndex);
table_[topIndex++] = obj;
segment_state_.parts.topIndex = topIndex;
}
@@ -277,9 +277,6 @@ void IndirectReferenceTable::Dump(std::ostream& os) const {
// while the read barrier won't.
entries.push_back(obj);
} else {
- // We need a read barrier if weak globals. Since this is for
- // debugging where performance isn't top priority, we
- // unconditionally enable the read barrier, which is conservative.
obj = ReadBarrier::BarrierForRoot<mirror::Object, kWithReadBarrier>(root);
entries.push_back(obj);
}
diff --git a/runtime/indirect_reference_table.h b/runtime/indirect_reference_table.h
index 5b3ed685c7..b3a855dfb3 100644
--- a/runtime/indirect_reference_table.h
+++ b/runtime/indirect_reference_table.h
@@ -27,6 +27,7 @@
#include "mem_map.h"
#include "object_callbacks.h"
#include "offsets.h"
+#include "read_barrier.h"
namespace art {
namespace mirror {
@@ -215,6 +216,7 @@ class IrtIterator {
}
mirror::Object** operator*() {
+ // This does not have a read barrier as this is used to visit roots.
return &table_[i_];
}
@@ -298,6 +300,7 @@ class IndirectReferenceTable {
return segment_state_.parts.topIndex;
}
+ // Note IrtIterator does not have a read barrier as it's used to visit roots.
IrtIterator begin() {
return IrtIterator(table_, 0, Capacity());
}
@@ -333,7 +336,7 @@ class IndirectReferenceTable {
* The object pointer itself is subject to relocation in some GC
* implementations, so we shouldn't really be using it here.
*/
- IndirectRef ToIndirectRef(const mirror::Object* /*o*/, uint32_t tableIndex) const {
+ IndirectRef ToIndirectRef(uint32_t tableIndex) const {
DCHECK_LT(tableIndex, 65536U);
uint32_t serialChunk = slot_data_[tableIndex].serial;
uintptr_t uref = serialChunk << 20 | (tableIndex << 2) | kind_;
@@ -368,9 +371,8 @@ class IndirectReferenceTable {
std::unique_ptr<MemMap> table_mem_map_;
// Mem map where we store the extended debugging info.
std::unique_ptr<MemMap> slot_mem_map_;
- // bottom of the stack. If a JNI weak global table, do not directly
- // access the object references in this as they are weak roots. Use
- // Get() that has a read barrier.
+ // bottom of the stack. Do not directly access the object references
+ // in this as they are roots. Use Get() that has a read barrier.
mirror::Object** table_;
/* bit mask, ORed into all irefs */
IndirectRefKind kind_;
diff --git a/runtime/jni_internal.cc b/runtime/jni_internal.cc
index fc5d5905cb..17a5592e97 100644
--- a/runtime/jni_internal.cc
+++ b/runtime/jni_internal.cc
@@ -359,8 +359,9 @@ class SharedLibrary {
jni_on_load_result_(kPending) {
}
- mirror::Object* GetClassLoader() {
- return class_loader_;
+ mirror::Object* GetClassLoader() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::Object** root = &class_loader_;
+ return ReadBarrier::BarrierForRoot<mirror::Object, kWithReadBarrier>(root);
}
std::string GetPath() {
@@ -3160,9 +3161,7 @@ mirror::Object* JavaVMExt::DecodeWeakGlobal(Thread* self, IndirectRef ref) {
while (UNLIKELY(!allow_new_weak_globals_)) {
weak_globals_add_condition_.WaitHoldingLocks(self);
}
- // The weak globals do need a read barrier as they are weak roots.
- mirror::Object* obj = weak_globals_.Get<kWithReadBarrier>(ref);
- return obj;
+ return weak_globals_.Get(ref);
}
void JavaVMExt::DumpReferenceTables(std::ostream& os) {
diff --git a/runtime/read_barrier.h b/runtime/read_barrier.h
index 451d13c5db..ed5db4e4a5 100644
--- a/runtime/read_barrier.h
+++ b/runtime/read_barrier.h
@@ -33,11 +33,15 @@ namespace mirror {
class ReadBarrier {
public:
+ // It's up to the implementation whether the given field gets
+ // updated whereas the return value must be an updated reference.
template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE static MirrorType* Barrier(
mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // It's up to the implementation whether the given root gets updated
+ // whereas the return value must be an updated reference.
template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/reference_table.cc b/runtime/reference_table.cc
index 11527fa2fe..cd35863543 100644
--- a/runtime/reference_table.cc
+++ b/runtime/reference_table.cc
@@ -24,6 +24,7 @@
#include "mirror/class-inl.h"
#include "mirror/object-inl.h"
#include "mirror/string-inl.h"
+#include "read_barrier.h"
#include "thread.h"
#include "utils.h"
@@ -51,7 +52,9 @@ void ReferenceTable::Add(mirror::Object* obj) {
void ReferenceTable::Remove(mirror::Object* obj) {
// We iterate backwards on the assumption that references are LIFO.
for (int i = entries_.size() - 1; i >= 0; --i) {
- if (entries_[i] == obj) {
+ mirror::Object* entry =
+ ReadBarrier::BarrierForRoot<mirror::Object, kWithReadBarrier>(&entries_[i]);
+ if (entry == obj) {
entries_.erase(entries_.begin() + i);
return;
}
@@ -140,12 +143,12 @@ size_t ReferenceTable::Size() const {
return entries_.size();
}
-void ReferenceTable::Dump(std::ostream& os) const {
+void ReferenceTable::Dump(std::ostream& os) {
os << name_ << " reference table dump:\n";
Dump(os, entries_);
}
-void ReferenceTable::Dump(std::ostream& os, const Table& entries) {
+void ReferenceTable::Dump(std::ostream& os, Table& entries) {
if (entries.empty()) {
os << " (empty)\n";
return;
@@ -160,7 +163,8 @@ void ReferenceTable::Dump(std::ostream& os, const Table& entries) {
}
os << " Last " << (count - first) << " entries (of " << count << "):\n";
for (int idx = count - 1; idx >= first; --idx) {
- mirror::Object* ref = entries[idx];
+ mirror::Object* ref =
+ ReadBarrier::BarrierForRoot<mirror::Object, kWithReadBarrier>(&entries[idx]);
if (ref == NULL) {
continue;
}
@@ -194,7 +198,12 @@ void ReferenceTable::Dump(std::ostream& os, const Table& entries) {
}
// Make a copy of the table and sort it.
- Table sorted_entries(entries.begin(), entries.end());
+ Table sorted_entries;
+ for (size_t i = 0; i < entries.size(); ++i) {
+ mirror::Object* entry =
+ ReadBarrier::BarrierForRoot<mirror::Object, kWithReadBarrier>(&entries[i]);
+ sorted_entries.push_back(entry);
+ }
std::sort(sorted_entries.begin(), sorted_entries.end(), ObjectComparator());
// Remove any uninteresting stuff from the list. The sort moved them all to the end.
diff --git a/runtime/reference_table.h b/runtime/reference_table.h
index 45309c9d99..1cd0999f26 100644
--- a/runtime/reference_table.h
+++ b/runtime/reference_table.h
@@ -39,19 +39,19 @@ class ReferenceTable {
ReferenceTable(const char* name, size_t initial_size, size_t max_size);
~ReferenceTable();
- void Add(mirror::Object* obj);
+ void Add(mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void Remove(mirror::Object* obj);
+ void Remove(mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
size_t Size() const;
- void Dump(std::ostream& os) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void Dump(std::ostream& os) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void VisitRoots(RootCallback* visitor, void* arg, uint32_t tid, RootType root_type);
private:
typedef std::vector<mirror::Object*> Table;
- static void Dump(std::ostream& os, const Table& entries)
+ static void Dump(std::ostream& os, Table& entries)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
friend class IndirectReferenceTable; // For Dump.
diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h
index 7ce68c624b..d69162334d 100644
--- a/runtime/scoped_thread_state_change.h
+++ b/runtime/scoped_thread_state_change.h
@@ -146,7 +146,8 @@ class ScopedObjectAccessAlreadyRunnable {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
CHECK(!kMovingFields);
- return reinterpret_cast<mirror::ArtField*>(fid);
+ mirror::ArtField* field = reinterpret_cast<mirror::ArtField*>(fid);
+ return ReadBarrier::BarrierForRoot<mirror::ArtField, kWithReadBarrier>(&field);
}
jfieldID EncodeField(mirror::ArtField* field) const
@@ -162,7 +163,8 @@ class ScopedObjectAccessAlreadyRunnable {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
CHECK(!kMovingMethods);
- return reinterpret_cast<mirror::ArtMethod*>(mid);
+ mirror::ArtMethod* method = reinterpret_cast<mirror::ArtMethod*>(mid);
+ return ReadBarrier::BarrierForRoot<mirror::ArtMethod, kWithReadBarrier>(&method);
}
jmethodID EncodeMethod(mirror::ArtMethod* method) const
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 6980530623..d740230e11 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -1298,9 +1298,7 @@ mirror::Object* Thread::DecodeJObject(jobject obj) const {
}
} else if (kind == kGlobal) {
JavaVMExt* const vm = Runtime::Current()->GetJavaVM();
- // Strong global references do not need a read barrier.
- result = vm->globals.SynchronizedGet<kWithoutReadBarrier>(
- const_cast<Thread*>(this), &vm->globals_lock, ref);
+ result = vm->globals.SynchronizedGet(const_cast<Thread*>(this), &vm->globals_lock, ref);
} else {
DCHECK_EQ(kind, kWeakGlobal);
result = Runtime::Current()->GetJavaVM()->DecodeWeakGlobal(const_cast<Thread*>(this), ref);