summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--compiler/dex/quick/arm/assemble_arm.cc2
-rw-r--r--compiler/optimizing/find_loops_test.cc49
-rw-r--r--runtime/arch/arm/jni_entrypoints_arm.S15
-rw-r--r--runtime/class_linker.cc25
-rw-r--r--runtime/class_linker.h5
-rw-r--r--runtime/class_linker_test.cc1
-rw-r--r--runtime/indirect_reference_table-inl.h9
-rw-r--r--runtime/indirect_reference_table.cc15
-rw-r--r--runtime/indirect_reference_table.h4
-rw-r--r--runtime/java_vm_ext.cc5
-rw-r--r--runtime/mirror/class.cc12
-rw-r--r--runtime/mirror/class.h11
-rw-r--r--runtime/mirror/object-inl.h9
-rw-r--r--runtime/runtime-inl.h4
-rw-r--r--runtime/runtime.cc1
15 files changed, 70 insertions, 97 deletions
diff --git a/compiler/dex/quick/arm/assemble_arm.cc b/compiler/dex/quick/arm/assemble_arm.cc
index 06d9dd53b5..dcec861e22 100644
--- a/compiler/dex/quick/arm/assemble_arm.cc
+++ b/compiler/dex/quick/arm/assemble_arm.cc
@@ -427,7 +427,7 @@ const ArmEncodingMap ArmMir2Lir::EncodingMap[kArmLast] = {
REG_DEF_LR | NEEDS_FIXUP, "vldr", "!0s, [!1C, #!2E]", 4, kFixupVLoad),
ENCODING_MAP(kThumb2Vldrd, 0xed900b00,
kFmtDfp, 22, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 7, 0,
- kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD_OFF |
+ kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD_OFF4 |
REG_DEF_LR | NEEDS_FIXUP, "vldr", "!0S, [!1C, #!2E]", 4, kFixupVLoad),
ENCODING_MAP(kThumb2Vmuls, 0xee200a00,
kFmtSfp, 22, 12, kFmtSfp, 7, 16, kFmtSfp, 5, 0,
diff --git a/compiler/optimizing/find_loops_test.cc b/compiler/optimizing/find_loops_test.cc
index fab9f7a5ec..c36b1436d3 100644
--- a/compiler/optimizing/find_loops_test.cc
+++ b/compiler/optimizing/find_loops_test.cc
@@ -27,9 +27,8 @@
namespace art {
-static HGraph* TestCode(const uint16_t* data, ArenaPool* pool) {
- ArenaAllocator allocator(pool);
- HGraphBuilder builder(&allocator);
+static HGraph* TestCode(const uint16_t* data, ArenaAllocator* allocator) {
+ HGraphBuilder builder(allocator);
const DexFile::CodeItem* item = reinterpret_cast<const DexFile::CodeItem*>(data);
HGraph* graph = builder.BuildGraph(*item);
graph->BuildDominatorTree();
@@ -44,7 +43,8 @@ TEST(FindLoopsTest, CFG1) {
Instruction::RETURN_VOID);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) {
ASSERT_EQ(graph->GetBlocks().Get(i)->GetLoopInformation(), nullptr);
}
@@ -56,7 +56,8 @@ TEST(FindLoopsTest, CFG2) {
Instruction::RETURN);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) {
ASSERT_EQ(graph->GetBlocks().Get(i)->GetLoopInformation(), nullptr);
}
@@ -71,7 +72,8 @@ TEST(FindLoopsTest, CFG3) {
Instruction::RETURN);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) {
ASSERT_EQ(graph->GetBlocks().Get(i)->GetLoopInformation(), nullptr);
}
@@ -87,7 +89,8 @@ TEST(FindLoopsTest, CFG4) {
Instruction::RETURN | 0 << 8);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) {
ASSERT_EQ(graph->GetBlocks().Get(i)->GetLoopInformation(), nullptr);
}
@@ -101,7 +104,8 @@ TEST(FindLoopsTest, CFG5) {
Instruction::RETURN | 0 << 8);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) {
ASSERT_EQ(graph->GetBlocks().Get(i)->GetLoopInformation(), nullptr);
}
@@ -146,7 +150,8 @@ TEST(FindLoopsTest, Loop1) {
Instruction::RETURN_VOID);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
TestBlock(graph, 0, false, -1); // entry block
TestBlock(graph, 1, false, -1); // pre header
@@ -173,7 +178,8 @@ TEST(FindLoopsTest, Loop2) {
Instruction::RETURN | 0 << 8);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
TestBlock(graph, 0, false, -1); // entry block
TestBlock(graph, 1, false, -1); // goto block
@@ -197,7 +203,8 @@ TEST(FindLoopsTest, Loop3) {
Instruction::RETURN | 0 << 8);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
TestBlock(graph, 0, false, -1); // entry block
TestBlock(graph, 1, false, -1); // goto block
@@ -222,7 +229,8 @@ TEST(FindLoopsTest, Loop4) {
Instruction::RETURN | 0 << 8);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
TestBlock(graph, 0, false, -1); // entry block
TestBlock(graph, 1, false, -1); // pre header
@@ -248,7 +256,8 @@ TEST(FindLoopsTest, Loop5) {
Instruction::RETURN | 0 << 8);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
TestBlock(graph, 0, false, -1); // entry block
TestBlock(graph, 1, false, -1); // pre header
@@ -271,9 +280,9 @@ TEST(FindLoopsTest, InnerLoop) {
Instruction::GOTO | 0xFB00,
Instruction::RETURN | 0 << 8);
-
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
TestBlock(graph, 0, false, -1); // entry block
TestBlock(graph, 1, false, -1); // pre header of outer loop
@@ -302,9 +311,9 @@ TEST(FindLoopsTest, TwoLoops) {
Instruction::GOTO | 0xFE00, // second loop
Instruction::RETURN | 0 << 8);
-
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
TestBlock(graph, 0, false, -1); // entry block
TestBlock(graph, 1, false, -1); // pre header of first loop
@@ -333,7 +342,8 @@ TEST(FindLoopsTest, NonNaturalLoop) {
Instruction::RETURN | 0 << 8);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
ASSERT_TRUE(graph->GetBlocks().Get(3)->IsLoopHeader());
HLoopInformation* info = graph->GetBlocks().Get(3)->GetLoopInformation();
ASSERT_FALSE(info->GetHeader()->Dominates(info->GetBackEdges().Get(0)));
@@ -347,7 +357,8 @@ TEST(FindLoopsTest, DoWhileLoop) {
Instruction::RETURN | 0 << 8);
ArenaPool arena;
- HGraph* graph = TestCode(data, &arena);
+ ArenaAllocator allocator(&arena);
+ HGraph* graph = TestCode(data, &allocator);
TestBlock(graph, 0, false, -1); // entry block
TestBlock(graph, 1, false, -1); // pre header of first loop
diff --git a/runtime/arch/arm/jni_entrypoints_arm.S b/runtime/arch/arm/jni_entrypoints_arm.S
index 1be34ba80e..0e00f341d5 100644
--- a/runtime/arch/arm/jni_entrypoints_arm.S
+++ b/runtime/arch/arm/jni_entrypoints_arm.S
@@ -22,11 +22,13 @@
.extern artFindNativeMethod
ENTRY art_jni_dlsym_lookup_stub
push {r0, r1, r2, r3, lr} @ spill regs
- .save {r0, r1, r2, r3, lr}
- .pad #20
.cfi_adjust_cfa_offset 20
+ .cfi_rel_offset r0, 0
+ .cfi_rel_offset r1, 4
+ .cfi_rel_offset r2, 8
+ .cfi_rel_offset r3, 12
+ .cfi_rel_offset lr, 16
sub sp, #12 @ pad stack pointer to align frame
- .pad #12
.cfi_adjust_cfa_offset 12
blx artFindNativeMethod
mov r12, r0 @ save result in r12
@@ -35,9 +37,12 @@ ENTRY art_jni_dlsym_lookup_stub
cbz r0, 1f @ is method code null?
pop {r0, r1, r2, r3, lr} @ restore regs
.cfi_adjust_cfa_offset -20
+ .cfi_restore r0
+ .cfi_restore r1
+ .cfi_restore r2
+ .cfi_restore r3
+ .cfi_restore lr
bx r12 @ if non-null, tail call to method's code
1:
- .cfi_adjust_cfa_offset 20
pop {r0, r1, r2, r3, pc} @ restore regs and return to caller to handle exception
- .cfi_adjust_cfa_offset -20
END art_jni_dlsym_lookup_stub
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 637fd02a91..4342234f29 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -2140,7 +2140,7 @@ mirror::Class* ClassLinker::FindClass(Thread* self, const char* descriptor,
if (pair.second != nullptr) {
mirror::Class* klass = LookupClass(descriptor, nullptr);
if (klass != nullptr) {
- return klass;
+ return EnsureResolved(self, descriptor, klass);
}
klass = DefineClass(descriptor, NullHandle<mirror::ClassLoader>(), *pair.first,
*pair.second);
@@ -4427,7 +4427,6 @@ bool ClassLinker::LinkClass(Thread* self, const char* descriptor, ConstHandle<mi
return false;
}
CreateReferenceInstanceOffsets(klass);
- CreateReferenceStaticOffsets(klass);
CHECK_EQ(mirror::Class::kStatusLoaded, klass->GetStatus());
if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
@@ -5160,20 +5159,13 @@ void ClassLinker::CreateReferenceInstanceOffsets(ConstHandle<mirror::Class> klas
return;
}
}
- CreateReferenceOffsets(klass, false, reference_offsets);
-}
-
-void ClassLinker::CreateReferenceStaticOffsets(ConstHandle<mirror::Class> klass) {
- CreateReferenceOffsets(klass, true, 0);
+ CreateReferenceOffsets(klass, reference_offsets);
}
-void ClassLinker::CreateReferenceOffsets(ConstHandle<mirror::Class> klass, bool is_static,
+void ClassLinker::CreateReferenceOffsets(ConstHandle<mirror::Class> klass,
uint32_t reference_offsets) {
- size_t num_reference_fields =
- is_static ? klass->NumReferenceStaticFieldsDuringLinking()
- : klass->NumReferenceInstanceFieldsDuringLinking();
- mirror::ObjectArray<mirror::ArtField>* fields =
- is_static ? klass->GetSFields() : klass->GetIFields();
+ size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
+ mirror::ObjectArray<mirror::ArtField>* fields = klass->GetIFields();
// All of the fields that contain object references are guaranteed
// to be at the beginning of the fields list.
for (size_t i = 0; i < num_reference_fields; ++i) {
@@ -5191,12 +5183,7 @@ void ClassLinker::CreateReferenceOffsets(ConstHandle<mirror::Class> klass, bool
break;
}
}
- // Update fields in klass
- if (is_static) {
- klass->SetReferenceStaticOffsets(reference_offsets);
- } else {
- klass->SetReferenceInstanceOffsets(reference_offsets);
- }
+ klass->SetReferenceInstanceOffsets(reference_offsets);
}
mirror::String* ClassLinker::ResolveString(const DexFile& dex_file, uint32_t string_idx,
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 0248a219be..158816d193 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -533,10 +533,7 @@ class ClassLinker {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void CreateReferenceInstanceOffsets(ConstHandle<mirror::Class> klass)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void CreateReferenceStaticOffsets(ConstHandle<mirror::Class> klass)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void CreateReferenceOffsets(ConstHandle<mirror::Class> klass, bool is_static,
- uint32_t reference_offsets)
+ void CreateReferenceOffsets(ConstHandle<mirror::Class> klass, uint32_t reference_offsets)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// For use by ImageWriter to find DexCaches for its roots
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 69c281e61b..b2509186e0 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -528,7 +528,6 @@ struct ClassOffsets : public CheckOffsets<mirror::Class> {
offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::Class, object_size_), "objectSize"));
offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::Class, primitive_type_), "primitiveType"));
offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::Class, reference_instance_offsets_), "referenceInstanceOffsets"));
- offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::Class, reference_static_offsets_), "referenceStaticOffsets"));
offsets.push_back(CheckOffset(OFFSETOF_MEMBER(mirror::Class, status_), "status"));
};
};
diff --git a/runtime/indirect_reference_table-inl.h b/runtime/indirect_reference_table-inl.h
index 9bf3ea258d..9ee6d897ab 100644
--- a/runtime/indirect_reference_table-inl.h
+++ b/runtime/indirect_reference_table-inl.h
@@ -27,15 +27,6 @@ namespace mirror {
class Object;
} // namespace mirror
-inline void IrtIterator::SkipNullsAndTombstones() {
- // We skip NULLs and tombstones. Clients don't want to see implementation details.
- while (i_ < capacity_ &&
- (table_[i_].IsNull() ||
- Runtime::Current()->IsClearedJniWeakGlobal(table_[i_].Read<kWithoutReadBarrier>()))) {
- ++i_;
- }
-}
-
// Verifies that the indirect table lookup is valid.
// Returns "false" if something looks bad.
inline bool IndirectReferenceTable::GetChecked(IndirectRef iref) const {
diff --git a/runtime/indirect_reference_table.cc b/runtime/indirect_reference_table.cc
index 49bffa404e..2278408a52 100644
--- a/runtime/indirect_reference_table.cc
+++ b/runtime/indirect_reference_table.cc
@@ -161,10 +161,12 @@ IndirectRef IndirectReferenceTable::Add(uint32_t cookie, mirror::Object* obj) {
}
void IndirectReferenceTable::AssertEmpty() {
- if (UNLIKELY(begin() != end())) {
- ScopedObjectAccess soa(Thread::Current());
- LOG(FATAL) << "Internal Error: non-empty local reference table\n"
- << MutatorLockedDumpable<IndirectReferenceTable>(*this);
+ for (size_t i = 0; i < Capacity(); ++i) {
+ if (!table_[i].IsNull()) {
+ ScopedObjectAccess soa(Thread::Current());
+ LOG(FATAL) << "Internal Error: non-empty local reference table\n"
+ << MutatorLockedDumpable<IndirectReferenceTable>(*this);
+ }
}
}
@@ -264,6 +266,11 @@ bool IndirectReferenceTable::Remove(uint32_t cookie, IndirectRef iref) {
void IndirectReferenceTable::VisitRoots(RootCallback* callback, void* arg, uint32_t tid,
RootType root_type) {
for (auto ref : *this) {
+ if (*ref == nullptr) {
+ // Need to skip null entries to make it possible to do the
+ // non-null check after the call back.
+ continue;
+ }
callback(ref, arg, tid, root_type);
DCHECK(*ref != nullptr);
}
diff --git a/runtime/indirect_reference_table.h b/runtime/indirect_reference_table.h
index 562ba1e8df..5291e508a9 100644
--- a/runtime/indirect_reference_table.h
+++ b/runtime/indirect_reference_table.h
@@ -206,12 +206,10 @@ class IrtIterator {
explicit IrtIterator(GcRoot<mirror::Object>* table, size_t i, size_t capacity)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
: table_(table), i_(i), capacity_(capacity) {
- SkipNullsAndTombstones();
}
IrtIterator& operator++() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
++i_;
- SkipNullsAndTombstones();
return *this;
}
@@ -225,8 +223,6 @@ class IrtIterator {
}
private:
- void SkipNullsAndTombstones() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
GcRoot<mirror::Object>* const table_;
size_t i_;
const size_t capacity_;
diff --git a/runtime/java_vm_ext.cc b/runtime/java_vm_ext.cc
index 424addb2a8..0ac5b88e80 100644
--- a/runtime/java_vm_ext.cc
+++ b/runtime/java_vm_ext.cc
@@ -760,6 +760,11 @@ void JavaVMExt::SweepJniWeakGlobals(IsMarkedCallback* callback, void* arg) {
for (mirror::Object** entry : weak_globals_) {
// Since this is called by the GC, we don't need a read barrier.
mirror::Object* obj = *entry;
+ if (obj == nullptr) {
+ // Need to skip null here to distinguish between null entries
+ // and cleared weak ref entries.
+ continue;
+ }
mirror::Object* new_obj = callback(obj, arg);
if (new_obj == nullptr) {
new_obj = Runtime::Current()->GetClearedJniWeakGlobal();
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 760d54cfa1..e7d8163715 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -292,18 +292,6 @@ void Class::SetReferenceInstanceOffsets(uint32_t new_reference_offsets) {
new_reference_offsets);
}
-void Class::SetReferenceStaticOffsets(uint32_t new_reference_offsets) {
- if (new_reference_offsets != CLASS_WALK_SUPER) {
- // Sanity check that the number of bits set in the reference offset bitmap
- // agrees with the number of references
- CHECK_EQ((size_t)POPCOUNT(new_reference_offsets),
- NumReferenceStaticFieldsDuringLinking());
- }
- // Not called within a transaction.
- SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, reference_static_offsets_),
- new_reference_offsets);
-}
-
bool Class::IsInSamePackage(const StringPiece& descriptor1, const StringPiece& descriptor2) {
size_t i = 0;
while (descriptor1[i] != '\0' && descriptor1[i] == descriptor2[i]) {
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 0d30bc68a3..cf9501ad3c 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -871,14 +871,6 @@ class MANAGED Class FINAL : public Object {
// TODO: uint16_t
void SetStaticField(uint32_t i, ArtField* f) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- uint32_t GetReferenceStaticOffsets() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetField32<kVerifyFlags>(OFFSET_OF_OBJECT_MEMBER(Class, reference_static_offsets_));
- }
-
- void SetReferenceStaticOffsets(uint32_t new_reference_offsets)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
// Find a static or instance field using the JLS resolution order
static ArtField* FindField(Thread* self, Handle<Class> klass, const StringPiece& name,
const StringPiece& type)
@@ -1150,9 +1142,6 @@ class MANAGED Class FINAL : public Object {
// Bitmap of offsets of ifields.
uint32_t reference_instance_offsets_;
- // Bitmap of offsets of sfields.
- uint32_t reference_static_offsets_;
-
// State of class initialization.
Status status_;
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 8c1dc7d22c..166ea9c3e5 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -888,9 +888,9 @@ inline bool Object::CasFieldStrongSequentiallyConsistentObject(MemberOffset fiel
template<bool kVisitClass, bool kIsStatic, typename Visitor>
inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) {
- if (LIKELY(ref_offsets != CLASS_WALK_SUPER)) {
+ if (!kIsStatic && LIKELY(ref_offsets != CLASS_WALK_SUPER)) {
if (!kVisitClass) {
- // Mask out the class from the reference offsets.
+ // Mask out the class from the reference offsets.
ref_offsets ^= kWordHighBitMask;
}
DCHECK_EQ(ClassOffset().Uint32Value(), 0U);
@@ -902,7 +902,7 @@ inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& v
ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
}
} else {
- // There is no reference offset bitmap. In the non-static case, walk up the class
+ // There is no reference offset bitmap. In the non-static case, walk up the class
// inheritance hierarchy and find reference offsets the hard way. In the static case, just
// consider this class.
for (mirror::Class* klass = kIsStatic ? AsClass() : GetClass(); klass != nullptr;
@@ -930,8 +930,7 @@ inline void Object::VisitInstanceFieldsReferences(mirror::Class* klass, const Vi
template<bool kVisitClass, typename Visitor>
inline void Object::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) {
DCHECK(!klass->IsTemp());
- klass->VisitFieldsReferences<kVisitClass, true>(
- klass->GetReferenceStaticOffsets<kVerifyNone>(), visitor);
+ klass->VisitFieldsReferences<kVisitClass, true>(0, visitor);
}
template <const bool kVisitClass, VerifyObjectFlags kVerifyFlags, typename Visitor,
diff --git a/runtime/runtime-inl.h b/runtime/runtime-inl.h
index 8b632b2459..fe05073fa2 100644
--- a/runtime/runtime-inl.h
+++ b/runtime/runtime-inl.h
@@ -29,9 +29,7 @@ inline bool Runtime::IsClearedJniWeakGlobal(mirror::Object* obj) {
inline mirror::Object* Runtime::GetClearedJniWeakGlobal() {
mirror::Object* obj = sentinel_.Read();
- if (obj == nullptr) {
- LOG(ERROR) << "Failed to return cleared JNI weak global sentinel";
- }
+ DCHECK(obj != nullptr);
return obj;
}
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 33b09a3b64..105c01110f 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -807,6 +807,7 @@ bool Runtime::Init(const RuntimeOptions& raw_options, bool ignore_unrecognized)
// Initialize the special sentinel_ value early.
sentinel_ = GcRoot<mirror::Object>(class_linker_->AllocObject(self));
+ CHECK(sentinel_.Read() != nullptr);
verifier::MethodVerifier::Init();