/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef ART_RUNTIME_MIRROR_OBJECT_INL_H_ #define ART_RUNTIME_MIRROR_OBJECT_INL_H_ #include "object.h" #include "art_field.h" #include "art_method.h" #include "atomic.h" #include "array-inl.h" #include "class.h" #include "lock_word-inl.h" #include "monitor.h" #include "object_array-inl.h" #include "read_barrier-inl.h" #include "runtime.h" #include "reference.h" #include "throwable.h" namespace art { namespace mirror { inline uint32_t Object::ClassSize() { uint32_t vtable_entries = kVTableLength; return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0); } template inline Class* Object::GetClass() { return GetFieldObject( OFFSET_OF_OBJECT_MEMBER(Object, klass_)); } template inline void Object::SetClass(Class* new_klass) { // new_klass may be NULL prior to class linker initialization. // We don't mark the card as this occurs as part of object allocation. Not all objects have // backing cards, such as large objects. // We use non transactional version since we can't undo this write. We also disable checking as // we may run in transaction mode here. SetFieldObjectWithoutWriteBarrier(kVerifyFlags & ~kVerifyThis)>( OFFSET_OF_OBJECT_MEMBER(Object, klass_), new_klass); } inline LockWord Object::GetLockWord(bool as_volatile) { if (as_volatile) { return LockWord(GetField32Volatile(OFFSET_OF_OBJECT_MEMBER(Object, monitor_))); } return LockWord(GetField32(OFFSET_OF_OBJECT_MEMBER(Object, monitor_))); } inline void Object::SetLockWord(LockWord new_val, bool as_volatile) { // Force use of non-transactional mode and do not check. if (as_volatile) { SetField32Volatile(OFFSET_OF_OBJECT_MEMBER(Object, monitor_), new_val.GetValue()); } else { SetField32(OFFSET_OF_OBJECT_MEMBER(Object, monitor_), new_val.GetValue()); } } inline bool Object::CasLockWordWeakSequentiallyConsistent(LockWord old_val, LockWord new_val) { // Force use of non-transactional mode and do not check. return CasFieldWeakSequentiallyConsistent32( OFFSET_OF_OBJECT_MEMBER(Object, monitor_), old_val.GetValue(), new_val.GetValue()); } inline bool Object::CasLockWordWeakRelaxed(LockWord old_val, LockWord new_val) { // Force use of non-transactional mode and do not check. return CasFieldWeakRelaxed32( OFFSET_OF_OBJECT_MEMBER(Object, monitor_), old_val.GetValue(), new_val.GetValue()); } inline uint32_t Object::GetLockOwnerThreadId() { return Monitor::GetLockOwnerThreadId(this); } inline mirror::Object* Object::MonitorEnter(Thread* self) { return Monitor::MonitorEnter(self, this); } inline bool Object::MonitorExit(Thread* self) { return Monitor::MonitorExit(self, this); } inline void Object::Notify(Thread* self) { Monitor::Notify(self, this); } inline void Object::NotifyAll(Thread* self) { Monitor::NotifyAll(self, this); } inline void Object::Wait(Thread* self) { Monitor::Wait(self, this, 0, 0, true, kWaiting); } inline void Object::Wait(Thread* self, int64_t ms, int32_t ns) { Monitor::Wait(self, this, ms, ns, true, kTimedWaiting); } inline Object* Object::GetReadBarrierPointer() { #ifdef USE_BAKER_OR_BROOKS_READ_BARRIER DCHECK(kUseBakerOrBrooksReadBarrier); return GetFieldObject( OFFSET_OF_OBJECT_MEMBER(Object, x_rb_ptr_)); #else LOG(FATAL) << "Unreachable"; return nullptr; #endif } inline void Object::SetReadBarrierPointer(Object* rb_ptr) { #ifdef USE_BAKER_OR_BROOKS_READ_BARRIER DCHECK(kUseBakerOrBrooksReadBarrier); // We don't mark the card as this occurs as part of object allocation. Not all objects have // backing cards, such as large objects. SetFieldObjectWithoutWriteBarrier( OFFSET_OF_OBJECT_MEMBER(Object, x_rb_ptr_), rb_ptr); #else LOG(FATAL) << "Unreachable"; #endif } inline bool Object::AtomicSetReadBarrierPointer(Object* expected_rb_ptr, Object* rb_ptr) { #ifdef USE_BAKER_OR_BROOKS_READ_BARRIER DCHECK(kUseBakerOrBrooksReadBarrier); MemberOffset offset = OFFSET_OF_OBJECT_MEMBER(Object, x_rb_ptr_); byte* raw_addr = reinterpret_cast(this) + offset.SizeValue(); Atomic* atomic_rb_ptr = reinterpret_cast*>(raw_addr); HeapReference expected_ref(HeapReference::FromMirrorPtr(expected_rb_ptr)); HeapReference new_ref(HeapReference::FromMirrorPtr(rb_ptr)); do { if (UNLIKELY(atomic_rb_ptr->LoadRelaxed() != expected_ref.reference_)) { // Lost the race. return false; } } while (!atomic_rb_ptr->CompareExchangeWeakSequentiallyConsistent(expected_ref.reference_, new_ref.reference_)); DCHECK_EQ(new_ref.reference_, atomic_rb_ptr->LoadRelaxed()); return true; #else LOG(FATAL) << "Unreachable"; return false; #endif } inline void Object::AssertReadBarrierPointer() const { if (kUseBakerReadBarrier) { Object* obj = const_cast(this); DCHECK(obj->GetReadBarrierPointer() == nullptr) << "Bad Baker pointer: obj=" << reinterpret_cast(obj) << " ptr=" << reinterpret_cast(obj->GetReadBarrierPointer()); } else if (kUseBrooksReadBarrier) { Object* obj = const_cast(this); DCHECK_EQ(obj, obj->GetReadBarrierPointer()) << "Bad Brooks pointer: obj=" << reinterpret_cast(obj) << " ptr=" << reinterpret_cast(obj->GetReadBarrierPointer()); } else { LOG(FATAL) << "Unreachable"; } } template inline bool Object::VerifierInstanceOf(Class* klass) { DCHECK(klass != NULL); DCHECK(GetClass() != NULL); return klass->IsInterface() || InstanceOf(klass); } template inline bool Object::InstanceOf(Class* klass) { DCHECK(klass != NULL); DCHECK(GetClass() != NULL); return klass->IsAssignableFrom(GetClass()); } template inline bool Object::IsClass() { Class* java_lang_Class = GetClass()-> template GetClass(); return GetClass(kVerifyFlags & ~kVerifyThis), kReadBarrierOption>() == java_lang_Class; } template inline Class* Object::AsClass() { DCHECK((IsClass())); return down_cast(this); } template inline bool Object::IsObjectArray() { constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); return IsArrayInstance() && !GetClass()->template GetComponentType()->IsPrimitive(); } template inline ObjectArray* Object::AsObjectArray() { DCHECK(IsObjectArray()); return down_cast*>(this); } template inline bool Object::IsArrayInstance() { return GetClass()-> template IsArrayClass(); } template inline bool Object::IsArtField() { return GetClass()-> template IsArtFieldClass(); } template inline ArtField* Object::AsArtField() { DCHECK(IsArtField()); return down_cast(this); } template inline bool Object::IsArtMethod() { return GetClass()-> template IsArtMethodClass(); } template inline ArtMethod* Object::AsArtMethod() { DCHECK(IsArtMethod()); return down_cast(this); } template inline bool Object::IsReferenceInstance() { return GetClass()->IsTypeOfReferenceClass(); } template inline Reference* Object::AsReference() { DCHECK(IsReferenceInstance()); return down_cast(this); } template inline Array* Object::AsArray() { DCHECK((IsArrayInstance())); return down_cast(this); } template inline BooleanArray* Object::AsBooleanArray() { constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->GetComponentType()->IsPrimitiveBoolean()); return down_cast(this); } template inline ByteArray* Object::AsByteArray() { static const VerifyObjectFlags kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->template GetComponentType()->IsPrimitiveByte()); return down_cast(this); } template inline ByteArray* Object::AsByteSizedArray() { constexpr VerifyObjectFlags kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->template GetComponentType()->IsPrimitiveByte() || GetClass()->template GetComponentType()->IsPrimitiveBoolean()); return down_cast(this); } template inline CharArray* Object::AsCharArray() { constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->template GetComponentType()->IsPrimitiveChar()); return down_cast(this); } template inline ShortArray* Object::AsShortArray() { constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->template GetComponentType()->IsPrimitiveShort()); return down_cast(this); } template inline ShortArray* Object::AsShortSizedArray() { constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->template GetComponentType()->IsPrimitiveShort() || GetClass()->template GetComponentType()->IsPrimitiveChar()); return down_cast(this); } template inline IntArray* Object::AsIntArray() { constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->template GetComponentType()->IsPrimitiveInt() || GetClass()->template GetComponentType()->IsPrimitiveFloat()); return down_cast(this); } template inline LongArray* Object::AsLongArray() { constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->template GetComponentType()->IsPrimitiveLong() || GetClass()->template GetComponentType()->IsPrimitiveDouble()); return down_cast(this); } template inline FloatArray* Object::AsFloatArray() { constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->template GetComponentType()->IsPrimitiveFloat()); return down_cast(this); } template inline DoubleArray* Object::AsDoubleArray() { constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->template GetComponentType()->IsPrimitiveDouble()); return down_cast(this); } template inline String* Object::AsString() { DCHECK(GetClass()->IsStringClass()); return down_cast(this); } template inline Throwable* Object::AsThrowable() { DCHECK(GetClass()->IsThrowableClass()); return down_cast(this); } template inline bool Object::IsWeakReferenceInstance() { return GetClass()->IsWeakReferenceClass(); } template inline bool Object::IsSoftReferenceInstance() { return GetClass()->IsSoftReferenceClass(); } template inline bool Object::IsFinalizerReferenceInstance() { return GetClass()->IsFinalizerReferenceClass(); } template inline FinalizerReference* Object::AsFinalizerReference() { DCHECK(IsFinalizerReferenceInstance()); return down_cast(this); } template inline bool Object::IsPhantomReferenceInstance() { return GetClass()->IsPhantomReferenceClass(); } template inline size_t Object::SizeOf() { size_t result; constexpr auto kNewFlags = static_cast(kVerifyFlags & ~kVerifyThis); if (IsArrayInstance()) { result = AsArray()-> template SizeOf(); } else if (IsClass()) { result = AsClass()-> template SizeOf(); } else { result = GetClass()-> template GetObjectSize(); } DCHECK_GE(result, sizeof(Object)) << " class=" << PrettyTypeOf(GetClass()); DCHECK(!(IsArtField()) || result == sizeof(ArtField)); DCHECK(!(IsArtMethod()) || result == sizeof(ArtMethod)); return result; } template inline int32_t Object::GetField32(MemberOffset field_offset) { if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } const byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); const int32_t* word_addr = reinterpret_cast(raw_addr); if (UNLIKELY(kIsVolatile)) { return reinterpret_cast*>(word_addr)->LoadSequentiallyConsistent(); } else { return reinterpret_cast*>(word_addr)->LoadJavaData(); } } template inline int32_t Object::GetField32Volatile(MemberOffset field_offset) { return GetField32(field_offset); } template inline void Object::SetField32(MemberOffset field_offset, int32_t new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kTransactionActive) { Runtime::Current()->RecordWriteField32(this, field_offset, GetField32(field_offset), kIsVolatile); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); int32_t* word_addr = reinterpret_cast(raw_addr); if (kIsVolatile) { reinterpret_cast*>(word_addr)->StoreSequentiallyConsistent(new_value); } else { reinterpret_cast*>(word_addr)->StoreJavaData(new_value); } } template inline void Object::SetField32Volatile(MemberOffset field_offset, int32_t new_value) { SetField32(field_offset, new_value); } // TODO: Pass memory_order_ and strong/weak as arguments to avoid code duplication? template inline bool Object::CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset, int32_t old_value, int32_t new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kTransactionActive) { Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); AtomicInteger* atomic_addr = reinterpret_cast(raw_addr); return atomic_addr->CompareExchangeWeakSequentiallyConsistent(old_value, new_value); } template inline bool Object::CasFieldWeakRelaxed32(MemberOffset field_offset, int32_t old_value, int32_t new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kTransactionActive) { Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); AtomicInteger* atomic_addr = reinterpret_cast(raw_addr); return atomic_addr->CompareExchangeWeakRelaxed(old_value, new_value); } template inline bool Object::CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset, int32_t old_value, int32_t new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kTransactionActive) { Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); AtomicInteger* atomic_addr = reinterpret_cast(raw_addr); return atomic_addr->CompareExchangeStrongSequentiallyConsistent(old_value, new_value); } template inline int64_t Object::GetField64(MemberOffset field_offset) { if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } const byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); const int64_t* addr = reinterpret_cast(raw_addr); if (kIsVolatile) { return reinterpret_cast*>(addr)->LoadSequentiallyConsistent(); } else { return reinterpret_cast*>(addr)->LoadJavaData(); } } template inline int64_t Object::GetField64Volatile(MemberOffset field_offset) { return GetField64(field_offset); } template inline void Object::SetField64(MemberOffset field_offset, int64_t new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kTransactionActive) { Runtime::Current()->RecordWriteField64(this, field_offset, GetField64(field_offset), kIsVolatile); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); int64_t* addr = reinterpret_cast(raw_addr); if (kIsVolatile) { reinterpret_cast*>(addr)->StoreSequentiallyConsistent(new_value); } else { reinterpret_cast*>(addr)->StoreJavaData(new_value); } } template inline void Object::SetField64Volatile(MemberOffset field_offset, int64_t new_value) { return SetField64(field_offset, new_value); } template inline bool Object::CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value, int64_t new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kTransactionActive) { Runtime::Current()->RecordWriteField64(this, field_offset, old_value, true); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); Atomic* atomic_addr = reinterpret_cast*>(raw_addr); return atomic_addr->CompareExchangeWeakSequentiallyConsistent(old_value, new_value); } template inline bool Object::CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value, int64_t new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kTransactionActive) { Runtime::Current()->RecordWriteField64(this, field_offset, old_value, true); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); Atomic* atomic_addr = reinterpret_cast*>(raw_addr); return atomic_addr->CompareExchangeStrongSequentiallyConsistent(old_value, new_value); } template inline T* Object::GetFieldObject(MemberOffset field_offset) { if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); HeapReference* objref_addr = reinterpret_cast*>(raw_addr); T* result = ReadBarrier::Barrier(this, field_offset, objref_addr); if (kIsVolatile) { // TODO: Refactor to use a SequentiallyConsistent load instead. QuasiAtomic::ThreadFenceAcquire(); // Ensure visibility of operations preceding store. } if (kVerifyFlags & kVerifyReads) { VerifyObject(result); } return result; } template inline T* Object::GetFieldObjectVolatile(MemberOffset field_offset) { return GetFieldObject(field_offset); } template inline void Object::SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kTransactionActive) { mirror::Object* obj; if (kIsVolatile) { obj = GetFieldObjectVolatile(field_offset); } else { obj = GetFieldObject(field_offset); } Runtime::Current()->RecordWriteFieldReference(this, field_offset, obj, true); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } if (kVerifyFlags & kVerifyWrites) { VerifyObject(new_value); } byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); HeapReference* objref_addr = reinterpret_cast*>(raw_addr); if (kIsVolatile) { // TODO: Refactor to use a SequentiallyConsistent store instead. QuasiAtomic::ThreadFenceRelease(); // Ensure that prior accesses are visible before store. objref_addr->Assign(new_value); QuasiAtomic::ThreadFenceSequentiallyConsistent(); // Ensure this store occurs before any volatile loads. } else { objref_addr->Assign(new_value); } } template inline void Object::SetFieldObject(MemberOffset field_offset, Object* new_value) { SetFieldObjectWithoutWriteBarrier(field_offset, new_value); if (new_value != nullptr) { Runtime::Current()->GetHeap()->WriteBarrierField(this, field_offset, new_value); // TODO: Check field assignment could theoretically cause thread suspension, TODO: fix this. CheckFieldAssignment(field_offset, new_value); } } template inline void Object::SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value) { SetFieldObject(field_offset, new_value); } template inline HeapReference* Object::GetFieldObjectReferenceAddr(MemberOffset field_offset) { if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } return reinterpret_cast*>(reinterpret_cast(this) + field_offset.Int32Value()); } template inline bool Object::CasFieldWeakSequentiallyConsistentObject(MemberOffset field_offset, Object* old_value, Object* new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } if (kVerifyFlags & kVerifyWrites) { VerifyObject(new_value); } if (kVerifyFlags & kVerifyReads) { VerifyObject(old_value); } if (kTransactionActive) { Runtime::Current()->RecordWriteFieldReference(this, field_offset, old_value, true); } HeapReference old_ref(HeapReference::FromMirrorPtr(old_value)); HeapReference new_ref(HeapReference::FromMirrorPtr(new_value)); byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); Atomic* atomic_addr = reinterpret_cast*>(raw_addr); bool success = atomic_addr->CompareExchangeWeakSequentiallyConsistent(old_ref.reference_, new_ref.reference_); if (success) { Runtime::Current()->GetHeap()->WriteBarrierField(this, field_offset, new_value); } return success; } template inline bool Object::CasFieldStrongSequentiallyConsistentObject(MemberOffset field_offset, Object* old_value, Object* new_value) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } if (kVerifyFlags & kVerifyThis) { VerifyObject(this); } if (kVerifyFlags & kVerifyWrites) { VerifyObject(new_value); } if (kVerifyFlags & kVerifyReads) { VerifyObject(old_value); } if (kTransactionActive) { Runtime::Current()->RecordWriteFieldReference(this, field_offset, old_value, true); } HeapReference old_ref(HeapReference::FromMirrorPtr(old_value)); HeapReference new_ref(HeapReference::FromMirrorPtr(new_value)); byte* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); Atomic* atomic_addr = reinterpret_cast*>(raw_addr); bool success = atomic_addr->CompareExchangeStrongSequentiallyConsistent(old_ref.reference_, new_ref.reference_); if (success) { Runtime::Current()->GetHeap()->WriteBarrierField(this, field_offset, new_value); } return success; } template inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) { if (LIKELY(ref_offsets != CLASS_WALK_SUPER)) { if (!kVisitClass) { // Mask out the class from the reference offsets. ref_offsets ^= kWordHighBitMask; } DCHECK_EQ(ClassOffset().Uint32Value(), 0U); // Found a reference offset bitmap. Visit the specified offsets. while (ref_offsets != 0) { size_t right_shift = CLZ(ref_offsets); MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift); visitor(this, field_offset, kIsStatic); ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift); } } else { // There is no reference offset bitmap. In the non-static case, walk up the class // inheritance hierarchy and find reference offsets the hard way. In the static case, just // consider this class. for (mirror::Class* klass = kIsStatic ? AsClass() : GetClass(); klass != nullptr; klass = kIsStatic ? nullptr : klass->GetSuperClass()) { size_t num_reference_fields = kIsStatic ? klass->NumReferenceStaticFields() : klass->NumReferenceInstanceFields(); for (size_t i = 0; i < num_reference_fields; ++i) { mirror::ArtField* field = kIsStatic ? klass->GetStaticField(i) : klass->GetInstanceField(i); MemberOffset field_offset = field->GetOffset(); // TODO: Do a simpler check? if (kVisitClass || field_offset.Uint32Value() != ClassOffset().Uint32Value()) { visitor(this, field_offset, kIsStatic); } } } } } template inline void Object::VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor) { VisitFieldsReferences( klass->GetReferenceInstanceOffsets(), visitor); } template inline void Object::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) { DCHECK(!klass->IsTemp()); klass->VisitFieldsReferences( klass->GetReferenceStaticOffsets(), visitor); } template inline void Object::VisitReferences(const Visitor& visitor, const JavaLangRefVisitor& ref_visitor) { mirror::Class* klass = GetClass(); if (klass == Class::GetJavaLangClass()) { AsClass()->VisitReferences(klass, visitor); } else if (klass->IsArrayClass()) { if (klass->IsObjectArrayClass()) { AsObjectArray()->VisitReferences(visitor); } else if (kVisitClass) { visitor(this, ClassOffset(), false); } } else { DCHECK(!klass->IsVariableSize()); VisitInstanceFieldsReferences(klass, visitor); if (UNLIKELY(klass->IsTypeOfReferenceClass())) { ref_visitor(klass, AsReference()); } } } } // namespace mirror } // namespace art #endif // ART_RUNTIME_MIRROR_OBJECT_INL_H_