diff options
Diffstat (limited to 'runtime/entrypoints')
19 files changed, 258 insertions, 288 deletions
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h index 9292cff88e..a4dd55cebe 100644 --- a/runtime/entrypoints/entrypoint_utils-inl.h +++ b/runtime/entrypoints/entrypoint_utils-inl.h @@ -19,6 +19,7 @@ #include "entrypoint_utils.h" +#include "art_method.h" #include "class_linker-inl.h" #include "common_throws.h" #include "dex_file.h" @@ -27,7 +28,6 @@ #include "indirect_reference_table.h" #include "invoke_type.h" #include "jni_internal.h" -#include "mirror/art_method.h" #include "mirror/array.h" #include "mirror/class-inl.h" #include "mirror/object-inl.h" @@ -38,15 +38,15 @@ namespace art { -inline mirror::ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type) +inline ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - auto* refs_only_sp = self->GetManagedStack()->GetTopQuickFrame(); - DCHECK_EQ(refs_only_sp->AsMirrorPtr(), Runtime::Current()->GetCalleeSaveMethod(type)); + auto** refs_only_sp = self->GetManagedStack()->GetTopQuickFrame(); + DCHECK_EQ(*refs_only_sp, Runtime::Current()->GetCalleeSaveMethod(type)); const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, type); - auto* caller_sp = reinterpret_cast<StackReference<mirror::ArtMethod>*>( - reinterpret_cast<uintptr_t>(refs_only_sp) + callee_frame_size); - auto* caller = caller_sp->AsMirrorPtr(); + auto** caller_sp = reinterpret_cast<ArtMethod**>( + reinterpret_cast<uintptr_t>(refs_only_sp) + callee_frame_size); + auto* caller = *caller_sp; if (kIsDebugBuild) { NthCallerVisitor visitor(self, 1, true); @@ -60,7 +60,7 @@ inline mirror::ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::Calle template <const bool kAccessCheck> ALWAYS_INLINE inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, - mirror::ArtMethod* method, + ArtMethod* method, Thread* self, bool* slow_path) { mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); if (UNLIKELY(klass == nullptr)) { @@ -141,7 +141,7 @@ inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, template <bool kAccessCheck, bool kInstrumented> ALWAYS_INLINE inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, - mirror::ArtMethod* method, + ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) { bool slow_path = false; @@ -193,7 +193,7 @@ template <bool kAccessCheck> ALWAYS_INLINE inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, int32_t component_count, - mirror::ArtMethod* method, + ArtMethod* method, bool* slow_path) { if (UNLIKELY(component_count < 0)) { ThrowNegativeArraySizeException(component_count); @@ -229,7 +229,7 @@ template <bool kAccessCheck, bool kInstrumented> ALWAYS_INLINE inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, int32_t component_count, - mirror::ArtMethod* method, + ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) { bool slow_path = false; @@ -252,7 +252,7 @@ template <bool kAccessCheck, bool kInstrumented> ALWAYS_INLINE inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, int32_t component_count, - mirror::ArtMethod* method, + ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) { DCHECK(klass != nullptr); @@ -274,7 +274,7 @@ inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, } template<FindFieldType type, bool access_check> -inline ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, +inline ArtField* FindFieldFromCode(uint32_t field_idx, ArtMethod* referrer, Thread* self, size_t expected_size) { bool is_primitive; bool is_set; @@ -347,8 +347,8 @@ inline ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referr #define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \ - mirror::ArtMethod* referrer, \ - Thread* self, size_t expected_size) \ + ArtMethod* referrer, \ + Thread* self, size_t expected_size) \ #define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \ @@ -367,17 +367,16 @@ EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite); #undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL template<InvokeType type, bool access_check> -inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, - mirror::Object** this_object, - mirror::ArtMethod** referrer, Thread* self) { +inline ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object** this_object, + ArtMethod** referrer, Thread* self) { ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); - mirror::ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer); + ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer); if (resolved_method == nullptr) { StackHandleScope<1> hs(self); mirror::Object* null_this = nullptr; HandleWrapper<mirror::Object> h_this( hs.NewHandleWrapper(type == kStatic ? &null_this : this_object)); - resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type); + resolved_method = class_linker->ResolveMethod(self, method_idx, *referrer, type); } if (UNLIKELY(resolved_method == nullptr)) { DCHECK(self->IsExceptionPending()); // Throw exception and unwind. @@ -420,7 +419,7 @@ inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, return nullptr; // Failure. } DCHECK(klass->HasVTable()) << PrettyClass(klass); - return klass->GetVTableEntry(vtable_index); + return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize()); } case kSuper: { mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass(); @@ -439,23 +438,25 @@ inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, DCHECK(super_class != nullptr); } DCHECK(super_class->HasVTable()); - return super_class->GetVTableEntry(vtable_index); + return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize()); } case kInterface: { uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize; - mirror::ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(imt_index); + ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry( + imt_index, class_linker->GetImagePointerSize()); if (!imt_method->IsImtConflictMethod() && !imt_method->IsImtUnimplementedMethod()) { if (kIsDebugBuild) { mirror::Class* klass = (*this_object)->GetClass(); - mirror::ArtMethod* method = klass->FindVirtualMethodForInterface(resolved_method); + ArtMethod* method = klass->FindVirtualMethodForInterface( + resolved_method, class_linker->GetImagePointerSize()); CHECK_EQ(imt_method, method) << PrettyMethod(resolved_method) << " / " << PrettyMethod(imt_method) << " / " << PrettyMethod(method) << " / " << PrettyClass(klass); } return imt_method; } else { - mirror::ArtMethod* interface_method = - (*this_object)->GetClass()->FindVirtualMethodForInterface(resolved_method); + ArtMethod* interface_method = (*this_object)->GetClass()->FindVirtualMethodForInterface( + resolved_method, class_linker->GetImagePointerSize()); if (UNLIKELY(interface_method == nullptr)) { ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method, *this_object, *referrer); @@ -473,10 +474,10 @@ inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, // Explicit template declarations of FindMethodFromCode for all invoke types. #define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ - mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \ - mirror::Object** this_object, \ - mirror::ArtMethod** referrer, \ - Thread* self) + ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \ + mirror::Object** this_object, \ + ArtMethod** referrer, \ + Thread* self) #define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \ EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true) @@ -491,9 +492,8 @@ EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface); #undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL // Fast path field resolution that can't initialize classes or throw exceptions. -inline ArtField* FindFieldFast(uint32_t field_idx, - mirror::ArtMethod* referrer, - FindFieldType type, size_t expected_size) { +inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type, + size_t expected_size) { ArtField* resolved_field = referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx, sizeof(void*)); if (UNLIKELY(resolved_field == nullptr)) { @@ -530,8 +530,7 @@ inline ArtField* FindFieldFast(uint32_t field_idx, } mirror::Class* referring_class = referrer->GetDeclaringClass(); if (UNLIKELY(!referring_class->CanAccess(fields_class) || - !referring_class->CanAccessMember(fields_class, - resolved_field->GetAccessFlags()) || + !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) || (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) { // Illegal access. return nullptr; @@ -544,15 +543,13 @@ inline ArtField* FindFieldFast(uint32_t field_idx, } // Fast path method resolution that can't throw exceptions. -inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx, - mirror::Object* this_object, - mirror::ArtMethod* referrer, - bool access_check, InvokeType type) { +inline ArtMethod* FindMethodFast(uint32_t method_idx, mirror::Object* this_object, + ArtMethod* referrer, bool access_check, InvokeType type) { if (UNLIKELY(this_object == nullptr && type != kStatic)) { return nullptr; } - mirror::ArtMethod* resolved_method = - referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx); + ArtMethod* resolved_method = + referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx, sizeof(void*)); if (UNLIKELY(resolved_method == nullptr)) { return nullptr; } @@ -572,22 +569,21 @@ inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx, } } if (type == kInterface) { // Most common form of slow path dispatch. - return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method); + return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method, sizeof(void*)); } else if (type == kStatic || type == kDirect) { return resolved_method; } else if (type == kSuper) { - return referrer->GetDeclaringClass()->GetSuperClass() - ->GetVTableEntry(resolved_method->GetMethodIndex()); + return referrer->GetDeclaringClass()->GetSuperClass()->GetVTableEntry( + resolved_method->GetMethodIndex(), sizeof(void*)); } else { DCHECK(type == kVirtual); - return this_object->GetClass()->GetVTableEntry(resolved_method->GetMethodIndex()); + return this_object->GetClass()->GetVTableEntry( + resolved_method->GetMethodIndex(), sizeof(void*)); } } -inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, - mirror::ArtMethod* referrer, - Thread* self, bool can_run_clinit, - bool verify_access) { +inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, ArtMethod* referrer, Thread* self, + bool can_run_clinit, bool verify_access) { ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); mirror::Class* klass = class_linker->ResolveType(type_idx, referrer); if (UNLIKELY(klass == nullptr)) { @@ -620,8 +616,7 @@ inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, return h_class.Get(); } -inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer, - uint32_t string_idx) { +inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, uint32_t string_idx) { ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); return class_linker->ResolveString(string_idx, referrer); } diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc index ce5673923f..fc7f8b782a 100644 --- a/runtime/entrypoints/entrypoint_utils.cc +++ b/runtime/entrypoints/entrypoint_utils.cc @@ -17,11 +17,11 @@ #include "entrypoints/entrypoint_utils.h" #include "art_field-inl.h" +#include "art_method-inl.h" #include "base/mutex.h" #include "class_linker-inl.h" #include "dex_file-inl.h" #include "gc/accounting/card_table-inl.h" -#include "mirror/art_method-inl.h" #include "mirror/class-inl.h" #include "mirror/method.h" #include "mirror/object-inl.h" @@ -35,7 +35,7 @@ namespace art { static inline mirror::Class* CheckFilledNewArrayAlloc(uint32_t type_idx, int32_t component_count, - mirror::ArtMethod* referrer, + ArtMethod* referrer, Thread* self, bool access_check) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { @@ -76,7 +76,7 @@ static inline mirror::Class* CheckFilledNewArrayAlloc(uint32_t type_idx, // Helper function to allocate array for FILLED_NEW_ARRAY. mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, int32_t component_count, - mirror::ArtMethod* referrer, Thread* self, + ArtMethod* referrer, Thread* self, bool access_check, gc::AllocatorType /* allocator_type */) { mirror::Class* klass = CheckFilledNewArrayAlloc(type_idx, component_count, referrer, self, @@ -96,7 +96,7 @@ mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, int32_t component_c // Helper function to allocate array for FILLED_NEW_ARRAY. mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx, int32_t component_count, - mirror::ArtMethod* referrer, + ArtMethod* referrer, Thread* self, bool access_check, gc::AllocatorType /* allocator_type */) { @@ -294,22 +294,19 @@ JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, cons mirror::Object* rcvr = soa.Decode<mirror::Object*>(rcvr_jobj); mirror::Class* proxy_class = rcvr->GetClass(); mirror::Method* interface_method = soa.Decode<mirror::Method*>(interface_method_jobj); - mirror::ArtMethod* proxy_method = - rcvr->GetClass()->FindVirtualMethodForInterface(interface_method->GetArtMethod()); - int throws_index = -1; - size_t num_virt_methods = proxy_class->NumVirtualMethods(); - for (size_t i = 0; i < num_virt_methods; i++) { - if (proxy_class->GetVirtualMethod(i) == proxy_method) { - throws_index = i; - break; - } - } - CHECK_NE(throws_index, -1); + ArtMethod* proxy_method = rcvr->GetClass()->FindVirtualMethodForInterface( + interface_method->GetArtMethod(), sizeof(void*)); + auto* virtual_methods = proxy_class->GetVirtualMethodsPtr(); + size_t num_virtuals = proxy_class->NumVirtualMethods(); + size_t method_size = ArtMethod::ObjectSize(sizeof(void*)); + int throws_index = (reinterpret_cast<uintptr_t>(proxy_method) - + reinterpret_cast<uintptr_t>(virtual_methods)) / method_size; + CHECK_LT(throws_index, static_cast<int>(num_virtuals)); mirror::ObjectArray<mirror::Class>* declared_exceptions = proxy_class->GetThrows()->Get(throws_index); mirror::Class* exception_class = exception->GetClass(); bool declares_exception = false; - for (int i = 0; i < declared_exceptions->GetLength() && !declares_exception; i++) { + for (int32_t i = 0; i < declared_exceptions->GetLength() && !declares_exception; i++) { mirror::Class* declared_exception = declared_exceptions->Get(i); declares_exception = declared_exception->IsAssignableFrom(exception_class); } diff --git a/runtime/entrypoints/entrypoint_utils.h b/runtime/entrypoints/entrypoint_utils.h index 8d419f8a7b..47865a2a80 100644 --- a/runtime/entrypoints/entrypoint_utils.h +++ b/runtime/entrypoints/entrypoint_utils.h @@ -31,19 +31,19 @@ namespace art { namespace mirror { class Array; - class ArtMethod; class Class; class Object; class String; } // namespace mirror class ArtField; +class ArtMethod; class ScopedObjectAccessAlreadyRunnable; class Thread; template <const bool kAccessCheck> ALWAYS_INLINE inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, - mirror::ArtMethod* method, + ArtMethod* method, Thread* self, bool* slow_path) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -58,7 +58,7 @@ ALWAYS_INLINE inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror:: // check. template <bool kAccessCheck, bool kInstrumented> ALWAYS_INLINE inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, - mirror::ArtMethod* method, + ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -81,7 +81,7 @@ ALWAYS_INLINE inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Clas template <bool kAccessCheck> ALWAYS_INLINE inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, int32_t component_count, - mirror::ArtMethod* method, + ArtMethod* method, bool* slow_path) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -92,7 +92,7 @@ ALWAYS_INLINE inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, template <bool kAccessCheck, bool kInstrumented> ALWAYS_INLINE inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, int32_t component_count, - mirror::ArtMethod* method, + ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); @@ -100,20 +100,20 @@ ALWAYS_INLINE inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, template <bool kAccessCheck, bool kInstrumented> ALWAYS_INLINE inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, int32_t component_count, - mirror::ArtMethod* method, + ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); extern mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, int32_t component_count, - mirror::ArtMethod* method, Thread* self, + ArtMethod* method, Thread* self, bool access_check, gc::AllocatorType allocator_type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); extern mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx, int32_t component_count, - mirror::ArtMethod* method, + ArtMethod* method, Thread* self, bool access_check, gc::AllocatorType allocator_type) @@ -132,38 +132,33 @@ enum FindFieldType { }; template<FindFieldType type, bool access_check> -inline ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, - Thread* self, size_t expected_size) +inline ArtField* FindFieldFromCode( + uint32_t field_idx, ArtMethod* referrer, Thread* self, size_t expected_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); template<InvokeType type, bool access_check> -inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, - mirror::Object** this_object, - mirror::ArtMethod** referrer, Thread* self) +inline ArtMethod* FindMethodFromCode( + uint32_t method_idx, mirror::Object** this_object, ArtMethod** referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Fast path field resolution that can't initialize classes or throw exceptions. -inline ArtField* FindFieldFast(uint32_t field_idx, - mirror::ArtMethod* referrer, - FindFieldType type, size_t expected_size) +inline ArtField* FindFieldFast( + uint32_t field_idx, ArtMethod* referrer, FindFieldType type, size_t expected_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Fast path method resolution that can't throw exceptions. -inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx, - mirror::Object* this_object, - mirror::ArtMethod* referrer, - bool access_check, InvokeType type) +inline ArtMethod* FindMethodFast( + uint32_t method_idx, mirror::Object* this_object, ArtMethod* referrer, bool access_check, + InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); -inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, - mirror::ArtMethod* referrer, - Thread* self, bool can_run_clinit, - bool verify_access) +inline mirror::Class* ResolveVerifyAndClinit( + uint32_t type_idx, ArtMethod* referrer, Thread* self, bool can_run_clinit, bool verify_access) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); extern void ThrowStackOverflowError(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); -inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer, uint32_t string_idx) +inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, uint32_t string_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // TODO: annotalysis disabled as monitor semantics are maintained in Java code. diff --git a/runtime/entrypoints/interpreter/interpreter_entrypoints.cc b/runtime/entrypoints/interpreter/interpreter_entrypoints.cc index d4844c2a95..72c2e0a5b0 100644 --- a/runtime/entrypoints/interpreter/interpreter_entrypoints.cc +++ b/runtime/entrypoints/interpreter/interpreter_entrypoints.cc @@ -14,10 +14,10 @@ * limitations under the License. */ +#include "art_method-inl.h" #include "class_linker.h" #include "dex_file-inl.h" #include "interpreter/interpreter.h" -#include "mirror/art_method-inl.h" #include "mirror/object-inl.h" #include "reflection.h" #include "runtime.h" @@ -27,7 +27,7 @@ namespace art { extern "C" void artInterpreterToCompiledCodeBridge(Thread* self, const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result) { - mirror::ArtMethod* method = shadow_frame->GetMethod(); + ArtMethod* method = shadow_frame->GetMethod(); // Ensure static methods are initialized. if (method->IsStatic()) { mirror::Class* declaringClass = method->GetDeclaringClass(); @@ -50,7 +50,7 @@ extern "C" void artInterpreterToCompiledCodeBridge(Thread* self, const DexFile:: uint16_t arg_offset = (code_item == nullptr) ? 0 : code_item->registers_size_ - code_item->ins_size_; method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset), (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t), - result, method->GetShorty()); + result, method->GetInterfaceMethodIfProxy(sizeof(void*))->GetShorty()); } } // namespace art diff --git a/runtime/entrypoints/jni/jni_entrypoints.cc b/runtime/entrypoints/jni/jni_entrypoints.cc index a68eeebff8..22226c1dfb 100644 --- a/runtime/entrypoints/jni/jni_entrypoints.cc +++ b/runtime/entrypoints/jni/jni_entrypoints.cc @@ -14,9 +14,9 @@ * limitations under the License. */ +#include "art_method-inl.h" #include "base/logging.h" #include "entrypoints/entrypoint_utils.h" -#include "mirror/art_method-inl.h" #include "mirror/object-inl.h" #include "scoped_thread_state_change.h" #include "thread.h" @@ -34,7 +34,7 @@ extern "C" void* artFindNativeMethod(Thread* self) { Locks::mutator_lock_->AssertNotHeld(self); // We come here as Native. ScopedObjectAccess soa(self); - mirror::ArtMethod* method = self->GetCurrentMethod(nullptr); + ArtMethod* method = self->GetCurrentMethod(nullptr); DCHECK(method != nullptr); // Lookup symbol address for method, on failure we'll return null with an exception set, diff --git a/runtime/entrypoints/quick/callee_save_frame.h b/runtime/entrypoints/quick/callee_save_frame.h index 8cd6ca6777..521c549193 100644 --- a/runtime/entrypoints/quick/callee_save_frame.h +++ b/runtime/entrypoints/quick/callee_save_frame.h @@ -32,9 +32,7 @@ #include "arch/x86_64/quick_method_frame_info_x86_64.h" namespace art { -namespace mirror { class ArtMethod; -} // namespace mirror class ScopedQuickEntrypointChecks { public: diff --git a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc index fa129afd39..f56b5e45b6 100644 --- a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc @@ -16,9 +16,9 @@ #include "entrypoints/quick/quick_alloc_entrypoints.h" +#include "art_method-inl.h" #include "callee_save_frame.h" #include "entrypoints/entrypoint_utils-inl.h" -#include "mirror/art_method-inl.h" #include "mirror/class-inl.h" #include "mirror/object_array-inl.h" #include "mirror/object-inl.h" @@ -29,7 +29,7 @@ static constexpr bool kUseTlabFastPath = true; #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \ extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ - uint32_t type_idx, mirror::ArtMethod* method, Thread* self) \ + uint32_t type_idx, ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ ScopedQuickEntrypointChecks sqec(self); \ if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ @@ -56,7 +56,7 @@ extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ return AllocObjectFromCode<false, instrumented_bool>(type_idx, method, self, allocator_type); \ } \ extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \ - mirror::Class* klass, mirror::ArtMethod* method, Thread* self) \ + mirror::Class* klass, ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ UNUSED(method); \ ScopedQuickEntrypointChecks sqec(self); \ @@ -83,7 +83,7 @@ extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \ return AllocObjectFromCodeResolved<instrumented_bool>(klass, self, allocator_type); \ } \ extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \ - mirror::Class* klass, mirror::ArtMethod* method, Thread* self) \ + mirror::Class* klass, ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ UNUSED(method); \ ScopedQuickEntrypointChecks sqec(self); \ @@ -108,34 +108,34 @@ extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \ return AllocObjectFromCodeInitialized<instrumented_bool>(klass, self, allocator_type); \ } \ extern "C" mirror::Object* artAllocObjectFromCodeWithAccessCheck##suffix##suffix2( \ - uint32_t type_idx, mirror::ArtMethod* method, Thread* self) \ + uint32_t type_idx, ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ ScopedQuickEntrypointChecks sqec(self); \ return AllocObjectFromCode<true, instrumented_bool>(type_idx, method, self, allocator_type); \ } \ extern "C" mirror::Array* artAllocArrayFromCode##suffix##suffix2( \ - uint32_t type_idx, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ + uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ ScopedQuickEntrypointChecks sqec(self); \ return AllocArrayFromCode<false, instrumented_bool>(type_idx, component_count, method, self, \ allocator_type); \ } \ extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \ - mirror::Class* klass, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ + mirror::Class* klass, int32_t component_count, ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ ScopedQuickEntrypointChecks sqec(self); \ return AllocArrayFromCodeResolved<false, instrumented_bool>(klass, component_count, method, self, \ allocator_type); \ } \ extern "C" mirror::Array* artAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ - uint32_t type_idx, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ + uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ ScopedQuickEntrypointChecks sqec(self); \ return AllocArrayFromCode<true, instrumented_bool>(type_idx, component_count, method, self, \ allocator_type); \ } \ extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \ - uint32_t type_idx, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ + uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ ScopedQuickEntrypointChecks sqec(self); \ if (!instrumented_bool) { \ @@ -145,7 +145,7 @@ extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \ } \ } \ extern "C" mirror::Array* artCheckAndAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ - uint32_t type_idx, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ + uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ ScopedQuickEntrypointChecks sqec(self); \ if (!instrumented_bool) { \ @@ -193,27 +193,27 @@ GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(Region, gc::kAllocatorTypeRegion) GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RegionTLAB, gc::kAllocatorTypeRegionTLAB) #define GENERATE_ENTRYPOINTS(suffix) \ -extern "C" void* art_quick_alloc_array##suffix(uint32_t, int32_t, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_array_with_access_check##suffix(uint32_t, int32_t, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_object##suffix(uint32_t type_idx, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_object_with_access_check##suffix(uint32_t type_idx, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_check_and_alloc_array##suffix(uint32_t, int32_t, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix(uint32_t, int32_t, mirror::ArtMethod* ref); \ +extern "C" void* art_quick_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_object##suffix(uint32_t type_idx, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_object_with_access_check##suffix(uint32_t type_idx, ArtMethod* ref); \ +extern "C" void* art_quick_check_and_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \ +extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \ extern "C" void* art_quick_alloc_string_from_bytes##suffix(void*, int32_t, int32_t, int32_t); \ extern "C" void* art_quick_alloc_string_from_chars##suffix(int32_t, int32_t, void*); \ extern "C" void* art_quick_alloc_string_from_string##suffix(void*); \ -extern "C" void* art_quick_alloc_array##suffix##_instrumented(uint32_t, int32_t, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_object##suffix##_instrumented(uint32_t type_idx, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_alloc_object_with_access_check##suffix##_instrumented(uint32_t type_idx, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_check_and_alloc_array##suffix##_instrumented(uint32_t, int32_t, mirror::ArtMethod* ref); \ -extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, mirror::ArtMethod* ref); \ +extern "C" void* art_quick_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_object##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \ +extern "C" void* art_quick_alloc_object_with_access_check##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \ +extern "C" void* art_quick_check_and_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ +extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ extern "C" void* art_quick_alloc_string_from_bytes##suffix##_instrumented(void*, int32_t, int32_t, int32_t); \ extern "C" void* art_quick_alloc_string_from_chars##suffix##_instrumented(int32_t, int32_t, void*); \ extern "C" void* art_quick_alloc_string_from_string##suffix##_instrumented(void*); \ diff --git a/runtime/entrypoints/quick/quick_default_externs.h b/runtime/entrypoints/quick/quick_default_externs.h index 1fd8a949a9..c7aaa2021f 100644 --- a/runtime/entrypoints/quick/quick_default_externs.h +++ b/runtime/entrypoints/quick/quick_default_externs.h @@ -22,10 +22,10 @@ namespace art { namespace mirror { class Array; -class ArtMethod; class Class; class Object; } // namespace mirror +class ArtMethod; } // namespace art // These are extern declarations of assembly stubs with common names. @@ -97,9 +97,9 @@ extern "C" int32_t art_quick_string_compareto(void*, void*); extern "C" void* art_quick_memcpy(void*, const void*, size_t); // Invoke entrypoints. -extern "C" void art_quick_imt_conflict_trampoline(art::mirror::ArtMethod*); -extern "C" void art_quick_resolution_trampoline(art::mirror::ArtMethod*); -extern "C" void art_quick_to_interpreter_bridge(art::mirror::ArtMethod*); +extern "C" void art_quick_imt_conflict_trampoline(art::ArtMethod*); +extern "C" void art_quick_resolution_trampoline(art::ArtMethod*); +extern "C" void art_quick_to_interpreter_bridge(art::ArtMethod*); extern "C" void art_quick_invoke_direct_trampoline_with_access_check(uint32_t, void*); extern "C" void art_quick_invoke_interface_trampoline_with_access_check(uint32_t, void*); extern "C" void art_quick_invoke_static_trampoline_with_access_check(uint32_t, void*); diff --git a/runtime/entrypoints/quick/quick_deoptimization_entrypoints.cc b/runtime/entrypoints/quick/quick_deoptimization_entrypoints.cc index 6a8aaf2610..3eefeef84a 100644 --- a/runtime/entrypoints/quick/quick_deoptimization_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_deoptimization_entrypoints.cc @@ -17,7 +17,6 @@ #include "callee_save_frame.h" #include "dex_file-inl.h" #include "interpreter/interpreter.h" -#include "mirror/art_method-inl.h" #include "mirror/class-inl.h" #include "mirror/object_array-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc index 46629f5958..67649d4c64 100644 --- a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc @@ -14,12 +14,12 @@ * limitations under the License. */ +#include "art_method-inl.h" #include "callee_save_frame.h" #include "entrypoints/entrypoint_utils-inl.h" #include "class_linker-inl.h" #include "dex_file-inl.h" #include "gc/accounting/card_table-inl.h" -#include "mirror/art_method-inl.h" #include "mirror/object_array-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/entrypoints/quick/quick_entrypoints.h b/runtime/entrypoints/quick/quick_entrypoints.h index b72ce34648..cef2510451 100644 --- a/runtime/entrypoints/quick/quick_entrypoints.h +++ b/runtime/entrypoints/quick/quick_entrypoints.h @@ -29,13 +29,13 @@ namespace art { namespace mirror { class Array; -class ArtMethod; class Class; class Object; template<class MirrorType> class CompressedReference; } // namespace mirror +class ArtMethod; class Thread; // Pointers to functions that are called by quick compiler generated code via thread-local storage. diff --git a/runtime/entrypoints/quick/quick_entrypoints_list.h b/runtime/entrypoints/quick/quick_entrypoints_list.h index 0aca58fb16..60bbf4ac82 100644 --- a/runtime/entrypoints/quick/quick_entrypoints_list.h +++ b/runtime/entrypoints/quick/quick_entrypoints_list.h @@ -20,15 +20,15 @@ // All quick entrypoints. Format is name, return type, argument types. #define QUICK_ENTRYPOINT_LIST(V) \ - V(AllocArray, void*, uint32_t, int32_t, mirror::ArtMethod*) \ - V(AllocArrayResolved, void*, mirror::Class*, int32_t, mirror::ArtMethod*) \ - V(AllocArrayWithAccessCheck, void*, uint32_t, int32_t, mirror::ArtMethod*) \ - V(AllocObject, void*, uint32_t, mirror::ArtMethod*) \ - V(AllocObjectResolved, void*, mirror::Class*, mirror::ArtMethod*) \ - V(AllocObjectInitialized, void*, mirror::Class*, mirror::ArtMethod*) \ - V(AllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*) \ - V(CheckAndAllocArray, void*, uint32_t, int32_t, mirror::ArtMethod*) \ - V(CheckAndAllocArrayWithAccessCheck, void*, uint32_t, int32_t, mirror::ArtMethod*) \ + V(AllocArray, void*, uint32_t, int32_t, ArtMethod*) \ + V(AllocArrayResolved, void*, mirror::Class*, int32_t, ArtMethod*) \ + V(AllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*) \ + V(AllocObject, void*, uint32_t, ArtMethod*) \ + V(AllocObjectResolved, void*, mirror::Class*, ArtMethod*) \ + V(AllocObjectInitialized, void*, mirror::Class*, ArtMethod*) \ + V(AllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*) \ + V(CheckAndAllocArray, void*, uint32_t, int32_t, ArtMethod*) \ + V(CheckAndAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*) \ V(AllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t) \ V(AllocStringFromChars, void*, int32_t, int32_t, void*) \ V(AllocStringFromString, void*, void*) \ @@ -77,7 +77,7 @@ V(JniMethodEndSynchronized, void, uint32_t, jobject, Thread*) \ V(JniMethodEndWithReference, mirror::Object*, jobject, uint32_t, Thread*) \ V(JniMethodEndWithReferenceSynchronized, mirror::Object*, jobject, uint32_t, jobject, Thread*) \ - V(QuickGenericJniTrampoline, void, mirror::ArtMethod*) \ + V(QuickGenericJniTrampoline, void, ArtMethod*) \ \ V(LockObject, void, mirror::Object*) \ V(UnlockObject, void, mirror::Object*) \ @@ -106,9 +106,9 @@ V(StringCompareTo, int32_t, void*, void*) \ V(Memcpy, void*, void*, const void*, size_t) \ \ - V(QuickImtConflictTrampoline, void, mirror::ArtMethod*) \ - V(QuickResolutionTrampoline, void, mirror::ArtMethod*) \ - V(QuickToInterpreterBridge, void, mirror::ArtMethod*) \ + V(QuickImtConflictTrampoline, void, ArtMethod*) \ + V(QuickResolutionTrampoline, void, ArtMethod*) \ + V(QuickToInterpreterBridge, void, ArtMethod*) \ V(InvokeDirectTrampolineWithAccessCheck, void, uint32_t, void*) \ V(InvokeInterfaceTrampolineWithAccessCheck, void, uint32_t, void*) \ V(InvokeStaticTrampolineWithAccessCheck, void, uint32_t, void*) \ diff --git a/runtime/entrypoints/quick/quick_field_entrypoints.cc b/runtime/entrypoints/quick/quick_field_entrypoints.cc index b5a7c09531..871cf3c256 100644 --- a/runtime/entrypoints/quick/quick_field_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_field_entrypoints.cc @@ -15,17 +15,17 @@ */ #include "art_field-inl.h" +#include "art_method-inl.h" #include "callee_save_frame.h" #include "dex_file-inl.h" #include "entrypoints/entrypoint_utils-inl.h" -#include "mirror/art_method-inl.h" #include "mirror/class-inl.h" #include <stdint.h> namespace art { -extern "C" int8_t artGetByteStaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, +extern "C" int8_t artGetByteStaticFromCode(uint32_t field_idx, ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); @@ -40,7 +40,7 @@ extern "C" int8_t artGetByteStaticFromCode(uint32_t field_idx, mirror::ArtMethod return 0; // Will throw exception by checking with Thread::Current. } -extern "C" uint8_t artGetBooleanStaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, +extern "C" uint8_t artGetBooleanStaticFromCode(uint32_t field_idx, ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); @@ -55,7 +55,7 @@ extern "C" uint8_t artGetBooleanStaticFromCode(uint32_t field_idx, mirror::ArtMe return 0; // Will throw exception by checking with Thread::Current. } -extern "C" int16_t artGetShortStaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, +extern "C" int16_t artGetShortStaticFromCode(uint32_t field_idx, ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); @@ -71,7 +71,7 @@ extern "C" int16_t artGetShortStaticFromCode(uint32_t field_idx, mirror::ArtMeth } extern "C" uint16_t artGetCharStaticFromCode(uint32_t field_idx, - mirror::ArtMethod* referrer, + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); @@ -87,7 +87,7 @@ extern "C" uint16_t artGetCharStaticFromCode(uint32_t field_idx, } extern "C" uint32_t artGet32StaticFromCode(uint32_t field_idx, - mirror::ArtMethod* referrer, + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); @@ -103,7 +103,7 @@ extern "C" uint32_t artGet32StaticFromCode(uint32_t field_idx, } extern "C" uint64_t artGet64StaticFromCode(uint32_t field_idx, - mirror::ArtMethod* referrer, + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); @@ -119,7 +119,7 @@ extern "C" uint64_t artGet64StaticFromCode(uint32_t field_idx, } extern "C" mirror::Object* artGetObjStaticFromCode(uint32_t field_idx, - mirror::ArtMethod* referrer, + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); @@ -137,7 +137,7 @@ extern "C" mirror::Object* artGetObjStaticFromCode(uint32_t field_idx, } extern "C" int8_t artGetByteInstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int8_t)); @@ -157,7 +157,7 @@ extern "C" int8_t artGetByteInstanceFromCode(uint32_t field_idx, mirror::Object* } extern "C" uint8_t artGetBooleanInstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int8_t)); @@ -176,7 +176,7 @@ extern "C" uint8_t artGetBooleanInstanceFromCode(uint32_t field_idx, mirror::Obj return 0; // Will throw exception by checking with Thread::Current. } extern "C" int16_t artGetShortInstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int16_t)); @@ -196,7 +196,7 @@ extern "C" int16_t artGetShortInstanceFromCode(uint32_t field_idx, mirror::Objec } extern "C" uint16_t artGetCharInstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int16_t)); @@ -216,7 +216,7 @@ extern "C" uint16_t artGetCharInstanceFromCode(uint32_t field_idx, mirror::Objec } extern "C" uint32_t artGet32InstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int32_t)); @@ -236,7 +236,7 @@ extern "C" uint32_t artGet32InstanceFromCode(uint32_t field_idx, mirror::Object* } extern "C" uint64_t artGet64InstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead, sizeof(int64_t)); @@ -256,7 +256,7 @@ extern "C" uint64_t artGet64InstanceFromCode(uint32_t field_idx, mirror::Object* } extern "C" mirror::Object* artGetObjInstanceFromCode(uint32_t field_idx, mirror::Object* obj, - mirror::ArtMethod* referrer, + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); @@ -278,7 +278,7 @@ extern "C" mirror::Object* artGetObjInstanceFromCode(uint32_t field_idx, mirror: } extern "C" int artSet8StaticFromCode(uint32_t field_idx, uint32_t new_value, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveWrite, sizeof(int8_t)); @@ -309,7 +309,7 @@ extern "C" int artSet8StaticFromCode(uint32_t field_idx, uint32_t new_value, } extern "C" int artSet16StaticFromCode(uint32_t field_idx, uint16_t new_value, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveWrite, sizeof(int16_t)); @@ -340,7 +340,7 @@ extern "C" int artSet16StaticFromCode(uint32_t field_idx, uint16_t new_value, } extern "C" int artSet32StaticFromCode(uint32_t field_idx, uint32_t new_value, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveWrite, sizeof(int32_t)); @@ -358,7 +358,7 @@ extern "C" int artSet32StaticFromCode(uint32_t field_idx, uint32_t new_value, return -1; // failure } -extern "C" int artSet64StaticFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, +extern "C" int artSet64StaticFromCode(uint32_t field_idx, ArtMethod* referrer, uint64_t new_value, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); @@ -378,7 +378,7 @@ extern "C" int artSet64StaticFromCode(uint32_t field_idx, mirror::ArtMethod* ref } extern "C" int artSetObjStaticFromCode(uint32_t field_idx, mirror::Object* new_value, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, StaticObjectWrite, @@ -401,7 +401,7 @@ extern "C" int artSetObjStaticFromCode(uint32_t field_idx, mirror::Object* new_v } extern "C" int artSet8InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint8_t new_value, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite, sizeof(int8_t)); @@ -440,7 +440,7 @@ extern "C" int artSet8InstanceFromCode(uint32_t field_idx, mirror::Object* obj, } extern "C" int artSet16InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint16_t new_value, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite, sizeof(int16_t)); @@ -480,7 +480,7 @@ extern "C" int artSet16InstanceFromCode(uint32_t field_idx, mirror::Object* obj, } extern "C" int artSet32InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint32_t new_value, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite, sizeof(int32_t)); @@ -508,7 +508,7 @@ extern "C" int artSet32InstanceFromCode(uint32_t field_idx, mirror::Object* obj, } extern "C" int artSet64InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint64_t new_value, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite, sizeof(int64_t)); @@ -533,7 +533,7 @@ extern "C" int artSet64InstanceFromCode(uint32_t field_idx, mirror::Object* obj, extern "C" int artSetObjInstanceFromCode(uint32_t field_idx, mirror::Object* obj, mirror::Object* new_value, - mirror::ArtMethod* referrer, Thread* self) + ArtMethod* referrer, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); ArtField* field = FindFieldFast(field_idx, referrer, InstanceObjectWrite, diff --git a/runtime/entrypoints/quick/quick_fillarray_entrypoints.cc b/runtime/entrypoints/quick/quick_fillarray_entrypoints.cc index e3365431ce..d3991cdb78 100644 --- a/runtime/entrypoints/quick/quick_fillarray_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_fillarray_entrypoints.cc @@ -14,9 +14,9 @@ * limitations under the License. */ +#include "art_method-inl.h" #include "callee_save_frame.h" #include "mirror/array.h" -#include "mirror/art_method-inl.h" #include "entrypoints/entrypoint_utils.h" namespace art { @@ -25,7 +25,7 @@ namespace art { * Handle fill array data by copying appropriate part of dex file into array. */ extern "C" int artHandleFillArrayDataFromCode(uint32_t payload_offset, mirror::Array* array, - mirror::ArtMethod* method, Thread* self) + ArtMethod* method, Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); const uint16_t* const insns = method->GetCodeItem()->insns_; diff --git a/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc b/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc index eb1b1056a4..7eb73c3b59 100644 --- a/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc @@ -14,17 +14,17 @@ * limitations under the License. */ +#include "art_method-inl.h" #include "callee_save_frame.h" #include "entrypoints/runtime_asm_entrypoints.h" #include "instrumentation.h" -#include "mirror/art_method-inl.h" #include "mirror/object-inl.h" #include "runtime.h" #include "thread-inl.h" namespace art { -extern "C" const void* artInstrumentationMethodEntryFromCode(mirror::ArtMethod* method, +extern "C" const void* artInstrumentationMethodEntryFromCode(ArtMethod* method, mirror::Object* this_object, Thread* self, uintptr_t lr) @@ -45,8 +45,7 @@ extern "C" const void* artInstrumentationMethodEntryFromCode(mirror::ArtMethod* return result; } -extern "C" TwoWordReturn artInstrumentationMethodExitFromCode(Thread* self, - StackReference<mirror::ArtMethod>* sp, +extern "C" TwoWordReturn artInstrumentationMethodExitFromCode(Thread* self, ArtMethod** sp, uint64_t gpr_result, uint64_t fpr_result) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { diff --git a/runtime/entrypoints/quick/quick_jni_entrypoints.cc b/runtime/entrypoints/quick/quick_jni_entrypoints.cc index 51817a249d..de225ad8e8 100644 --- a/runtime/entrypoints/quick/quick_jni_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_jni_entrypoints.cc @@ -14,8 +14,8 @@ * limitations under the License. */ +#include "art_method-inl.h" #include "entrypoints/entrypoint_utils-inl.h" -#include "mirror/art_method-inl.h" #include "mirror/object-inl.h" #include "thread-inl.h" #include "verify_object-inl.h" @@ -35,7 +35,7 @@ extern uint32_t JniMethodStart(Thread* self) { DCHECK(env != nullptr); uint32_t saved_local_ref_cookie = env->local_ref_cookie; env->local_ref_cookie = env->locals.GetSegmentState(); - mirror::ArtMethod* native_method = self->GetManagedStack()->GetTopQuickFrame()->AsMirrorPtr(); + ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); if (!native_method->IsFastNative()) { // When not fast JNI we transition out of runnable. self->TransitionFromRunnableToSuspended(kNative); @@ -50,7 +50,7 @@ extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self) { // TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI. static void GoToRunnable(Thread* self) NO_THREAD_SAFETY_ANALYSIS { - mirror::ArtMethod* native_method = self->GetManagedStack()->GetTopQuickFrame()->AsMirrorPtr(); + ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); bool is_fast = native_method->IsFastNative(); if (!is_fast) { self->TransitionFromSuspendedToRunnable(); diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc index 2e7e2dfd74..bc15cc79c9 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc @@ -14,6 +14,7 @@ * limitations under the License. */ +#include "art_method-inl.h" #include "callee_save_frame.h" #include "common_throws.h" #include "dex_file-inl.h" @@ -23,7 +24,6 @@ #include "gc/accounting/card_table-inl.h" #include "interpreter/interpreter.h" #include "method_reference.h" -#include "mirror/art_method-inl.h" #include "mirror/class-inl.h" #include "mirror/dex_cache-inl.h" #include "mirror/method.h" @@ -279,10 +279,10 @@ class QuickArgumentVisitor { // 'this' object is the 1st argument. They also have the same frame layout as the // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the // 1st GPR. - static mirror::Object* GetProxyThisObject(StackReference<mirror::ArtMethod>* sp) + static mirror::Object* GetProxyThisObject(ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - CHECK(sp->AsMirrorPtr()->IsProxyMethod()); - CHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize, sp->AsMirrorPtr()->GetFrameSizeInBytes()); + CHECK((*sp)->IsProxyMethod()); + CHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize, (*sp)->GetFrameSizeInBytes()); CHECK_GT(kNumQuickGprArgs, 0u); constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR. size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset + @@ -291,28 +291,28 @@ class QuickArgumentVisitor { return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address)->AsMirrorPtr(); } - static mirror::ArtMethod* GetCallingMethod(StackReference<mirror::ArtMethod>* sp) + static ArtMethod* GetCallingMethod(ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod()); - uint8_t* previous_sp = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize; - return reinterpret_cast<StackReference<mirror::ArtMethod>*>(previous_sp)->AsMirrorPtr(); + DCHECK((*sp)->IsCalleeSaveMethod()); + uint8_t* previous_sp = reinterpret_cast<uint8_t*>(sp) + + kQuickCalleeSaveFrame_RefAndArgs_FrameSize; + return *reinterpret_cast<ArtMethod**>(previous_sp); } // For the given quick ref and args quick frame, return the caller's PC. - static uintptr_t GetCallingPc(StackReference<mirror::ArtMethod>* sp) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod()); + static uintptr_t GetCallingPc(ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + DCHECK((*sp)->IsCalleeSaveMethod()); uint8_t* lr = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset; return *reinterpret_cast<uintptr_t*>(lr); } - QuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, const char* shorty, + QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) : is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len), gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset), fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset), stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize - + sizeof(StackReference<mirror::ArtMethod>)), // Skip StackReference<ArtMethod>. + + sizeof(ArtMethod*)), // Skip ArtMethod*. gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0), cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) { static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0), @@ -323,6 +323,7 @@ class QuickArgumentVisitor { // next register is even. static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0, "Number of Quick FPR arguments not even"); + DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*)); } virtual ~QuickArgumentVisitor() {} @@ -354,7 +355,8 @@ class QuickArgumentVisitor { } bool IsSplitLongOrDouble() const { - if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) || (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) { + if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) || + (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) { return is_split_long_or_double_; } else { return false; // An optimization for when GPR and FPRs are 64bit. @@ -539,7 +541,7 @@ class QuickArgumentVisitor { // Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It // allows to use the QuickArgumentVisitor constants without moving all the code in its own module. -extern "C" mirror::Object* artQuickGetProxyThisObject(StackReference<mirror::ArtMethod>* sp) +extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { return QuickArgumentVisitor::GetProxyThisObject(sp); } @@ -547,9 +549,8 @@ extern "C" mirror::Object* artQuickGetProxyThisObject(StackReference<mirror::Art // Visits arguments on the stack placing them into the shadow frame. class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor { public: - BuildQuickShadowFrameVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, - const char* shorty, uint32_t shorty_len, ShadowFrame* sf, - size_t first_arg_reg) : + BuildQuickShadowFrameVisitor(ArtMethod** sp, bool is_static, const char* shorty, + uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) : QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {} void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE; @@ -594,8 +595,7 @@ void BuildQuickShadowFrameVisitor::Visit() { ++cur_reg_; } -extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self, - StackReference<mirror::ArtMethod>* sp) +extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self, ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { // Ensure we don't get thread suspension until the object arguments are safely in the shadow // frame. @@ -616,7 +616,8 @@ extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Threa ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, nullptr, method, 0, memory)); size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_; uint32_t shorty_len = 0; - const char* shorty = method->GetShorty(&shorty_len); + auto* non_proxy_method = method->GetInterfaceMethodIfProxy(sizeof(void*)); + const char* shorty = non_proxy_method->GetShorty(&shorty_len); BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len, shadow_frame, first_arg_reg); shadow_frame_builder.VisitArguments(); @@ -643,7 +644,7 @@ extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Threa self->PopManagedStackFragment(fragment); // Request a stack deoptimization if needed - mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp); + ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp); if (UNLIKELY(Dbg::IsForcedInterpreterNeededForUpcall(self, caller))) { self->SetException(Thread::GetDeoptimizationException()); self->SetDeoptimizationReturnValue(result); @@ -658,8 +659,7 @@ extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Threa // to jobjects. class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor { public: - BuildQuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, - const char* shorty, uint32_t shorty_len, + BuildQuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len, ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) : QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {} @@ -722,9 +722,8 @@ void BuildQuickArgumentVisitor::FixupReferences() { // which is responsible for recording callee save registers. We explicitly place into jobjects the // incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a // field within the proxy object, which will box the primitive arguments and deal with error cases. -extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method, - mirror::Object* receiver, - Thread* self, StackReference<mirror::ArtMethod>* sp) +extern "C" uint64_t artQuickProxyInvokeHandler( + ArtMethod* proxy_method, mirror::Object* receiver, Thread* self, ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method); DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method); @@ -732,7 +731,7 @@ extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method, const char* old_cause = self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments"); // Register the top of the managed stack, making stack crawlable. - DCHECK_EQ(sp->AsMirrorPtr(), proxy_method) << PrettyMethod(proxy_method); + DCHECK_EQ((*sp), proxy_method) << PrettyMethod(proxy_method); DCHECK_EQ(proxy_method->GetFrameSizeInBytes(), Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes()) << PrettyMethod(proxy_method); @@ -745,12 +744,12 @@ extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method, jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver); // Placing arguments into args vector and remove the receiver. - mirror::ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(); + ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(sizeof(void*)); CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " " << PrettyMethod(non_proxy_method); std::vector<jvalue> args; uint32_t shorty_len = 0; - const char* shorty = proxy_method->GetShorty(&shorty_len); + const char* shorty = non_proxy_method->GetShorty(&shorty_len); BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args); local_ref_visitor.VisitArguments(); @@ -758,7 +757,7 @@ extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method, args.erase(args.begin()); // Convert proxy method into expected interface method. - mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod(); + ArtMethod* interface_method = proxy_method->FindOverriddenMethod(sizeof(void*)); DCHECK(interface_method != nullptr) << PrettyMethod(proxy_method); DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method); self->EndAssertNoThreadSuspension(old_cause); @@ -777,9 +776,8 @@ extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method, // so they don't get garbage collected. class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor { public: - RememberForGcArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, - const char* shorty, uint32_t shorty_len, - ScopedObjectAccessUnchecked* soa) : + RememberForGcArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, + uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) : QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {} void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE; @@ -813,10 +811,8 @@ void RememberForGcArgumentVisitor::FixupReferences() { } // Lazily resolve a method for quick. Called by stub code. -extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called, - mirror::Object* receiver, - Thread* self, - StackReference<mirror::ArtMethod>* sp) +extern "C" const void* artQuickResolutionTrampoline( + ArtMethod* called, mirror::Object* receiver, Thread* self, ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); // Start new JNI local reference state @@ -827,7 +823,7 @@ extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called, // Compute details about the called method (avoid GCs) ClassLinker* linker = Runtime::Current()->GetClassLinker(); - mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp); + ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp); InvokeType invoke_type; MethodReference called_method(nullptr, 0); const bool called_method_known_on_entry = !called->IsRuntimeMethod(); @@ -906,7 +902,7 @@ extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called, HandleWrapper<mirror::Object> h_receiver( hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy)); DCHECK_EQ(caller->GetDexFile(), called_method.dex_file); - called = linker->ResolveMethod(self, called_method.dex_method_index, &caller, invoke_type); + called = linker->ResolveMethod(self, called_method.dex_method_index, caller, invoke_type); } const void* code = nullptr; if (LIKELY(!self->IsExceptionPending())) { @@ -917,11 +913,11 @@ extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called, // Refine called method based on receiver. CHECK(receiver != nullptr) << invoke_type; - mirror::ArtMethod* orig_called = called; + ArtMethod* orig_called = called; if (invoke_type == kVirtual) { - called = receiver->GetClass()->FindVirtualMethodForVirtual(called); + called = receiver->GetClass()->FindVirtualMethodForVirtual(called, sizeof(void*)); } else { - called = receiver->GetClass()->FindVirtualMethodForInterface(called); + called = receiver->GetClass()->FindVirtualMethodForInterface(called, sizeof(void*)); } CHECK(called != nullptr) << PrettyMethod(orig_called) << " " @@ -947,8 +943,9 @@ extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called, caller_method_name_and_sig_index); } if ((update_dex_cache_method_index != DexFile::kDexNoIndex) && - (caller->GetDexCacheResolvedMethod(update_dex_cache_method_index) != called)) { - caller->SetDexCacheResolvedMethod(update_dex_cache_method_index, called); + (caller->GetDexCacheResolvedMethod( + update_dex_cache_method_index, sizeof(void*)) != called)) { + caller->SetDexCacheResolvedMethod(update_dex_cache_method_index, called, sizeof(void*)); } } else if (invoke_type == kStatic) { const auto called_dex_method_idx = called->GetDexMethodIndex(); @@ -958,7 +955,7 @@ extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called, // b/19175856 if (called->GetDexFile() == called_method.dex_file && called_method.dex_method_index != called_dex_method_idx) { - called->GetDexCache()->SetResolvedMethod(called_dex_method_idx, called); + called->GetDexCache()->SetResolvedMethod(called_dex_method_idx, called, sizeof(void*)); } } @@ -1007,7 +1004,8 @@ extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called, // Fixup any locally saved objects may have moved during a GC. visitor.FixupReferences(); // Place called method in callee-save frame to be placed as first argument to quick method. - sp->Assign(called); + *sp = called; + return code; } @@ -1487,10 +1485,11 @@ class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize { // is at *m = sp. Will update to point to the bottom of the save frame. // // Note: assumes ComputeAll() has been run before. - void LayoutCalleeSaveFrame(Thread* self, StackReference<mirror::ArtMethod>** m, void* sp, - HandleScope** handle_scope) + void LayoutCalleeSaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - mirror::ArtMethod* method = (*m)->AsMirrorPtr(); + ArtMethod* method = **m; + + DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*)); uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp); @@ -1502,22 +1501,20 @@ class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize { // Under the callee saves put handle scope and new method stack reference. size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_); - size_t scope_and_method = handle_scope_size + sizeof(StackReference<mirror::ArtMethod>); + size_t scope_and_method = handle_scope_size + sizeof(ArtMethod*); sp8 -= scope_and_method; // Align by kStackAlignment. - sp8 = reinterpret_cast<uint8_t*>(RoundDown( - reinterpret_cast<uintptr_t>(sp8), kStackAlignment)); + sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment)); - uint8_t* sp8_table = sp8 + sizeof(StackReference<mirror::ArtMethod>); + uint8_t* sp8_table = sp8 + sizeof(ArtMethod*); *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(), num_handle_scope_references_); // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us. uint8_t* method_pointer = sp8; - StackReference<mirror::ArtMethod>* new_method_ref = - reinterpret_cast<StackReference<mirror::ArtMethod>*>(method_pointer); - new_method_ref->Assign(method); + auto** new_method_ref = reinterpret_cast<ArtMethod**>(method_pointer); + *new_method_ref = method; *m = new_method_ref; } @@ -1529,8 +1526,7 @@ class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize { // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie. // Returns the new bottom. Note: this may be unaligned. - uint8_t* LayoutJNISaveFrame(Thread* self, StackReference<mirror::ArtMethod>** m, void* sp, - HandleScope** handle_scope) + uint8_t* LayoutJNISaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { // First, fix up the layout of the callee-save frame. // We have to squeeze in the HandleScope, and relocate the method pointer. @@ -1546,9 +1542,9 @@ class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize { } // WARNING: After this, *sp won't be pointing to the method anymore! - uint8_t* ComputeLayout(Thread* self, StackReference<mirror::ArtMethod>** m, - const char* shorty, uint32_t shorty_len, HandleScope** handle_scope, - uintptr_t** start_stack, uintptr_t** start_gpr, uint32_t** start_fpr) + uint8_t* ComputeLayout(Thread* self, ArtMethod*** m, const char* shorty, uint32_t shorty_len, + HandleScope** handle_scope, uintptr_t** start_stack, uintptr_t** start_gpr, + uint32_t** start_fpr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { Walk(shorty, shorty_len); @@ -1637,7 +1633,7 @@ class FillNativeCall { class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor { public: BuildGenericJniFrameVisitor(Thread* self, bool is_static, const char* shorty, uint32_t shorty_len, - StackReference<mirror::ArtMethod>** sp) + ArtMethod*** sp) : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len), jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) { ComputeGenericJniFrameSize fsc; @@ -1655,7 +1651,7 @@ class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor { sm_.AdvancePointer(self->GetJniEnv()); if (is_static) { - sm_.AdvanceHandleScope((*sp)->AsMirrorPtr()->GetDeclaringClass()); + sm_.AdvanceHandleScope((**sp)->GetDeclaringClass()); } } @@ -1811,10 +1807,9 @@ void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) * 1) How many bytes of the alloca can be released, if the value is non-negative. * 2) An error, if the value is negative. */ -extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, - StackReference<mirror::ArtMethod>* sp) +extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - mirror::ArtMethod* called = sp->AsMirrorPtr(); + ArtMethod* called = *sp; DCHECK(called->IsNative()) << PrettyMethod(called, true); uint32_t shorty_len = 0; const char* shorty = called->GetShorty(&shorty_len); @@ -1887,15 +1882,15 @@ extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, */ extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, jvalue result, uint64_t result_f) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - StackReference<mirror::ArtMethod>* sp = self->GetManagedStack()->GetTopQuickFrame(); + ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame(); uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp); - mirror::ArtMethod* called = sp->AsMirrorPtr(); + ArtMethod* called = *sp; uint32_t cookie = *(sp32 - 1); jobject lock = nullptr; if (called->IsSynchronized()) { HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp) - + sizeof(StackReference<mirror::ArtMethod>)); + + sizeof(*sp)); lock = table->GetHandle(0).ToJObject(); } @@ -1947,17 +1942,14 @@ extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, jvalue result, template<InvokeType type, bool access_check> static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object, - mirror::ArtMethod* caller_method, - Thread* self, StackReference<mirror::ArtMethod>* sp); + ArtMethod* caller_method, Thread* self, ArtMethod** sp); template<InvokeType type, bool access_check> static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object, - mirror::ArtMethod* caller_method, - Thread* self, StackReference<mirror::ArtMethod>* sp) { + ArtMethod* caller_method, Thread* self, ArtMethod** sp) { ScopedQuickEntrypointChecks sqec(self); - DCHECK_EQ(sp->AsMirrorPtr(), Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)); - mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check, - type); + DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)); + ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check, type); if (UNLIKELY(method == nullptr)) { const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile(); uint32_t shorty_len; @@ -1994,9 +1986,9 @@ static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_o template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \ TwoWordReturn artInvokeCommon<type, access_check>(uint32_t method_idx, \ mirror::Object* this_object, \ - mirror::ArtMethod* caller_method, \ + ArtMethod* caller_method, \ Thread* self, \ - StackReference<mirror::ArtMethod>* sp) \ + ArtMethod** sp) \ EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false); EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true); @@ -2013,8 +2005,7 @@ EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true); // See comments in runtime_support_asm.S extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck( uint32_t method_idx, mirror::Object* this_object, - mirror::ArtMethod* caller_method, Thread* self, - StackReference<mirror::ArtMethod>* sp) + ArtMethod* caller_method, Thread* self, ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { return artInvokeCommon<kInterface, true>(method_idx, this_object, caller_method, self, sp); @@ -2022,8 +2013,7 @@ extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck( extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck( uint32_t method_idx, mirror::Object* this_object, - mirror::ArtMethod* caller_method, Thread* self, - StackReference<mirror::ArtMethod>* sp) + ArtMethod* caller_method, Thread* self, ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { return artInvokeCommon<kDirect, true>(method_idx, this_object, caller_method, self, sp); @@ -2031,8 +2021,7 @@ extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck( extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck( uint32_t method_idx, mirror::Object* this_object, - mirror::ArtMethod* caller_method, Thread* self, - StackReference<mirror::ArtMethod>* sp) + ArtMethod* caller_method, Thread* self, ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { return artInvokeCommon<kStatic, true>(method_idx, this_object, caller_method, self, sp); @@ -2040,8 +2029,7 @@ extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck( extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck( uint32_t method_idx, mirror::Object* this_object, - mirror::ArtMethod* caller_method, Thread* self, - StackReference<mirror::ArtMethod>* sp) + ArtMethod* caller_method, Thread* self, ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { return artInvokeCommon<kSuper, true>(method_idx, this_object, caller_method, self, sp); @@ -2049,31 +2037,31 @@ extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck( extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck( uint32_t method_idx, mirror::Object* this_object, - mirror::ArtMethod* caller_method, Thread* self, - StackReference<mirror::ArtMethod>* sp) + ArtMethod* caller_method, Thread* self, ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { return artInvokeCommon<kVirtual, true>(method_idx, this_object, caller_method, self, sp); } // Determine target of interface dispatch. This object is known non-null. -extern "C" TwoWordReturn artInvokeInterfaceTrampoline(mirror::ArtMethod* interface_method, +extern "C" TwoWordReturn artInvokeInterfaceTrampoline(ArtMethod* interface_method, mirror::Object* this_object, - mirror::ArtMethod* caller_method, + ArtMethod* caller_method, Thread* self, - StackReference<mirror::ArtMethod>* sp) + ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { ScopedQuickEntrypointChecks sqec(self); - mirror::ArtMethod* method; + ArtMethod* method; if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) { - method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method); + method = this_object->GetClass()->FindVirtualMethodForInterface( + interface_method, sizeof(void*)); if (UNLIKELY(method == nullptr)) { - ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object, - caller_method); + ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch( + interface_method, this_object, caller_method); return GetTwoWordFailureValue(); // Failure. } } else { - DCHECK(interface_method == Runtime::Current()->GetResolutionMethod()); + DCHECK_EQ(interface_method, Runtime::Current()->GetResolutionMethod()); // Find the caller PC. constexpr size_t pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, Runtime::kRefsAndArgs); diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints_test.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints_test.cc index 01c17acdcc..5cdf9677ef 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints_test.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints_test.cc @@ -16,9 +16,9 @@ #include <stdint.h> +#include "art_method-inl.h" #include "callee_save_frame.h" #include "common_runtime_test.h" -#include "mirror/art_method-inl.h" #include "quick/quick_method_frame_info.h" namespace art { @@ -31,8 +31,7 @@ class QuickTrampolineEntrypointsTest : public CommonRuntimeTest { options->push_back(std::make_pair("imageinstructionset", "x86_64")); } - static mirror::ArtMethod* CreateCalleeSaveMethod(InstructionSet isa, - Runtime::CalleeSaveType type) + static ArtMethod* CreateCalleeSaveMethod(InstructionSet isa, Runtime::CalleeSaveType type) NO_THREAD_SAFETY_ANALYSIS { Runtime* r = Runtime::Current(); @@ -40,7 +39,7 @@ class QuickTrampolineEntrypointsTest : public CommonRuntimeTest { t->TransitionFromSuspendedToRunnable(); // So we can create callee-save methods. r->SetInstructionSet(isa); - mirror::ArtMethod* save_method = r->CreateCalleeSaveMethod(); + ArtMethod* save_method = r->CreateCalleeSaveMethod(); r->SetCalleeSaveMethod(save_method, type); t->TransitionFromRunnableToSuspended(ThreadState::kNative); // So we can shut down. @@ -50,7 +49,7 @@ class QuickTrampolineEntrypointsTest : public CommonRuntimeTest { static void CheckFrameSize(InstructionSet isa, Runtime::CalleeSaveType type, uint32_t save_size) NO_THREAD_SAFETY_ANALYSIS { - mirror::ArtMethod* save_method = CreateCalleeSaveMethod(isa, type); + ArtMethod* save_method = CreateCalleeSaveMethod(isa, type); QuickMethodFrameInfo frame_info = save_method->GetQuickFrameInfo(); EXPECT_EQ(frame_info.FrameSizeInBytes(), save_size) << "Expected and real size differs for " << type << " core spills=" << std::hex << frame_info.CoreSpillMask() << " fp spills=" @@ -59,7 +58,7 @@ class QuickTrampolineEntrypointsTest : public CommonRuntimeTest { static void CheckPCOffset(InstructionSet isa, Runtime::CalleeSaveType type, size_t pc_offset) NO_THREAD_SAFETY_ANALYSIS { - mirror::ArtMethod* save_method = CreateCalleeSaveMethod(isa, type); + ArtMethod* save_method = CreateCalleeSaveMethod(isa, type); QuickMethodFrameInfo frame_info = save_method->GetQuickFrameInfo(); EXPECT_EQ(save_method->GetReturnPcOffset().SizeValue(), pc_offset) << "Expected and real pc offset differs for " << type diff --git a/runtime/entrypoints/runtime_asm_entrypoints.h b/runtime/entrypoints/runtime_asm_entrypoints.h index bfe7ee8a34..8209dc808e 100644 --- a/runtime/entrypoints/runtime_asm_entrypoints.h +++ b/runtime/entrypoints/runtime_asm_entrypoints.h @@ -29,19 +29,19 @@ static inline const void* GetJniDlsymLookupStub() { } // Return the address of quick stub code for handling IMT conflicts. -extern "C" void art_quick_imt_conflict_trampoline(mirror::ArtMethod*); +extern "C" void art_quick_imt_conflict_trampoline(ArtMethod*); static inline const void* GetQuickImtConflictStub() { return reinterpret_cast<const void*>(art_quick_imt_conflict_trampoline); } // Return the address of quick stub code for bridging from quick code to the interpreter. -extern "C" void art_quick_to_interpreter_bridge(mirror::ArtMethod*); +extern "C" void art_quick_to_interpreter_bridge(ArtMethod*); static inline const void* GetQuickToInterpreterBridge() { return reinterpret_cast<const void*>(art_quick_to_interpreter_bridge); } // Return the address of quick stub code for handling JNI calls. -extern "C" void art_quick_generic_jni_trampoline(mirror::ArtMethod*); +extern "C" void art_quick_generic_jni_trampoline(ArtMethod*); static inline const void* GetQuickGenericJniStub() { return reinterpret_cast<const void*>(art_quick_generic_jni_trampoline); } @@ -53,7 +53,7 @@ static inline const void* GetQuickProxyInvokeHandler() { } // Return the address of quick stub code for resolving a method at first call. -extern "C" void art_quick_resolution_trampoline(mirror::ArtMethod*); +extern "C" void art_quick_resolution_trampoline(ArtMethod*); static inline const void* GetQuickResolutionStub() { return reinterpret_cast<const void*>(art_quick_resolution_trampoline); } |