summaryrefslogtreecommitdiffstats
path: root/runtime/debugger.cc
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2015-04-22 13:56:20 -0700
committerMathieu Chartier <mathieuc@google.com>2015-06-02 09:21:27 -0700
commit3d21bdf8894e780d349c481e5c9e29fe1556051c (patch)
tree61a5231f36c0dabd73457fec81df103462a05aff /runtime/debugger.cc
parent71f0a8a123fa27bdc857a98afebbaf0ed09dac15 (diff)
downloadart-3d21bdf8894e780d349c481e5c9e29fe1556051c.tar.gz
art-3d21bdf8894e780d349c481e5c9e29fe1556051c.tar.bz2
art-3d21bdf8894e780d349c481e5c9e29fe1556051c.zip
Move mirror::ArtMethod to native
Optimizing + quick tests are passing, devices boot. TODO: Test and fix bugs in mips64. Saves 16 bytes per most ArtMethod, 7.5MB reduction in system PSS. Some of the savings are from removal of virtual methods and direct methods object arrays. Bug: 19264997 (cherry picked from commit e401d146407d61eeb99f8d6176b2ac13c4df1e33) Change-Id: I622469a0cfa0e7082a2119f3d6a9491eb61e3f3d Fix some ArtMethod related bugs Added root visiting for runtime methods, not currently required since the GcRoots in these methods are null. Added missing GetInterfaceMethodIfProxy in GetMethodLine, fixes --trace run-tests 005, 044. Fixed optimizing compiler bug where we used a normal stack location instead of double on ARM64, this fixes the debuggable tests. TODO: Fix JDWP tests. Bug: 19264997 Change-Id: I7c55f69c61d1b45351fd0dc7185ffe5efad82bd3 ART: Fix casts for 64-bit pointers on 32-bit compiler. Bug: 19264997 Change-Id: Ief45cdd4bae5a43fc8bfdfa7cf744e2c57529457 Fix JDWP tests after ArtMethod change Fixes Throwable::GetStackDepth for exception event detection after internal stack trace representation change. Adds missing ArtMethod::GetInterfaceMethodIfProxy call in case of proxy method. Bug: 19264997 Change-Id: I363e293796848c3ec491c963813f62d868da44d2 Fix accidental IMT and root marking regression Was always using the conflict trampoline. Also included fix for regression in GC time caused by extra roots. Most of the regression was IMT. Fixed bug in DumpGcPerformanceInfo where we would get SIGABRT due to detached thread. EvaluateAndApplyChanges: From ~2500 -> ~1980 GC time: 8.2s -> 7.2s due to 1s less of MarkConcurrentRoots Bug: 19264997 Change-Id: I4333e80a8268c2ed1284f87f25b9f113d4f2c7e0 Fix bogus image test assert Previously we were comparing the size of the non moving space to size of the image file. Now we properly compare the size of the image space against the size of the image file. Bug: 19264997 Change-Id: I7359f1f73ae3df60c5147245935a24431c04808a [MIPS64] Fix art_quick_invoke_stub argument offsets. ArtMethod reference's size got bigger, so we need to move other args and leave enough space for ArtMethod* and 'this' pointer. This fixes mips64 boot. Bug: 19264997 Change-Id: I47198d5f39a4caab30b3b77479d5eedaad5006ab
Diffstat (limited to 'runtime/debugger.cc')
-rw-r--r--runtime/debugger.cc211
1 files changed, 103 insertions, 108 deletions
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 728e8e3ccb..24615e2a66 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -22,6 +22,7 @@
#include "arch/context.h"
#include "art_field-inl.h"
+#include "art_method-inl.h"
#include "base/time_utils.h"
#include "class_linker.h"
#include "class_linker-inl.h"
@@ -32,7 +33,6 @@
#include "gc/space/space-inl.h"
#include "handle_scope.h"
#include "jdwp/object_registry.h"
-#include "mirror/art_method-inl.h"
#include "mirror/class.h"
#include "mirror/class-inl.h"
#include "mirror/class_loader.h"
@@ -78,17 +78,17 @@ class AllocRecordStackTraceElement {
}
int32_t LineNumber() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- mirror::ArtMethod* method = Method();
+ ArtMethod* method = Method();
DCHECK(method != nullptr);
return method->GetLineNumFromDexPC(DexPc());
}
- mirror::ArtMethod* Method() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ArtMethod* Method() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
ScopedObjectAccessUnchecked soa(Thread::Current());
return soa.DecodeMethod(method_);
}
- void SetMethod(mirror::ArtMethod* m) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ void SetMethod(ArtMethod* m) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
ScopedObjectAccessUnchecked soa(Thread::Current());
method_ = soa.EncodeMethod(m);
}
@@ -184,7 +184,7 @@ class AllocRecord {
class Breakpoint {
public:
- Breakpoint(mirror::ArtMethod* method, uint32_t dex_pc,
+ Breakpoint(ArtMethod* method, uint32_t dex_pc,
DeoptimizationRequest::Kind deoptimization_kind)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
: method_(nullptr), dex_pc_(dex_pc), deoptimization_kind_(deoptimization_kind) {
@@ -202,7 +202,7 @@ class Breakpoint {
method_ = soa.EncodeMethod(other.Method());
}
- mirror::ArtMethod* Method() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ArtMethod* Method() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
ScopedObjectAccessUnchecked soa(Thread::Current());
return soa.DecodeMethod(method_);
}
@@ -235,7 +235,7 @@ class DebugInstrumentationListener FINAL : public instrumentation::Instrumentati
DebugInstrumentationListener() {}
virtual ~DebugInstrumentationListener() {}
- void MethodEntered(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method,
+ void MethodEntered(Thread* thread, mirror::Object* this_object, ArtMethod* method,
uint32_t dex_pc)
OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (method->IsNative()) {
@@ -261,7 +261,7 @@ class DebugInstrumentationListener FINAL : public instrumentation::Instrumentati
}
}
- void MethodExited(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method,
+ void MethodExited(Thread* thread, mirror::Object* this_object, ArtMethod* method,
uint32_t dex_pc, const JValue& return_value)
OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (method->IsNative()) {
@@ -279,14 +279,14 @@ class DebugInstrumentationListener FINAL : public instrumentation::Instrumentati
}
void MethodUnwind(Thread* thread ATTRIBUTE_UNUSED, mirror::Object* this_object ATTRIBUTE_UNUSED,
- mirror::ArtMethod* method, uint32_t dex_pc)
+ ArtMethod* method, uint32_t dex_pc)
OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
// We're not recorded to listen to this kind of event, so complain.
LOG(ERROR) << "Unexpected method unwind event in debugger " << PrettyMethod(method)
<< " " << dex_pc;
}
- void DexPcMoved(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method,
+ void DexPcMoved(Thread* thread, mirror::Object* this_object, ArtMethod* method,
uint32_t new_dex_pc)
OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (IsListeningToMethodExit() && IsReturn(method, new_dex_pc)) {
@@ -308,13 +308,13 @@ class DebugInstrumentationListener FINAL : public instrumentation::Instrumentati
}
void FieldRead(Thread* thread ATTRIBUTE_UNUSED, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc, ArtField* field)
+ ArtMethod* method, uint32_t dex_pc, ArtField* field)
OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Dbg::PostFieldAccessEvent(method, dex_pc, this_object, field);
}
void FieldWritten(Thread* thread ATTRIBUTE_UNUSED, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc, ArtField* field,
+ ArtMethod* method, uint32_t dex_pc, ArtField* field,
const JValue& field_value)
OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Dbg::PostFieldModificationEvent(method, dex_pc, this_object, field, &field_value);
@@ -326,14 +326,14 @@ class DebugInstrumentationListener FINAL : public instrumentation::Instrumentati
}
// We only care about how many backward branches were executed in the Jit.
- void BackwardBranch(Thread* /*thread*/, mirror::ArtMethod* method, int32_t dex_pc_offset)
+ void BackwardBranch(Thread* /*thread*/, ArtMethod* method, int32_t dex_pc_offset)
OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
LOG(ERROR) << "Unexpected backward branch event in debugger " << PrettyMethod(method)
<< " " << dex_pc_offset;
}
private:
- static bool IsReturn(mirror::ArtMethod* method, uint32_t dex_pc)
+ static bool IsReturn(ArtMethod* method, uint32_t dex_pc)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
const DexFile::CodeItem* code_item = method->GetCodeItem();
const Instruction* instruction = Instruction::At(&code_item->insns_[dex_pc]);
@@ -408,11 +408,6 @@ static std::vector<Breakpoint> gBreakpoints GUARDED_BY(Locks::breakpoint_lock_);
void DebugInvokeReq::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) {
receiver.VisitRootIfNonNull(visitor, root_info); // null for static method call.
klass.VisitRoot(visitor, root_info);
- method.VisitRoot(visitor, root_info);
-}
-
-void SingleStepControl::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) {
- method_.VisitRootIfNonNull(visitor, root_info);
}
void SingleStepControl::AddDexPc(uint32_t dex_pc) {
@@ -423,7 +418,7 @@ bool SingleStepControl::ContainsDexPc(uint32_t dex_pc) const {
return dex_pcs_.find(dex_pc) == dex_pcs_.end();
}
-static bool IsBreakpoint(const mirror::ArtMethod* m, uint32_t dex_pc)
+static bool IsBreakpoint(const ArtMethod* m, uint32_t dex_pc)
LOCKS_EXCLUDED(Locks::breakpoint_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
ReaderMutexLock mu(Thread::Current(), *Locks::breakpoint_lock_);
@@ -1395,9 +1390,8 @@ JDWP::FieldId Dbg::ToFieldId(const ArtField* f) {
return static_cast<JDWP::FieldId>(reinterpret_cast<uintptr_t>(f));
}
-static JDWP::MethodId ToMethodId(const mirror::ArtMethod* m)
+static JDWP::MethodId ToMethodId(const ArtMethod* m)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- CHECK(!kMovingMethods);
return static_cast<JDWP::MethodId>(reinterpret_cast<uintptr_t>(m));
}
@@ -1406,17 +1400,16 @@ static ArtField* FromFieldId(JDWP::FieldId fid)
return reinterpret_cast<ArtField*>(static_cast<uintptr_t>(fid));
}
-static mirror::ArtMethod* FromMethodId(JDWP::MethodId mid)
+static ArtMethod* FromMethodId(JDWP::MethodId mid)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- CHECK(!kMovingMethods);
- return reinterpret_cast<mirror::ArtMethod*>(static_cast<uintptr_t>(mid));
+ return reinterpret_cast<ArtMethod*>(static_cast<uintptr_t>(mid));
}
bool Dbg::MatchThread(JDWP::ObjectId expected_thread_id, Thread* event_thread) {
CHECK(event_thread != nullptr);
JDWP::JdwpError error;
- mirror::Object* expected_thread_peer = gRegistry->Get<mirror::Object*>(expected_thread_id,
- &error);
+ mirror::Object* expected_thread_peer = gRegistry->Get<mirror::Object*>(
+ expected_thread_id, &error);
return expected_thread_peer == event_thread->GetPeer();
}
@@ -1425,7 +1418,7 @@ bool Dbg::MatchLocation(const JDWP::JdwpLocation& expected_location,
if (expected_location.dex_pc != event_location.dex_pc) {
return false;
}
- mirror::ArtMethod* m = FromMethodId(expected_location.method_id);
+ ArtMethod* m = FromMethodId(expected_location.method_id);
return m == event_location.method;
}
@@ -1454,7 +1447,7 @@ bool Dbg::MatchInstance(JDWP::ObjectId expected_instance_id, mirror::Object* eve
return modifier_instance == event_instance;
}
-void Dbg::SetJdwpLocation(JDWP::JdwpLocation* location, mirror::ArtMethod* m, uint32_t dex_pc)
+void Dbg::SetJdwpLocation(JDWP::JdwpLocation* location, ArtMethod* m, uint32_t dex_pc)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
LOCKS_EXCLUDED(Locks::thread_list_lock_,
Locks::thread_suspend_count_lock_) {
@@ -1470,11 +1463,11 @@ void Dbg::SetJdwpLocation(JDWP::JdwpLocation* location, mirror::ArtMethod* m, ui
}
std::string Dbg::GetMethodName(JDWP::MethodId method_id) {
- mirror::ArtMethod* m = FromMethodId(method_id);
+ ArtMethod* m = FromMethodId(method_id);
if (m == nullptr) {
return "null";
}
- return m->GetName();
+ return m->GetInterfaceMethodIfProxy(sizeof(void*))->GetName();
}
std::string Dbg::GetFieldName(JDWP::FieldId field_id) {
@@ -1503,7 +1496,7 @@ static uint32_t MangleAccessFlags(uint32_t accessFlags) {
* expect slots to begin with arguments, but dex code places them at
* the end.
*/
-static uint16_t MangleSlot(uint16_t slot, mirror::ArtMethod* m)
+static uint16_t MangleSlot(uint16_t slot, ArtMethod* m)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
const DexFile::CodeItem* code_item = m->GetCodeItem();
if (code_item == nullptr) {
@@ -1525,14 +1518,14 @@ static uint16_t MangleSlot(uint16_t slot, mirror::ArtMethod* m)
* Circularly shifts registers so that arguments come last. Reverts
* slots to dex style argument placement.
*/
-static uint16_t DemangleSlot(uint16_t slot, mirror::ArtMethod* m, JDWP::JdwpError* error)
+static uint16_t DemangleSlot(uint16_t slot, ArtMethod* m, JDWP::JdwpError* error)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
const DexFile::CodeItem* code_item = m->GetCodeItem();
if (code_item == nullptr) {
// We should not get here for a method without code (native, proxy or abstract). Log it and
// return the slot as is since all registers are arguments.
LOG(WARNING) << "Trying to demangle slot for method without code " << PrettyMethod(m);
- uint16_t vreg_count = mirror::ArtMethod::NumArgRegisters(m->GetShorty());
+ uint16_t vreg_count = ArtMethod::NumArgRegisters(m->GetShorty());
if (slot < vreg_count) {
*error = JDWP::ERR_NONE;
return slot;
@@ -1591,14 +1584,18 @@ JDWP::JdwpError Dbg::OutputDeclaredMethods(JDWP::RefTypeId class_id, bool with_g
expandBufAdd4BE(pReply, direct_method_count + virtual_method_count);
+ auto* cl = Runtime::Current()->GetClassLinker();
+ auto ptr_size = cl->GetImagePointerSize();
for (size_t i = 0; i < direct_method_count + virtual_method_count; ++i) {
- mirror::ArtMethod* m = (i < direct_method_count) ? c->GetDirectMethod(i) : c->GetVirtualMethod(i - direct_method_count);
+ ArtMethod* m = i < direct_method_count ?
+ c->GetDirectMethod(i, ptr_size) : c->GetVirtualMethod(i - direct_method_count, ptr_size);
expandBufAddMethodId(pReply, ToMethodId(m));
- expandBufAddUtf8String(pReply, m->GetName());
- expandBufAddUtf8String(pReply, m->GetSignature().ToString());
+ expandBufAddUtf8String(pReply, m->GetInterfaceMethodIfProxy(sizeof(void*))->GetName());
+ expandBufAddUtf8String(pReply,
+ m->GetInterfaceMethodIfProxy(sizeof(void*))->GetSignature().ToString());
if (with_generic) {
- static const char genericSignature[1] = "";
- expandBufAddUtf8String(pReply, genericSignature);
+ const char* generic_signature = "";
+ expandBufAddUtf8String(pReply, generic_signature);
}
expandBufAdd4BE(pReply, MangleAccessFlags(m->GetAccessFlags()));
}
@@ -1635,7 +1632,7 @@ void Dbg::OutputLineTable(JDWP::RefTypeId, JDWP::MethodId method_id, JDWP::Expan
return false;
}
};
- mirror::ArtMethod* m = FromMethodId(method_id);
+ ArtMethod* m = FromMethodId(method_id);
const DexFile::CodeItem* code_item = m->GetCodeItem();
uint64_t start, end;
if (code_item == nullptr) {
@@ -1670,7 +1667,7 @@ void Dbg::OutputLineTable(JDWP::RefTypeId, JDWP::MethodId method_id, JDWP::Expan
void Dbg::OutputVariableTable(JDWP::RefTypeId, JDWP::MethodId method_id, bool with_generic,
JDWP::ExpandBuf* pReply) {
struct DebugCallbackContext {
- mirror::ArtMethod* method;
+ ArtMethod* method;
JDWP::ExpandBuf* pReply;
size_t variable_count;
bool with_generic;
@@ -1699,12 +1696,12 @@ void Dbg::OutputVariableTable(JDWP::RefTypeId, JDWP::MethodId method_id, bool wi
++pContext->variable_count;
}
};
- mirror::ArtMethod* m = FromMethodId(method_id);
+ ArtMethod* m = FromMethodId(method_id);
// arg_count considers doubles and longs to take 2 units.
// variable_count considers everything to take 1 unit.
std::string shorty(m->GetShorty());
- expandBufAdd4BE(pReply, mirror::ArtMethod::NumArgRegisters(shorty));
+ expandBufAdd4BE(pReply, ArtMethod::NumArgRegisters(shorty));
// We don't know the total number of variables yet, so leave a blank and update it later.
size_t variable_count_offset = expandBufGetLength(pReply);
@@ -1728,7 +1725,7 @@ void Dbg::OutputVariableTable(JDWP::RefTypeId, JDWP::MethodId method_id, bool wi
void Dbg::OutputMethodReturnValue(JDWP::MethodId method_id, const JValue* return_value,
JDWP::ExpandBuf* pReply) {
- mirror::ArtMethod* m = FromMethodId(method_id);
+ ArtMethod* m = FromMethodId(method_id);
JDWP::JdwpTag tag = BasicTagFromDescriptor(m->GetShorty());
OutputJValue(tag, return_value, pReply);
}
@@ -1742,7 +1739,7 @@ void Dbg::OutputFieldValue(JDWP::FieldId field_id, const JValue* field_value,
JDWP::JdwpError Dbg::GetBytecodes(JDWP::RefTypeId, JDWP::MethodId method_id,
std::vector<uint8_t>* bytecodes) {
- mirror::ArtMethod* m = FromMethodId(method_id);
+ ArtMethod* m = FromMethodId(method_id);
if (m == nullptr) {
return JDWP::ERR_INVALID_METHODID;
}
@@ -2470,7 +2467,7 @@ class FindFrameVisitor FINAL : public StackVisitor {
if (GetFrameId() != frame_id_) {
return true; // Not our frame, carry on.
}
- mirror::ArtMethod* m = GetMethod();
+ ArtMethod* m = GetMethod();
if (m->IsNative()) {
// We can't read/write local value from/into native method.
error_ = JDWP::ERR_OPAQUE_FRAME;
@@ -2548,7 +2545,7 @@ static JDWP::JdwpError FailGetLocalValue(const StackVisitor& visitor, uint16_t v
JDWP::JdwpError Dbg::GetLocalValue(const StackVisitor& visitor, ScopedObjectAccessUnchecked& soa,
int slot, JDWP::JdwpTag tag, uint8_t* buf, size_t width) {
- mirror::ArtMethod* m = visitor.GetMethod();
+ ArtMethod* m = visitor.GetMethod();
JDWP::JdwpError error = JDWP::ERR_NONE;
uint16_t vreg = DemangleSlot(slot, m, &error);
if (error != JDWP::ERR_NONE) {
@@ -2711,7 +2708,7 @@ static JDWP::JdwpError FailSetLocalValue(const StackVisitor& visitor, uint16_t v
JDWP::JdwpError Dbg::SetLocalValue(StackVisitor& visitor, int slot, JDWP::JdwpTag tag,
uint64_t value, size_t width) {
- mirror::ArtMethod* m = visitor.GetMethod();
+ ArtMethod* m = visitor.GetMethod();
JDWP::JdwpError error = JDWP::ERR_NONE;
uint16_t vreg = DemangleSlot(slot, m, &error);
if (error != JDWP::ERR_NONE) {
@@ -2786,7 +2783,7 @@ JDWP::JdwpError Dbg::SetLocalValue(StackVisitor& visitor, int slot, JDWP::JdwpTa
return JDWP::ERR_NONE;
}
-static void SetEventLocation(JDWP::EventLocation* location, mirror::ArtMethod* m, uint32_t dex_pc)
+static void SetEventLocation(JDWP::EventLocation* location, ArtMethod* m, uint32_t dex_pc)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
DCHECK(location != nullptr);
if (m == nullptr) {
@@ -2797,7 +2794,7 @@ static void SetEventLocation(JDWP::EventLocation* location, mirror::ArtMethod* m
}
}
-void Dbg::PostLocationEvent(mirror::ArtMethod* m, int dex_pc, mirror::Object* this_object,
+void Dbg::PostLocationEvent(ArtMethod* m, int dex_pc, mirror::Object* this_object,
int event_flags, const JValue* return_value) {
if (!IsDebuggerActive()) {
return;
@@ -2830,7 +2827,7 @@ void Dbg::PostLocationEvent(mirror::ArtMethod* m, int dex_pc, mirror::Object* th
}
}
-void Dbg::PostFieldAccessEvent(mirror::ArtMethod* m, int dex_pc,
+void Dbg::PostFieldAccessEvent(ArtMethod* m, int dex_pc,
mirror::Object* this_object, ArtField* f) {
if (!IsDebuggerActive()) {
return;
@@ -2843,7 +2840,7 @@ void Dbg::PostFieldAccessEvent(mirror::ArtMethod* m, int dex_pc,
gJdwpState->PostFieldEvent(&location, f, this_object, nullptr, false);
}
-void Dbg::PostFieldModificationEvent(mirror::ArtMethod* m, int dex_pc,
+void Dbg::PostFieldModificationEvent(ArtMethod* m, int dex_pc,
mirror::Object* this_object, ArtField* f,
const JValue* field_value) {
if (!IsDebuggerActive()) {
@@ -2871,14 +2868,14 @@ class CatchLocationFinder : public StackVisitor {
exception_(exception),
handle_scope_(self),
this_at_throw_(handle_scope_.NewHandle<mirror::Object>(nullptr)),
- catch_method_(handle_scope_.NewHandle<mirror::ArtMethod>(nullptr)),
- throw_method_(handle_scope_.NewHandle<mirror::ArtMethod>(nullptr)),
+ catch_method_(nullptr),
+ throw_method_(nullptr),
catch_dex_pc_(DexFile::kDexNoIndex),
throw_dex_pc_(DexFile::kDexNoIndex) {
}
bool VisitFrame() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- mirror::ArtMethod* method = GetMethod();
+ ArtMethod* method = GetMethod();
DCHECK(method != nullptr);
if (method->IsRuntimeMethod()) {
// Ignore callee save method.
@@ -2887,25 +2884,23 @@ class CatchLocationFinder : public StackVisitor {
}
uint32_t dex_pc = GetDexPc();
- if (throw_method_.Get() == nullptr) {
+ if (throw_method_ == nullptr) {
// First Java method found. It is either the method that threw the exception,
// or the Java native method that is reporting an exception thrown by
// native code.
this_at_throw_.Assign(GetThisObject());
- throw_method_.Assign(method);
+ throw_method_ = method;
throw_dex_pc_ = dex_pc;
}
if (dex_pc != DexFile::kDexNoIndex) {
- StackHandleScope<2> hs(self_);
+ StackHandleScope<1> hs(self_);
uint32_t found_dex_pc;
Handle<mirror::Class> exception_class(hs.NewHandle(exception_->GetClass()));
- Handle<mirror::ArtMethod> h_method(hs.NewHandle(method));
bool unused_clear_exception;
- found_dex_pc = mirror::ArtMethod::FindCatchBlock(
- h_method, exception_class, dex_pc, &unused_clear_exception);
+ found_dex_pc = method->FindCatchBlock(exception_class, dex_pc, &unused_clear_exception);
if (found_dex_pc != DexFile::kDexNoIndex) {
- catch_method_.Assign(method);
+ catch_method_ = method;
catch_dex_pc_ = found_dex_pc;
return false; // End stack walk.
}
@@ -2913,12 +2908,12 @@ class CatchLocationFinder : public StackVisitor {
return true; // Continue stack walk.
}
- mirror::ArtMethod* GetCatchMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return catch_method_.Get();
+ ArtMethod* GetCatchMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return catch_method_;
}
- mirror::ArtMethod* GetThrowMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return throw_method_.Get();
+ ArtMethod* GetThrowMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return throw_method_;
}
mirror::Object* GetThisAtThrow() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -2936,10 +2931,10 @@ class CatchLocationFinder : public StackVisitor {
private:
Thread* const self_;
const Handle<mirror::Throwable>& exception_;
- StackHandleScope<3> handle_scope_;
+ StackHandleScope<1> handle_scope_;
MutableHandle<mirror::Object> this_at_throw_;
- MutableHandle<mirror::ArtMethod> catch_method_;
- MutableHandle<mirror::ArtMethod> throw_method_;
+ ArtMethod* catch_method_;
+ ArtMethod* throw_method_;
uint32_t catch_dex_pc_;
uint32_t throw_dex_pc_;
@@ -2973,7 +2968,7 @@ void Dbg::PostClassPrepare(mirror::Class* c) {
}
void Dbg::UpdateDebugger(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* m, uint32_t dex_pc,
+ ArtMethod* m, uint32_t dex_pc,
int event_flags, const JValue* return_value) {
if (!IsDebuggerActive() || dex_pc == static_cast<uint32_t>(-2) /* fake method exit */) {
return;
@@ -3222,7 +3217,7 @@ void Dbg::ManageDeoptimization() {
self->TransitionFromSuspendedToRunnable();
}
-static bool IsMethodPossiblyInlined(Thread* self, mirror::ArtMethod* m)
+static bool IsMethodPossiblyInlined(Thread* self, ArtMethod* m)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
const DexFile::CodeItem* code_item = m->GetCodeItem();
if (code_item == nullptr) {
@@ -3232,19 +3227,18 @@ static bool IsMethodPossiblyInlined(Thread* self, mirror::ArtMethod* m)
}
// Note: method verifier may cause thread suspension.
self->AssertThreadSuspensionIsAllowable();
- StackHandleScope<3> hs(self);
+ StackHandleScope<2> hs(self);
mirror::Class* declaring_class = m->GetDeclaringClass();
Handle<mirror::DexCache> dex_cache(hs.NewHandle(declaring_class->GetDexCache()));
Handle<mirror::ClassLoader> class_loader(hs.NewHandle(declaring_class->GetClassLoader()));
- Handle<mirror::ArtMethod> method(hs.NewHandle(m));
verifier::MethodVerifier verifier(self, dex_cache->GetDexFile(), dex_cache, class_loader,
- &m->GetClassDef(), code_item, m->GetDexMethodIndex(), method,
+ &m->GetClassDef(), code_item, m->GetDexMethodIndex(), m,
m->GetAccessFlags(), false, true, false, true);
// Note: we don't need to verify the method.
return InlineMethodAnalyser::AnalyseMethodCode(&verifier, nullptr);
}
-static const Breakpoint* FindFirstBreakpointForMethod(mirror::ArtMethod* m)
+static const Breakpoint* FindFirstBreakpointForMethod(ArtMethod* m)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::breakpoint_lock_) {
for (Breakpoint& breakpoint : gBreakpoints) {
if (breakpoint.Method() == m) {
@@ -3254,13 +3248,13 @@ static const Breakpoint* FindFirstBreakpointForMethod(mirror::ArtMethod* m)
return nullptr;
}
-bool Dbg::MethodHasAnyBreakpoints(mirror::ArtMethod* method) {
+bool Dbg::MethodHasAnyBreakpoints(ArtMethod* method) {
ReaderMutexLock mu(Thread::Current(), *Locks::breakpoint_lock_);
return FindFirstBreakpointForMethod(method) != nullptr;
}
// Sanity checks all existing breakpoints on the same method.
-static void SanityCheckExistingBreakpoints(mirror::ArtMethod* m,
+static void SanityCheckExistingBreakpoints(ArtMethod* m,
DeoptimizationRequest::Kind deoptimization_kind)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::breakpoint_lock_) {
for (const Breakpoint& breakpoint : gBreakpoints) {
@@ -3289,7 +3283,7 @@ static void SanityCheckExistingBreakpoints(mirror::ArtMethod* m,
// If a breakpoint has already been set, we also return the first breakpoint
// through the given 'existing_brkpt' pointer.
static DeoptimizationRequest::Kind GetRequiredDeoptimizationKind(Thread* self,
- mirror::ArtMethod* m,
+ ArtMethod* m,
const Breakpoint** existing_brkpt)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (!Dbg::RequiresDeoptimization()) {
@@ -3353,7 +3347,7 @@ static DeoptimizationRequest::Kind GetRequiredDeoptimizationKind(Thread* self,
// request if we need to deoptimize.
void Dbg::WatchLocation(const JDWP::JdwpLocation* location, DeoptimizationRequest* req) {
Thread* const self = Thread::Current();
- mirror::ArtMethod* m = FromMethodId(location->method_id);
+ ArtMethod* m = FromMethodId(location->method_id);
DCHECK(m != nullptr) << "No method for method id " << location->method_id;
const Breakpoint* existing_breakpoint = nullptr;
@@ -3388,7 +3382,7 @@ void Dbg::WatchLocation(const JDWP::JdwpLocation* location, DeoptimizationReques
// request if we need to undeoptimize.
void Dbg::UnwatchLocation(const JDWP::JdwpLocation* location, DeoptimizationRequest* req) {
WriterMutexLock mu(Thread::Current(), *Locks::breakpoint_lock_);
- mirror::ArtMethod* m = FromMethodId(location->method_id);
+ ArtMethod* m = FromMethodId(location->method_id);
DCHECK(m != nullptr) << "No method for method id " << location->method_id;
DeoptimizationRequest::Kind deoptimization_kind = DeoptimizationRequest::kNothing;
for (size_t i = 0, e = gBreakpoints.size(); i < e; ++i) {
@@ -3428,7 +3422,7 @@ void Dbg::UnwatchLocation(const JDWP::JdwpLocation* location, DeoptimizationRequ
}
}
-bool Dbg::IsForcedInterpreterNeededForCallingImpl(Thread* thread, mirror::ArtMethod* m) {
+bool Dbg::IsForcedInterpreterNeededForCallingImpl(Thread* thread, ArtMethod* m) {
const SingleStepControl* const ssc = thread->GetSingleStepControl();
if (ssc == nullptr) {
// If we are not single-stepping, then we don't have to force interpreter.
@@ -3448,7 +3442,7 @@ bool Dbg::IsForcedInterpreterNeededForCallingImpl(Thread* thread, mirror::ArtMet
return false;
}
-bool Dbg::IsForcedInterpreterNeededForResolutionImpl(Thread* thread, mirror::ArtMethod* m) {
+bool Dbg::IsForcedInterpreterNeededForResolutionImpl(Thread* thread, ArtMethod* m) {
instrumentation::Instrumentation* const instrumentation =
Runtime::Current()->GetInstrumentation();
// If we are in interpreter only mode, then we don't have to force interpreter.
@@ -3482,7 +3476,7 @@ bool Dbg::IsForcedInterpreterNeededForResolutionImpl(Thread* thread, mirror::Art
return instrumentation->IsDeoptimized(m);
}
-bool Dbg::IsForcedInstrumentationNeededForResolutionImpl(Thread* thread, mirror::ArtMethod* m) {
+bool Dbg::IsForcedInstrumentationNeededForResolutionImpl(Thread* thread, ArtMethod* m) {
// The upcall can be null and in that case we don't need to do anything.
if (m == nullptr) {
return false;
@@ -3519,7 +3513,7 @@ bool Dbg::IsForcedInstrumentationNeededForResolutionImpl(Thread* thread, mirror:
return instrumentation->IsDeoptimized(m);
}
-bool Dbg::IsForcedInterpreterNeededForUpcallImpl(Thread* thread, mirror::ArtMethod* m) {
+bool Dbg::IsForcedInterpreterNeededForUpcallImpl(Thread* thread, ArtMethod* m) {
// The upcall can be null and in that case we don't need to do anything.
if (m == nullptr) {
return false;
@@ -3623,7 +3617,7 @@ JDWP::JdwpError Dbg::ConfigureStep(JDWP::ObjectId thread_id, JDWP::JdwpStepSize
// TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses
// annotalysis.
bool VisitFrame() NO_THREAD_SAFETY_ANALYSIS {
- mirror::ArtMethod* m = GetMethod();
+ ArtMethod* m = GetMethod();
if (!m->IsRuntimeMethod()) {
++stack_depth;
if (method == nullptr) {
@@ -3639,7 +3633,7 @@ JDWP::JdwpError Dbg::ConfigureStep(JDWP::ObjectId thread_id, JDWP::JdwpStepSize
}
int stack_depth;
- mirror::ArtMethod* method;
+ ArtMethod* method;
int32_t line_number;
};
@@ -3701,7 +3695,7 @@ JDWP::JdwpError Dbg::ConfigureStep(JDWP::ObjectId thread_id, JDWP::JdwpStepSize
return JDWP::ERR_OUT_OF_MEMORY;
}
- mirror::ArtMethod* m = single_step_control->GetMethod();
+ ArtMethod* m = single_step_control->GetMethod();
const int32_t line_number = visitor.line_number;
// Note: if the thread is not running Java code (pure native thread), there is no "current"
// method on the stack (and no line number either).
@@ -3838,7 +3832,7 @@ JDWP::JdwpError Dbg::InvokeMethod(JDWP::ObjectId thread_id, JDWP::ObjectId objec
return error;
}
- mirror::ArtMethod* m = FromMethodId(method_id);
+ ArtMethod* m = FromMethodId(method_id);
if (m->IsStatic() != (receiver == nullptr)) {
return JDWP::ERR_INVALID_METHODID;
}
@@ -3860,8 +3854,7 @@ JDWP::JdwpError Dbg::InvokeMethod(JDWP::ObjectId thread_id, JDWP::ObjectId objec
}
{
- StackHandleScope<3> hs(soa.Self());
- HandleWrapper<mirror::ArtMethod> h_m(hs.NewHandleWrapper(&m));
+ StackHandleScope<2> hs(soa.Self());
HandleWrapper<mirror::Object> h_obj(hs.NewHandleWrapper(&receiver));
HandleWrapper<mirror::Class> h_klass(hs.NewHandleWrapper(&c));
const DexFile::TypeList* types = m->GetParameterTypeList();
@@ -3873,7 +3866,7 @@ JDWP::JdwpError Dbg::InvokeMethod(JDWP::ObjectId thread_id, JDWP::ObjectId objec
if (shorty[i + 1] == 'L') {
// Did we really get an argument of an appropriate reference type?
mirror::Class* parameter_type =
- h_m->GetClassFromTypeIndex(types->GetTypeItem(i).type_idx_, true);
+ m->GetClassFromTypeIndex(types->GetTypeItem(i).type_idx_, true);
mirror::Object* argument = gRegistry->Get<mirror::Object*>(arg_values[i], &error);
if (error != JDWP::ERR_NONE) {
return JDWP::ERR_INVALID_OBJECT;
@@ -3976,32 +3969,34 @@ void Dbg::ExecuteMethod(DebugInvokeReq* pReq) {
// We can be called while an exception is pending. We need
// to preserve that across the method invocation.
- StackHandleScope<4> hs(soa.Self());
+ StackHandleScope<3> hs(soa.Self());
auto old_exception = hs.NewHandle<mirror::Throwable>(soa.Self()->GetException());
soa.Self()->ClearException();
// Translate the method through the vtable, unless the debugger wants to suppress it.
- MutableHandle<mirror::ArtMethod> m(hs.NewHandle(pReq->method.Read()));
+ auto* m = pReq->method;
+ auto image_pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
if ((pReq->options & JDWP::INVOKE_NONVIRTUAL) == 0 && pReq->receiver.Read() != nullptr) {
- mirror::ArtMethod* actual_method = pReq->klass.Read()->FindVirtualMethodForVirtualOrInterface(m.Get());
- if (actual_method != m.Get()) {
- VLOG(jdwp) << "ExecuteMethod translated " << PrettyMethod(m.Get())
+ ArtMethod* actual_method =
+ pReq->klass.Read()->FindVirtualMethodForVirtualOrInterface(m, image_pointer_size);
+ if (actual_method != m) {
+ VLOG(jdwp) << "ExecuteMethod translated " << PrettyMethod(m)
<< " to " << PrettyMethod(actual_method);
- m.Assign(actual_method);
+ m = actual_method;
}
}
- VLOG(jdwp) << "ExecuteMethod " << PrettyMethod(m.Get())
+ VLOG(jdwp) << "ExecuteMethod " << PrettyMethod(m)
<< " receiver=" << pReq->receiver.Read()
<< " arg_count=" << pReq->arg_count;
- CHECK(m.Get() != nullptr);
+ CHECK(m != nullptr);
CHECK_EQ(sizeof(jvalue), sizeof(uint64_t));
ScopedLocalRef<jobject> ref(soa.Env(), soa.AddLocalReference<jobject>(pReq->receiver.Read()));
- JValue result = InvokeWithJValues(soa, ref.get(), soa.EncodeMethod(m.Get()),
+ JValue result = InvokeWithJValues(soa, ref.get(), soa.EncodeMethod(m),
reinterpret_cast<jvalue*>(pReq->arg_values));
- pReq->result_tag = BasicTagFromDescriptor(m.Get()->GetShorty());
+ pReq->result_tag = BasicTagFromDescriptor(m->GetShorty());
const bool is_object_result = (pReq->result_tag == JDWP::JT_OBJECT);
Handle<mirror::Object> object_result = hs.NewHandle(is_object_result ? result.GetL() : nullptr);
Handle<mirror::Throwable> exception = hs.NewHandle(soa.Self()->GetException());
@@ -4744,7 +4739,7 @@ struct AllocRecordStackVisitor : public StackVisitor {
if (depth >= kMaxAllocRecordStackDepth) {
return false;
}
- mirror::ArtMethod* m = GetMethod();
+ ArtMethod* m = GetMethod();
if (!m->IsRuntimeMethod()) {
record->StackElement(depth)->SetMethod(m);
record->StackElement(depth)->SetDexPc(GetDexPc());
@@ -4828,7 +4823,7 @@ void Dbg::DumpRecentAllocations() {
for (size_t stack_frame = 0; stack_frame < kMaxAllocRecordStackDepth; ++stack_frame) {
AllocRecordStackTraceElement* stack_element = record->StackElement(stack_frame);
- mirror::ArtMethod* m = stack_element->Method();
+ ArtMethod* m = stack_element->Method();
if (m == nullptr) {
break;
}
@@ -4884,7 +4879,7 @@ class StringTable {
DISALLOW_COPY_AND_ASSIGN(StringTable);
};
-static const char* GetMethodSourceFile(mirror::ArtMethod* method)
+static const char* GetMethodSourceFile(ArtMethod* method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
DCHECK(method != nullptr);
const char* source_file = method->GetDeclaringClassSourceFile();
@@ -4957,7 +4952,7 @@ jbyteArray Dbg::GetRecentAllocations() {
std::string temp;
class_names.Add(record->Type()->GetDescriptor(&temp));
for (size_t i = 0; i < kMaxAllocRecordStackDepth; i++) {
- mirror::ArtMethod* m = record->StackElement(i)->Method();
+ ArtMethod* m = record->StackElement(i)->Method();
if (m != nullptr) {
class_names.Add(m->GetDeclaringClassDescriptor());
method_names.Add(m->GetName());
@@ -5019,7 +5014,7 @@ jbyteArray Dbg::GetRecentAllocations() {
// (2b) method name
// (2b) method source file
// (2b) line number, clipped to 32767; -2 if native; -1 if no source
- mirror::ArtMethod* m = record->StackElement(stack_frame)->Method();
+ ArtMethod* m = record->StackElement(stack_frame)->Method();
size_t class_name_index = class_names.IndexOf(m->GetDeclaringClassDescriptor());
size_t method_name_index = method_names.IndexOf(m->GetName());
size_t file_name_index = filenames.IndexOf(GetMethodSourceFile(m));
@@ -5047,12 +5042,12 @@ jbyteArray Dbg::GetRecentAllocations() {
return result;
}
-mirror::ArtMethod* DeoptimizationRequest::Method() const {
+ArtMethod* DeoptimizationRequest::Method() const {
ScopedObjectAccessUnchecked soa(Thread::Current());
return soa.DecodeMethod(method_);
}
-void DeoptimizationRequest::SetMethod(mirror::ArtMethod* m) {
+void DeoptimizationRequest::SetMethod(ArtMethod* m) {
ScopedObjectAccessUnchecked soa(Thread::Current());
method_ = soa.EncodeMethod(m);
}