diff options
author | Jeff Hao <jeffhao@google.com> | 2014-09-15 18:03:41 -0700 |
---|---|---|
committer | Jeff Hao <jeffhao@google.com> | 2014-09-17 14:31:55 -0700 |
commit | 69dbec6d9d55eeb2867949c2791d01dc9aa916c8 (patch) | |
tree | be4eca8161eafb4a8466f5adf28a2c8f27cd37d1 /runtime | |
parent | 926d8e99aa69151a271180299df68963688010d8 (diff) | |
download | art-69dbec6d9d55eeb2867949c2791d01dc9aa916c8.tar.gz art-69dbec6d9d55eeb2867949c2791d01dc9aa916c8.tar.bz2 art-69dbec6d9d55eeb2867949c2791d01dc9aa916c8.zip |
Avoid suspending for alloc trace enabling when already suspended.
Bug: 17499772
(cherry picked from commit 1d6ee090fddd4bfd35c304d6ceb929d5c529dfcc)
Change-Id: Id09809c476c685f0a197ee75bb08638931364efd
Diffstat (limited to 'runtime')
-rw-r--r-- | runtime/base/mutex.cc | 8 | ||||
-rw-r--r-- | runtime/base/mutex.h | 2 | ||||
-rw-r--r-- | runtime/debugger.cc | 4 | ||||
-rw-r--r-- | runtime/gc/heap.cc | 2 | ||||
-rw-r--r-- | runtime/instrumentation.cc | 17 | ||||
-rw-r--r-- | runtime/instrumentation.h | 10 | ||||
-rw-r--r-- | runtime/native/dalvik_system_VMDebug.cc | 4 | ||||
-rw-r--r-- | runtime/runtime.cc | 6 | ||||
-rw-r--r-- | runtime/runtime.h | 2 | ||||
-rw-r--r-- | runtime/trace.cc | 6 |
10 files changed, 31 insertions, 30 deletions
diff --git a/runtime/base/mutex.cc b/runtime/base/mutex.cc index 455680b449..2c95eded08 100644 --- a/runtime/base/mutex.cc +++ b/runtime/base/mutex.cc @@ -884,6 +884,10 @@ void Locks::Init() { DCHECK(heap_bitmap_lock_ == nullptr); heap_bitmap_lock_ = new ReaderWriterMutex("heap bitmap lock", current_lock_level); + UPDATE_CURRENT_LOCK_LEVEL(kTraceLock); + DCHECK(trace_lock_ == nullptr); + trace_lock_ = new Mutex("trace lock", current_lock_level); + UPDATE_CURRENT_LOCK_LEVEL(kRuntimeShutdownLock); DCHECK(runtime_shutdown_lock_ == nullptr); runtime_shutdown_lock_ = new Mutex("runtime shutdown lock", current_lock_level); @@ -892,10 +896,6 @@ void Locks::Init() { DCHECK(profiler_lock_ == nullptr); profiler_lock_ = new Mutex("profiler lock", current_lock_level); - UPDATE_CURRENT_LOCK_LEVEL(kTraceLock); - DCHECK(trace_lock_ == nullptr); - trace_lock_ = new Mutex("trace lock", current_lock_level); - UPDATE_CURRENT_LOCK_LEVEL(kDeoptimizationLock); DCHECK(deoptimization_lock_ == nullptr); deoptimization_lock_ = new Mutex("Deoptimization lock", current_lock_level); diff --git a/runtime/base/mutex.h b/runtime/base/mutex.h index 20f58de888..8d2cdce802 100644 --- a/runtime/base/mutex.h +++ b/runtime/base/mutex.h @@ -92,12 +92,12 @@ enum LockLevel { kBreakpointInvokeLock, kAllocTrackerLock, kDeoptimizationLock, - kTraceLock, kProfilerLock, kJdwpEventListLock, kJdwpAttachLock, kJdwpStartLock, kRuntimeShutdownLock, + kTraceLock, kHeapBitmapLock, kMutatorLock, kThreadListSuspendThreadLock, diff --git a/runtime/debugger.cc b/runtime/debugger.cc index df51973b38..6c374029d8 100644 --- a/runtime/debugger.cc +++ b/runtime/debugger.cc @@ -4388,7 +4388,7 @@ void Dbg::SetAllocTrackingEnabled(bool enable) { recent_allocation_records_ = new AllocRecord[alloc_record_max_]; CHECK(recent_allocation_records_ != nullptr); } - Runtime::Current()->GetInstrumentation()->InstrumentQuickAllocEntryPoints(); + Runtime::Current()->GetInstrumentation()->InstrumentQuickAllocEntryPoints(false); } else { { ScopedObjectAccess soa(self); // For type_cache_.Clear(); @@ -4404,7 +4404,7 @@ void Dbg::SetAllocTrackingEnabled(bool enable) { type_cache_.Clear(); } // If an allocation comes in before we uninstrument, we will safely drop it on the floor. - Runtime::Current()->GetInstrumentation()->UninstrumentQuickAllocEntryPoints(); + Runtime::Current()->GetInstrumentation()->UninstrumentQuickAllocEntryPoints(false); } } diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index 3e3b964795..18441c1cdf 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -426,7 +426,7 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max } } if (running_on_valgrind_) { - Runtime::Current()->GetInstrumentation()->InstrumentQuickAllocEntryPoints(); + Runtime::Current()->GetInstrumentation()->InstrumentQuickAllocEntryPoints(false); } if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) { LOG(INFO) << "Heap() exiting"; diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc index 0f45b9e512..a2e88a694e 100644 --- a/runtime/instrumentation.cc +++ b/runtime/instrumentation.cc @@ -597,10 +597,13 @@ static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) { thread->ResetQuickAllocEntryPointsForThread(); } -void Instrumentation::SetEntrypointsInstrumented(bool instrumented) { +void Instrumentation::SetEntrypointsInstrumented(bool instrumented, bool suspended) { Runtime* runtime = Runtime::Current(); ThreadList* tl = runtime->GetThreadList(); - if (runtime->IsStarted()) { + if (suspended) { + Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current()); + } + if (runtime->IsStarted() && !suspended) { tl->SuspendAll(); } { @@ -608,30 +611,30 @@ void Instrumentation::SetEntrypointsInstrumented(bool instrumented) { SetQuickAllocEntryPointsInstrumented(instrumented); ResetQuickAllocEntryPoints(); } - if (runtime->IsStarted()) { + if (runtime->IsStarted() && !suspended) { tl->ResumeAll(); } } -void Instrumentation::InstrumentQuickAllocEntryPoints() { +void Instrumentation::InstrumentQuickAllocEntryPoints(bool suspended) { // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code // should be guarded by a lock. DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0); const bool enable_instrumentation = quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0; if (enable_instrumentation) { - SetEntrypointsInstrumented(true); + SetEntrypointsInstrumented(true, suspended); } } -void Instrumentation::UninstrumentQuickAllocEntryPoints() { +void Instrumentation::UninstrumentQuickAllocEntryPoints(bool suspended) { // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code // should be guarded by a lock. DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0); const bool disable_instrumentation = quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1; if (disable_instrumentation) { - SetEntrypointsInstrumented(false); + SetEntrypointsInstrumented(false, suspended); } } diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h index d05cee5dcb..3c1c756992 100644 --- a/runtime/instrumentation.h +++ b/runtime/instrumentation.h @@ -182,10 +182,10 @@ class Instrumentation { return interpreter_handler_table_; } - void InstrumentQuickAllocEntryPoints() LOCKS_EXCLUDED(Locks::thread_list_lock_, - Locks::runtime_shutdown_lock_); - void UninstrumentQuickAllocEntryPoints() LOCKS_EXCLUDED(Locks::thread_list_lock_, - Locks::runtime_shutdown_lock_); + void InstrumentQuickAllocEntryPoints(bool suspended) + LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::runtime_shutdown_lock_); + void UninstrumentQuickAllocEntryPoints(bool suspended) + LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::runtime_shutdown_lock_); void ResetQuickAllocEntryPoints() EXCLUSIVE_LOCKS_REQUIRED(Locks::runtime_shutdown_lock_); // Update the code of a method respecting any installed stubs. @@ -350,7 +350,7 @@ class Instrumentation { // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring // exclusive access to mutator lock which you can't get if the runtime isn't started. - void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS; + void SetEntrypointsInstrumented(bool instrumented, bool suspended) NO_THREAD_SAFETY_ANALYSIS; void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method, uint32_t dex_pc) const diff --git a/runtime/native/dalvik_system_VMDebug.cc b/runtime/native/dalvik_system_VMDebug.cc index ceff2065ba..d8a537f948 100644 --- a/runtime/native/dalvik_system_VMDebug.cc +++ b/runtime/native/dalvik_system_VMDebug.cc @@ -60,11 +60,11 @@ static jobjectArray VMDebug_getVmFeatureList(JNIEnv* env, jclass) { } static void VMDebug_startAllocCounting(JNIEnv*, jclass) { - Runtime::Current()->SetStatsEnabled(true); + Runtime::Current()->SetStatsEnabled(true, false); } static void VMDebug_stopAllocCounting(JNIEnv*, jclass) { - Runtime::Current()->SetStatsEnabled(false); + Runtime::Current()->SetStatsEnabled(false, false); } static jint VMDebug_getAllocCount(JNIEnv*, jclass, jint kind) { diff --git a/runtime/runtime.cc b/runtime/runtime.cc index 9b24bec574..0e382ff65a 100644 --- a/runtime/runtime.cc +++ b/runtime/runtime.cc @@ -998,14 +998,14 @@ void Runtime::DumpLockHolders(std::ostream& os) { } } -void Runtime::SetStatsEnabled(bool new_state) { +void Runtime::SetStatsEnabled(bool new_state, bool suspended) { if (new_state == true) { GetStats()->Clear(~0); // TODO: wouldn't it make more sense to clear _all_ threads' stats? Thread::Current()->GetStats()->Clear(~0); - GetInstrumentation()->InstrumentQuickAllocEntryPoints(); + GetInstrumentation()->InstrumentQuickAllocEntryPoints(suspended); } else { - GetInstrumentation()->UninstrumentQuickAllocEntryPoints(); + GetInstrumentation()->UninstrumentQuickAllocEntryPoints(suspended); } stats_enabled_ = new_state; } diff --git a/runtime/runtime.h b/runtime/runtime.h index cfb1abc477..f9c017b278 100644 --- a/runtime/runtime.h +++ b/runtime/runtime.h @@ -390,7 +390,7 @@ class Runtime { void ResetStats(int kinds); - void SetStatsEnabled(bool new_state); + void SetStatsEnabled(bool new_state, bool suspended); enum class NativeBridgeAction { // private kUnload, diff --git a/runtime/trace.cc b/runtime/trace.cc index 6dcc5fe6bb..b32e0429b1 100644 --- a/runtime/trace.cc +++ b/runtime/trace.cc @@ -373,11 +373,9 @@ void Trace::Start(const char* trace_filename, int trace_fd, int buffer_size, int // Enable count of allocs if specified in the flags. if ((flags && kTraceCountAllocs) != 0) { - runtime->SetStatsEnabled(true); + runtime->SetStatsEnabled(true, true); } - - if (sampling_enabled) { CHECK_PTHREAD_CALL(pthread_create, (&sampling_pthread_, NULL, &RunSamplingThread, reinterpret_cast<void*>(interval_us)), @@ -492,7 +490,7 @@ void Trace::FinishTracing() { size_t final_offset = cur_offset_.LoadRelaxed(); if ((flags_ & kTraceCountAllocs) != 0) { - Runtime::Current()->SetStatsEnabled(false); + Runtime::Current()->SetStatsEnabled(false, true); } std::set<mirror::ArtMethod*> visited_methods; |