From 2b7c4d196c8abe32f4ca633534917da9de53c359 Mon Sep 17 00:00:00 2001 From: Mathieu Chartier Date: Mon, 19 May 2014 10:52:16 -0700 Subject: Don't get and restore thread state for ScopedFastNativeObjectAccess. Before we would ensure that we were runnable for fast native object access. However, these are done when you are already runnable. Change-Id: Ia4c6e4c83d146fe2a988b37b3133ca46b0f0fa42 --- runtime/scoped_thread_state_change.h | 130 +++++++++++++++++++++-------------- 1 file changed, 80 insertions(+), 50 deletions(-) (limited to 'runtime/scoped_thread_state_change.h') diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h index dbd961f942..d56495e4f9 100644 --- a/runtime/scoped_thread_state_change.h +++ b/runtime/scoped_thread_state_change.h @@ -93,50 +93,15 @@ class ScopedThreadStateChange { ThreadState old_thread_state_; const bool expected_has_no_thread_; + friend class ScopedObjectAccessUnchecked; DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange); }; -// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions). -// -// This class performs the necessary thread state switching to and from Runnable and lets us -// amortize the cost of working out the current thread. Additionally it lets us check (and repair) -// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects -// into jobjects via methods of this class. Performing this here enforces the Runnable thread state -// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code -// is also manipulating the Object. -// -// The destructor transitions back to the previous thread state, typically Native. In this state -// GC and thread suspension may occur. -// -// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of -// the mutator_lock_ will be acquired on construction. -class ScopedObjectAccessUnchecked : public ScopedThreadStateChange { +// Assumes we are already runnable. +class ScopedObjectAccessAlreadyRunnable { public: - explicit ScopedObjectAccessUnchecked(JNIEnv* env) - LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE - : ScopedThreadStateChange(ThreadForEnv(env), kRunnable), - env_(down_cast(env)), vm_(env_->vm) { - self_->VerifyStack(); - Locks::mutator_lock_->AssertSharedHeld(self_); - } - - explicit ScopedObjectAccessUnchecked(Thread* self) - LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) - : ScopedThreadStateChange(self, kRunnable), - env_(down_cast(self->GetJniEnv())), - vm_(env_ != NULL ? env_->vm : NULL) { - self_->VerifyStack(); - Locks::mutator_lock_->AssertSharedHeld(self_); - } - - // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't - // change into Runnable or acquire a share on the mutator_lock_. - explicit ScopedObjectAccessUnchecked(JavaVM* vm) - : ScopedThreadStateChange(), env_(NULL), vm_(down_cast(vm)) {} - - // Here purely to force inlining. - ~ScopedObjectAccessUnchecked() ALWAYS_INLINE { - Locks::mutator_lock_->AssertSharedHeld(self_); + Thread* Self() const { + return self_; } JNIEnvExt* Env() const { @@ -159,13 +124,11 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange { template T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states. + DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. if (obj == NULL) { return NULL; } - DCHECK_NE((reinterpret_cast(obj) & 0xffff0000), 0xebad0000); - return Env()->AddLocalReference(obj); } @@ -173,14 +136,14 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange { T Decode(jobject obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states. + DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. return down_cast(Self()->DecodeJObject(obj)); } mirror::ArtField* DecodeField(jfieldID fid) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states. + DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. CHECK(!kMovingFields); return reinterpret_cast(fid); } @@ -188,7 +151,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange { jfieldID EncodeField(mirror::ArtField* field) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states. + DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. CHECK(!kMovingFields); return reinterpret_cast(field); } @@ -196,7 +159,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange { mirror::ArtMethod* DecodeMethod(jmethodID mid) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states. + DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. CHECK(!kMovingMethods); return reinterpret_cast(mid); } @@ -204,16 +167,83 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange { jmethodID EncodeMethod(mirror::ArtMethod* method) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { Locks::mutator_lock_->AssertSharedHeld(Self()); - DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states. + DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. CHECK(!kMovingMethods); return reinterpret_cast(method); } - private: + bool IsRunnable() const { + return self_->GetState() == kRunnable; + } + + protected: + explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env) + LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE + : self_(ThreadForEnv(env)), env_(down_cast(env)), vm_(env_->vm) { + } + + explicit ScopedObjectAccessAlreadyRunnable(Thread* self) + LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE + : self_(self), env_(down_cast(self->GetJniEnv())), + vm_(env_ != nullptr ? env_->vm : nullptr) { + } + + // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't + // change into Runnable or acquire a share on the mutator_lock_. + explicit ScopedObjectAccessAlreadyRunnable(JavaVM* vm) + : self_(nullptr), env_(nullptr), vm_(down_cast(vm)) {} + + // Here purely to force inlining. + ~ScopedObjectAccessAlreadyRunnable() ALWAYS_INLINE { + } + + // Self thread, can be null. + Thread* const self_; // The full JNIEnv. JNIEnvExt* const env_; // The full JavaVM. JavaVMExt* const vm_; +}; + +// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions). +// +// This class performs the necessary thread state switching to and from Runnable and lets us +// amortize the cost of working out the current thread. Additionally it lets us check (and repair) +// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects +// into jobjects via methods of this class. Performing this here enforces the Runnable thread state +// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code +// is also manipulating the Object. +// +// The destructor transitions back to the previous thread state, typically Native. In this state +// GC and thread suspension may occur. +// +// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of +// the mutator_lock_ will be acquired on construction. +class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable { + public: + explicit ScopedObjectAccessUnchecked(JNIEnv* env) + LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE + : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) { + Self()->VerifyStack(); + Locks::mutator_lock_->AssertSharedHeld(Self()); + } + + explicit ScopedObjectAccessUnchecked(Thread* self) + LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE + : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) { + Self()->VerifyStack(); + Locks::mutator_lock_->AssertSharedHeld(Self()); + } + + // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't + // change into Runnable or acquire a share on the mutator_lock_. + explicit ScopedObjectAccessUnchecked(JavaVM* vm) ALWAYS_INLINE + : ScopedObjectAccessAlreadyRunnable(vm), tsc_() {} + + private: + // The scoped thread state change makes sure that we are runnable and restores the thread state + // in the destructor. + const ScopedThreadStateChange tsc_; DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked); }; @@ -229,7 +259,7 @@ class ScopedObjectAccess : public ScopedObjectAccessUnchecked { explicit ScopedObjectAccess(Thread* self) LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) - SHARED_LOCK_FUNCTION(Locks::mutator_lock_) + SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE : ScopedObjectAccessUnchecked(self) { } -- cgit v1.2.3