summaryrefslogtreecommitdiffstats
path: root/runtime/scoped_thread_state_change.h
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-05-19 10:52:16 -0700
committerMathieu Chartier <mathieuc@google.com>2014-05-19 15:19:28 -0700
commit2b7c4d196c8abe32f4ca633534917da9de53c359 (patch)
treef14dc45f53d5681a5c0e57b8a8cc6b35eee6e896 /runtime/scoped_thread_state_change.h
parent84e524207b23d58a1b1e5f4443000ccac97c4184 (diff)
downloadart-2b7c4d196c8abe32f4ca633534917da9de53c359.tar.gz
art-2b7c4d196c8abe32f4ca633534917da9de53c359.tar.bz2
art-2b7c4d196c8abe32f4ca633534917da9de53c359.zip
Don't get and restore thread state for ScopedFastNativeObjectAccess.
Before we would ensure that we were runnable for fast native object access. However, these are done when you are already runnable. Change-Id: Ia4c6e4c83d146fe2a988b37b3133ca46b0f0fa42
Diffstat (limited to 'runtime/scoped_thread_state_change.h')
-rw-r--r--runtime/scoped_thread_state_change.h130
1 files changed, 80 insertions, 50 deletions
diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h
index dbd961f942..d56495e4f9 100644
--- a/runtime/scoped_thread_state_change.h
+++ b/runtime/scoped_thread_state_change.h
@@ -93,50 +93,15 @@ class ScopedThreadStateChange {
ThreadState old_thread_state_;
const bool expected_has_no_thread_;
+ friend class ScopedObjectAccessUnchecked;
DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange);
};
-// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
-//
-// This class performs the necessary thread state switching to and from Runnable and lets us
-// amortize the cost of working out the current thread. Additionally it lets us check (and repair)
-// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
-// into jobjects via methods of this class. Performing this here enforces the Runnable thread state
-// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
-// is also manipulating the Object.
-//
-// The destructor transitions back to the previous thread state, typically Native. In this state
-// GC and thread suspension may occur.
-//
-// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
-// the mutator_lock_ will be acquired on construction.
-class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
+// Assumes we are already runnable.
+class ScopedObjectAccessAlreadyRunnable {
public:
- explicit ScopedObjectAccessUnchecked(JNIEnv* env)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
- : ScopedThreadStateChange(ThreadForEnv(env), kRunnable),
- env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
- self_->VerifyStack();
- Locks::mutator_lock_->AssertSharedHeld(self_);
- }
-
- explicit ScopedObjectAccessUnchecked(Thread* self)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
- : ScopedThreadStateChange(self, kRunnable),
- env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
- vm_(env_ != NULL ? env_->vm : NULL) {
- self_->VerifyStack();
- Locks::mutator_lock_->AssertSharedHeld(self_);
- }
-
- // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
- // change into Runnable or acquire a share on the mutator_lock_.
- explicit ScopedObjectAccessUnchecked(JavaVM* vm)
- : ScopedThreadStateChange(), env_(NULL), vm_(down_cast<JavaVMExt*>(vm)) {}
-
- // Here purely to force inlining.
- ~ScopedObjectAccessUnchecked() ALWAYS_INLINE {
- Locks::mutator_lock_->AssertSharedHeld(self_);
+ Thread* Self() const {
+ return self_;
}
JNIEnvExt* Env() const {
@@ -159,13 +124,11 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
template<typename T>
T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
- DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
if (obj == NULL) {
return NULL;
}
-
DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
-
return Env()->AddLocalReference<T>(obj);
}
@@ -173,14 +136,14 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
T Decode(jobject obj) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
- DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
return down_cast<T>(Self()->DecodeJObject(obj));
}
mirror::ArtField* DecodeField(jfieldID fid) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
- DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
CHECK(!kMovingFields);
return reinterpret_cast<mirror::ArtField*>(fid);
}
@@ -188,7 +151,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
jfieldID EncodeField(mirror::ArtField* field) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
- DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
CHECK(!kMovingFields);
return reinterpret_cast<jfieldID>(field);
}
@@ -196,7 +159,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
mirror::ArtMethod* DecodeMethod(jmethodID mid) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
- DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
CHECK(!kMovingMethods);
return reinterpret_cast<mirror::ArtMethod*>(mid);
}
@@ -204,16 +167,83 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
jmethodID EncodeMethod(mirror::ArtMethod* method) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
- DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
+ DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
CHECK(!kMovingMethods);
return reinterpret_cast<jmethodID>(method);
}
- private:
+ bool IsRunnable() const {
+ return self_->GetState() == kRunnable;
+ }
+
+ protected:
+ explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+ : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
+ }
+
+ explicit ScopedObjectAccessAlreadyRunnable(Thread* self)
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+ : self_(self), env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
+ vm_(env_ != nullptr ? env_->vm : nullptr) {
+ }
+
+ // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
+ // change into Runnable or acquire a share on the mutator_lock_.
+ explicit ScopedObjectAccessAlreadyRunnable(JavaVM* vm)
+ : self_(nullptr), env_(nullptr), vm_(down_cast<JavaVMExt*>(vm)) {}
+
+ // Here purely to force inlining.
+ ~ScopedObjectAccessAlreadyRunnable() ALWAYS_INLINE {
+ }
+
+ // Self thread, can be null.
+ Thread* const self_;
// The full JNIEnv.
JNIEnvExt* const env_;
// The full JavaVM.
JavaVMExt* const vm_;
+};
+
+// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
+//
+// This class performs the necessary thread state switching to and from Runnable and lets us
+// amortize the cost of working out the current thread. Additionally it lets us check (and repair)
+// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
+// into jobjects via methods of this class. Performing this here enforces the Runnable thread state
+// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
+// is also manipulating the Object.
+//
+// The destructor transitions back to the previous thread state, typically Native. In this state
+// GC and thread suspension may occur.
+//
+// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
+// the mutator_lock_ will be acquired on construction.
+class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable {
+ public:
+ explicit ScopedObjectAccessUnchecked(JNIEnv* env)
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+ : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) {
+ Self()->VerifyStack();
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ }
+
+ explicit ScopedObjectAccessUnchecked(Thread* self)
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+ : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) {
+ Self()->VerifyStack();
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ }
+
+ // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
+ // change into Runnable or acquire a share on the mutator_lock_.
+ explicit ScopedObjectAccessUnchecked(JavaVM* vm) ALWAYS_INLINE
+ : ScopedObjectAccessAlreadyRunnable(vm), tsc_() {}
+
+ private:
+ // The scoped thread state change makes sure that we are runnable and restores the thread state
+ // in the destructor.
+ const ScopedThreadStateChange tsc_;
DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked);
};
@@ -229,7 +259,7 @@ class ScopedObjectAccess : public ScopedObjectAccessUnchecked {
explicit ScopedObjectAccess(Thread* self)
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
- SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
+ SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
: ScopedObjectAccessUnchecked(self) {
}