summaryrefslogtreecommitdiffstats
path: root/compiler/jni
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/jni')
-rw-r--r--compiler/jni/jni_compiler_test.cc777
-rw-r--r--compiler/jni/portable/jni_compiler.cc295
-rw-r--r--compiler/jni/portable/jni_compiler.h87
-rw-r--r--compiler/jni/quick/arm/calling_convention_arm.cc212
-rw-r--r--compiler/jni/quick/arm/calling_convention_arm.h88
-rw-r--r--compiler/jni/quick/calling_convention.cc184
-rw-r--r--compiler/jni/quick/calling_convention.h289
-rw-r--r--compiler/jni/quick/jni_compiler.cc489
-rw-r--r--compiler/jni/quick/mips/calling_convention_mips.cc215
-rw-r--r--compiler/jni/quick/mips/calling_convention_mips.h86
-rw-r--r--compiler/jni/quick/x86/calling_convention_x86.cc166
-rw-r--r--compiler/jni/quick/x86/calling_convention_x86.h83
12 files changed, 2971 insertions, 0 deletions
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc
new file mode 100644
index 0000000000..560a146052
--- /dev/null
+++ b/compiler/jni/jni_compiler_test.cc
@@ -0,0 +1,777 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "class_linker.h"
+#include "common_test.h"
+#include "dex_file.h"
+#include "gtest/gtest.h"
+#include "indirect_reference_table.h"
+#include "jni_internal.h"
+#include "mem_map.h"
+#include "mirror/class-inl.h"
+#include "mirror/class_loader.h"
+#include "mirror/abstract_method-inl.h"
+#include "mirror/object_array-inl.h"
+#include "mirror/object-inl.h"
+#include "mirror/stack_trace_element.h"
+#include "runtime.h"
+#include "ScopedLocalRef.h"
+#include "scoped_thread_state_change.h"
+#include "thread.h"
+#include "UniquePtr.h"
+
+extern "C" JNIEXPORT jint JNICALL Java_MyClassNatives_bar(JNIEnv*, jobject, jint count) {
+ return count + 1;
+}
+
+extern "C" JNIEXPORT jint JNICALL Java_MyClassNatives_sbar(JNIEnv*, jclass, jint count) {
+ return count + 1;
+}
+
+namespace art {
+
+class JniCompilerTest : public CommonTest {
+ protected:
+ void CompileForTest(jobject class_loader, bool direct,
+ const char* method_name, const char* method_sig) {
+ ScopedObjectAccess soa(Thread::Current());
+ // Compile the native method before starting the runtime
+ mirror::Class* c = class_linker_->FindClass("LMyClassNatives;",
+ soa.Decode<mirror::ClassLoader*>(class_loader));
+ mirror::AbstractMethod* method;
+ if (direct) {
+ method = c->FindDirectMethod(method_name, method_sig);
+ } else {
+ method = c->FindVirtualMethod(method_name, method_sig);
+ }
+ ASSERT_TRUE(method != NULL) << method_name << " " << method_sig;
+ if (method->GetEntryPointFromCompiledCode() != NULL) {
+ return;
+ }
+ CompileMethod(method);
+ ASSERT_TRUE(method->GetEntryPointFromCompiledCode() != NULL) << method_name << " " << method_sig;
+ }
+
+ void SetUpForTest(bool direct, const char* method_name, const char* method_sig,
+ void* native_fnptr) {
+ // Initialize class loader and compile method when runtime not started.
+ if (!runtime_->IsStarted()){
+ {
+ ScopedObjectAccess soa(Thread::Current());
+ class_loader_ = LoadDex("MyClassNatives");
+ }
+ CompileForTest(class_loader_, direct, method_name, method_sig);
+ // Start runtime.
+ Thread::Current()->TransitionFromSuspendedToRunnable();
+ bool started = runtime_->Start();
+ CHECK(started);
+ }
+ // JNI operations after runtime start.
+ env_ = Thread::Current()->GetJniEnv();
+ jklass_ = env_->FindClass("MyClassNatives");
+ ASSERT_TRUE(jklass_ != NULL) << method_name << " " << method_sig;
+
+ if (direct) {
+ jmethod_ = env_->GetStaticMethodID(jklass_, method_name, method_sig);
+ } else {
+ jmethod_ = env_->GetMethodID(jklass_, method_name, method_sig);
+ }
+ ASSERT_TRUE(jmethod_ != NULL) << method_name << " " << method_sig;
+
+ if (native_fnptr != NULL) {
+ JNINativeMethod methods[] = { { method_name, method_sig, native_fnptr } };
+ ASSERT_EQ(JNI_OK, env_->RegisterNatives(jklass_, methods, 1))
+ << method_name << " " << method_sig;
+ } else {
+ env_->UnregisterNatives(jklass_);
+ }
+
+ jmethodID constructor = env_->GetMethodID(jklass_, "<init>", "()V");
+ jobj_ = env_->NewObject(jklass_, constructor);
+ ASSERT_TRUE(jobj_ != NULL) << method_name << " " << method_sig;
+ }
+
+ public:
+ static jclass jklass_;
+ static jobject jobj_;
+ static jobject class_loader_;
+
+
+ protected:
+ JNIEnv* env_;
+ jmethodID jmethod_;
+};
+
+jclass JniCompilerTest::jklass_;
+jobject JniCompilerTest::jobj_;
+jobject JniCompilerTest::class_loader_;
+
+int gJava_MyClassNatives_foo_calls = 0;
+void Java_MyClassNatives_foo(JNIEnv* env, jobject thisObj) {
+ // 1 = thisObj
+ EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ Locks::mutator_lock_->AssertNotHeld(Thread::Current());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
+ gJava_MyClassNatives_foo_calls++;
+}
+
+TEST_F(JniCompilerTest, CompileAndRunNoArgMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "foo", "()V",
+ reinterpret_cast<void*>(&Java_MyClassNatives_foo));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_foo_calls);
+ env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_);
+ EXPECT_EQ(1, gJava_MyClassNatives_foo_calls);
+ env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_);
+ EXPECT_EQ(2, gJava_MyClassNatives_foo_calls);
+}
+
+TEST_F(JniCompilerTest, CompileAndRunIntMethodThroughStub) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "bar", "(I)I",
+ NULL /* calling through stub will link with &Java_MyClassNatives_bar */);
+
+ ScopedObjectAccess soa(Thread::Current());
+ std::string reason;
+ ASSERT_TRUE(
+ Runtime::Current()->GetJavaVM()->LoadNativeLibrary("", soa.Decode<mirror::ClassLoader*>(class_loader_),
+ reason)) << reason;
+
+ jint result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 24);
+ EXPECT_EQ(25, result);
+}
+
+TEST_F(JniCompilerTest, CompileAndRunStaticIntMethodThroughStub) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(true, "sbar", "(I)I",
+ NULL /* calling through stub will link with &Java_MyClassNatives_sbar */);
+
+ ScopedObjectAccess soa(Thread::Current());
+ std::string reason;
+ ASSERT_TRUE(
+ Runtime::Current()->GetJavaVM()->LoadNativeLibrary("", soa.Decode<mirror::ClassLoader*>(class_loader_),
+ reason)) << reason;
+
+ jint result = env_->CallStaticIntMethod(jklass_, jmethod_, 42);
+ EXPECT_EQ(43, result);
+}
+
+int gJava_MyClassNatives_fooI_calls = 0;
+jint Java_MyClassNatives_fooI(JNIEnv* env, jobject thisObj, jint x) {
+ // 1 = thisObj
+ EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
+ gJava_MyClassNatives_fooI_calls++;
+ return x;
+}
+
+TEST_F(JniCompilerTest, CompileAndRunIntMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "fooI", "(I)I",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooI));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooI_calls);
+ jint result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 42);
+ EXPECT_EQ(42, result);
+ EXPECT_EQ(1, gJava_MyClassNatives_fooI_calls);
+ result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 0xCAFED00D);
+ EXPECT_EQ(static_cast<jint>(0xCAFED00D), result);
+ EXPECT_EQ(2, gJava_MyClassNatives_fooI_calls);
+}
+
+int gJava_MyClassNatives_fooII_calls = 0;
+jint Java_MyClassNatives_fooII(JNIEnv* env, jobject thisObj, jint x, jint y) {
+ // 1 = thisObj
+ EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
+ gJava_MyClassNatives_fooII_calls++;
+ return x - y; // non-commutative operator
+}
+
+TEST_F(JniCompilerTest, CompileAndRunIntIntMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "fooII", "(II)I",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooII));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooII_calls);
+ jint result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 99, 10);
+ EXPECT_EQ(99 - 10, result);
+ EXPECT_EQ(1, gJava_MyClassNatives_fooII_calls);
+ result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 0xCAFEBABE,
+ 0xCAFED00D);
+ EXPECT_EQ(static_cast<jint>(0xCAFEBABE - 0xCAFED00D), result);
+ EXPECT_EQ(2, gJava_MyClassNatives_fooII_calls);
+}
+
+int gJava_MyClassNatives_fooJJ_calls = 0;
+jlong Java_MyClassNatives_fooJJ(JNIEnv* env, jobject thisObj, jlong x, jlong y) {
+ // 1 = thisObj
+ EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
+ gJava_MyClassNatives_fooJJ_calls++;
+ return x - y; // non-commutative operator
+}
+
+TEST_F(JniCompilerTest, CompileAndRunLongLongMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "fooJJ", "(JJ)J",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooJJ));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooJJ_calls);
+ jlong a = 0x1234567890ABCDEFll;
+ jlong b = 0xFEDCBA0987654321ll;
+ jlong result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b);
+ EXPECT_EQ(a - b, result);
+ EXPECT_EQ(1, gJava_MyClassNatives_fooJJ_calls);
+ result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, b, a);
+ EXPECT_EQ(b - a, result);
+ EXPECT_EQ(2, gJava_MyClassNatives_fooJJ_calls);
+}
+
+int gJava_MyClassNatives_fooDD_calls = 0;
+jdouble Java_MyClassNatives_fooDD(JNIEnv* env, jobject thisObj, jdouble x, jdouble y) {
+ // 1 = thisObj
+ EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
+ gJava_MyClassNatives_fooDD_calls++;
+ return x - y; // non-commutative operator
+}
+
+TEST_F(JniCompilerTest, CompileAndRunDoubleDoubleMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "fooDD", "(DD)D",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooDD));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooDD_calls);
+ jdouble result = env_->CallNonvirtualDoubleMethod(jobj_, jklass_, jmethod_,
+ 99.0, 10.0);
+ EXPECT_EQ(99.0 - 10.0, result);
+ EXPECT_EQ(1, gJava_MyClassNatives_fooDD_calls);
+ jdouble a = 3.14159265358979323846;
+ jdouble b = 0.69314718055994530942;
+ result = env_->CallNonvirtualDoubleMethod(jobj_, jklass_, jmethod_, a, b);
+ EXPECT_EQ(a - b, result);
+ EXPECT_EQ(2, gJava_MyClassNatives_fooDD_calls);
+}
+
+int gJava_MyClassNatives_fooJJ_synchronized_calls = 0;
+jlong Java_MyClassNatives_fooJJ_synchronized(JNIEnv* env, jobject thisObj, jlong x, jlong y) {
+ // 1 = thisObj
+ EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
+ gJava_MyClassNatives_fooJJ_synchronized_calls++;
+ return x | y;
+}
+
+TEST_F(JniCompilerTest, CompileAndRun_fooJJ_synchronized) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "fooJJ_synchronized", "(JJ)J",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooJJ_synchronized));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooJJ_synchronized_calls);
+ jlong a = 0x1000000020000000ULL;
+ jlong b = 0x00ff000000aa0000ULL;
+ jlong result = env_->CallNonvirtualLongMethod(jobj_, jklass_, jmethod_, a, b);
+ EXPECT_EQ(a | b, result);
+ EXPECT_EQ(1, gJava_MyClassNatives_fooJJ_synchronized_calls);
+}
+
+int gJava_MyClassNatives_fooIOO_calls = 0;
+jobject Java_MyClassNatives_fooIOO(JNIEnv* env, jobject thisObj, jint x, jobject y,
+ jobject z) {
+ // 3 = this + y + z
+ EXPECT_EQ(3U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(thisObj != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(thisObj, JniCompilerTest::jklass_));
+ gJava_MyClassNatives_fooIOO_calls++;
+ switch (x) {
+ case 1:
+ return y;
+ case 2:
+ return z;
+ default:
+ return thisObj;
+ }
+}
+
+TEST_F(JniCompilerTest, CompileAndRunIntObjectObjectMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "fooIOO",
+ "(ILjava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooIOO));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooIOO_calls);
+ jobject result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, NULL, NULL);
+ EXPECT_TRUE(env_->IsSameObject(jobj_, result));
+ EXPECT_EQ(1, gJava_MyClassNatives_fooIOO_calls);
+
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, NULL, jklass_);
+ EXPECT_TRUE(env_->IsSameObject(jobj_, result));
+ EXPECT_EQ(2, gJava_MyClassNatives_fooIOO_calls);
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 1, NULL, jklass_);
+ EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ EXPECT_EQ(3, gJava_MyClassNatives_fooIOO_calls);
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 2, NULL, jklass_);
+ EXPECT_TRUE(env_->IsSameObject(jklass_, result));
+ EXPECT_EQ(4, gJava_MyClassNatives_fooIOO_calls);
+
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 0, jklass_, NULL);
+ EXPECT_TRUE(env_->IsSameObject(jobj_, result));
+ EXPECT_EQ(5, gJava_MyClassNatives_fooIOO_calls);
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 1, jklass_, NULL);
+ EXPECT_TRUE(env_->IsSameObject(jklass_, result));
+ EXPECT_EQ(6, gJava_MyClassNatives_fooIOO_calls);
+ result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, 2, jklass_, NULL);
+ EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ EXPECT_EQ(7, gJava_MyClassNatives_fooIOO_calls);
+}
+
+int gJava_MyClassNatives_fooSII_calls = 0;
+jint Java_MyClassNatives_fooSII(JNIEnv* env, jclass klass, jint x, jint y) {
+ // 1 = klass
+ EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(klass != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass));
+ gJava_MyClassNatives_fooSII_calls++;
+ return x + y;
+}
+
+TEST_F(JniCompilerTest, CompileAndRunStaticIntIntMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(true, "fooSII", "(II)I",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooSII));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooSII_calls);
+ jint result = env_->CallStaticIntMethod(jklass_, jmethod_, 20, 30);
+ EXPECT_EQ(50, result);
+ EXPECT_EQ(1, gJava_MyClassNatives_fooSII_calls);
+}
+
+int gJava_MyClassNatives_fooSDD_calls = 0;
+jdouble Java_MyClassNatives_fooSDD(JNIEnv* env, jclass klass, jdouble x, jdouble y) {
+ // 1 = klass
+ EXPECT_EQ(1U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(klass != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass));
+ gJava_MyClassNatives_fooSDD_calls++;
+ return x - y; // non-commutative operator
+}
+
+TEST_F(JniCompilerTest, CompileAndRunStaticDoubleDoubleMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(true, "fooSDD", "(DD)D",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooSDD));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooSDD_calls);
+ jdouble result = env_->CallStaticDoubleMethod(jklass_, jmethod_, 99.0, 10.0);
+ EXPECT_EQ(99.0 - 10.0, result);
+ EXPECT_EQ(1, gJava_MyClassNatives_fooSDD_calls);
+ jdouble a = 3.14159265358979323846;
+ jdouble b = 0.69314718055994530942;
+ result = env_->CallStaticDoubleMethod(jklass_, jmethod_, a, b);
+ EXPECT_EQ(a - b, result);
+ EXPECT_EQ(2, gJava_MyClassNatives_fooSDD_calls);
+}
+
+int gJava_MyClassNatives_fooSIOO_calls = 0;
+jobject Java_MyClassNatives_fooSIOO(JNIEnv* env, jclass klass, jint x, jobject y,
+ jobject z) {
+ // 3 = klass + y + z
+ EXPECT_EQ(3U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(klass != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass));
+ gJava_MyClassNatives_fooSIOO_calls++;
+ switch (x) {
+ case 1:
+ return y;
+ case 2:
+ return z;
+ default:
+ return klass;
+ }
+}
+
+
+TEST_F(JniCompilerTest, CompileAndRunStaticIntObjectObjectMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(true, "fooSIOO",
+ "(ILjava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooSIOO));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooSIOO_calls);
+ jobject result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, NULL, NULL);
+ EXPECT_TRUE(env_->IsSameObject(jklass_, result));
+ EXPECT_EQ(1, gJava_MyClassNatives_fooSIOO_calls);
+
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, NULL, jobj_);
+ EXPECT_TRUE(env_->IsSameObject(jklass_, result));
+ EXPECT_EQ(2, gJava_MyClassNatives_fooSIOO_calls);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, NULL, jobj_);
+ EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ EXPECT_EQ(3, gJava_MyClassNatives_fooSIOO_calls);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, NULL, jobj_);
+ EXPECT_TRUE(env_->IsSameObject(jobj_, result));
+ EXPECT_EQ(4, gJava_MyClassNatives_fooSIOO_calls);
+
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, jobj_, NULL);
+ EXPECT_TRUE(env_->IsSameObject(jklass_, result));
+ EXPECT_EQ(5, gJava_MyClassNatives_fooSIOO_calls);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, jobj_, NULL);
+ EXPECT_TRUE(env_->IsSameObject(jobj_, result));
+ EXPECT_EQ(6, gJava_MyClassNatives_fooSIOO_calls);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, NULL);
+ EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ EXPECT_EQ(7, gJava_MyClassNatives_fooSIOO_calls);
+}
+
+int gJava_MyClassNatives_fooSSIOO_calls = 0;
+jobject Java_MyClassNatives_fooSSIOO(JNIEnv* env, jclass klass, jint x, jobject y, jobject z) {
+ // 3 = klass + y + z
+ EXPECT_EQ(3U, Thread::Current()->NumStackReferences());
+ EXPECT_EQ(kNative, Thread::Current()->GetState());
+ EXPECT_EQ(Thread::Current()->GetJniEnv(), env);
+ EXPECT_TRUE(klass != NULL);
+ EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass));
+ gJava_MyClassNatives_fooSSIOO_calls++;
+ switch (x) {
+ case 1:
+ return y;
+ case 2:
+ return z;
+ default:
+ return klass;
+ }
+}
+
+TEST_F(JniCompilerTest, CompileAndRunStaticSynchronizedIntObjectObjectMethod) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(true, "fooSSIOO",
+ "(ILjava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooSSIOO));
+
+ EXPECT_EQ(0, gJava_MyClassNatives_fooSSIOO_calls);
+ jobject result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, NULL, NULL);
+ EXPECT_TRUE(env_->IsSameObject(jklass_, result));
+ EXPECT_EQ(1, gJava_MyClassNatives_fooSSIOO_calls);
+
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, NULL, jobj_);
+ EXPECT_TRUE(env_->IsSameObject(jklass_, result));
+ EXPECT_EQ(2, gJava_MyClassNatives_fooSSIOO_calls);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, NULL, jobj_);
+ EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ EXPECT_EQ(3, gJava_MyClassNatives_fooSSIOO_calls);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, NULL, jobj_);
+ EXPECT_TRUE(env_->IsSameObject(jobj_, result));
+ EXPECT_EQ(4, gJava_MyClassNatives_fooSSIOO_calls);
+
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 0, jobj_, NULL);
+ EXPECT_TRUE(env_->IsSameObject(jklass_, result));
+ EXPECT_EQ(5, gJava_MyClassNatives_fooSSIOO_calls);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 1, jobj_, NULL);
+ EXPECT_TRUE(env_->IsSameObject(jobj_, result));
+ EXPECT_EQ(6, gJava_MyClassNatives_fooSSIOO_calls);
+ result = env_->CallStaticObjectMethod(jklass_, jmethod_, 2, jobj_, NULL);
+ EXPECT_TRUE(env_->IsSameObject(NULL, result));
+ EXPECT_EQ(7, gJava_MyClassNatives_fooSSIOO_calls);
+}
+
+void Java_MyClassNatives_throwException(JNIEnv* env, jobject) {
+ jclass c = env->FindClass("java/lang/RuntimeException");
+ env->ThrowNew(c, "hello");
+}
+
+TEST_F(JniCompilerTest, ExceptionHandling) {
+ TEST_DISABLED_FOR_PORTABLE();
+ {
+ ASSERT_FALSE(runtime_->IsStarted());
+ ScopedObjectAccess soa(Thread::Current());
+ class_loader_ = LoadDex("MyClassNatives");
+
+ // all compilation needs to happen before Runtime::Start
+ CompileForTest(class_loader_, false, "foo", "()V");
+ CompileForTest(class_loader_, false, "throwException", "()V");
+ CompileForTest(class_loader_, false, "foo", "()V");
+ }
+ // Start runtime to avoid re-initialization in SetupForTest.
+ Thread::Current()->TransitionFromSuspendedToRunnable();
+ bool started = runtime_->Start();
+ CHECK(started);
+
+ gJava_MyClassNatives_foo_calls = 0;
+
+ // Check a single call of a JNI method is ok
+ SetUpForTest(false, "foo", "()V", reinterpret_cast<void*>(&Java_MyClassNatives_foo));
+ env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_);
+ EXPECT_EQ(1, gJava_MyClassNatives_foo_calls);
+ EXPECT_FALSE(Thread::Current()->IsExceptionPending());
+
+ // Get class for exception we expect to be thrown
+ ScopedLocalRef<jclass> jlre(env_, env_->FindClass("java/lang/RuntimeException"));
+ SetUpForTest(false, "throwException", "()V",
+ reinterpret_cast<void*>(&Java_MyClassNatives_throwException));
+ // Call Java_MyClassNatives_throwException (JNI method that throws exception)
+ env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_);
+ EXPECT_EQ(1, gJava_MyClassNatives_foo_calls);
+ EXPECT_TRUE(env_->ExceptionCheck() == JNI_TRUE);
+ ScopedLocalRef<jthrowable> exception(env_, env_->ExceptionOccurred());
+ env_->ExceptionClear();
+ EXPECT_TRUE(env_->IsInstanceOf(exception.get(), jlre.get()));
+
+ // Check a single call of a JNI method is ok
+ SetUpForTest(false, "foo", "()V", reinterpret_cast<void*>(&Java_MyClassNatives_foo));
+ env_->CallNonvirtualVoidMethod(jobj_, jklass_, jmethod_);
+ EXPECT_EQ(2, gJava_MyClassNatives_foo_calls);
+}
+
+jint Java_MyClassNatives_nativeUpCall(JNIEnv* env, jobject thisObj, jint i) {
+ if (i <= 0) {
+ // We want to check raw Object*/Array* below
+ ScopedObjectAccess soa(env);
+
+ // Build stack trace
+ jobject internal = Thread::Current()->CreateInternalStackTrace(soa);
+ jobjectArray ste_array = Thread::InternalStackTraceToStackTraceElementArray(env, internal);
+ mirror::ObjectArray<mirror::StackTraceElement>* trace_array =
+ soa.Decode<mirror::ObjectArray<mirror::StackTraceElement>*>(ste_array);
+ EXPECT_TRUE(trace_array != NULL);
+ EXPECT_EQ(11, trace_array->GetLength());
+
+ // Check stack trace entries have expected values
+ for (int32_t i = 0; i < trace_array->GetLength(); ++i) {
+ EXPECT_EQ(-2, trace_array->Get(i)->GetLineNumber());
+ mirror::StackTraceElement* ste = trace_array->Get(i);
+ EXPECT_STREQ("MyClassNatives.java", ste->GetFileName()->ToModifiedUtf8().c_str());
+ EXPECT_STREQ("MyClassNatives", ste->GetDeclaringClass()->ToModifiedUtf8().c_str());
+ EXPECT_STREQ("fooI", ste->GetMethodName()->ToModifiedUtf8().c_str());
+ }
+
+ // end recursion
+ return 0;
+ } else {
+ jclass jklass = env->FindClass("MyClassNatives");
+ EXPECT_TRUE(jklass != NULL);
+ jmethodID jmethod = env->GetMethodID(jklass, "fooI", "(I)I");
+ EXPECT_TRUE(jmethod != NULL);
+
+ // Recurse with i - 1
+ jint result = env->CallNonvirtualIntMethod(thisObj, jklass, jmethod, i - 1);
+
+ // Return sum of all depths
+ return i + result;
+ }
+}
+
+TEST_F(JniCompilerTest, NativeStackTraceElement) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "fooI", "(I)I",
+ reinterpret_cast<void*>(&Java_MyClassNatives_nativeUpCall));
+ jint result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 10);
+ EXPECT_EQ(10+9+8+7+6+5+4+3+2+1, result);
+}
+
+jobject Java_MyClassNatives_fooO(JNIEnv* env, jobject, jobject x) {
+ return env->NewGlobalRef(x);
+}
+
+TEST_F(JniCompilerTest, ReturnGlobalRef) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "fooO", "(Ljava/lang/Object;)Ljava/lang/Object;",
+ reinterpret_cast<void*>(&Java_MyClassNatives_fooO));
+ jobject result = env_->CallNonvirtualObjectMethod(jobj_, jklass_, jmethod_, jobj_);
+ EXPECT_EQ(JNILocalRefType, env_->GetObjectRefType(result));
+ EXPECT_TRUE(env_->IsSameObject(result, jobj_));
+}
+
+jint local_ref_test(JNIEnv* env, jobject thisObj, jint x) {
+ // Add 10 local references
+ ScopedObjectAccess soa(env);
+ for (int i = 0; i < 10; i++) {
+ soa.AddLocalReference<jobject>(soa.Decode<mirror::Object*>(thisObj));
+ }
+ return x+1;
+}
+
+TEST_F(JniCompilerTest, LocalReferenceTableClearingTest) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "fooI", "(I)I", reinterpret_cast<void*>(&local_ref_test));
+ // 1000 invocations of a method that adds 10 local references
+ for (int i = 0; i < 1000; i++) {
+ jint result = env_->CallIntMethod(jobj_, jmethod_, i);
+ EXPECT_TRUE(result == i + 1);
+ }
+}
+
+void my_arraycopy(JNIEnv* env, jclass klass, jobject src, jint src_pos, jobject dst, jint dst_pos, jint length) {
+ EXPECT_TRUE(env->IsSameObject(JniCompilerTest::jklass_, klass));
+ EXPECT_TRUE(env->IsSameObject(JniCompilerTest::jklass_, dst));
+ EXPECT_TRUE(env->IsSameObject(JniCompilerTest::jobj_, src));
+ EXPECT_EQ(1234, src_pos);
+ EXPECT_EQ(5678, dst_pos);
+ EXPECT_EQ(9876, length);
+}
+
+TEST_F(JniCompilerTest, JavaLangSystemArrayCopy) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(true, "arraycopy", "(Ljava/lang/Object;ILjava/lang/Object;II)V",
+ reinterpret_cast<void*>(&my_arraycopy));
+ env_->CallStaticVoidMethod(jklass_, jmethod_, jobj_, 1234, jklass_, 5678, 9876);
+}
+
+jboolean my_casi(JNIEnv* env, jobject unsafe, jobject obj, jlong offset, jint expected, jint newval) {
+ EXPECT_TRUE(env->IsSameObject(JniCompilerTest::jobj_, unsafe));
+ EXPECT_TRUE(env->IsSameObject(JniCompilerTest::jobj_, obj));
+ EXPECT_EQ(0x12345678ABCDEF88ll, offset);
+ EXPECT_EQ(static_cast<jint>(0xCAFEF00D), expected);
+ EXPECT_EQ(static_cast<jint>(0xEBADF00D), newval);
+ return JNI_TRUE;
+}
+
+TEST_F(JniCompilerTest, CompareAndSwapInt) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "compareAndSwapInt", "(Ljava/lang/Object;JII)Z",
+ reinterpret_cast<void*>(&my_casi));
+ jboolean result = env_->CallBooleanMethod(jobj_, jmethod_, jobj_, 0x12345678ABCDEF88ll, 0xCAFEF00D, 0xEBADF00D);
+ EXPECT_EQ(result, JNI_TRUE);
+}
+
+jint my_gettext(JNIEnv* env, jclass klass, jlong val1, jobject obj1, jlong val2, jobject obj2) {
+ EXPECT_TRUE(env->IsInstanceOf(JniCompilerTest::jobj_, klass));
+ EXPECT_TRUE(env->IsSameObject(JniCompilerTest::jobj_, obj1));
+ EXPECT_TRUE(env->IsSameObject(JniCompilerTest::jobj_, obj2));
+ EXPECT_EQ(0x12345678ABCDEF88ll, val1);
+ EXPECT_EQ(0x7FEDCBA987654321ll, val2);
+ return 42;
+}
+
+TEST_F(JniCompilerTest, GetText) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(true, "getText", "(JLjava/lang/Object;JLjava/lang/Object;)I",
+ reinterpret_cast<void*>(&my_gettext));
+ jint result = env_->CallStaticIntMethod(jklass_, jmethod_, 0x12345678ABCDEF88ll, jobj_,
+ 0x7FEDCBA987654321ll, jobj_);
+ EXPECT_EQ(result, 42);
+}
+
+TEST_F(JniCompilerTest, GetSinkPropertiesNative) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "getSinkPropertiesNative", "(Ljava/lang/String;)[Ljava/lang/Object;", NULL);
+ // This space intentionally left blank. Just testing compilation succeeds.
+}
+
+// This should return jclass, but we're imitating a bug pattern.
+jobject Java_MyClassNatives_instanceMethodThatShouldReturnClass(JNIEnv* env, jobject) {
+ return env->NewStringUTF("not a class!");
+}
+
+// This should return jclass, but we're imitating a bug pattern.
+jobject Java_MyClassNatives_staticMethodThatShouldReturnClass(JNIEnv* env, jclass) {
+ return env->NewStringUTF("not a class!");
+}
+
+TEST_F(JniCompilerTest, UpcallReturnTypeChecking_Instance) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "instanceMethodThatShouldReturnClass", "()Ljava/lang/Class;",
+ reinterpret_cast<void*>(&Java_MyClassNatives_instanceMethodThatShouldReturnClass));
+
+ CheckJniAbortCatcher check_jni_abort_catcher;
+ // TODO: check type of returns with portable JNI compiler.
+ // This native method is bad, and tries to return a jstring as a jclass.
+ env_->CallObjectMethod(jobj_, jmethod_);
+ check_jni_abort_catcher.Check("attempt to return an instance of java.lang.String from java.lang.Class MyClassNatives.instanceMethodThatShouldReturnClass()");
+
+ // Here, we just call the method incorrectly; we should catch that too.
+ env_->CallVoidMethod(jobj_, jmethod_);
+ check_jni_abort_catcher.Check("attempt to return an instance of java.lang.String from java.lang.Class MyClassNatives.instanceMethodThatShouldReturnClass()");
+ env_->CallStaticVoidMethod(jklass_, jmethod_);
+ check_jni_abort_catcher.Check("calling non-static method java.lang.Class MyClassNatives.instanceMethodThatShouldReturnClass() with CallStaticVoidMethodV");
+}
+
+TEST_F(JniCompilerTest, UpcallReturnTypeChecking_Static) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(true, "staticMethodThatShouldReturnClass", "()Ljava/lang/Class;",
+ reinterpret_cast<void*>(&Java_MyClassNatives_staticMethodThatShouldReturnClass));
+
+ CheckJniAbortCatcher check_jni_abort_catcher;
+ // TODO: check type of returns with portable JNI compiler.
+ // This native method is bad, and tries to return a jstring as a jclass.
+ env_->CallStaticObjectMethod(jklass_, jmethod_);
+ check_jni_abort_catcher.Check("attempt to return an instance of java.lang.String from java.lang.Class MyClassNatives.staticMethodThatShouldReturnClass()");
+
+ // Here, we just call the method incorrectly; we should catch that too.
+ env_->CallStaticVoidMethod(jklass_, jmethod_);
+ check_jni_abort_catcher.Check("attempt to return an instance of java.lang.String from java.lang.Class MyClassNatives.staticMethodThatShouldReturnClass()");
+ env_->CallVoidMethod(jobj_, jmethod_);
+ check_jni_abort_catcher.Check("calling static method java.lang.Class MyClassNatives.staticMethodThatShouldReturnClass() with CallVoidMethodV");
+}
+
+// This should take jclass, but we're imitating a bug pattern.
+void Java_MyClassNatives_instanceMethodThatShouldTakeClass(JNIEnv*, jobject, jclass) {
+}
+
+// This should take jclass, but we're imitating a bug pattern.
+void Java_MyClassNatives_staticMethodThatShouldTakeClass(JNIEnv*, jclass, jclass) {
+}
+
+TEST_F(JniCompilerTest, UpcallArgumentTypeChecking_Instance) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(false, "instanceMethodThatShouldTakeClass", "(ILjava/lang/Class;)V",
+ reinterpret_cast<void*>(&Java_MyClassNatives_instanceMethodThatShouldTakeClass));
+
+ CheckJniAbortCatcher check_jni_abort_catcher;
+ // We deliberately pass a bad second argument here.
+ env_->CallVoidMethod(jobj_, jmethod_, 123, env_->NewStringUTF("not a class!"));
+ check_jni_abort_catcher.Check("bad arguments passed to void MyClassNatives.instanceMethodThatShouldTakeClass(int, java.lang.Class)");
+}
+
+TEST_F(JniCompilerTest, UpcallArgumentTypeChecking_Static) {
+ TEST_DISABLED_FOR_PORTABLE();
+ SetUpForTest(true, "staticMethodThatShouldTakeClass", "(ILjava/lang/Class;)V",
+ reinterpret_cast<void*>(&Java_MyClassNatives_staticMethodThatShouldTakeClass));
+
+ CheckJniAbortCatcher check_jni_abort_catcher;
+ // We deliberately pass a bad second argument here.
+ env_->CallStaticVoidMethod(jklass_, jmethod_, 123, env_->NewStringUTF("not a class!"));
+ check_jni_abort_catcher.Check("bad arguments passed to void MyClassNatives.staticMethodThatShouldTakeClass(int, java.lang.Class)");
+}
+
+} // namespace art
diff --git a/compiler/jni/portable/jni_compiler.cc b/compiler/jni/portable/jni_compiler.cc
new file mode 100644
index 0000000000..44d0c2d215
--- /dev/null
+++ b/compiler/jni/portable/jni_compiler.cc
@@ -0,0 +1,295 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "jni_compiler.h"
+
+#include "base/logging.h"
+#include "class_linker.h"
+#include "compiled_method.h"
+#include "dex_file-inl.h"
+#include "driver/compiler_driver.h"
+#include "driver/dex_compilation_unit.h"
+#include "llvm/compiler_llvm.h"
+#include "llvm/ir_builder.h"
+#include "llvm/llvm_compilation_unit.h"
+#include "llvm/runtime_support_llvm_func.h"
+#include "llvm/utils_llvm.h"
+#include "mirror/abstract_method.h"
+#include "runtime.h"
+#include "stack.h"
+#include "thread.h"
+
+#include <llvm/ADT/SmallVector.h>
+#include <llvm/IR/BasicBlock.h>
+#include <llvm/IR/DerivedTypes.h>
+#include <llvm/IR/Function.h>
+#include <llvm/IR/Type.h>
+
+namespace art {
+namespace llvm {
+
+using namespace runtime_support;
+
+JniCompiler::JniCompiler(LlvmCompilationUnit* cunit,
+ const CompilerDriver& driver,
+ const DexCompilationUnit* dex_compilation_unit)
+: cunit_(cunit), driver_(&driver), module_(cunit_->GetModule()),
+ context_(cunit_->GetLLVMContext()), irb_(*cunit_->GetIRBuilder()),
+ dex_compilation_unit_(dex_compilation_unit),
+ func_(NULL), elf_func_idx_(0) {
+
+ // Check: Ensure that JNI compiler will only get "native" method
+ CHECK(dex_compilation_unit->IsNative());
+}
+
+
+CompiledMethod* JniCompiler::Compile() {
+ const bool is_static = dex_compilation_unit_->IsStatic();
+ const bool is_synchronized = dex_compilation_unit_->IsSynchronized();
+ const DexFile* dex_file = dex_compilation_unit_->GetDexFile();
+ DexFile::MethodId const& method_id =
+ dex_file->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
+ char const return_shorty = dex_file->GetMethodShorty(method_id)[0];
+ ::llvm::Value* this_object_or_class_object;
+
+ uint32_t method_idx = dex_compilation_unit_->GetDexMethodIndex();
+ std::string func_name(StringPrintf("jni_%s",
+ MangleForJni(PrettyMethod(method_idx, *dex_file)).c_str()));
+ CreateFunction(func_name);
+
+ // Set argument name
+ ::llvm::Function::arg_iterator arg_begin(func_->arg_begin());
+ ::llvm::Function::arg_iterator arg_end(func_->arg_end());
+ ::llvm::Function::arg_iterator arg_iter(arg_begin);
+
+ DCHECK_NE(arg_iter, arg_end);
+ arg_iter->setName("method");
+ ::llvm::Value* method_object_addr = arg_iter++;
+
+ if (!is_static) {
+ // Non-static, the second argument is "this object"
+ this_object_or_class_object = arg_iter++;
+ } else {
+ // Load class object
+ this_object_or_class_object =
+ irb_.LoadFromObjectOffset(method_object_addr,
+ mirror::AbstractMethod::DeclaringClassOffset().Int32Value(),
+ irb_.getJObjectTy(),
+ kTBAAConstJObject);
+ }
+ // Actual argument (ignore method and this object)
+ arg_begin = arg_iter;
+
+ // Count the number of Object* arguments
+ uint32_t sirt_size = 1;
+ // "this" object pointer for non-static
+ // "class" object pointer for static
+ for (unsigned i = 0; arg_iter != arg_end; ++i, ++arg_iter) {
+#if !defined(NDEBUG)
+ arg_iter->setName(StringPrintf("a%u", i));
+#endif
+ if (arg_iter->getType() == irb_.getJObjectTy()) {
+ ++sirt_size;
+ }
+ }
+
+ // Shadow stack
+ ::llvm::StructType* shadow_frame_type = irb_.getShadowFrameTy(sirt_size);
+ ::llvm::AllocaInst* shadow_frame_ = irb_.CreateAlloca(shadow_frame_type);
+
+ // Store the dex pc
+ irb_.StoreToObjectOffset(shadow_frame_,
+ ShadowFrame::DexPCOffset(),
+ irb_.getInt32(DexFile::kDexNoIndex),
+ kTBAAShadowFrame);
+
+ // Push the shadow frame
+ ::llvm::Value* shadow_frame_upcast = irb_.CreateConstGEP2_32(shadow_frame_, 0, 0);
+ ::llvm::Value* old_shadow_frame =
+ irb_.Runtime().EmitPushShadowFrame(shadow_frame_upcast, method_object_addr, sirt_size);
+
+ // Get JNIEnv
+ ::llvm::Value* jni_env_object_addr =
+ irb_.Runtime().EmitLoadFromThreadOffset(Thread::JniEnvOffset().Int32Value(),
+ irb_.getJObjectTy(),
+ kTBAARuntimeInfo);
+
+ // Get callee code_addr
+ ::llvm::Value* code_addr =
+ irb_.LoadFromObjectOffset(method_object_addr,
+ mirror::AbstractMethod::NativeMethodOffset().Int32Value(),
+ GetFunctionType(dex_compilation_unit_->GetDexMethodIndex(),
+ is_static, true)->getPointerTo(),
+ kTBAARuntimeInfo);
+
+ // Load actual parameters
+ std::vector< ::llvm::Value*> args;
+
+ // The 1st parameter: JNIEnv*
+ args.push_back(jni_env_object_addr);
+
+ // Variables for GetElementPtr
+ ::llvm::Value* gep_index[] = {
+ irb_.getInt32(0), // No displacement for shadow frame pointer
+ irb_.getInt32(1), // SIRT
+ NULL,
+ };
+
+ size_t sirt_member_index = 0;
+
+ // Store the "this object or class object" to SIRT
+ gep_index[2] = irb_.getInt32(sirt_member_index++);
+ ::llvm::Value* sirt_field_addr = irb_.CreateBitCast(irb_.CreateGEP(shadow_frame_, gep_index),
+ irb_.getJObjectTy()->getPointerTo());
+ irb_.CreateStore(this_object_or_class_object, sirt_field_addr, kTBAAShadowFrame);
+ // Push the "this object or class object" to out args
+ this_object_or_class_object = irb_.CreateBitCast(sirt_field_addr, irb_.getJObjectTy());
+ args.push_back(this_object_or_class_object);
+ // Store arguments to SIRT, and push back to args
+ for (arg_iter = arg_begin; arg_iter != arg_end; ++arg_iter) {
+ if (arg_iter->getType() == irb_.getJObjectTy()) {
+ // Store the reference type arguments to SIRT
+ gep_index[2] = irb_.getInt32(sirt_member_index++);
+ ::llvm::Value* sirt_field_addr = irb_.CreateBitCast(irb_.CreateGEP(shadow_frame_, gep_index),
+ irb_.getJObjectTy()->getPointerTo());
+ irb_.CreateStore(arg_iter, sirt_field_addr, kTBAAShadowFrame);
+ // Note null is placed in the SIRT but the jobject passed to the native code must be null
+ // (not a pointer into the SIRT as with regular references).
+ ::llvm::Value* equal_null = irb_.CreateICmpEQ(arg_iter, irb_.getJNull());
+ ::llvm::Value* arg =
+ irb_.CreateSelect(equal_null,
+ irb_.getJNull(),
+ irb_.CreateBitCast(sirt_field_addr, irb_.getJObjectTy()));
+ args.push_back(arg);
+ } else {
+ args.push_back(arg_iter);
+ }
+ }
+
+ ::llvm::Value* saved_local_ref_cookie;
+ { // JniMethodStart
+ RuntimeId func_id = is_synchronized ? JniMethodStartSynchronized
+ : JniMethodStart;
+ ::llvm::SmallVector< ::llvm::Value*, 2> args;
+ if (is_synchronized) {
+ args.push_back(this_object_or_class_object);
+ }
+ args.push_back(irb_.Runtime().EmitGetCurrentThread());
+ saved_local_ref_cookie =
+ irb_.CreateCall(irb_.GetRuntime(func_id), args);
+ }
+
+ // Call!!!
+ ::llvm::Value* retval = irb_.CreateCall(code_addr, args);
+
+ { // JniMethodEnd
+ bool is_return_ref = return_shorty == 'L';
+ RuntimeId func_id =
+ is_return_ref ? (is_synchronized ? JniMethodEndWithReferenceSynchronized
+ : JniMethodEndWithReference)
+ : (is_synchronized ? JniMethodEndSynchronized
+ : JniMethodEnd);
+ ::llvm::SmallVector< ::llvm::Value*, 4> args;
+ if (is_return_ref) {
+ args.push_back(retval);
+ }
+ args.push_back(saved_local_ref_cookie);
+ if (is_synchronized) {
+ args.push_back(this_object_or_class_object);
+ }
+ args.push_back(irb_.Runtime().EmitGetCurrentThread());
+
+ ::llvm::Value* decoded_jobject =
+ irb_.CreateCall(irb_.GetRuntime(func_id), args);
+
+ // Return decoded jobject if return reference.
+ if (is_return_ref) {
+ retval = decoded_jobject;
+ }
+ }
+
+ // Pop the shadow frame
+ irb_.Runtime().EmitPopShadowFrame(old_shadow_frame);
+
+ // Return!
+ if (return_shorty != 'V') {
+ irb_.CreateRet(retval);
+ } else {
+ irb_.CreateRetVoid();
+ }
+
+ // Verify the generated bitcode
+ VERIFY_LLVM_FUNCTION(*func_);
+
+ cunit_->Materialize();
+
+ return new CompiledMethod(cunit_->GetInstructionSet(),
+ cunit_->GetElfObject(),
+ func_name);
+}
+
+
+void JniCompiler::CreateFunction(const std::string& func_name) {
+ CHECK_NE(0U, func_name.size());
+
+ const bool is_static = dex_compilation_unit_->IsStatic();
+
+ // Get function type
+ ::llvm::FunctionType* func_type =
+ GetFunctionType(dex_compilation_unit_->GetDexMethodIndex(), is_static, false);
+
+ // Create function
+ func_ = ::llvm::Function::Create(func_type, ::llvm::Function::InternalLinkage,
+ func_name, module_);
+
+ // Create basic block
+ ::llvm::BasicBlock* basic_block = ::llvm::BasicBlock::Create(*context_, "B0", func_);
+
+ // Set insert point
+ irb_.SetInsertPoint(basic_block);
+}
+
+
+::llvm::FunctionType* JniCompiler::GetFunctionType(uint32_t method_idx,
+ bool is_static, bool is_native_function) {
+ // Get method signature
+ uint32_t shorty_size;
+ const char* shorty = dex_compilation_unit_->GetShorty(&shorty_size);
+ CHECK_GE(shorty_size, 1u);
+
+ // Get return type
+ ::llvm::Type* ret_type = irb_.getJType(shorty[0]);
+
+ // Get argument type
+ std::vector< ::llvm::Type*> args_type;
+
+ args_type.push_back(irb_.getJObjectTy()); // method object pointer
+
+ if (!is_static || is_native_function) {
+ // "this" object pointer for non-static
+ // "class" object pointer for static naitve
+ args_type.push_back(irb_.getJType('L'));
+ }
+
+ for (uint32_t i = 1; i < shorty_size; ++i) {
+ args_type.push_back(irb_.getJType(shorty[i]));
+ }
+
+ return ::llvm::FunctionType::get(ret_type, args_type, false);
+}
+
+} // namespace llvm
+} // namespace art
diff --git a/compiler/jni/portable/jni_compiler.h b/compiler/jni/portable/jni_compiler.h
new file mode 100644
index 0000000000..a04277c9e6
--- /dev/null
+++ b/compiler/jni/portable/jni_compiler.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_SRC_COMPILER_JNI_PORTABLE_JNI_COMPILER_H_
+#define ART_SRC_COMPILER_JNI_PORTABLE_JNI_COMPILER_H_
+
+#include <stdint.h>
+
+#include <string>
+
+namespace art {
+ class ClassLinker;
+ class CompiledMethod;
+ class CompilerDriver;
+ class DexFile;
+ class DexCompilationUnit;
+ namespace mirror {
+ class AbstractMethod;
+ class ClassLoader;
+ class DexCache;
+ } // namespace mirror
+} // namespace art
+
+namespace llvm {
+ class AllocaInst;
+ class Function;
+ class FunctionType;
+ class BasicBlock;
+ class LLVMContext;
+ class Module;
+ class Type;
+ class Value;
+} // namespace llvm
+
+namespace art {
+namespace llvm {
+
+class LlvmCompilationUnit;
+class IRBuilder;
+
+class JniCompiler {
+ public:
+ JniCompiler(LlvmCompilationUnit* cunit,
+ const CompilerDriver& driver,
+ const DexCompilationUnit* dex_compilation_unit);
+
+ CompiledMethod* Compile();
+
+ private:
+ void CreateFunction(const std::string& symbol);
+
+ ::llvm::FunctionType* GetFunctionType(uint32_t method_idx,
+ bool is_static, bool is_target_function);
+
+ private:
+ LlvmCompilationUnit* cunit_;
+ const CompilerDriver* const driver_;
+
+ ::llvm::Module* module_;
+ ::llvm::LLVMContext* context_;
+ IRBuilder& irb_;
+
+ const DexCompilationUnit* const dex_compilation_unit_;
+
+ ::llvm::Function* func_;
+ uint16_t elf_func_idx_;
+};
+
+
+} // namespace llvm
+} // namespace art
+
+
+#endif // ART_SRC_COMPILER_JNI_PORTABLE_JNI_COMPILER_H_
diff --git a/compiler/jni/quick/arm/calling_convention_arm.cc b/compiler/jni/quick/arm/calling_convention_arm.cc
new file mode 100644
index 0000000000..e9b09c5bba
--- /dev/null
+++ b/compiler/jni/quick/arm/calling_convention_arm.cc
@@ -0,0 +1,212 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "base/logging.h"
+#include "calling_convention_arm.h"
+#include "oat/utils/arm/managed_register_arm.h"
+
+namespace art {
+namespace arm {
+
+// Calling convention
+
+ManagedRegister ArmManagedRuntimeCallingConvention::InterproceduralScratchRegister() {
+ return ArmManagedRegister::FromCoreRegister(IP); // R12
+}
+
+ManagedRegister ArmJniCallingConvention::InterproceduralScratchRegister() {
+ return ArmManagedRegister::FromCoreRegister(IP); // R12
+}
+
+static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
+ if (shorty[0] == 'F') {
+ return ArmManagedRegister::FromCoreRegister(R0);
+ } else if (shorty[0] == 'D') {
+ return ArmManagedRegister::FromRegisterPair(R0_R1);
+ } else if (shorty[0] == 'J') {
+ return ArmManagedRegister::FromRegisterPair(R0_R1);
+ } else if (shorty[0] == 'V') {
+ return ArmManagedRegister::NoRegister();
+ } else {
+ return ArmManagedRegister::FromCoreRegister(R0);
+ }
+}
+
+ManagedRegister ArmManagedRuntimeCallingConvention::ReturnRegister() {
+ return ReturnRegisterForShorty(GetShorty());
+}
+
+ManagedRegister ArmJniCallingConvention::ReturnRegister() {
+ return ReturnRegisterForShorty(GetShorty());
+}
+
+ManagedRegister ArmJniCallingConvention::IntReturnRegister() {
+ return ArmManagedRegister::FromCoreRegister(R0);
+}
+
+// Managed runtime calling convention
+
+ManagedRegister ArmManagedRuntimeCallingConvention::MethodRegister() {
+ return ArmManagedRegister::FromCoreRegister(R0);
+}
+
+bool ArmManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
+ return false; // Everything moved to stack on entry.
+}
+
+bool ArmManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
+ return true;
+}
+
+ManagedRegister ArmManagedRuntimeCallingConvention::CurrentParamRegister() {
+ LOG(FATAL) << "Should not reach here";
+ return ManagedRegister::NoRegister();
+}
+
+FrameOffset ArmManagedRuntimeCallingConvention::CurrentParamStackOffset() {
+ CHECK(IsCurrentParamOnStack());
+ FrameOffset result =
+ FrameOffset(displacement_.Int32Value() + // displacement
+ kPointerSize + // Method*
+ (itr_slots_ * kPointerSize)); // offset into in args
+ return result;
+}
+
+const std::vector<ManagedRegister>& ArmManagedRuntimeCallingConvention::EntrySpills() {
+ // We spill the argument registers on ARM to free them up for scratch use, we then assume
+ // all arguments are on the stack.
+ if (entry_spills_.size() == 0) {
+ size_t num_spills = NumArgs() + NumLongOrDoubleArgs();
+ if (num_spills > 0) {
+ entry_spills_.push_back(ArmManagedRegister::FromCoreRegister(R1));
+ if (num_spills > 1) {
+ entry_spills_.push_back(ArmManagedRegister::FromCoreRegister(R2));
+ if (num_spills > 2) {
+ entry_spills_.push_back(ArmManagedRegister::FromCoreRegister(R3));
+ }
+ }
+ }
+ }
+ return entry_spills_;
+}
+// JNI calling convention
+
+ArmJniCallingConvention::ArmJniCallingConvention(bool is_static, bool is_synchronized,
+ const char* shorty)
+ : JniCallingConvention(is_static, is_synchronized, shorty) {
+ // Compute padding to ensure longs and doubles are not split in AAPCS. Ignore the 'this' jobject
+ // or jclass for static methods and the JNIEnv. We start at the aligned register r2.
+ size_t padding = 0;
+ for (size_t cur_arg = IsStatic() ? 0 : 1, cur_reg = 2; cur_arg < NumArgs(); cur_arg++) {
+ if (IsParamALongOrDouble(cur_arg)) {
+ if ((cur_reg & 1) != 0) {
+ padding += 4;
+ cur_reg++; // additional bump to ensure alignment
+ }
+ cur_reg++; // additional bump to skip extra long word
+ }
+ cur_reg++; // bump the iterator for every argument
+ }
+ padding_ = padding;
+
+ callee_save_regs_.push_back(ArmManagedRegister::FromCoreRegister(R5));
+ callee_save_regs_.push_back(ArmManagedRegister::FromCoreRegister(R6));
+ callee_save_regs_.push_back(ArmManagedRegister::FromCoreRegister(R7));
+ callee_save_regs_.push_back(ArmManagedRegister::FromCoreRegister(R8));
+ callee_save_regs_.push_back(ArmManagedRegister::FromCoreRegister(R10));
+ callee_save_regs_.push_back(ArmManagedRegister::FromCoreRegister(R11));
+}
+
+uint32_t ArmJniCallingConvention::CoreSpillMask() const {
+ // Compute spill mask to agree with callee saves initialized in the constructor
+ uint32_t result = 0;
+ result = 1 << R5 | 1 << R6 | 1 << R7 | 1 << R8 | 1 << R10 | 1 << R11 | 1 << LR;
+ return result;
+}
+
+ManagedRegister ArmJniCallingConvention::ReturnScratchRegister() const {
+ return ArmManagedRegister::FromCoreRegister(R2);
+}
+
+size_t ArmJniCallingConvention::FrameSize() {
+ // Method*, LR and callee save area size, local reference segment state
+ size_t frame_data_size = (3 + CalleeSaveRegisters().size()) * kPointerSize;
+ // References plus 2 words for SIRT header
+ size_t sirt_size = (ReferenceCount() + 2) * kPointerSize;
+ // Plus return value spill area size
+ return RoundUp(frame_data_size + sirt_size + SizeOfReturnValue(), kStackAlignment);
+}
+
+size_t ArmJniCallingConvention::OutArgSize() {
+ return RoundUp(NumberOfOutgoingStackArgs() * kPointerSize + padding_,
+ kStackAlignment);
+}
+
+// JniCallingConvention ABI follows AAPCS where longs and doubles must occur
+// in even register numbers and stack slots
+void ArmJniCallingConvention::Next() {
+ JniCallingConvention::Next();
+ size_t arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+ if ((itr_args_ >= 2) &&
+ (arg_pos < NumArgs()) &&
+ IsParamALongOrDouble(arg_pos)) {
+ // itr_slots_ needs to be an even number, according to AAPCS.
+ if ((itr_slots_ & 0x1u) != 0) {
+ itr_slots_++;
+ }
+ }
+}
+
+bool ArmJniCallingConvention::IsCurrentParamInRegister() {
+ return itr_slots_ < 4;
+}
+
+bool ArmJniCallingConvention::IsCurrentParamOnStack() {
+ return !IsCurrentParamInRegister();
+}
+
+static const Register kJniArgumentRegisters[] = {
+ R0, R1, R2, R3
+};
+ManagedRegister ArmJniCallingConvention::CurrentParamRegister() {
+ CHECK_LT(itr_slots_, 4u);
+ int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+ if ((itr_args_ >= 2) && IsParamALongOrDouble(arg_pos)) {
+ CHECK_EQ(itr_slots_, 2u);
+ return ArmManagedRegister::FromRegisterPair(R2_R3);
+ } else {
+ return
+ ArmManagedRegister::FromCoreRegister(kJniArgumentRegisters[itr_slots_]);
+ }
+}
+
+FrameOffset ArmJniCallingConvention::CurrentParamStackOffset() {
+ CHECK_GE(itr_slots_, 4u);
+ size_t offset = displacement_.Int32Value() - OutArgSize() + ((itr_slots_ - 4) * kPointerSize);
+ CHECK_LT(offset, OutArgSize());
+ return FrameOffset(offset);
+}
+
+size_t ArmJniCallingConvention::NumberOfOutgoingStackArgs() {
+ size_t static_args = IsStatic() ? 1 : 0; // count jclass
+ // regular argument parameters and this
+ size_t param_args = NumArgs() + NumLongOrDoubleArgs();
+ // count JNIEnv* less arguments in registers
+ return static_args + param_args + 1 - 4;
+}
+
+} // namespace arm
+} // namespace art
diff --git a/compiler/jni/quick/arm/calling_convention_arm.h b/compiler/jni/quick/arm/calling_convention_arm.h
new file mode 100644
index 0000000000..3787d45c6f
--- /dev/null
+++ b/compiler/jni/quick/arm/calling_convention_arm.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_SRC_OAT_JNI_ARM_CALLING_CONVENTION_ARM_H_
+#define ART_SRC_OAT_JNI_ARM_CALLING_CONVENTION_ARM_H_
+
+#include "jni/quick/calling_convention.h"
+
+namespace art {
+namespace arm {
+
+class ArmManagedRuntimeCallingConvention : public ManagedRuntimeCallingConvention {
+ public:
+ ArmManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ : ManagedRuntimeCallingConvention(is_static, is_synchronized, shorty) {}
+ virtual ~ArmManagedRuntimeCallingConvention() {}
+ // Calling convention
+ virtual ManagedRegister ReturnRegister();
+ virtual ManagedRegister InterproceduralScratchRegister();
+ // Managed runtime calling convention
+ virtual ManagedRegister MethodRegister();
+ virtual bool IsCurrentParamInRegister();
+ virtual bool IsCurrentParamOnStack();
+ virtual ManagedRegister CurrentParamRegister();
+ virtual FrameOffset CurrentParamStackOffset();
+ virtual const std::vector<ManagedRegister>& EntrySpills();
+
+ private:
+ std::vector<ManagedRegister> entry_spills_;
+
+ DISALLOW_COPY_AND_ASSIGN(ArmManagedRuntimeCallingConvention);
+};
+
+class ArmJniCallingConvention : public JniCallingConvention {
+ public:
+ explicit ArmJniCallingConvention(bool is_static, bool is_synchronized, const char* shorty);
+ virtual ~ArmJniCallingConvention() {}
+ // Calling convention
+ virtual ManagedRegister ReturnRegister();
+ virtual ManagedRegister IntReturnRegister();
+ virtual ManagedRegister InterproceduralScratchRegister();
+ // JNI calling convention
+ virtual void Next(); // Override default behavior for AAPCS
+ virtual size_t FrameSize();
+ virtual size_t OutArgSize();
+ virtual const std::vector<ManagedRegister>& CalleeSaveRegisters() const {
+ return callee_save_regs_;
+ }
+ virtual ManagedRegister ReturnScratchRegister() const;
+ virtual uint32_t CoreSpillMask() const;
+ virtual uint32_t FpSpillMask() const {
+ return 0; // Floats aren't spilled in JNI down call
+ }
+ virtual bool IsCurrentParamInRegister();
+ virtual bool IsCurrentParamOnStack();
+ virtual ManagedRegister CurrentParamRegister();
+ virtual FrameOffset CurrentParamStackOffset();
+
+ protected:
+ virtual size_t NumberOfOutgoingStackArgs();
+
+ private:
+ // TODO: these values aren't unique and can be shared amongst instances
+ std::vector<ManagedRegister> callee_save_regs_;
+
+ // Padding to ensure longs and doubles are not split in AAPCS
+ size_t padding_;
+
+ DISALLOW_COPY_AND_ASSIGN(ArmJniCallingConvention);
+};
+
+} // namespace arm
+} // namespace art
+
+#endif // ART_SRC_OAT_JNI_ARM_CALLING_CONVENTION_ARM_H_
diff --git a/compiler/jni/quick/calling_convention.cc b/compiler/jni/quick/calling_convention.cc
new file mode 100644
index 0000000000..5d5eaf2ea9
--- /dev/null
+++ b/compiler/jni/quick/calling_convention.cc
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "calling_convention.h"
+
+#include "base/logging.h"
+#include "jni/quick/arm/calling_convention_arm.h"
+#include "jni/quick/mips/calling_convention_mips.h"
+#include "jni/quick/x86/calling_convention_x86.h"
+#include "utils.h"
+
+namespace art {
+
+// Offset of Method within the frame
+FrameOffset CallingConvention::MethodStackOffset() {
+ return displacement_;
+}
+
+// Managed runtime calling convention
+
+ManagedRuntimeCallingConvention* ManagedRuntimeCallingConvention::Create(
+ bool is_static, bool is_synchronized, const char* shorty, InstructionSet instruction_set) {
+ switch (instruction_set) {
+ case kArm:
+ case kThumb2:
+ return new arm::ArmManagedRuntimeCallingConvention(is_static, is_synchronized, shorty);
+ case kMips:
+ return new mips::MipsManagedRuntimeCallingConvention(is_static, is_synchronized, shorty);
+ case kX86:
+ return new x86::X86ManagedRuntimeCallingConvention(is_static, is_synchronized, shorty);
+ default:
+ LOG(FATAL) << "Unknown InstructionSet: " << instruction_set;
+ return NULL;
+ }
+}
+
+bool ManagedRuntimeCallingConvention::HasNext() {
+ return itr_args_ < NumArgs();
+}
+
+void ManagedRuntimeCallingConvention::Next() {
+ CHECK(HasNext());
+ if (IsCurrentArgExplicit() && // don't query parameter type of implicit args
+ IsParamALongOrDouble(itr_args_)) {
+ itr_longs_and_doubles_++;
+ itr_slots_++;
+ }
+ if (IsCurrentParamAReference()) {
+ itr_refs_++;
+ }
+ itr_args_++;
+ itr_slots_++;
+}
+
+bool ManagedRuntimeCallingConvention::IsCurrentArgExplicit() {
+ // Static methods have no implicit arguments, others implicitly pass this
+ return IsStatic() || (itr_args_ != 0);
+}
+
+bool ManagedRuntimeCallingConvention::IsCurrentArgPossiblyNull() {
+ return IsCurrentArgExplicit(); // any user parameter may be null
+}
+
+size_t ManagedRuntimeCallingConvention::CurrentParamSize() {
+ return ParamSize(itr_args_);
+}
+
+bool ManagedRuntimeCallingConvention::IsCurrentParamAReference() {
+ return IsParamAReference(itr_args_);
+}
+
+// JNI calling convention
+
+JniCallingConvention* JniCallingConvention::Create(bool is_static, bool is_synchronized,
+ const char* shorty,
+ InstructionSet instruction_set) {
+ switch (instruction_set) {
+ case kArm:
+ case kThumb2:
+ return new arm::ArmJniCallingConvention(is_static, is_synchronized, shorty);
+ case kMips:
+ return new mips::MipsJniCallingConvention(is_static, is_synchronized, shorty);
+ case kX86:
+ return new x86::X86JniCallingConvention(is_static, is_synchronized, shorty);
+ default:
+ LOG(FATAL) << "Unknown InstructionSet: " << instruction_set;
+ return NULL;
+ }
+}
+
+size_t JniCallingConvention::ReferenceCount() const {
+ return NumReferenceArgs() + (IsStatic() ? 1 : 0);
+}
+
+FrameOffset JniCallingConvention::SavedLocalReferenceCookieOffset() const {
+ size_t start_of_sirt = SirtLinkOffset().Int32Value() + kPointerSize;
+ size_t references_size = kPointerSize * ReferenceCount(); // size excluding header
+ return FrameOffset(start_of_sirt + references_size);
+}
+
+FrameOffset JniCallingConvention::ReturnValueSaveLocation() const {
+ // Segment state is 4 bytes long
+ return FrameOffset(SavedLocalReferenceCookieOffset().Int32Value() + 4);
+}
+
+bool JniCallingConvention::HasNext() {
+ if (itr_args_ <= kObjectOrClass) {
+ return true;
+ } else {
+ unsigned int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+ return arg_pos < NumArgs();
+ }
+}
+
+void JniCallingConvention::Next() {
+ CHECK(HasNext());
+ if (itr_args_ > kObjectOrClass) {
+ int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+ if (IsParamALongOrDouble(arg_pos)) {
+ itr_longs_and_doubles_++;
+ itr_slots_++;
+ }
+ }
+ if (IsCurrentParamAReference()) {
+ itr_refs_++;
+ }
+ itr_args_++;
+ itr_slots_++;
+}
+
+bool JniCallingConvention::IsCurrentParamAReference() {
+ switch (itr_args_) {
+ case kJniEnv:
+ return false; // JNIEnv*
+ case kObjectOrClass:
+ return true; // jobject or jclass
+ default: {
+ int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+ return IsParamAReference(arg_pos);
+ }
+ }
+}
+
+// Return position of SIRT entry holding reference at the current iterator
+// position
+FrameOffset JniCallingConvention::CurrentParamSirtEntryOffset() {
+ CHECK(IsCurrentParamAReference());
+ CHECK_GT(SirtLinkOffset(), SirtNumRefsOffset());
+ // Address of 1st SIRT entry
+ int result = SirtLinkOffset().Int32Value() + kPointerSize;
+ result += itr_refs_ * kPointerSize;
+ CHECK_GT(result, SirtLinkOffset().Int32Value());
+ return FrameOffset(result);
+}
+
+size_t JniCallingConvention::CurrentParamSize() {
+ if (itr_args_ <= kObjectOrClass) {
+ return kPointerSize; // JNIEnv or jobject/jclass
+ } else {
+ int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+ return ParamSize(arg_pos);
+ }
+}
+
+size_t JniCallingConvention::NumberOfExtraArgumentsForJni() {
+ // The first argument is the JNIEnv*.
+ // Static methods have an extra argument which is the jclass.
+ return IsStatic() ? 2 : 1;
+}
+
+} // namespace art
diff --git a/compiler/jni/quick/calling_convention.h b/compiler/jni/quick/calling_convention.h
new file mode 100644
index 0000000000..121d1f80ae
--- /dev/null
+++ b/compiler/jni/quick/calling_convention.h
@@ -0,0 +1,289 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_SRC_OAT_JNI_CALLING_CONVENTION_H_
+#define ART_SRC_OAT_JNI_CALLING_CONVENTION_H_
+
+#include <vector>
+#include "oat/utils/managed_register.h"
+#include "stack_indirect_reference_table.h"
+#include "thread.h"
+
+namespace art {
+
+// Top-level abstraction for different calling conventions
+class CallingConvention {
+ public:
+ bool IsReturnAReference() const { return shorty_[0] == 'L'; }
+
+ Primitive::Type GetReturnType() const {
+ return Primitive::GetType(shorty_[0]);
+ }
+
+ size_t SizeOfReturnValue() const {
+ size_t result = Primitive::ComponentSize(Primitive::GetType(shorty_[0]));
+ if (result >= 1 && result < 4) {
+ result = 4;
+ }
+ return result;
+ }
+
+ // Register that holds result of this method invocation.
+ virtual ManagedRegister ReturnRegister() = 0;
+ // Register reserved for scratch usage during procedure calls.
+ virtual ManagedRegister InterproceduralScratchRegister() = 0;
+
+ // Offset of Method within the frame
+ FrameOffset MethodStackOffset();
+
+ // Iterator interface
+
+ // Place iterator at start of arguments. The displacement is applied to
+ // frame offset methods to account for frames which may be on the stack
+ // below the one being iterated over.
+ void ResetIterator(FrameOffset displacement) {
+ displacement_ = displacement;
+ itr_slots_ = 0;
+ itr_args_ = 0;
+ itr_refs_ = 0;
+ itr_longs_and_doubles_ = 0;
+ }
+
+ virtual ~CallingConvention() {}
+
+ protected:
+ CallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ : displacement_(0), is_static_(is_static), is_synchronized_(is_synchronized),
+ shorty_(shorty) {
+ num_args_ = (is_static ? 0 : 1) + strlen(shorty) - 1;
+ num_ref_args_ = is_static ? 0 : 1; // The implicit this pointer.
+ num_long_or_double_args_ = 0;
+ for (size_t i = 1; i < strlen(shorty); i++) {
+ char ch = shorty_[i];
+ if (ch == 'L') {
+ num_ref_args_++;
+ } else if ((ch == 'D') || (ch == 'J')) {
+ num_long_or_double_args_++;
+ }
+ }
+ }
+
+ bool IsStatic() const {
+ return is_static_;
+ }
+ bool IsSynchronized() const {
+ return is_synchronized_;
+ }
+ bool IsParamALongOrDouble(unsigned int param) const {
+ DCHECK_LT(param, NumArgs());
+ if (IsStatic()) {
+ param++; // 0th argument must skip return value at start of the shorty
+ } else if (param == 0) {
+ return false; // this argument
+ }
+ char ch = shorty_[param];
+ return (ch == 'J' || ch == 'D');
+ }
+ bool IsParamAReference(unsigned int param) const {
+ DCHECK_LT(param, NumArgs());
+ if (IsStatic()) {
+ param++; // 0th argument must skip return value at start of the shorty
+ } else if (param == 0) {
+ return true; // this argument
+ }
+ return shorty_[param] == 'L';
+ }
+ size_t NumArgs() const {
+ return num_args_;
+ }
+ size_t NumLongOrDoubleArgs() const {
+ return num_long_or_double_args_;
+ }
+ size_t NumReferenceArgs() const {
+ return num_ref_args_;
+ }
+ size_t ParamSize(unsigned int param) const {
+ DCHECK_LT(param, NumArgs());
+ if (IsStatic()) {
+ param++; // 0th argument must skip return value at start of the shorty
+ } else if (param == 0) {
+ return kPointerSize; // this argument
+ }
+ size_t result = Primitive::ComponentSize(Primitive::GetType(shorty_[param]));
+ if (result >= 1 && result < 4) {
+ result = 4;
+ }
+ return result;
+ }
+ const char* GetShorty() const {
+ return shorty_.c_str();
+ }
+ // The slot number for current calling_convention argument.
+ // Note that each slot is 32-bit. When the current argument is bigger
+ // than 32 bits, return the first slot number for this argument.
+ unsigned int itr_slots_;
+ // The number of references iterated past
+ unsigned int itr_refs_;
+ // The argument number along argument list for current argument
+ unsigned int itr_args_;
+ // Number of longs and doubles seen along argument list
+ unsigned int itr_longs_and_doubles_;
+ // Space for frames below this on the stack
+ FrameOffset displacement_;
+
+ private:
+ const bool is_static_;
+ const bool is_synchronized_;
+ std::string shorty_;
+ size_t num_args_;
+ size_t num_ref_args_;
+ size_t num_long_or_double_args_;
+};
+
+// Abstraction for managed code's calling conventions
+// | { Incoming stack args } |
+// | { Prior Method* } | <-- Prior SP
+// | { Return address } |
+// | { Callee saves } |
+// | { Spills ... } |
+// | { Outgoing stack args } |
+// | { Method* } | <-- SP
+class ManagedRuntimeCallingConvention : public CallingConvention {
+ public:
+ static ManagedRuntimeCallingConvention* Create(bool is_static, bool is_synchronized,
+ const char* shorty,
+ InstructionSet instruction_set);
+
+ // Register that holds the incoming method argument
+ virtual ManagedRegister MethodRegister() = 0;
+
+ // Iterator interface
+ bool HasNext();
+ void Next();
+ bool IsCurrentParamAReference();
+ bool IsCurrentArgExplicit(); // ie a non-implict argument such as this
+ bool IsCurrentArgPossiblyNull();
+ size_t CurrentParamSize();
+ virtual bool IsCurrentParamInRegister() = 0;
+ virtual bool IsCurrentParamOnStack() = 0;
+ virtual ManagedRegister CurrentParamRegister() = 0;
+ virtual FrameOffset CurrentParamStackOffset() = 0;
+
+ virtual ~ManagedRuntimeCallingConvention() {}
+
+ // Registers to spill to caller's out registers on entry.
+ virtual const std::vector<ManagedRegister>& EntrySpills() = 0;
+
+ protected:
+ ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ : CallingConvention(is_static, is_synchronized, shorty) {}
+};
+
+// Abstraction for JNI calling conventions
+// | { Incoming stack args } | <-- Prior SP
+// | { Return address } |
+// | { Callee saves } | ([1])
+// | { Return value spill } | (live on return slow paths)
+// | { Local Ref. Table State } |
+// | { Stack Indirect Ref. Table |
+// | num. refs./link } | (here to prior SP is frame size)
+// | { Method* } | <-- Anchor SP written to thread
+// | { Outgoing stack args } | <-- SP at point of call
+// | Native frame |
+//
+// [1] We must save all callee saves here to enable any exception throws to restore
+// callee saves for frames above this one.
+class JniCallingConvention : public CallingConvention {
+ public:
+ static JniCallingConvention* Create(bool is_static, bool is_synchronized, const char* shorty,
+ InstructionSet instruction_set);
+
+ // Size of frame excluding space for outgoing args (its assumed Method* is
+ // always at the bottom of a frame, but this doesn't work for outgoing
+ // native args). Includes alignment.
+ virtual size_t FrameSize() = 0;
+ // Size of outgoing arguments, including alignment
+ virtual size_t OutArgSize() = 0;
+ // Number of references in stack indirect reference table
+ size_t ReferenceCount() const;
+ // Location where the segment state of the local indirect reference table is saved
+ FrameOffset SavedLocalReferenceCookieOffset() const;
+ // Location where the return value of a call can be squirreled if another
+ // call is made following the native call
+ FrameOffset ReturnValueSaveLocation() const;
+ // Register that holds result if it is integer.
+ virtual ManagedRegister IntReturnRegister() = 0;
+
+ // Callee save registers to spill prior to native code (which may clobber)
+ virtual const std::vector<ManagedRegister>& CalleeSaveRegisters() const = 0;
+
+ // Spill mask values
+ virtual uint32_t CoreSpillMask() const = 0;
+ virtual uint32_t FpSpillMask() const = 0;
+
+ // An extra scratch register live after the call
+ virtual ManagedRegister ReturnScratchRegister() const = 0;
+
+ // Iterator interface
+ bool HasNext();
+ virtual void Next();
+ bool IsCurrentParamAReference();
+ size_t CurrentParamSize();
+ virtual bool IsCurrentParamInRegister() = 0;
+ virtual bool IsCurrentParamOnStack() = 0;
+ virtual ManagedRegister CurrentParamRegister() = 0;
+ virtual FrameOffset CurrentParamStackOffset() = 0;
+
+ // Iterator interface extension for JNI
+ FrameOffset CurrentParamSirtEntryOffset();
+
+ // Position of SIRT and interior fields
+ FrameOffset SirtOffset() const {
+ return FrameOffset(displacement_.Int32Value() +
+ kPointerSize); // above Method*
+ }
+ FrameOffset SirtNumRefsOffset() const {
+ return FrameOffset(SirtOffset().Int32Value() +
+ StackIndirectReferenceTable::NumberOfReferencesOffset());
+ }
+ FrameOffset SirtLinkOffset() const {
+ return FrameOffset(SirtOffset().Int32Value() +
+ StackIndirectReferenceTable::LinkOffset());
+ }
+
+ virtual ~JniCallingConvention() {}
+
+ protected:
+ // Named iterator positions
+ enum IteratorPos {
+ kJniEnv = 0,
+ kObjectOrClass = 1
+ };
+
+ explicit JniCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ : CallingConvention(is_static, is_synchronized, shorty) {}
+
+ // Number of stack slots for outgoing arguments, above which the SIRT is
+ // located
+ virtual size_t NumberOfOutgoingStackArgs() = 0;
+
+ protected:
+ size_t NumberOfExtraArgumentsForJni();
+};
+
+} // namespace art
+
+#endif // ART_SRC_OAT_JNI_CALLING_CONVENTION_H_
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
new file mode 100644
index 0000000000..fa227f7fbb
--- /dev/null
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -0,0 +1,489 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <vector>
+
+#include "base/logging.h"
+#include "base/macros.h"
+#include "calling_convention.h"
+#include "class_linker.h"
+#include "compiled_method.h"
+#include "dex_file-inl.h"
+#include "driver/compiler_driver.h"
+#include "disassembler.h"
+#include "jni_internal.h"
+#include "oat/runtime/oat_support_entrypoints.h"
+#include "oat/utils/assembler.h"
+#include "oat/utils/managed_register.h"
+#include "oat/utils/arm/managed_register_arm.h"
+#include "oat/utils/mips/managed_register_mips.h"
+#include "oat/utils/x86/managed_register_x86.h"
+#include "thread.h"
+#include "UniquePtr.h"
+
+#define __ jni_asm->
+
+namespace art {
+
+static void CopyParameter(Assembler* jni_asm,
+ ManagedRuntimeCallingConvention* mr_conv,
+ JniCallingConvention* jni_conv,
+ size_t frame_size, size_t out_arg_size);
+static void SetNativeParameter(Assembler* jni_asm,
+ JniCallingConvention* jni_conv,
+ ManagedRegister in_reg);
+
+// Generate the JNI bridge for the given method, general contract:
+// - Arguments are in the managed runtime format, either on stack or in
+// registers, a reference to the method object is supplied as part of this
+// convention.
+//
+CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver& compiler,
+ uint32_t access_flags, uint32_t method_idx,
+ const DexFile& dex_file) {
+ const bool is_native = (access_flags & kAccNative) != 0;
+ CHECK(is_native);
+ const bool is_static = (access_flags & kAccStatic) != 0;
+ const bool is_synchronized = (access_flags & kAccSynchronized) != 0;
+ const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx));
+ InstructionSet instruction_set = compiler.GetInstructionSet();
+ if (instruction_set == kThumb2) {
+ instruction_set = kArm;
+ }
+ // Calling conventions used to iterate over parameters to method
+ UniquePtr<JniCallingConvention> main_jni_conv(
+ JniCallingConvention::Create(is_static, is_synchronized, shorty, instruction_set));
+ bool reference_return = main_jni_conv->IsReturnAReference();
+
+ UniquePtr<ManagedRuntimeCallingConvention> mr_conv(
+ ManagedRuntimeCallingConvention::Create(is_static, is_synchronized, shorty, instruction_set));
+
+ // Calling conventions to call into JNI method "end" possibly passing a returned reference, the
+ // method and the current thread.
+ size_t jni_end_arg_count = 0;
+ if (reference_return) { jni_end_arg_count++; }
+ if (is_synchronized) { jni_end_arg_count++; }
+ const char* jni_end_shorty = jni_end_arg_count == 0 ? "I"
+ : (jni_end_arg_count == 1 ? "II" : "III");
+ UniquePtr<JniCallingConvention> end_jni_conv(
+ JniCallingConvention::Create(is_static, is_synchronized, jni_end_shorty, instruction_set));
+
+
+ // Assembler that holds generated instructions
+ UniquePtr<Assembler> jni_asm(Assembler::Create(instruction_set));
+ bool should_disassemble = false;
+
+ // Offsets into data structures
+ // TODO: if cross compiling these offsets are for the host not the target
+ const Offset functions(OFFSETOF_MEMBER(JNIEnvExt, functions));
+ const Offset monitor_enter(OFFSETOF_MEMBER(JNINativeInterface, MonitorEnter));
+ const Offset monitor_exit(OFFSETOF_MEMBER(JNINativeInterface, MonitorExit));
+
+ // 1. Build the frame saving all callee saves
+ const size_t frame_size(main_jni_conv->FrameSize());
+ const std::vector<ManagedRegister>& callee_save_regs = main_jni_conv->CalleeSaveRegisters();
+ __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs, mr_conv->EntrySpills());
+
+ // 2. Set up the StackIndirectReferenceTable
+ mr_conv->ResetIterator(FrameOffset(frame_size));
+ main_jni_conv->ResetIterator(FrameOffset(0));
+ __ StoreImmediateToFrame(main_jni_conv->SirtNumRefsOffset(),
+ main_jni_conv->ReferenceCount(),
+ mr_conv->InterproceduralScratchRegister());
+ __ CopyRawPtrFromThread(main_jni_conv->SirtLinkOffset(),
+ Thread::TopSirtOffset(),
+ mr_conv->InterproceduralScratchRegister());
+ __ StoreStackOffsetToThread(Thread::TopSirtOffset(),
+ main_jni_conv->SirtOffset(),
+ mr_conv->InterproceduralScratchRegister());
+
+ // 3. Place incoming reference arguments into SIRT
+ main_jni_conv->Next(); // Skip JNIEnv*
+ // 3.5. Create Class argument for static methods out of passed method
+ if (is_static) {
+ FrameOffset sirt_offset = main_jni_conv->CurrentParamSirtEntryOffset();
+ // Check sirt offset is within frame
+ CHECK_LT(sirt_offset.Uint32Value(), frame_size);
+ __ LoadRef(main_jni_conv->InterproceduralScratchRegister(),
+ mr_conv->MethodRegister(), mirror::AbstractMethod::DeclaringClassOffset());
+ __ VerifyObject(main_jni_conv->InterproceduralScratchRegister(), false);
+ __ StoreRef(sirt_offset, main_jni_conv->InterproceduralScratchRegister());
+ main_jni_conv->Next(); // in SIRT so move to next argument
+ }
+ while (mr_conv->HasNext()) {
+ CHECK(main_jni_conv->HasNext());
+ bool ref_param = main_jni_conv->IsCurrentParamAReference();
+ CHECK(!ref_param || mr_conv->IsCurrentParamAReference());
+ // References need placing in SIRT and the entry value passing
+ if (ref_param) {
+ // Compute SIRT entry, note null is placed in the SIRT but its boxed value
+ // must be NULL
+ FrameOffset sirt_offset = main_jni_conv->CurrentParamSirtEntryOffset();
+ // Check SIRT offset is within frame and doesn't run into the saved segment state
+ CHECK_LT(sirt_offset.Uint32Value(), frame_size);
+ CHECK_NE(sirt_offset.Uint32Value(),
+ main_jni_conv->SavedLocalReferenceCookieOffset().Uint32Value());
+ bool input_in_reg = mr_conv->IsCurrentParamInRegister();
+ bool input_on_stack = mr_conv->IsCurrentParamOnStack();
+ CHECK(input_in_reg || input_on_stack);
+
+ if (input_in_reg) {
+ ManagedRegister in_reg = mr_conv->CurrentParamRegister();
+ __ VerifyObject(in_reg, mr_conv->IsCurrentArgPossiblyNull());
+ __ StoreRef(sirt_offset, in_reg);
+ } else if (input_on_stack) {
+ FrameOffset in_off = mr_conv->CurrentParamStackOffset();
+ __ VerifyObject(in_off, mr_conv->IsCurrentArgPossiblyNull());
+ __ CopyRef(sirt_offset, in_off,
+ mr_conv->InterproceduralScratchRegister());
+ }
+ }
+ mr_conv->Next();
+ main_jni_conv->Next();
+ }
+
+ // 4. Write out the end of the quick frames.
+ __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset());
+ __ StoreImmediateToThread(Thread::TopOfManagedStackPcOffset(), 0,
+ mr_conv->InterproceduralScratchRegister());
+
+ // 5. Move frame down to allow space for out going args.
+ const size_t main_out_arg_size = main_jni_conv->OutArgSize();
+ const size_t end_out_arg_size = end_jni_conv->OutArgSize();
+ const size_t max_out_arg_size = std::max(main_out_arg_size, end_out_arg_size);
+ __ IncreaseFrameSize(max_out_arg_size);
+
+
+ // 6. Call into appropriate JniMethodStart passing Thread* so that transition out of Runnable
+ // can occur. The result is the saved JNI local state that is restored by the exit call. We
+ // abuse the JNI calling convention here, that is guaranteed to support passing 2 pointer
+ // arguments.
+ uintptr_t jni_start = is_synchronized ? ENTRYPOINT_OFFSET(pJniMethodStartSynchronized)
+ : ENTRYPOINT_OFFSET(pJniMethodStart);
+ main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
+ FrameOffset locked_object_sirt_offset(0);
+ if (is_synchronized) {
+ // Pass object for locking.
+ main_jni_conv->Next(); // Skip JNIEnv.
+ locked_object_sirt_offset = main_jni_conv->CurrentParamSirtEntryOffset();
+ main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
+ if (main_jni_conv->IsCurrentParamOnStack()) {
+ FrameOffset out_off = main_jni_conv->CurrentParamStackOffset();
+ __ CreateSirtEntry(out_off, locked_object_sirt_offset,
+ mr_conv->InterproceduralScratchRegister(),
+ false);
+ } else {
+ ManagedRegister out_reg = main_jni_conv->CurrentParamRegister();
+ __ CreateSirtEntry(out_reg, locked_object_sirt_offset,
+ ManagedRegister::NoRegister(), false);
+ }
+ main_jni_conv->Next();
+ }
+ if (main_jni_conv->IsCurrentParamInRegister()) {
+ __ GetCurrentThread(main_jni_conv->CurrentParamRegister());
+ __ Call(main_jni_conv->CurrentParamRegister(), Offset(jni_start),
+ main_jni_conv->InterproceduralScratchRegister());
+ } else {
+ __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset(),
+ main_jni_conv->InterproceduralScratchRegister());
+ __ Call(ThreadOffset(jni_start), main_jni_conv->InterproceduralScratchRegister());
+ }
+ if (is_synchronized) { // Check for exceptions from monitor enter.
+ __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), main_out_arg_size);
+ }
+ FrameOffset saved_cookie_offset = main_jni_conv->SavedLocalReferenceCookieOffset();
+ __ Store(saved_cookie_offset, main_jni_conv->IntReturnRegister(), 4);
+
+ // 7. Iterate over arguments placing values from managed calling convention in
+ // to the convention required for a native call (shuffling). For references
+ // place an index/pointer to the reference after checking whether it is
+ // NULL (which must be encoded as NULL).
+ // Note: we do this prior to materializing the JNIEnv* and static's jclass to
+ // give as many free registers for the shuffle as possible
+ mr_conv->ResetIterator(FrameOffset(frame_size+main_out_arg_size));
+ uint32_t args_count = 0;
+ while (mr_conv->HasNext()) {
+ args_count++;
+ mr_conv->Next();
+ }
+
+ // Do a backward pass over arguments, so that the generated code will be "mov
+ // R2, R3; mov R1, R2" instead of "mov R1, R2; mov R2, R3."
+ // TODO: A reverse iterator to improve readability.
+ for (uint32_t i = 0; i < args_count; ++i) {
+ mr_conv->ResetIterator(FrameOffset(frame_size + main_out_arg_size));
+ main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
+ main_jni_conv->Next(); // Skip JNIEnv*.
+ if (is_static) {
+ main_jni_conv->Next(); // Skip Class for now.
+ }
+ // Skip to the argument we're interested in.
+ for (uint32_t j = 0; j < args_count - i - 1; ++j) {
+ mr_conv->Next();
+ main_jni_conv->Next();
+ }
+ CopyParameter(jni_asm.get(), mr_conv.get(), main_jni_conv.get(), frame_size, main_out_arg_size);
+ }
+ if (is_static) {
+ // Create argument for Class
+ mr_conv->ResetIterator(FrameOffset(frame_size+main_out_arg_size));
+ main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
+ main_jni_conv->Next(); // Skip JNIEnv*
+ FrameOffset sirt_offset = main_jni_conv->CurrentParamSirtEntryOffset();
+ if (main_jni_conv->IsCurrentParamOnStack()) {
+ FrameOffset out_off = main_jni_conv->CurrentParamStackOffset();
+ __ CreateSirtEntry(out_off, sirt_offset,
+ mr_conv->InterproceduralScratchRegister(),
+ false);
+ } else {
+ ManagedRegister out_reg = main_jni_conv->CurrentParamRegister();
+ __ CreateSirtEntry(out_reg, sirt_offset,
+ ManagedRegister::NoRegister(), false);
+ }
+ }
+
+ // 8. Create 1st argument, the JNI environment ptr.
+ main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
+ // Register that will hold local indirect reference table
+ if (main_jni_conv->IsCurrentParamInRegister()) {
+ ManagedRegister jni_env = main_jni_conv->CurrentParamRegister();
+ DCHECK(!jni_env.Equals(main_jni_conv->InterproceduralScratchRegister()));
+ __ LoadRawPtrFromThread(jni_env, Thread::JniEnvOffset());
+ } else {
+ FrameOffset jni_env = main_jni_conv->CurrentParamStackOffset();
+ __ CopyRawPtrFromThread(jni_env, Thread::JniEnvOffset(),
+ main_jni_conv->InterproceduralScratchRegister());
+ }
+
+ // 9. Plant call to native code associated with method.
+ __ Call(main_jni_conv->MethodStackOffset(), mirror::AbstractMethod::NativeMethodOffset(),
+ mr_conv->InterproceduralScratchRegister());
+
+ // 10. Fix differences in result widths.
+ if (instruction_set == kX86) {
+ if (main_jni_conv->GetReturnType() == Primitive::kPrimByte ||
+ main_jni_conv->GetReturnType() == Primitive::kPrimShort) {
+ __ SignExtend(main_jni_conv->ReturnRegister(),
+ Primitive::ComponentSize(main_jni_conv->GetReturnType()));
+ } else if (main_jni_conv->GetReturnType() == Primitive::kPrimBoolean ||
+ main_jni_conv->GetReturnType() == Primitive::kPrimChar) {
+ __ ZeroExtend(main_jni_conv->ReturnRegister(),
+ Primitive::ComponentSize(main_jni_conv->GetReturnType()));
+ }
+ }
+
+ // 11. Save return value
+ FrameOffset return_save_location = main_jni_conv->ReturnValueSaveLocation();
+ if (main_jni_conv->SizeOfReturnValue() != 0 && !reference_return) {
+ if (instruction_set == kMips && main_jni_conv->GetReturnType() == Primitive::kPrimDouble &&
+ return_save_location.Uint32Value() % 8 != 0) {
+ // Ensure doubles are 8-byte aligned for MIPS
+ return_save_location = FrameOffset(return_save_location.Uint32Value() + kPointerSize);
+ }
+ CHECK_LT(return_save_location.Uint32Value(), frame_size+main_out_arg_size);
+ __ Store(return_save_location, main_jni_conv->ReturnRegister(), main_jni_conv->SizeOfReturnValue());
+ }
+
+ // 12. Call into JNI method end possibly passing a returned reference, the method and the current
+ // thread.
+ end_jni_conv->ResetIterator(FrameOffset(end_out_arg_size));
+ uintptr_t jni_end;
+ if (reference_return) {
+ // Pass result.
+ jni_end = is_synchronized ? ENTRYPOINT_OFFSET(pJniMethodEndWithReferenceSynchronized)
+ : ENTRYPOINT_OFFSET(pJniMethodEndWithReference);
+ SetNativeParameter(jni_asm.get(), end_jni_conv.get(), end_jni_conv->ReturnRegister());
+ end_jni_conv->Next();
+ } else {
+ jni_end = is_synchronized ? ENTRYPOINT_OFFSET(pJniMethodEndSynchronized)
+ : ENTRYPOINT_OFFSET(pJniMethodEnd);
+ }
+ // Pass saved local reference state.
+ if (end_jni_conv->IsCurrentParamOnStack()) {
+ FrameOffset out_off = end_jni_conv->CurrentParamStackOffset();
+ __ Copy(out_off, saved_cookie_offset, end_jni_conv->InterproceduralScratchRegister(), 4);
+ } else {
+ ManagedRegister out_reg = end_jni_conv->CurrentParamRegister();
+ __ Load(out_reg, saved_cookie_offset, 4);
+ }
+ end_jni_conv->Next();
+ if (is_synchronized) {
+ // Pass object for unlocking.
+ if (end_jni_conv->IsCurrentParamOnStack()) {
+ FrameOffset out_off = end_jni_conv->CurrentParamStackOffset();
+ __ CreateSirtEntry(out_off, locked_object_sirt_offset,
+ end_jni_conv->InterproceduralScratchRegister(),
+ false);
+ } else {
+ ManagedRegister out_reg = end_jni_conv->CurrentParamRegister();
+ __ CreateSirtEntry(out_reg, locked_object_sirt_offset,
+ ManagedRegister::NoRegister(), false);
+ }
+ end_jni_conv->Next();
+ }
+ if (end_jni_conv->IsCurrentParamInRegister()) {
+ __ GetCurrentThread(end_jni_conv->CurrentParamRegister());
+ __ Call(end_jni_conv->CurrentParamRegister(), Offset(jni_end),
+ end_jni_conv->InterproceduralScratchRegister());
+ } else {
+ __ GetCurrentThread(end_jni_conv->CurrentParamStackOffset(),
+ end_jni_conv->InterproceduralScratchRegister());
+ __ Call(ThreadOffset(jni_end), end_jni_conv->InterproceduralScratchRegister());
+ }
+
+ // 13. Reload return value
+ if (main_jni_conv->SizeOfReturnValue() != 0 && !reference_return) {
+ __ Load(mr_conv->ReturnRegister(), return_save_location, mr_conv->SizeOfReturnValue());
+ }
+
+ // 14. Move frame up now we're done with the out arg space.
+ __ DecreaseFrameSize(max_out_arg_size);
+
+ // 15. Process pending exceptions from JNI call or monitor exit.
+ __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), 0);
+
+ // 16. Remove activation - no need to restore callee save registers because we didn't clobber
+ // them.
+ __ RemoveFrame(frame_size, std::vector<ManagedRegister>());
+
+ // 17. Finalize code generation
+ __ EmitSlowPaths();
+ size_t cs = __ CodeSize();
+ std::vector<uint8_t> managed_code(cs);
+ MemoryRegion code(&managed_code[0], managed_code.size());
+ __ FinalizeInstructions(code);
+ if (should_disassemble) {
+ UniquePtr<Disassembler> disassembler(Disassembler::Create(instruction_set));
+ disassembler->Dump(LOG(INFO), &managed_code[0], &managed_code[managed_code.size()]);
+ }
+ return new CompiledMethod(instruction_set,
+ managed_code,
+ frame_size,
+ main_jni_conv->CoreSpillMask(),
+ main_jni_conv->FpSpillMask());
+}
+
+// Copy a single parameter from the managed to the JNI calling convention
+static void CopyParameter(Assembler* jni_asm,
+ ManagedRuntimeCallingConvention* mr_conv,
+ JniCallingConvention* jni_conv,
+ size_t frame_size, size_t out_arg_size) {
+ bool input_in_reg = mr_conv->IsCurrentParamInRegister();
+ bool output_in_reg = jni_conv->IsCurrentParamInRegister();
+ FrameOffset sirt_offset(0);
+ bool null_allowed = false;
+ bool ref_param = jni_conv->IsCurrentParamAReference();
+ CHECK(!ref_param || mr_conv->IsCurrentParamAReference());
+ // input may be in register, on stack or both - but not none!
+ CHECK(input_in_reg || mr_conv->IsCurrentParamOnStack());
+ if (output_in_reg) { // output shouldn't straddle registers and stack
+ CHECK(!jni_conv->IsCurrentParamOnStack());
+ } else {
+ CHECK(jni_conv->IsCurrentParamOnStack());
+ }
+ // References need placing in SIRT and the entry address passing
+ if (ref_param) {
+ null_allowed = mr_conv->IsCurrentArgPossiblyNull();
+ // Compute SIRT offset. Note null is placed in the SIRT but the jobject
+ // passed to the native code must be null (not a pointer into the SIRT
+ // as with regular references).
+ sirt_offset = jni_conv->CurrentParamSirtEntryOffset();
+ // Check SIRT offset is within frame.
+ CHECK_LT(sirt_offset.Uint32Value(), (frame_size + out_arg_size));
+ }
+ if (input_in_reg && output_in_reg) {
+ ManagedRegister in_reg = mr_conv->CurrentParamRegister();
+ ManagedRegister out_reg = jni_conv->CurrentParamRegister();
+ if (ref_param) {
+ __ CreateSirtEntry(out_reg, sirt_offset, in_reg, null_allowed);
+ } else {
+ if (!mr_conv->IsCurrentParamOnStack()) {
+ // regular non-straddling move
+ __ Move(out_reg, in_reg, mr_conv->CurrentParamSize());
+ } else {
+ UNIMPLEMENTED(FATAL); // we currently don't expect to see this case
+ }
+ }
+ } else if (!input_in_reg && !output_in_reg) {
+ FrameOffset out_off = jni_conv->CurrentParamStackOffset();
+ if (ref_param) {
+ __ CreateSirtEntry(out_off, sirt_offset, mr_conv->InterproceduralScratchRegister(),
+ null_allowed);
+ } else {
+ FrameOffset in_off = mr_conv->CurrentParamStackOffset();
+ size_t param_size = mr_conv->CurrentParamSize();
+ CHECK_EQ(param_size, jni_conv->CurrentParamSize());
+ __ Copy(out_off, in_off, mr_conv->InterproceduralScratchRegister(), param_size);
+ }
+ } else if (!input_in_reg && output_in_reg) {
+ FrameOffset in_off = mr_conv->CurrentParamStackOffset();
+ ManagedRegister out_reg = jni_conv->CurrentParamRegister();
+ // Check that incoming stack arguments are above the current stack frame.
+ CHECK_GT(in_off.Uint32Value(), frame_size);
+ if (ref_param) {
+ __ CreateSirtEntry(out_reg, sirt_offset, ManagedRegister::NoRegister(), null_allowed);
+ } else {
+ size_t param_size = mr_conv->CurrentParamSize();
+ CHECK_EQ(param_size, jni_conv->CurrentParamSize());
+ __ Load(out_reg, in_off, param_size);
+ }
+ } else {
+ CHECK(input_in_reg && !output_in_reg);
+ ManagedRegister in_reg = mr_conv->CurrentParamRegister();
+ FrameOffset out_off = jni_conv->CurrentParamStackOffset();
+ // Check outgoing argument is within frame
+ CHECK_LT(out_off.Uint32Value(), frame_size);
+ if (ref_param) {
+ // TODO: recycle value in in_reg rather than reload from SIRT
+ __ CreateSirtEntry(out_off, sirt_offset, mr_conv->InterproceduralScratchRegister(),
+ null_allowed);
+ } else {
+ size_t param_size = mr_conv->CurrentParamSize();
+ CHECK_EQ(param_size, jni_conv->CurrentParamSize());
+ if (!mr_conv->IsCurrentParamOnStack()) {
+ // regular non-straddling store
+ __ Store(out_off, in_reg, param_size);
+ } else {
+ // store where input straddles registers and stack
+ CHECK_EQ(param_size, 8u);
+ FrameOffset in_off = mr_conv->CurrentParamStackOffset();
+ __ StoreSpanning(out_off, in_reg, in_off, mr_conv->InterproceduralScratchRegister());
+ }
+ }
+ }
+}
+
+static void SetNativeParameter(Assembler* jni_asm,
+ JniCallingConvention* jni_conv,
+ ManagedRegister in_reg) {
+ if (jni_conv->IsCurrentParamOnStack()) {
+ FrameOffset dest = jni_conv->CurrentParamStackOffset();
+ __ StoreRawPtr(dest, in_reg);
+ } else {
+ if (!jni_conv->CurrentParamRegister().Equals(in_reg)) {
+ __ Move(jni_conv->CurrentParamRegister(), in_reg, jni_conv->CurrentParamSize());
+ }
+ }
+}
+
+} // namespace art
+
+extern "C" art::CompiledMethod* ArtQuickJniCompileMethod(art::CompilerDriver& compiler,
+ uint32_t access_flags, uint32_t method_idx,
+ const art::DexFile& dex_file) {
+ return ArtJniCompileMethodInternal(compiler, access_flags, method_idx, dex_file);
+}
diff --git a/compiler/jni/quick/mips/calling_convention_mips.cc b/compiler/jni/quick/mips/calling_convention_mips.cc
new file mode 100644
index 0000000000..053ab44eb4
--- /dev/null
+++ b/compiler/jni/quick/mips/calling_convention_mips.cc
@@ -0,0 +1,215 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "calling_convention_mips.h"
+
+#include "base/logging.h"
+#include "oat/utils/mips/managed_register_mips.h"
+
+namespace art {
+namespace mips {
+
+// Calling convention
+ManagedRegister MipsManagedRuntimeCallingConvention::InterproceduralScratchRegister() {
+ return MipsManagedRegister::FromCoreRegister(T9);
+}
+
+ManagedRegister MipsJniCallingConvention::InterproceduralScratchRegister() {
+ return MipsManagedRegister::FromCoreRegister(T9);
+}
+
+static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
+ if (shorty[0] == 'F') {
+ return MipsManagedRegister::FromFRegister(F0);
+ } else if (shorty[0] == 'D') {
+ return MipsManagedRegister::FromDRegister(D0);
+ } else if (shorty[0] == 'J') {
+ return MipsManagedRegister::FromRegisterPair(V0_V1);
+ } else if (shorty[0] == 'V') {
+ return MipsManagedRegister::NoRegister();
+ } else {
+ return MipsManagedRegister::FromCoreRegister(V0);
+ }
+}
+
+ManagedRegister MipsManagedRuntimeCallingConvention::ReturnRegister() {
+ return ReturnRegisterForShorty(GetShorty());
+}
+
+ManagedRegister MipsJniCallingConvention::ReturnRegister() {
+ return ReturnRegisterForShorty(GetShorty());
+}
+
+ManagedRegister MipsJniCallingConvention::IntReturnRegister() {
+ return MipsManagedRegister::FromCoreRegister(V0);
+}
+
+// Managed runtime calling convention
+
+ManagedRegister MipsManagedRuntimeCallingConvention::MethodRegister() {
+ return MipsManagedRegister::FromCoreRegister(A0);
+}
+
+bool MipsManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
+ return false; // Everything moved to stack on entry.
+}
+
+bool MipsManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
+ return true;
+}
+
+ManagedRegister MipsManagedRuntimeCallingConvention::CurrentParamRegister() {
+ LOG(FATAL) << "Should not reach here";
+ return ManagedRegister::NoRegister();
+}
+
+FrameOffset MipsManagedRuntimeCallingConvention::CurrentParamStackOffset() {
+ CHECK(IsCurrentParamOnStack());
+ FrameOffset result =
+ FrameOffset(displacement_.Int32Value() + // displacement
+ kPointerSize + // Method*
+ (itr_slots_ * kPointerSize)); // offset into in args
+ return result;
+}
+
+const std::vector<ManagedRegister>& MipsManagedRuntimeCallingConvention::EntrySpills() {
+ // We spill the argument registers on MIPS to free them up for scratch use, we then assume
+ // all arguments are on the stack.
+ if (entry_spills_.size() == 0) {
+ size_t num_spills = NumArgs() + NumLongOrDoubleArgs();
+ if (num_spills > 0) {
+ entry_spills_.push_back(MipsManagedRegister::FromCoreRegister(A1));
+ if (num_spills > 1) {
+ entry_spills_.push_back(MipsManagedRegister::FromCoreRegister(A2));
+ if (num_spills > 2) {
+ entry_spills_.push_back(MipsManagedRegister::FromCoreRegister(A3));
+ }
+ }
+ }
+ }
+ return entry_spills_;
+}
+// JNI calling convention
+
+MipsJniCallingConvention::MipsJniCallingConvention(bool is_static, bool is_synchronized,
+ const char* shorty)
+ : JniCallingConvention(is_static, is_synchronized, shorty) {
+ // Compute padding to ensure longs and doubles are not split in AAPCS. Ignore the 'this' jobject
+ // or jclass for static methods and the JNIEnv. We start at the aligned register A2.
+ size_t padding = 0;
+ for (size_t cur_arg = IsStatic() ? 0 : 1, cur_reg = 2; cur_arg < NumArgs(); cur_arg++) {
+ if (IsParamALongOrDouble(cur_arg)) {
+ if ((cur_reg & 1) != 0) {
+ padding += 4;
+ cur_reg++; // additional bump to ensure alignment
+ }
+ cur_reg++; // additional bump to skip extra long word
+ }
+ cur_reg++; // bump the iterator for every argument
+ }
+ padding_ = padding;
+
+ callee_save_regs_.push_back(MipsManagedRegister::FromCoreRegister(T0));
+ callee_save_regs_.push_back(MipsManagedRegister::FromCoreRegister(T1));
+ callee_save_regs_.push_back(MipsManagedRegister::FromCoreRegister(T2));
+ callee_save_regs_.push_back(MipsManagedRegister::FromCoreRegister(T3));
+ callee_save_regs_.push_back(MipsManagedRegister::FromCoreRegister(T4));
+ callee_save_regs_.push_back(MipsManagedRegister::FromCoreRegister(T5));
+ callee_save_regs_.push_back(MipsManagedRegister::FromCoreRegister(T6));
+ callee_save_regs_.push_back(MipsManagedRegister::FromCoreRegister(T7));
+ callee_save_regs_.push_back(MipsManagedRegister::FromCoreRegister(T8));
+}
+
+uint32_t MipsJniCallingConvention::CoreSpillMask() const {
+ // Compute spill mask to agree with callee saves initialized in the constructor
+ uint32_t result = 0;
+ result = 1 << T0 | 1 << T1 | 1 << T2 | 1 << T3 | 1 << T4 | 1 << T5 | 1 << T6 |
+ 1 << T7 | 1 << T8 | 1 << RA;
+ return result;
+}
+
+ManagedRegister MipsJniCallingConvention::ReturnScratchRegister() const {
+ return MipsManagedRegister::FromCoreRegister(AT);
+}
+
+size_t MipsJniCallingConvention::FrameSize() {
+ // Method*, LR and callee save area size, local reference segment state
+ size_t frame_data_size = (3 + CalleeSaveRegisters().size()) * kPointerSize;
+ // References plus 2 words for SIRT header
+ size_t sirt_size = (ReferenceCount() + 2) * kPointerSize;
+ // Plus return value spill area size
+ return RoundUp(frame_data_size + sirt_size + SizeOfReturnValue(), kStackAlignment);
+}
+
+size_t MipsJniCallingConvention::OutArgSize() {
+ return RoundUp(NumberOfOutgoingStackArgs() * kPointerSize + padding_,
+ kStackAlignment);
+}
+
+// JniCallingConvention ABI follows AAPCS where longs and doubles must occur
+// in even register numbers and stack slots
+void MipsJniCallingConvention::Next() {
+ JniCallingConvention::Next();
+ size_t arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+ if ((itr_args_ >= 2) &&
+ (arg_pos < NumArgs()) &&
+ IsParamALongOrDouble(arg_pos)) {
+ // itr_slots_ needs to be an even number, according to AAPCS.
+ if ((itr_slots_ & 0x1u) != 0) {
+ itr_slots_++;
+ }
+ }
+}
+
+bool MipsJniCallingConvention::IsCurrentParamInRegister() {
+ return itr_slots_ < 4;
+}
+
+bool MipsJniCallingConvention::IsCurrentParamOnStack() {
+ return !IsCurrentParamInRegister();
+}
+
+static const Register kJniArgumentRegisters[] = {
+ A0, A1, A2, A3
+};
+ManagedRegister MipsJniCallingConvention::CurrentParamRegister() {
+ CHECK_LT(itr_slots_, 4u);
+ int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+ if ((itr_args_ >= 2) && IsParamALongOrDouble(arg_pos)) {
+ CHECK_EQ(itr_slots_, 2u);
+ return MipsManagedRegister::FromRegisterPair(A2_A3);
+ } else {
+ return
+ MipsManagedRegister::FromCoreRegister(kJniArgumentRegisters[itr_slots_]);
+ }
+}
+
+FrameOffset MipsJniCallingConvention::CurrentParamStackOffset() {
+ CHECK_GE(itr_slots_, 4u);
+ size_t offset = displacement_.Int32Value() - OutArgSize() + (itr_slots_ * kPointerSize);
+ CHECK_LT(offset, OutArgSize());
+ return FrameOffset(offset);
+}
+
+size_t MipsJniCallingConvention::NumberOfOutgoingStackArgs() {
+ size_t static_args = IsStatic() ? 1 : 0; // count jclass
+ // regular argument parameters and this
+ size_t param_args = NumArgs() + NumLongOrDoubleArgs();
+ // count JNIEnv*
+ return static_args + param_args + 1;
+}
+} // namespace mips
+} // namespace art
diff --git a/compiler/jni/quick/mips/calling_convention_mips.h b/compiler/jni/quick/mips/calling_convention_mips.h
new file mode 100644
index 0000000000..90681362bc
--- /dev/null
+++ b/compiler/jni/quick/mips/calling_convention_mips.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_SRC_OAT_JNI_MIPS_CALLING_CONVENTION_MIPS_H_
+#define ART_SRC_OAT_JNI_MIPS_CALLING_CONVENTION_MIPS_H_
+
+#include "jni/quick/calling_convention.h"
+
+namespace art {
+namespace mips {
+class MipsManagedRuntimeCallingConvention : public ManagedRuntimeCallingConvention {
+ public:
+ MipsManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+ : ManagedRuntimeCallingConvention(is_static, is_synchronized, shorty) {}
+ virtual ~MipsManagedRuntimeCallingConvention() {}
+ // Calling convention
+ virtual ManagedRegister ReturnRegister();
+ virtual ManagedRegister InterproceduralScratchRegister();
+ // Managed runtime calling convention
+ virtual ManagedRegister MethodRegister();
+ virtual bool IsCurrentParamInRegister();
+ virtual bool IsCurrentParamOnStack();
+ virtual ManagedRegister CurrentParamRegister();
+ virtual FrameOffset CurrentParamStackOffset();
+ virtual const std::vector<ManagedRegister>& EntrySpills();
+
+ private:
+ std::vector<ManagedRegister> entry_spills_;
+
+ DISALLOW_COPY_AND_ASSIGN(MipsManagedRuntimeCallingConvention);
+};
+
+class MipsJniCallingConvention : public JniCallingConvention {
+ public:
+ explicit MipsJniCallingConvention(bool is_static, bool is_synchronized, const char* shorty);
+ virtual ~MipsJniCallingConvention() {}
+ // Calling convention
+ virtual ManagedRegister ReturnRegister();
+ virtual ManagedRegister IntReturnRegister();
+ virtual ManagedRegister InterproceduralScratchRegister();
+ // JNI calling convention
+ virtual void Next(); // Override default behavior for AAPCS
+ virtual size_t FrameSize();
+ virtual size_t OutArgSize();
+ virtual const std::vector<ManagedRegister>& CalleeSaveRegisters() const {
+ return callee_save_regs_;
+ }
+ virtual ManagedRegister ReturnScratchRegister() const;
+ virtual uint32_t CoreSpillMask() const;
+ virtual uint32_t FpSpillMask() const {
+ return 0; // Floats aren't spilled in JNI down call
+ }
+ virtual bool IsCurrentParamInRegister();
+ virtual bool IsCurrentParamOnStack();
+ virtual ManagedRegister CurrentParamRegister();
+ virtual FrameOffset CurrentParamStackOffset();
+
+ protected:
+ virtual size_t NumberOfOutgoingStackArgs();
+
+ private:
+ // TODO: these values aren't unique and can be shared amongst instances
+ std::vector<ManagedRegister> callee_save_regs_;
+
+ // Padding to ensure longs and doubles are not split in AAPCS
+ size_t padding_;
+
+ DISALLOW_COPY_AND_ASSIGN(MipsJniCallingConvention);
+};
+} // namespace mips
+} // namespace art
+
+#endif // ART_SRC_OAT_JNI_MIPS_CALLING_CONVENTION_MIPS_H_
diff --git a/compiler/jni/quick/x86/calling_convention_x86.cc b/compiler/jni/quick/x86/calling_convention_x86.cc
new file mode 100644
index 0000000000..b671bd190c
--- /dev/null
+++ b/compiler/jni/quick/x86/calling_convention_x86.cc
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "calling_convention_x86.h"
+
+#include "base/logging.h"
+#include "oat/utils/x86/managed_register_x86.h"
+#include "utils.h"
+
+namespace art {
+namespace x86 {
+
+// Calling convention
+
+ManagedRegister X86ManagedRuntimeCallingConvention::InterproceduralScratchRegister() {
+ return X86ManagedRegister::FromCpuRegister(ECX);
+}
+
+ManagedRegister X86JniCallingConvention::InterproceduralScratchRegister() {
+ return X86ManagedRegister::FromCpuRegister(ECX);
+}
+
+ManagedRegister X86JniCallingConvention::ReturnScratchRegister() const {
+ return ManagedRegister::NoRegister(); // No free regs, so assembler uses push/pop
+}
+
+static ManagedRegister ReturnRegisterForShorty(const char* shorty, bool jni) {
+ if (shorty[0] == 'F' || shorty[0] == 'D') {
+ if (jni) {
+ return X86ManagedRegister::FromX87Register(ST0);
+ } else {
+ return X86ManagedRegister::FromXmmRegister(XMM0);
+ }
+ } else if (shorty[0] == 'J') {
+ return X86ManagedRegister::FromRegisterPair(EAX_EDX);
+ } else if (shorty[0] == 'V') {
+ return ManagedRegister::NoRegister();
+ } else {
+ return X86ManagedRegister::FromCpuRegister(EAX);
+ }
+}
+
+ManagedRegister X86ManagedRuntimeCallingConvention::ReturnRegister() {
+ return ReturnRegisterForShorty(GetShorty(), false);
+}
+
+ManagedRegister X86JniCallingConvention::ReturnRegister() {
+ return ReturnRegisterForShorty(GetShorty(), true);
+}
+
+ManagedRegister X86JniCallingConvention::IntReturnRegister() {
+ return X86ManagedRegister::FromCpuRegister(EAX);
+}
+
+// Managed runtime calling convention
+
+ManagedRegister X86ManagedRuntimeCallingConvention::MethodRegister() {
+ return X86ManagedRegister::FromCpuRegister(EAX);
+}
+
+bool X86ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
+ return false; // Everything is passed by stack
+}
+
+bool X86ManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
+ return true; // Everything is passed by stack
+}
+
+ManagedRegister X86ManagedRuntimeCallingConvention::CurrentParamRegister() {
+ LOG(FATAL) << "Should not reach here";
+ return ManagedRegister::NoRegister();
+}
+
+FrameOffset X86ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
+ return FrameOffset(displacement_.Int32Value() + // displacement
+ kPointerSize + // Method*
+ (itr_slots_ * kPointerSize)); // offset into in args
+}
+
+const std::vector<ManagedRegister>& X86ManagedRuntimeCallingConvention::EntrySpills() {
+ // We spill the argument registers on X86 to free them up for scratch use, we then assume
+ // all arguments are on the stack.
+ if (entry_spills_.size() == 0) {
+ size_t num_spills = NumArgs() + NumLongOrDoubleArgs();
+ if (num_spills > 0) {
+ entry_spills_.push_back(X86ManagedRegister::FromCpuRegister(ECX));
+ if (num_spills > 1) {
+ entry_spills_.push_back(X86ManagedRegister::FromCpuRegister(EDX));
+ if (num_spills > 2) {
+ entry_spills_.push_back(X86ManagedRegister::FromCpuRegister(EBX));
+ }
+ }
+ }
+ }
+ return entry_spills_;
+}
+
+// JNI calling convention
+
+X86JniCallingConvention::X86JniCallingConvention(bool is_static, bool is_synchronized,
+ const char* shorty)
+ : JniCallingConvention(is_static, is_synchronized, shorty) {
+ callee_save_regs_.push_back(X86ManagedRegister::FromCpuRegister(EBP));
+ callee_save_regs_.push_back(X86ManagedRegister::FromCpuRegister(ESI));
+ callee_save_regs_.push_back(X86ManagedRegister::FromCpuRegister(EDI));
+}
+
+uint32_t X86JniCallingConvention::CoreSpillMask() const {
+ return 1 << EBP | 1 << ESI | 1 << EDI | 1 << kNumberOfCpuRegisters;
+}
+
+size_t X86JniCallingConvention::FrameSize() {
+ // Method*, return address and callee save area size, local reference segment state
+ size_t frame_data_size = (3 + CalleeSaveRegisters().size()) * kPointerSize;
+ // References plus 2 words for SIRT header
+ size_t sirt_size = (ReferenceCount() + 2) * kPointerSize;
+ // Plus return value spill area size
+ return RoundUp(frame_data_size + sirt_size + SizeOfReturnValue(), kStackAlignment);
+}
+
+size_t X86JniCallingConvention::OutArgSize() {
+ return RoundUp(NumberOfOutgoingStackArgs() * kPointerSize, kStackAlignment);
+}
+
+bool X86JniCallingConvention::IsCurrentParamInRegister() {
+ return false; // Everything is passed by stack.
+}
+
+bool X86JniCallingConvention::IsCurrentParamOnStack() {
+ return true; // Everything is passed by stack.
+}
+
+ManagedRegister X86JniCallingConvention::CurrentParamRegister() {
+ LOG(FATAL) << "Should not reach here";
+ return ManagedRegister::NoRegister();
+}
+
+FrameOffset X86JniCallingConvention::CurrentParamStackOffset() {
+ return FrameOffset(displacement_.Int32Value() - OutArgSize() + (itr_slots_ * kPointerSize));
+}
+
+size_t X86JniCallingConvention::NumberOfOutgoingStackArgs() {
+ size_t static_args = IsStatic() ? 1 : 0; // count jclass
+ // regular argument parameters and this
+ size_t param_args = NumArgs() + NumLongOrDoubleArgs();
+ // count JNIEnv* and return pc (pushed after Method*)
+ size_t total_args = static_args + param_args + 2;
+ return total_args;
+
+}
+
+} // namespace x86
+} // namespace art
diff --git a/compiler/jni/quick/x86/calling_convention_x86.h b/compiler/jni/quick/x86/calling_convention_x86.h
new file mode 100644
index 0000000000..ea8a26e7d5
--- /dev/null
+++ b/compiler/jni/quick/x86/calling_convention_x86.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_SRC_OAT_JNI_X86_CALLING_CONVENTION_X86_H_
+#define ART_SRC_OAT_JNI_X86_CALLING_CONVENTION_X86_H_
+
+#include "jni/quick/calling_convention.h"
+
+namespace art {
+namespace x86 {
+
+class X86ManagedRuntimeCallingConvention : public ManagedRuntimeCallingConvention {
+ public:
+ explicit X86ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized,
+ const char* shorty)
+ : ManagedRuntimeCallingConvention(is_static, is_synchronized, shorty) {}
+ virtual ~X86ManagedRuntimeCallingConvention() {}
+ // Calling convention
+ virtual ManagedRegister ReturnRegister();
+ virtual ManagedRegister InterproceduralScratchRegister();
+ // Managed runtime calling convention
+ virtual ManagedRegister MethodRegister();
+ virtual bool IsCurrentParamInRegister();
+ virtual bool IsCurrentParamOnStack();
+ virtual ManagedRegister CurrentParamRegister();
+ virtual FrameOffset CurrentParamStackOffset();
+ virtual const std::vector<ManagedRegister>& EntrySpills();
+ private:
+ std::vector<ManagedRegister> entry_spills_;
+ DISALLOW_COPY_AND_ASSIGN(X86ManagedRuntimeCallingConvention);
+};
+
+class X86JniCallingConvention : public JniCallingConvention {
+ public:
+ explicit X86JniCallingConvention(bool is_static, bool is_synchronized, const char* shorty);
+ virtual ~X86JniCallingConvention() {}
+ // Calling convention
+ virtual ManagedRegister ReturnRegister();
+ virtual ManagedRegister IntReturnRegister();
+ virtual ManagedRegister InterproceduralScratchRegister();
+ // JNI calling convention
+ virtual size_t FrameSize();
+ virtual size_t OutArgSize();
+ virtual const std::vector<ManagedRegister>& CalleeSaveRegisters() const {
+ return callee_save_regs_;
+ }
+ virtual ManagedRegister ReturnScratchRegister() const;
+ virtual uint32_t CoreSpillMask() const;
+ virtual uint32_t FpSpillMask() const {
+ return 0;
+ }
+ virtual bool IsCurrentParamInRegister();
+ virtual bool IsCurrentParamOnStack();
+ virtual ManagedRegister CurrentParamRegister();
+ virtual FrameOffset CurrentParamStackOffset();
+
+ protected:
+ virtual size_t NumberOfOutgoingStackArgs();
+
+ private:
+ // TODO: these values aren't unique and can be shared amongst instances
+ std::vector<ManagedRegister> callee_save_regs_;
+
+ DISALLOW_COPY_AND_ASSIGN(X86JniCallingConvention);
+};
+
+} // namespace x86
+} // namespace art
+
+#endif // ART_SRC_OAT_JNI_X86_CALLING_CONVENTION_X86_H_