summaryrefslogtreecommitdiffstats
path: root/compiler/optimizing/code_generator_arm64.h
diff options
context:
space:
mode:
authorAndreas Gampe <agampe@google.com>2015-01-15 23:24:00 -0800
committerAndreas Gampe <agampe@google.com>2015-01-28 15:32:40 -0800
commit878d58cbaf6b17a9e3dcab790754527f3ebc69e5 (patch)
tree1c1af4ef938ad06a783da51e2c6276d6b0628da6 /compiler/optimizing/code_generator_arm64.h
parentb80c3154d3b6359d8ad4ce50d3a6a68224400cdd (diff)
downloadandroid_art-878d58cbaf6b17a9e3dcab790754527f3ebc69e5.tar.gz
android_art-878d58cbaf6b17a9e3dcab790754527f3ebc69e5.tar.bz2
android_art-878d58cbaf6b17a9e3dcab790754527f3ebc69e5.zip
ART: Arm64 optimizing compiler intrinsics
Implement most intrinsics for the optimizing compiler for Arm64. Change-Id: Idb459be09f0524cb9aeab7a5c7fccb1c6b65a707
Diffstat (limited to 'compiler/optimizing/code_generator_arm64.h')
-rw-r--r--compiler/optimizing/code_generator_arm64.h24
1 files changed, 22 insertions, 2 deletions
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 96013e55c6..100dafe724 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -31,7 +31,10 @@ namespace art {
namespace arm64 {
class CodeGeneratorARM64;
-class SlowPathCodeARM64;
+
+// TODO: Tune the use of Load-Acquire, Store-Release vs Data Memory Barriers.
+// For now we prefer the use of load-acquire, store-release over explicit memory barriers.
+static constexpr bool kUseAcquireRelease = true;
// Use a local definition to prevent copying mistakes.
static constexpr size_t kArm64WordSize = kArm64PointerSize;
@@ -45,7 +48,8 @@ static const vixl::FPRegister kParameterFPRegisters[] = {
};
static constexpr size_t kParameterFPRegistersLength = arraysize(kParameterFPRegisters);
-const vixl::Register tr = vixl::x18; // Thread Register
+const vixl::Register tr = vixl::x18; // Thread Register
+static const vixl::Register kArtMethodRegister = vixl::w0; // Method register on invoke.
const vixl::CPURegList vixl_reserved_core_registers(vixl::ip0, vixl::ip1);
const vixl::CPURegList vixl_reserved_fp_registers(vixl::d31);
@@ -56,6 +60,20 @@ const vixl::CPURegList quick_callee_saved_registers(vixl::CPURegister::kRegister
Location ARM64ReturnLocation(Primitive::Type return_type);
+class SlowPathCodeARM64 : public SlowPathCode {
+ public:
+ SlowPathCodeARM64() : entry_label_(), exit_label_() {}
+
+ vixl::Label* GetEntryLabel() { return &entry_label_; }
+ vixl::Label* GetExitLabel() { return &exit_label_; }
+
+ private:
+ vixl::Label entry_label_;
+ vixl::Label exit_label_;
+
+ DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
+};
+
class InvokeDexCallingConvention : public CallingConvention<vixl::Register, vixl::FPRegister> {
public:
InvokeDexCallingConvention()
@@ -274,6 +292,8 @@ class CodeGeneratorARM64 : public CodeGenerator {
return false;
}
+ void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, vixl::Register temp);
+
private:
// Labels for each block that will be compiled.
vixl::Label* block_labels_;