summaryrefslogtreecommitdiffstats
path: root/compiler/optimizing/code_generator_arm64.h
diff options
context:
space:
mode:
authorAlexandre Rames <alexandre.rames@arm.com>2014-11-18 10:55:16 +0000
committerNicolas Geoffray <ngeoffray@google.com>2014-11-18 15:36:36 +0000
commit67555f7e9a05a9d436e034f67ae683bbf02d072d (patch)
tree9a01b7c69032b08b3c55c18076f68c1e397d8a35 /compiler/optimizing/code_generator_arm64.h
parentbf75c5cf32a47eecadcc5e4a324237c1f1d09cde (diff)
downloadandroid_art-67555f7e9a05a9d436e034f67ae683bbf02d072d.tar.gz
android_art-67555f7e9a05a9d436e034f67ae683bbf02d072d.tar.bz2
android_art-67555f7e9a05a9d436e034f67ae683bbf02d072d.zip
Opt compiler: Add support for more IRs on arm64.
Change-Id: I4b6425135d1af74912a206411288081d2516f8bf
Diffstat (limited to 'compiler/optimizing/code_generator_arm64.h')
-rw-r--r--compiler/optimizing/code_generator_arm64.h36
1 files changed, 23 insertions, 13 deletions
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 54e87f4d9c..6b71b94532 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -29,6 +29,7 @@ namespace art {
namespace arm64 {
class CodeGeneratorARM64;
+class SlowPathCodeARM64;
static constexpr size_t kArm64WordSize = 8;
static const vixl::Register kParameterCoreRegisters[] = {
@@ -103,9 +104,11 @@ class InstructionCodeGeneratorARM64 : public HGraphVisitor {
void LoadCurrentMethod(XRegister reg);
Arm64Assembler* GetAssembler() const { return assembler_; }
+ vixl::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->vixl_masm_; }
private:
- void HandleAddSub(HBinaryOperation* instr);
+ void GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path, vixl::Register class_reg);
+ void HandleBinaryOp(HBinaryOperation* instr);
Arm64Assembler* const assembler_;
CodeGeneratorARM64* const codegen_;
@@ -124,7 +127,7 @@ class LocationsBuilderARM64 : public HGraphVisitor {
#undef DECLARE_VISIT_INSTRUCTION
private:
- void HandleAddSub(HBinaryOperation* instr);
+ void HandleBinaryOp(HBinaryOperation* instr);
void HandleInvoke(HInvoke* instr);
CodeGeneratorARM64* const codegen_;
@@ -162,9 +165,10 @@ class CodeGeneratorARM64 : public CodeGenerator {
return kArm64WordSize;
}
- uintptr_t GetAddressOf(HBasicBlock* block ATTRIBUTE_UNUSED) const OVERRIDE {
- UNIMPLEMENTED(INFO) << "TODO: GetAddressOf";
- return 0u;
+ uintptr_t GetAddressOf(HBasicBlock* block) const OVERRIDE {
+ vixl::Label* block_entry_label = GetLabelOf(block);
+ DCHECK(block_entry_label->IsBound());
+ return block_entry_label->location();
}
size_t FrameEntrySpillSize() const OVERRIDE;
@@ -172,6 +176,7 @@ class CodeGeneratorARM64 : public CodeGenerator {
HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; }
HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; }
Arm64Assembler* GetAssembler() OVERRIDE { return &assembler_; }
+ vixl::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->vixl_masm_; }
// Emit a write barrier.
void MarkGCCard(vixl::Register object, vixl::Register value);
@@ -185,18 +190,18 @@ class CodeGeneratorARM64 : public CodeGenerator {
Location GetStackLocation(HLoadLocal* load) const OVERRIDE;
- size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE {
+ size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
UNUSED(stack_index);
UNUSED(reg_id);
- UNIMPLEMENTED(INFO) << "TODO: SaveCoreRegister";
- return 0;
+ LOG(INFO) << "CodeGeneratorARM64::SaveCoreRegister()";
+ return kArm64WordSize;
}
- size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE {
+ size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
UNUSED(stack_index);
UNUSED(reg_id);
- UNIMPLEMENTED(INFO) << "TODO: RestoreCoreRegister";
- return 0;
+ LOG(INFO) << "CodeGeneratorARM64::RestoreCoreRegister()";
+ return kArm64WordSize;
}
// The number of registers that can be allocated. The register allocator may
@@ -226,9 +231,14 @@ class CodeGeneratorARM64 : public CodeGenerator {
}
// Code generation helpers.
+ void MoveConstant(vixl::CPURegister destination, HConstant* constant);
void MoveHelper(Location destination, Location source, Primitive::Type type);
- void Load(Primitive::Type type, vixl::Register dst, const vixl::MemOperand& src);
- void Store(Primitive::Type type, vixl::Register rt, const vixl::MemOperand& dst);
+ void Load(Primitive::Type type, vixl::CPURegister dst, const vixl::MemOperand& src);
+ void Store(Primitive::Type type, vixl::CPURegister rt, const vixl::MemOperand& dst);
+ void LoadCurrentMethod(vixl::Register current_method);
+
+ // Generate code to invoke a runtime entry point.
+ void InvokeRuntime(int32_t offset, HInstruction* instruction, uint32_t dex_pc);
ParallelMoveResolver* GetMoveResolver() OVERRIDE {
UNIMPLEMENTED(INFO) << "TODO: MoveResolver";