summaryrefslogtreecommitdiffstats
path: root/compiler/optimizing/code_generator_x86_64.cc
diff options
context:
space:
mode:
authorMingyao Yang <mingyao@google.com>2015-04-01 14:03:04 -0700
committerMingyao Yang <mingyao@google.com>2015-04-01 14:05:13 -0700
commitd43b3ac88cd46b8815890188c9c2b9a3f1564648 (patch)
tree6c599c3f40d57e92786bd7f41c0541d9eaa2643b /compiler/optimizing/code_generator_x86_64.cc
parenta109632b240f3c9355ca95500f6f48e4478e3c51 (diff)
downloadart-d43b3ac88cd46b8815890188c9c2b9a3f1564648.tar.gz
art-d43b3ac88cd46b8815890188c9c2b9a3f1564648.tar.bz2
art-d43b3ac88cd46b8815890188c9c2b9a3f1564648.zip
Revert "Revert "Deoptimization-based bce.""
This reverts commit 0ba627337274ccfb8c9cb9bf23fffb1e1b9d1430. Change-Id: I1ca10d15bbb49897a0cf541ab160431ec180a006
Diffstat (limited to 'compiler/optimizing/code_generator_x86_64.cc')
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc101
1 files changed, 77 insertions, 24 deletions
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index ef60280016..0c4c0b3e5d 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -314,6 +314,27 @@ class TypeCheckSlowPathX86_64 : public SlowPathCodeX86_64 {
DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
};
+class DeoptimizationSlowPathX86_64 : public SlowPathCodeX86_64 {
+ public:
+ explicit DeoptimizationSlowPathX86_64(HInstruction* instruction)
+ : instruction_(instruction) {}
+
+ void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
+ __ Bind(GetEntryLabel());
+ SaveLiveRegisters(codegen, instruction_->GetLocations());
+ __ gs()->call(
+ Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pDeoptimize), true));
+ DCHECK(instruction_->IsDeoptimize());
+ HDeoptimize* deoptimize = instruction_->AsDeoptimize();
+ uint32_t dex_pc = deoptimize->GetDexPc();
+ codegen->RecordPcInfo(instruction_, dex_pc, this);
+ }
+
+ private:
+ HInstruction* const instruction_;
+ DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
+};
+
#undef __
#define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())->
@@ -734,24 +755,17 @@ void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
UNUSED(exit);
}
-void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
- LocationSummary* locations =
- new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
- HInstruction* cond = if_instr->InputAt(0);
- if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
- locations->SetInAt(0, Location::Any());
- }
-}
-
-void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
- HInstruction* cond = if_instr->InputAt(0);
+void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
+ Label* true_target,
+ Label* false_target,
+ Label* always_true_target) {
+ HInstruction* cond = instruction->InputAt(0);
if (cond->IsIntConstant()) {
// Constant condition, statically compared against 1.
int32_t cond_value = cond->AsIntConstant()->GetValue();
if (cond_value == 1) {
- if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
- if_instr->IfTrueSuccessor())) {
- __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
+ if (always_true_target != nullptr) {
+ __ jmp(always_true_target);
}
return;
} else {
@@ -764,21 +778,20 @@ void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
// evaluated just before the if, we don't need to evaluate it
// again.
bool eflags_set = cond->IsCondition()
- && cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr);
+ && cond->AsCondition()->IsBeforeWhenDisregardMoves(instruction);
if (materialized) {
if (!eflags_set) {
// Materialized condition, compare against 0.
- Location lhs = if_instr->GetLocations()->InAt(0);
+ Location lhs = instruction->GetLocations()->InAt(0);
if (lhs.IsRegister()) {
__ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
} else {
__ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()),
Immediate(0));
}
- __ j(kNotEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
+ __ j(kNotEqual, true_target);
} else {
- __ j(X86_64Condition(cond->AsCondition()->GetCondition()),
- codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
+ __ j(X86_64Condition(cond->AsCondition()->GetCondition()), true_target);
}
} else {
Location lhs = cond->GetLocations()->InAt(0);
@@ -796,16 +809,56 @@ void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
__ cmpl(lhs.AsRegister<CpuRegister>(),
Address(CpuRegister(RSP), rhs.GetStackIndex()));
}
- __ j(X86_64Condition(cond->AsCondition()->GetCondition()),
- codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
+ __ j(X86_64Condition(cond->AsCondition()->GetCondition()), true_target);
}
}
- if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
- if_instr->IfFalseSuccessor())) {
- __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
+ if (false_target != nullptr) {
+ __ jmp(false_target);
+ }
+}
+
+void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
+ LocationSummary* locations =
+ new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
+ HInstruction* cond = if_instr->InputAt(0);
+ if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
+ locations->SetInAt(0, Location::Any());
+ }
+}
+
+void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
+ Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
+ Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
+ Label* always_true_target = true_target;
+ if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
+ if_instr->IfTrueSuccessor())) {
+ always_true_target = nullptr;
+ }
+ if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
+ if_instr->IfFalseSuccessor())) {
+ false_target = nullptr;
+ }
+ GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
+}
+
+void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
+ LocationSummary* locations = new (GetGraph()->GetArena())
+ LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
+ HInstruction* cond = deoptimize->InputAt(0);
+ DCHECK(cond->IsCondition());
+ if (cond->AsCondition()->NeedsMaterialization()) {
+ locations->SetInAt(0, Location::Any());
}
}
+void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
+ SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena())
+ DeoptimizationSlowPathX86_64(deoptimize);
+ codegen_->AddSlowPath(slow_path);
+ Label* slow_path_entry = slow_path->GetEntryLabel();
+ GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
+}
+
void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
local->SetLocations(nullptr);
}