summaryrefslogtreecommitdiffstats
path: root/compiler/optimizing/code_generator_x86.cc
diff options
context:
space:
mode:
authorMingyao Yang <mingyao@google.com>2015-03-07 06:37:59 -0800
committerMingyao Yang <mingyao@google.com>2015-03-23 16:39:37 -0700
commite295e6ec5beaea31be5d7d3c996cd8cfa2053129 (patch)
tree4d8a657d23d511743ce35bee596544d7f652efdb /compiler/optimizing/code_generator_x86.cc
parentd24ba2c44c76a2b2dd13aafe8f7981c15be31a98 (diff)
downloadandroid_art-e295e6ec5beaea31be5d7d3c996cd8cfa2053129.tar.gz
android_art-e295e6ec5beaea31be5d7d3c996cd8cfa2053129.tar.bz2
android_art-e295e6ec5beaea31be5d7d3c996cd8cfa2053129.zip
Deoptimization-based bce.
A mechanism is introduced that a runtime method can be called from code compiled with optimizing compiler to deoptimize into interpreter. This can be used to establish invariants in the managed code If the invariant does not hold at runtime, we will deoptimize and continue execution in the interpreter. This allows to optimize the managed code as if the invariant was proven during compile time. However, the exception will be thrown according to the semantics demanded by the spec. The invariant and optimization included in this patch are based on the length of an array. Given a set of array accesses with constant indices {c1, ..., cn}, we can optimize away all bounds checks iff all 0 <= min(ci) and max(ci) < array-length. The first can be proven statically. The second can be established with a deoptimization-based invariant. This replaces n bounds checks with one invariant check (plus slow-path code). Change-Id: I8c6e34b56c85d25b91074832d13dba1db0a81569
Diffstat (limited to 'compiler/optimizing/code_generator_x86.cc')
-rw-r--r--compiler/optimizing/code_generator_x86.cc101
1 files changed, 77 insertions, 24 deletions
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 02b9b3294c..44bbccce0f 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -324,6 +324,27 @@ class TypeCheckSlowPathX86 : public SlowPathCodeX86 {
DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
};
+class DeoptimizationSlowPathX86 : public SlowPathCodeX86 {
+ public:
+ explicit DeoptimizationSlowPathX86(HInstruction* instruction)
+ : instruction_(instruction) {}
+
+ void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
+ __ Bind(GetEntryLabel());
+ SaveLiveRegisters(codegen, instruction_->GetLocations());
+ __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pDeoptimize)));
+ // No need to restore live registers.
+ DCHECK(instruction_->IsDeoptimize());
+ HDeoptimize* deoptimize = instruction_->AsDeoptimize();
+ uint32_t dex_pc = deoptimize->GetDexPc();
+ codegen->RecordPcInfo(instruction_, dex_pc, this);
+ }
+
+ private:
+ HInstruction* const instruction_;
+ DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86);
+};
+
#undef __
#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
@@ -814,24 +835,17 @@ void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
UNUSED(exit);
}
-void LocationsBuilderX86::VisitIf(HIf* if_instr) {
- LocationSummary* locations =
- new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
- HInstruction* cond = if_instr->InputAt(0);
- if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
- locations->SetInAt(0, Location::Any());
- }
-}
-
-void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
- HInstruction* cond = if_instr->InputAt(0);
+void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instruction,
+ Label* true_target,
+ Label* false_target,
+ Label* always_true_target) {
+ HInstruction* cond = instruction->InputAt(0);
if (cond->IsIntConstant()) {
// Constant condition, statically compared against 1.
int32_t cond_value = cond->AsIntConstant()->GetValue();
if (cond_value == 1) {
- if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
- if_instr->IfTrueSuccessor())) {
- __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
+ if (always_true_target != nullptr) {
+ __ jmp(always_true_target);
}
return;
} else {
@@ -844,20 +858,19 @@ void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
// evaluated just before the if, we don't need to evaluate it
// again.
bool eflags_set = cond->IsCondition()
- && cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr);
+ && cond->AsCondition()->IsBeforeWhenDisregardMoves(instruction);
if (materialized) {
if (!eflags_set) {
// Materialized condition, compare against 0.
- Location lhs = if_instr->GetLocations()->InAt(0);
+ Location lhs = instruction->GetLocations()->InAt(0);
if (lhs.IsRegister()) {
__ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>());
} else {
__ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
}
- __ j(kNotEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
+ __ j(kNotEqual, true_target);
} else {
- __ j(X86Condition(cond->AsCondition()->GetCondition()),
- codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
+ __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
}
} else {
Location lhs = cond->GetLocations()->InAt(0);
@@ -876,16 +889,56 @@ void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
} else {
__ cmpl(lhs.AsRegister<Register>(), Address(ESP, rhs.GetStackIndex()));
}
- __ j(X86Condition(cond->AsCondition()->GetCondition()),
- codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
+ __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
}
}
- if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
- if_instr->IfFalseSuccessor())) {
- __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
+ if (false_target != nullptr) {
+ __ jmp(false_target);
}
}
+void LocationsBuilderX86::VisitIf(HIf* if_instr) {
+ LocationSummary* locations =
+ new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
+ HInstruction* cond = if_instr->InputAt(0);
+ if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
+ locations->SetInAt(0, Location::Any());
+ }
+}
+
+void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
+ Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
+ Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
+ Label* always_true_target = true_target;
+ if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
+ if_instr->IfTrueSuccessor())) {
+ always_true_target = nullptr;
+ }
+ if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
+ if_instr->IfFalseSuccessor())) {
+ false_target = nullptr;
+ }
+ GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
+}
+
+void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) {
+ LocationSummary* locations = new (GetGraph()->GetArena())
+ LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
+ HInstruction* cond = deoptimize->InputAt(0);
+ DCHECK(cond->IsCondition());
+ if (cond->AsCondition()->NeedsMaterialization()) {
+ locations->SetInAt(0, Location::Any());
+ }
+}
+
+void InstructionCodeGeneratorX86::VisitDeoptimize(HDeoptimize* deoptimize) {
+ SlowPathCodeX86* slow_path = new (GetGraph()->GetArena())
+ DeoptimizationSlowPathX86(deoptimize);
+ codegen_->AddSlowPath(slow_path);
+ Label* slow_path_entry = slow_path->GetEntryLabel();
+ GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
+}
+
void LocationsBuilderX86::VisitLocal(HLocal* local) {
local->SetLocations(nullptr);
}