summaryrefslogtreecommitdiffstats
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator_arm.cc12
-rw-r--r--compiler/optimizing/code_generator_arm64.cc13
-rw-r--r--compiler/optimizing/code_generator_x86.cc8
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc7
4 files changed, 18 insertions, 22 deletions
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 1ca1cee275..07c84bcc01 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -37,8 +37,6 @@ static DRegister FromLowSToD(SRegister reg) {
return static_cast<DRegister>(reg / 2);
}
-static constexpr bool kExplicitStackOverflowCheck = false;
-
static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2; // LR, R6, R7
static constexpr int kCurrentMethodStackOffset = 0;
@@ -514,17 +512,17 @@ void CodeGeneratorARM::GenerateFrameEntry() {
bool skip_overflow_check =
IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
if (!skip_overflow_check) {
- if (kExplicitStackOverflowCheck) {
+ if (GetCompilerOptions().GetImplicitStackOverflowChecks()) {
+ __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
+ __ LoadFromOffset(kLoadWord, IP, IP, 0);
+ RecordPcInfo(nullptr, 0);
+ } else {
SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
AddSlowPath(slow_path);
__ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
__ cmp(SP, ShifterOperand(IP));
__ b(slow_path->GetEntryLabel(), CC);
- } else {
- __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
- __ LoadFromOffset(kLoadWord, IP, IP, 0);
- RecordPcInfo(nullptr, 0);
}
}
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index c1bce2a077..306845beb8 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -43,7 +43,6 @@ namespace arm64 {
// TODO: Tune the use of Load-Acquire, Store-Release vs Data Memory Barriers.
// For now we prefer the use of load-acquire, store-release over explicit memory barriers.
static constexpr bool kUseAcquireRelease = true;
-static constexpr bool kExplicitStackOverflowCheck = false;
static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
static constexpr int kCurrentMethodStackOffset = 0;
@@ -443,7 +442,7 @@ class StackOverflowCheckSlowPathARM64 : public SlowPathCodeARM64 {
CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
__ Bind(GetEntryLabel());
arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowStackOverflow), nullptr, 0);
- CheckEntrypointTypes<kQuickThrowStackOverflow, void, void>();
+ CheckEntrypointTypes<kQuickThrowStackOverflow, void, void*>();
}
private:
@@ -606,17 +605,17 @@ void CodeGeneratorARM64::GenerateFrameEntry() {
if (do_overflow_check) {
UseScratchRegisterScope temps(GetVIXLAssembler());
Register temp = temps.AcquireX();
- if (kExplicitStackOverflowCheck) {
+ if (GetCompilerOptions().GetImplicitStackOverflowChecks()) {
+ __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
+ __ Ldr(wzr, MemOperand(temp, 0));
+ RecordPcInfo(nullptr, 0);
+ } else {
SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM64();
AddSlowPath(slow_path);
__ Ldr(temp, MemOperand(tr, Thread::StackEndOffset<kArm64WordSize>().Int32Value()));
__ Cmp(sp, temp);
__ B(lo, slow_path->GetEntryLabel());
- } else {
- __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
- __ Ldr(wzr, MemOperand(temp, 0));
- RecordPcInfo(nullptr, 0);
}
}
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 9052b8fdec..5b09fc190b 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -31,8 +31,6 @@ namespace art {
namespace x86 {
-static constexpr bool kExplicitStackOverflowCheck = false;
-
static constexpr int kNumberOfPushedRegistersAtEntry = 1;
static constexpr int kCurrentMethodStackOffset = 0;
@@ -470,7 +468,9 @@ void CodeGeneratorX86::GenerateFrameEntry() {
bool skip_overflow_check =
IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
- if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
+ bool implicitStackOverflowChecks = GetCompilerOptions().GetImplicitStackOverflowChecks();
+
+ if (!skip_overflow_check && implicitStackOverflowChecks) {
__ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
RecordPcInfo(nullptr, 0);
}
@@ -478,7 +478,7 @@ void CodeGeneratorX86::GenerateFrameEntry() {
// The return PC has already been pushed on the stack.
__ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
- if (!skip_overflow_check && kExplicitStackOverflowCheck) {
+ if (!skip_overflow_check && !implicitStackOverflowChecks) {
SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
AddSlowPath(slow_path);
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 723573afa0..196e0cf666 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -34,8 +34,6 @@ namespace art {
namespace x86_64 {
-static constexpr bool kExplicitStackOverflowCheck = false;
-
// Some x86_64 instructions require a register to be available as temp.
static constexpr Register TMP = R11;
@@ -487,8 +485,9 @@ void CodeGeneratorX86_64::GenerateFrameEntry() {
bool skip_overflow_check = IsLeafMethod()
&& !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
+ bool implicitStackOverflowChecks = GetCompilerOptions().GetImplicitStackOverflowChecks();
- if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
+ if (!skip_overflow_check && implicitStackOverflowChecks) {
__ testq(CpuRegister(RAX), Address(
CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
RecordPcInfo(nullptr, 0);
@@ -498,7 +497,7 @@ void CodeGeneratorX86_64::GenerateFrameEntry() {
__ subq(CpuRegister(RSP),
Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
- if (!skip_overflow_check && kExplicitStackOverflowCheck) {
+ if (!skip_overflow_check && !implicitStackOverflowChecks) {
SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86_64();
AddSlowPath(slow_path);