summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--compiler/jni/quick/jni_compiler.cc4
-rw-r--r--compiler/utils/arm64/assembler_arm64.cc4
-rw-r--r--compiler/utils/arm64/assembler_arm64.h9
-rw-r--r--compiler/utils/arm64/constants_arm64.h3
4 files changed, 6 insertions, 14 deletions
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index 78a228be47..f6795ea28c 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -428,10 +428,6 @@ CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver,
// 17. Finalize code generation
__ EmitSlowPaths();
size_t cs = __ CodeSize();
- if (instruction_set == kArm64) {
- // Test that we do not exceed the buffer size.
- CHECK(cs < arm64::kBufferSizeArm64);
- }
std::vector<uint8_t> managed_code(cs);
MemoryRegion code(&managed_code[0], managed_code.size());
__ FinalizeInstructions(code);
diff --git a/compiler/utils/arm64/assembler_arm64.cc b/compiler/utils/arm64/assembler_arm64.cc
index c82b4f0f50..25e02c35da 100644
--- a/compiler/utils/arm64/assembler_arm64.cc
+++ b/compiler/utils/arm64/assembler_arm64.cc
@@ -42,12 +42,12 @@ void Arm64Assembler::EmitSlowPaths() {
}
size_t Arm64Assembler::CodeSize() const {
- return ___ SizeOfCodeGenerated();
+ return vixl_masm_->BufferCapacity() - vixl_masm_->RemainingBufferSpace();
}
void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) {
// Copy the instructions from the buffer.
- MemoryRegion from(reinterpret_cast<void*>(vixl_buf_), CodeSize());
+ MemoryRegion from(vixl_masm_->GetStartAddress<void*>(), CodeSize());
region.CopyFrom(0, from);
}
diff --git a/compiler/utils/arm64/assembler_arm64.h b/compiler/utils/arm64/assembler_arm64.h
index bf89d24bd5..373fd34314 100644
--- a/compiler/utils/arm64/assembler_arm64.h
+++ b/compiler/utils/arm64/assembler_arm64.h
@@ -59,12 +59,12 @@ class Arm64Exception;
class Arm64Assembler FINAL : public Assembler {
public:
- Arm64Assembler() : vixl_buf_(new byte[kBufferSizeArm64]),
- vixl_masm_(new vixl::MacroAssembler(vixl_buf_, kBufferSizeArm64)) {}
+ // We indicate the size of the initial code generation buffer to the VIXL
+ // assembler. From there we it will automatically manage the buffer.
+ Arm64Assembler() : vixl_masm_(new vixl::MacroAssembler(kArm64BaseBufferSize)) {}
virtual ~Arm64Assembler() {
delete vixl_masm_;
- delete[] vixl_buf_;
}
// Emit slow paths queued during assembly.
@@ -213,9 +213,6 @@ class Arm64Assembler FINAL : public Assembler {
void AddConstant(Register rd, int32_t value, vixl::Condition cond = vixl::al);
void AddConstant(Register rd, Register rn, int32_t value, vixl::Condition cond = vixl::al);
- // Vixl buffer.
- byte* vixl_buf_;
-
// Vixl assembler.
vixl::MacroAssembler* vixl_masm_;
diff --git a/compiler/utils/arm64/constants_arm64.h b/compiler/utils/arm64/constants_arm64.h
index 0cbbb1eeff..ffb54d322f 100644
--- a/compiler/utils/arm64/constants_arm64.h
+++ b/compiler/utils/arm64/constants_arm64.h
@@ -31,8 +31,7 @@ namespace arm64 {
constexpr unsigned int kJniRefSpillRegsSize = 11;
-// Vixl buffer size.
-constexpr size_t kBufferSizeArm64 = 4096*2;
+constexpr size_t kArm64BaseBufferSize = 4096;
} // namespace arm64
} // namespace art