summaryrefslogtreecommitdiffstats
path: root/compiler/image_writer.h
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2015-04-22 13:56:20 -0700
committerMathieu Chartier <mathieuc@google.com>2015-06-02 09:21:27 -0700
commit3d21bdf8894e780d349c481e5c9e29fe1556051c (patch)
tree61a5231f36c0dabd73457fec81df103462a05aff /compiler/image_writer.h
parent71f0a8a123fa27bdc857a98afebbaf0ed09dac15 (diff)
downloadart-3d21bdf8894e780d349c481e5c9e29fe1556051c.tar.gz
art-3d21bdf8894e780d349c481e5c9e29fe1556051c.tar.bz2
art-3d21bdf8894e780d349c481e5c9e29fe1556051c.zip
Move mirror::ArtMethod to native
Optimizing + quick tests are passing, devices boot. TODO: Test and fix bugs in mips64. Saves 16 bytes per most ArtMethod, 7.5MB reduction in system PSS. Some of the savings are from removal of virtual methods and direct methods object arrays. Bug: 19264997 (cherry picked from commit e401d146407d61eeb99f8d6176b2ac13c4df1e33) Change-Id: I622469a0cfa0e7082a2119f3d6a9491eb61e3f3d Fix some ArtMethod related bugs Added root visiting for runtime methods, not currently required since the GcRoots in these methods are null. Added missing GetInterfaceMethodIfProxy in GetMethodLine, fixes --trace run-tests 005, 044. Fixed optimizing compiler bug where we used a normal stack location instead of double on ARM64, this fixes the debuggable tests. TODO: Fix JDWP tests. Bug: 19264997 Change-Id: I7c55f69c61d1b45351fd0dc7185ffe5efad82bd3 ART: Fix casts for 64-bit pointers on 32-bit compiler. Bug: 19264997 Change-Id: Ief45cdd4bae5a43fc8bfdfa7cf744e2c57529457 Fix JDWP tests after ArtMethod change Fixes Throwable::GetStackDepth for exception event detection after internal stack trace representation change. Adds missing ArtMethod::GetInterfaceMethodIfProxy call in case of proxy method. Bug: 19264997 Change-Id: I363e293796848c3ec491c963813f62d868da44d2 Fix accidental IMT and root marking regression Was always using the conflict trampoline. Also included fix for regression in GC time caused by extra roots. Most of the regression was IMT. Fixed bug in DumpGcPerformanceInfo where we would get SIGABRT due to detached thread. EvaluateAndApplyChanges: From ~2500 -> ~1980 GC time: 8.2s -> 7.2s due to 1s less of MarkConcurrentRoots Bug: 19264997 Change-Id: I4333e80a8268c2ed1284f87f25b9f113d4f2c7e0 Fix bogus image test assert Previously we were comparing the size of the non moving space to size of the image file. Now we properly compare the size of the image space against the size of the image file. Bug: 19264997 Change-Id: I7359f1f73ae3df60c5147245935a24431c04808a [MIPS64] Fix art_quick_invoke_stub argument offsets. ArtMethod reference's size got bigger, so we need to move other args and leave enough space for ArtMethod* and 'this' pointer. This fixes mips64 boot. Bug: 19264997 Change-Id: I47198d5f39a4caab30b3b77479d5eedaad5006ab
Diffstat (limited to 'compiler/image_writer.h')
-rw-r--r--compiler/image_writer.h105
1 files changed, 65 insertions, 40 deletions
diff --git a/compiler/image_writer.h b/compiler/image_writer.h
index 592173239..a35d6ad9c 100644
--- a/compiler/image_writer.h
+++ b/compiler/image_writer.h
@@ -30,12 +30,13 @@
#include "base/macros.h"
#include "driver/compiler_driver.h"
#include "gc/space/space.h"
+#include "lock_word.h"
#include "mem_map.h"
#include "oat_file.h"
#include "mirror/dex_cache.h"
#include "os.h"
#include "safe_map.h"
-#include "gc/space/space.h"
+#include "utils.h"
namespace art {
@@ -53,18 +54,12 @@ class ImageWriter FINAL {
quick_to_interpreter_bridge_offset_(0), compile_pic_(compile_pic),
target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
bin_slot_sizes_(), bin_slot_previous_sizes_(), bin_slot_count_(),
- string_data_array_(nullptr) {
+ dirty_methods_(0u), clean_methods_(0u) {
CHECK_NE(image_begin, 0U);
+ std::fill(image_methods_, image_methods_ + arraysize(image_methods_), nullptr);
}
~ImageWriter() {
- // For interned strings a large array is allocated to hold all the character data and avoid
- // overhead. However, no GC is run anymore at this point. As the array is likely large, it
- // will be allocated in the large object space, where valgrind can track every single
- // allocation. Not explicitly freeing that array will be recognized as a leak.
- if (RUNNING_ON_VALGRIND != 0) {
- FreeStringDataArray();
- }
}
bool PrepareImageAddressSpace();
@@ -73,14 +68,14 @@ class ImageWriter FINAL {
return image_roots_address_ != 0u;
}
- mirror::Object* GetImageAddress(mirror::Object* object) const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- if (object == nullptr) {
- return nullptr;
- }
- return reinterpret_cast<mirror::Object*>(image_begin_ + GetImageOffset(object));
+ template <typename T>
+ T* GetImageAddress(T* object) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return object == nullptr ? nullptr :
+ reinterpret_cast<T*>(image_begin_ + GetImageOffset(object));
}
+ ArtMethod* GetImageMethodAddress(ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
mirror::HeapReference<mirror::Object>* GetDexCacheArrayElementImageAddress(
const DexFile* dex_file, uint32_t offset) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
auto it = dex_cache_array_starts_.find(dex_file);
@@ -90,11 +85,12 @@ class ImageWriter FINAL {
}
uint8_t* GetOatFileBegin() const {
- return image_begin_ + RoundUp(image_end_ + bin_slot_sizes_[kBinArtField], kPageSize);
+ return image_begin_ + RoundUp(
+ image_end_ + bin_slot_sizes_[kBinArtField] + bin_slot_sizes_[kBinArtMethodDirty] +
+ bin_slot_sizes_[kBinArtMethodClean], kPageSize);
}
- bool Write(const std::string& image_filename,
- const std::string& oat_filename,
+ bool Write(const std::string& image_filename, const std::string& oat_filename,
const std::string& oat_location)
LOCKS_EXCLUDED(Locks::mutator_lock_);
@@ -124,11 +120,15 @@ class ImageWriter FINAL {
kBinClassInitializedFinalStatics, // Class initializers have been run, no non-final statics
kBinClassInitialized, // Class initializers have been run
kBinClassVerified, // Class verified, but initializers haven't been run
- kBinArtMethodNative, // Art method that is actually native
- kBinArtMethodNotInitialized, // Art method with a declaring class that wasn't initialized
// Add more bins here if we add more segregation code.
- // Non mirror fields must be below. ArtFields should be always clean.
+ // Non mirror fields must be below.
+ // ArtFields should be always clean.
kBinArtField,
+ // If the class is initialized, then the ArtMethods are probably clean.
+ kBinArtMethodClean,
+ // ArtMethods may be dirty if the class has native methods or a declaring class that isn't
+ // initialized.
+ kBinArtMethodDirty,
kBinSize,
// Number of bins which are for mirror objects.
kBinMirrorCount = kBinArtField,
@@ -138,9 +138,12 @@ class ImageWriter FINAL {
static constexpr size_t kBinBits = MinimumBitsToStore<uint32_t>(kBinMirrorCount - 1);
// uint32 = typeof(lockword_)
- static constexpr size_t kBinShift = BitSizeOf<uint32_t>() - kBinBits;
+ // Subtract read barrier bits since we want these to remain 0, or else it may result in DCHECK
+ // failures due to invalid read barrier bits during object field reads.
+ static const size_t kBinShift = BitSizeOf<uint32_t>() - kBinBits -
+ LockWord::kReadBarrierStateSize;
// 111000.....0
- static constexpr size_t kBinMask = ((static_cast<size_t>(1) << kBinBits) - 1) << kBinShift;
+ static const size_t kBinMask = ((static_cast<size_t>(1) << kBinBits) - 1) << kBinShift;
// We use the lock word to store the bin # and bin index of the object in the image.
//
@@ -172,6 +175,8 @@ class ImageWriter FINAL {
bool IsImageOffsetAssigned(mirror::Object* object) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
size_t GetImageOffset(mirror::Object* object) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void UpdateImageOffset(mirror::Object* obj, uintptr_t offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void PrepareDexCacheArraySlots() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void AssignImageBinSlot(mirror::Object* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -181,6 +186,8 @@ class ImageWriter FINAL {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
BinSlot GetImageBinSlot(mirror::Object* object) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void AddMethodPointerArray(mirror::PointerArray* arr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
static void* GetImageAddressCallback(void* writer, mirror::Object* obj)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return reinterpret_cast<ImageWriter*>(writer)->GetImageAddress(obj);
@@ -197,10 +204,12 @@ class ImageWriter FINAL {
// With Quick, code is within the OatFile, as there are all in one
// .o ELF object.
DCHECK_LT(offset, oat_file_->Size());
- if (offset == 0u) {
- return nullptr;
- }
- return oat_data_begin_ + offset;
+ DCHECK(oat_data_begin_ != nullptr);
+ return offset == 0u ? nullptr : oat_data_begin_ + offset;
+ }
+
+ static bool IsArtMethodBin(Bin bin) {
+ return bin == kBinArtMethodClean || bin == kBinArtMethodDirty;
}
// Returns true if the class was in the original requested image classes list.
@@ -257,21 +266,20 @@ class ImageWriter FINAL {
static void CopyAndFixupObjectsCallback(mirror::Object* obj, void* arg)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void CopyAndFixupObject(mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool CopyAndFixupIfDexCacheFieldArray(mirror::Object* dst, mirror::Object* obj,
- mirror::Class* klass)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void FixupMethod(mirror::ArtMethod* orig, mirror::ArtMethod* copy)
+ void CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void FixupClass(mirror::Class* orig, mirror::Class* copy)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void FixupObject(mirror::Object* orig, mirror::Object* copy)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr, mirror::Class* klass,
+ Bin array_type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Get quick code for non-resolution/imt_conflict/abstract method.
- const uint8_t* GetQuickCode(mirror::ArtMethod* method, bool* quick_is_interpreted)
+ const uint8_t* GetQuickCode(ArtMethod* method, bool* quick_is_interpreted)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- const uint8_t* GetQuickEntryPoint(mirror::ArtMethod* method)
+ const uint8_t* GetQuickEntryPoint(ArtMethod* method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Patches references in OatFile to expect runtime addresses.
@@ -280,8 +288,11 @@ class ImageWriter FINAL {
// Calculate the sum total of the bin slot sizes in [0, up_to). Defaults to all bins.
size_t GetBinSizeSum(Bin up_to = kBinSize) const;
- // Release the string_data_array_.
- void FreeStringDataArray();
+ // Return true if a method is likely to be dirtied at runtime.
+ bool WillMethodBeDirty(ArtMethod* m) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ // Assign the offset for an ArtMethod.
+ void AssignMethodOffset(ArtMethod* method, Bin bin) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
const CompilerDriver& compiler_driver_;
@@ -308,9 +319,14 @@ class ImageWriter FINAL {
struct DexCacheArrayLocation {
size_t offset_;
size_t length_;
+ Bin bin_type_;
};
SafeMap<mirror::Object*, DexCacheArrayLocation> dex_cache_array_indexes_;
+ // Pointer arrays that need to be updated. Since these are only some int and long arrays, we need
+ // to keep track. These include vtable arrays, iftable arrays, and dex caches.
+ std::unordered_map<mirror::PointerArray*, Bin> pointer_arrays_;
+
// The start offsets of the dex cache arrays.
SafeMap<const DexFile*, size_t> dex_cache_array_starts_;
@@ -344,12 +360,21 @@ class ImageWriter FINAL {
size_t bin_slot_previous_sizes_[kBinSize]; // Number of bytes in previous bins.
size_t bin_slot_count_[kBinSize]; // Number of objects in a bin
- // ArtField relocating map, ArtFields are allocated as array of structs but we want to have one
- // entry per art field for convenience.
- // ArtFields are placed right after the end of the image objects (aka sum of bin_slot_sizes_).
- std::unordered_map<ArtField*, uintptr_t> art_field_reloc_;
+ // ArtField, ArtMethod relocating map. These are allocated as array of structs but we want to
+ // have one entry per art field for convenience. ArtFields are placed right after the end of the
+ // image objects (aka sum of bin_slot_sizes_). ArtMethods are placed right after the ArtFields.
+ struct NativeObjectReloc {
+ uintptr_t offset;
+ Bin bin_type;
+ };
+ std::unordered_map<void*, NativeObjectReloc> native_object_reloc_;
+
+ // Runtime ArtMethods which aren't reachable from any Class but need to be copied into the image.
+ ArtMethod* image_methods_[ImageHeader::kImageMethodsCount];
- void* string_data_array_; // The backing for the interned strings.
+ // Counters for measurements, used for logging only.
+ uint64_t dirty_methods_;
+ uint64_t clean_methods_;
friend class FixupVisitor;
friend class FixupClassVisitor;