diff options
author | Mathieu Chartier <mathieuc@google.com> | 2015-04-22 13:56:20 -0700 |
---|---|---|
committer | Mathieu Chartier <mathieuc@google.com> | 2015-06-02 09:21:27 -0700 |
commit | 3d21bdf8894e780d349c481e5c9e29fe1556051c (patch) | |
tree | 61a5231f36c0dabd73457fec81df103462a05aff /runtime/base | |
parent | 71f0a8a123fa27bdc857a98afebbaf0ed09dac15 (diff) | |
download | android_art-3d21bdf8894e780d349c481e5c9e29fe1556051c.tar.gz android_art-3d21bdf8894e780d349c481e5c9e29fe1556051c.tar.bz2 android_art-3d21bdf8894e780d349c481e5c9e29fe1556051c.zip |
Move mirror::ArtMethod to native
Optimizing + quick tests are passing, devices boot.
TODO: Test and fix bugs in mips64.
Saves 16 bytes per most ArtMethod, 7.5MB reduction in system PSS.
Some of the savings are from removal of virtual methods and direct
methods object arrays.
Bug: 19264997
(cherry picked from commit e401d146407d61eeb99f8d6176b2ac13c4df1e33)
Change-Id: I622469a0cfa0e7082a2119f3d6a9491eb61e3f3d
Fix some ArtMethod related bugs
Added root visiting for runtime methods, not currently required
since the GcRoots in these methods are null.
Added missing GetInterfaceMethodIfProxy in GetMethodLine, fixes
--trace run-tests 005, 044.
Fixed optimizing compiler bug where we used a normal stack location
instead of double on ARM64, this fixes the debuggable tests.
TODO: Fix JDWP tests.
Bug: 19264997
Change-Id: I7c55f69c61d1b45351fd0dc7185ffe5efad82bd3
ART: Fix casts for 64-bit pointers on 32-bit compiler.
Bug: 19264997
Change-Id: Ief45cdd4bae5a43fc8bfdfa7cf744e2c57529457
Fix JDWP tests after ArtMethod change
Fixes Throwable::GetStackDepth for exception event detection after
internal stack trace representation change.
Adds missing ArtMethod::GetInterfaceMethodIfProxy call in case of
proxy method.
Bug: 19264997
Change-Id: I363e293796848c3ec491c963813f62d868da44d2
Fix accidental IMT and root marking regression
Was always using the conflict trampoline. Also included fix for
regression in GC time caused by extra roots. Most of the regression
was IMT.
Fixed bug in DumpGcPerformanceInfo where we would get SIGABRT due to
detached thread.
EvaluateAndApplyChanges:
From ~2500 -> ~1980
GC time: 8.2s -> 7.2s due to 1s less of MarkConcurrentRoots
Bug: 19264997
Change-Id: I4333e80a8268c2ed1284f87f25b9f113d4f2c7e0
Fix bogus image test assert
Previously we were comparing the size of the non moving space to
size of the image file.
Now we properly compare the size of the image space against the size
of the image file.
Bug: 19264997
Change-Id: I7359f1f73ae3df60c5147245935a24431c04808a
[MIPS64] Fix art_quick_invoke_stub argument offsets.
ArtMethod reference's size got bigger, so we need to move other args
and leave enough space for ArtMethod* and 'this' pointer.
This fixes mips64 boot.
Bug: 19264997
Change-Id: I47198d5f39a4caab30b3b77479d5eedaad5006ab
Diffstat (limited to 'runtime/base')
-rw-r--r-- | runtime/base/arena_allocator.cc | 12 | ||||
-rw-r--r-- | runtime/base/arena_allocator.h | 38 | ||||
-rw-r--r-- | runtime/base/iteration_range.h | 11 | ||||
-rw-r--r-- | runtime/base/macros.h | 1 | ||||
-rw-r--r-- | runtime/base/scoped_arena_containers.h | 6 |
5 files changed, 65 insertions, 3 deletions
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc index b53fa84f5c..8f2d94b564 100644 --- a/runtime/base/arena_allocator.cc +++ b/runtime/base/arena_allocator.cc @@ -302,6 +302,18 @@ void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) { end_ = new_arena->End(); } +bool ArenaAllocator::Contains(const void* ptr) const { + if (ptr >= begin_ && ptr < end_) { + return true; + } + for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) { + if (cur_arena->Contains(ptr)) { + return true; + } + } + return false; +} + MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena, ssize_t lost_bytes_adjustment) : name_(name), diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h index 2e617b500a..d9723b57de 100644 --- a/runtime/base/arena_allocator.h +++ b/runtime/base/arena_allocator.h @@ -142,6 +142,11 @@ class Arena { return bytes_allocated_; } + // Return true if ptr is contained in the arena. + bool Contains(const void* ptr) const { + return memory_ <= ptr && ptr < memory_ + bytes_allocated_; + } + protected: size_t bytes_allocated_; uint8_t* memory_; @@ -219,19 +224,52 @@ class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats return ret; } + // Realloc never frees the input pointer, it is the caller's job to do this if necessary. + void* Realloc(void* ptr, size_t ptr_size, size_t new_size, + ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE { + DCHECK_GE(new_size, ptr_size); + DCHECK_EQ(ptr == nullptr, ptr_size == 0u); + auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size; + // If we haven't allocated anything else, we can safely extend. + if (end == ptr_) { + const size_t size_delta = new_size - ptr_size; + // Check remain space. + const size_t remain = end_ - ptr_; + if (remain >= size_delta) { + ptr_ += size_delta; + ArenaAllocatorStats::RecordAlloc(size_delta, kind); + return ptr; + } + } + auto* new_ptr = Alloc(new_size, kind); + memcpy(new_ptr, ptr, ptr_size); + // TODO: Call free on ptr if linear alloc supports free. + return new_ptr; + } + template <typename T> T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) { return static_cast<T*>(Alloc(length * sizeof(T), kind)); } void* AllocValgrind(size_t bytes, ArenaAllocKind kind); + void ObtainNewArenaForAllocation(size_t allocation_size); + size_t BytesAllocated() const; + MemStats GetMemStats() const; + // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes. // TODO: Change BytesAllocated to this behavior? size_t BytesUsed() const; + ArenaPool* GetArenaPool() const { + return pool_; + } + + bool Contains(const void* ptr) const; + private: static constexpr size_t kAlignment = 8; diff --git a/runtime/base/iteration_range.h b/runtime/base/iteration_range.h index 5a46376237..6a0ef1f585 100644 --- a/runtime/base/iteration_range.h +++ b/runtime/base/iteration_range.h @@ -17,6 +17,8 @@ #ifndef ART_RUNTIME_BASE_ITERATION_RANGE_H_ #define ART_RUNTIME_BASE_ITERATION_RANGE_H_ +#include <iterator> + namespace art { // Helper class that acts as a container for range-based loops, given an iteration @@ -38,10 +40,15 @@ class IterationRange { iterator cend() const { return last_; } private: - iterator first_; - iterator last_; + const iterator first_; + const iterator last_; }; +template <typename Iter> +static inline IterationRange<Iter> MakeIterationRange(const Iter& begin_it, const Iter& end_it) { + return IterationRange<Iter>(begin_it, end_it); +} + } // namespace art #endif // ART_RUNTIME_BASE_ITERATION_RANGE_H_ diff --git a/runtime/base/macros.h b/runtime/base/macros.h index c00ae78be8..5c596471c2 100644 --- a/runtime/base/macros.h +++ b/runtime/base/macros.h @@ -50,7 +50,6 @@ friend class test_set_name##_##individual_test##_Test #define ART_FRIEND_TYPED_TEST(test_set_name, individual_test)\ template<typename T> ART_FRIEND_TEST(test_set_name, individual_test) - // DISALLOW_COPY_AND_ASSIGN disallows the copy and operator= functions. It goes in the private: // declarations in a class. #if !defined(DISALLOW_COPY_AND_ASSIGN) diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h index df79085626..82db60e4e4 100644 --- a/runtime/base/scoped_arena_containers.h +++ b/runtime/base/scoped_arena_containers.h @@ -20,6 +20,7 @@ #include <deque> #include <queue> #include <set> +#include <unordered_map> #include <vector> #include "arena_containers.h" // For ArenaAllocatorAdapterKind. @@ -55,6 +56,11 @@ template <typename K, typename V, typename Comparator = std::less<K>> using ScopedArenaSafeMap = SafeMap<K, V, Comparator, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>; +template <typename K, typename V, class Hash = std::hash<K>, class KeyEqual = std::equal_to<K>> +using ScopedArenaUnorderedMap = + std::unordered_map<K, V, Hash, KeyEqual, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>; + + // Implementation details below. template <> |