summaryrefslogtreecommitdiffstats
path: root/runtime/base
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2015-03-27 14:35:38 -0700
committerMathieu Chartier <mathieuc@google.com>2015-04-10 12:57:27 -0700
commitc785344b87221f5e4e6473e5b762e4e61fe65dcf (patch)
treecd32ad2c2604596a18926f04d4c313dab255ecfd /runtime/base
parenta29d93b380c9aeb8270e281aefbdd0c77a430d43 (diff)
downloadart-c785344b87221f5e4e6473e5b762e4e61fe65dcf.tar.gz
art-c785344b87221f5e4e6473e5b762e4e61fe65dcf.tar.bz2
art-c785344b87221f5e4e6473e5b762e4e61fe65dcf.zip
Move ArtField to native
Add linear alloc. Moved ArtField to be native object. Changed image writer to put ArtFields after the mirror section. Savings: 2MB on low ram devices 4MB on normal devices Total PSS measurements before (normal N5, 95s after shell start): Image size: 7729152 bytes 23112 kB: .NonMoving 23212 kB: .NonMoving 22868 kB: .NonMoving 23072 kB: .NonMoving 22836 kB: .NonMoving 19618 kB: .Zygote 19850 kB: .Zygote 19623 kB: .Zygote 19924 kB: .Zygote 19612 kB: .Zygote Avg: 42745.4 kB After: Image size: 7462912 bytes 17440 kB: .NonMoving 16776 kB: .NonMoving 16804 kB: .NonMoving 17812 kB: .NonMoving 16820 kB: .NonMoving 18788 kB: .Zygote 18856 kB: .Zygote 19064 kB: .Zygote 18841 kB: .Zygote 18629 kB: .Zygote 3499 kB: .LinearAlloc 3408 kB: .LinearAlloc 3424 kB: .LinearAlloc 3600 kB: .LinearAlloc 3436 kB: .LinearAlloc Avg: 39439.4 kB No reflection performance changes. Bug: 19264997 Bug: 17643507 Change-Id: I10c73a37913332080aeb978c7c94713bdfe4fe1c
Diffstat (limited to 'runtime/base')
-rw-r--r--runtime/base/arena_allocator.cc29
-rw-r--r--runtime/base/arena_allocator.h8
-rw-r--r--runtime/base/mutex.h1
3 files changed, 29 insertions, 9 deletions
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index dd29404cd9..59d38ad593 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -132,11 +132,10 @@ MallocArena::~MallocArena() {
free(reinterpret_cast<void*>(memory_));
}
-MemMapArena::MemMapArena(size_t size) {
+MemMapArena::MemMapArena(size_t size, bool low_4gb) {
std::string error_msg;
- map_.reset(
- MemMap::MapAnonymous("dalvik-LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, false,
- false, &error_msg));
+ map_.reset(MemMap::MapAnonymous(
+ "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
CHECK(map_.get() != nullptr) << error_msg;
memory_ = map_->Begin();
size_ = map_->Size();
@@ -156,8 +155,12 @@ void Arena::Reset() {
}
}
-ArenaPool::ArenaPool(bool use_malloc)
- : use_malloc_(use_malloc), lock_("Arena pool lock"), free_arenas_(nullptr) {
+ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
+ : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
+ low_4gb_(low_4gb) {
+ if (low_4gb) {
+ CHECK(!use_malloc) << "low4gb must use map implementation";
+ }
if (!use_malloc) {
MemMap::Init();
}
@@ -182,7 +185,8 @@ Arena* ArenaPool::AllocArena(size_t size) {
}
}
if (ret == nullptr) {
- ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) : new MemMapArena(size);
+ ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
+ new MemMapArena(size, low_4gb_);
}
ret->Reset();
return ret;
@@ -229,6 +233,17 @@ size_t ArenaAllocator::BytesAllocated() const {
return ArenaAllocatorStats::BytesAllocated();
}
+size_t ArenaAllocator::BytesUsed() const {
+ size_t total = ptr_ - begin_;
+ if (arena_head_ != nullptr) {
+ for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
+ cur_arena = cur_arena->next_) {
+ total += cur_arena->GetBytesAllocated();
+ }
+ }
+ return total;
+}
+
ArenaAllocator::ArenaAllocator(ArenaPool* pool)
: pool_(pool),
begin_(nullptr),
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index cc7b856e84..3a86b61e1a 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -165,7 +165,7 @@ class MallocArena FINAL : public Arena {
class MemMapArena FINAL : public Arena {
public:
- explicit MemMapArena(size_t size = Arena::kDefaultSize);
+ explicit MemMapArena(size_t size, bool low_4gb);
virtual ~MemMapArena() { }
void Release() OVERRIDE;
@@ -175,7 +175,7 @@ class MemMapArena FINAL : public Arena {
class ArenaPool {
public:
- explicit ArenaPool(bool use_malloc = true);
+ explicit ArenaPool(bool use_malloc = true, bool low_4gb = false);
~ArenaPool();
Arena* AllocArena(size_t size) LOCKS_EXCLUDED(lock_);
void FreeArenaChain(Arena* first) LOCKS_EXCLUDED(lock_);
@@ -188,6 +188,7 @@ class ArenaPool {
const bool use_malloc_;
mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Arena* free_arenas_ GUARDED_BY(lock_);
+ const bool low_4gb_;
DISALLOW_COPY_AND_ASSIGN(ArenaPool);
};
@@ -227,6 +228,9 @@ class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats
void ObtainNewArenaForAllocation(size_t allocation_size);
size_t BytesAllocated() const;
MemStats GetMemStats() const;
+ // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
+ // TODO: Change BytesAllocated to this behavior?
+ size_t BytesUsed() const;
private:
static constexpr size_t kAlignment = 8;
diff --git a/runtime/base/mutex.h b/runtime/base/mutex.h
index af008347cd..6e4b96c2e6 100644
--- a/runtime/base/mutex.h
+++ b/runtime/base/mutex.h
@@ -73,6 +73,7 @@ enum LockLevel {
kRosAllocBulkFreeLock,
kAllocSpaceLock,
kBumpPointerSpaceBlockLock,
+ kArenaPoolLock,
kDexFileMethodInlinerLock,
kDexFileToMethodInlinerMapLock,
kMarkSweepMarkStackLock,