summaryrefslogtreecommitdiffstats
path: root/runtime/gc/heap-inl.h
diff options
context:
space:
mode:
authorHiroshi Yamauchi <yamauchi@google.com>2014-02-11 17:02:22 -0800
committerHiroshi Yamauchi <yamauchi@google.com>2014-02-13 13:43:43 -0800
commitf5b0e20b5b31f5f5465784adcf2a204dcd69c7fd (patch)
tree9e1b211414c1dcb34f19a7b509e72aee2153a3cf /runtime/gc/heap-inl.h
parent68bb649b128cd8760732524bd7ba58b49780d9d3 (diff)
downloadandroid_art-f5b0e20b5b31f5f5465784adcf2a204dcd69c7fd.tar.gz
android_art-f5b0e20b5b31f5f5465784adcf2a204dcd69c7fd.tar.bz2
android_art-f5b0e20b5b31f5f5465784adcf2a204dcd69c7fd.zip
Thread-local allocation stack.
With this change, Ritz MemAllocTest gets ~14% faster on N4. Bug: 9986565 Change-Id: I2fb7d6f7c5daa63dd4fc73ba739e6ae4ed820617
Diffstat (limited to 'runtime/gc/heap-inl.h')
-rw-r--r--runtime/gc/heap-inl.h35
1 files changed, 30 insertions, 5 deletions
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h
index 5e1136b805..9c91b0e1a1 100644
--- a/runtime/gc/heap-inl.h
+++ b/runtime/gc/heap-inl.h
@@ -82,11 +82,7 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas
DCHECK(!Runtime::Current()->HasStatsEnabled());
}
if (AllocatorHasAllocationStack(allocator)) {
- // This is safe to do since the GC will never free objects which are neither in the allocation
- // stack or the live bitmap.
- while (!allocation_stack_->AtomicPushBack(obj)) {
- CollectGarbageInternal(collector::kGcTypeSticky, kGcCauseForAlloc, false);
- }
+ PushOnAllocationStack(self, obj);
}
if (kInstrumented) {
if (Dbg::IsAllocTrackingEnabled()) {
@@ -111,6 +107,35 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas
return obj;
}
+// The size of a thread-local allocation stack in the number of references.
+static constexpr size_t kThreadLocalAllocationStackSize = 128;
+
+inline void Heap::PushOnAllocationStack(Thread* self, mirror::Object* obj) {
+ if (kUseThreadLocalAllocationStack) {
+ bool success = self->PushOnThreadLocalAllocationStack(obj);
+ if (UNLIKELY(!success)) {
+ // Slow path. Allocate a new thread-local allocation stack.
+ mirror::Object** start_address;
+ mirror::Object** end_address;
+ while (!allocation_stack_->AtomicBumpBack(kThreadLocalAllocationStackSize,
+ &start_address, &end_address)) {
+ CollectGarbageInternal(collector::kGcTypeSticky, kGcCauseForAlloc, false);
+ }
+ self->SetThreadLocalAllocationStack(start_address, end_address);
+ // Retry on the new thread-local allocation stack.
+ success = self->PushOnThreadLocalAllocationStack(obj);
+ // Must succeed.
+ CHECK(success);
+ }
+ } else {
+ // This is safe to do since the GC will never free objects which are neither in the allocation
+ // stack or the live bitmap.
+ while (!allocation_stack_->AtomicPushBack(obj)) {
+ CollectGarbageInternal(collector::kGcTypeSticky, kGcCauseForAlloc, false);
+ }
+ }
+}
+
template <bool kInstrumented, typename PreFenceVisitor>
inline mirror::Object* Heap::AllocLargeObject(Thread* self, mirror::Class* klass,
size_t byte_count,