summaryrefslogtreecommitdiffstats
path: root/runtime/gc/heap-inl.h
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2013-11-20 12:33:14 -0800
committerMathieu Chartier <mathieuc@google.com>2013-11-20 13:09:20 -0800
commit1febddf359ae500ef1bb01ab4883b076fcb56440 (patch)
treeabcb59ae807d3b8efae95ee0a2e34d6b66e52275 /runtime/gc/heap-inl.h
parente768dfd50aa893b2956a59f59f3ed4033aed8cef (diff)
downloadandroid_art-1febddf359ae500ef1bb01ab4883b076fcb56440.tar.gz
android_art-1febddf359ae500ef1bb01ab4883b076fcb56440.tar.bz2
android_art-1febddf359ae500ef1bb01ab4883b076fcb56440.zip
Set array length before fence in allocation code path.
Could not delete SetLength since it is required by space_test. Bug: 11747779 Change-Id: Icf1ead216b6ff1b519240ab0d0ca30d68429d5b6
Diffstat (limited to 'runtime/gc/heap-inl.h')
-rw-r--r--runtime/gc/heap-inl.h21
1 files changed, 11 insertions, 10 deletions
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h
index fcc07a0224..9b28555528 100644
--- a/runtime/gc/heap-inl.h
+++ b/runtime/gc/heap-inl.h
@@ -32,10 +32,11 @@
namespace art {
namespace gc {
-template <const bool kInstrumented>
-inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Class* c,
- size_t byte_count, AllocatorType allocator) {
- DebugCheckPreconditionsForAllocObject(c, byte_count);
+template <bool kInstrumented, typename PreFenceVisitor>
+inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Class* klass,
+ size_t byte_count, AllocatorType allocator,
+ const PreFenceVisitor& pre_fence_visitor) {
+ DebugCheckPreconditionsForAllocObject(klass, byte_count);
// Since allocation can cause a GC which will need to SuspendAll, make sure all allocations are
// done in the runnable state where suspension is expected.
DCHECK_EQ(self->GetState(), kRunnable);
@@ -43,7 +44,7 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas
mirror::Object* obj;
size_t bytes_allocated;
AllocationTimer alloc_timer(this, &obj);
- if (UNLIKELY(ShouldAllocLargeObject(c, byte_count))) {
+ if (UNLIKELY(ShouldAllocLargeObject(klass, byte_count))) {
obj = TryToAllocate<kInstrumented>(self, kAllocatorTypeLOS, byte_count, false,
&bytes_allocated);
allocator = kAllocatorTypeLOS;
@@ -52,16 +53,16 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas
}
if (UNLIKELY(obj == nullptr)) {
- SirtRef<mirror::Class> sirt_c(self, c);
+ SirtRef<mirror::Class> sirt_c(self, klass);
obj = AllocateInternalWithGc(self, allocator, byte_count, &bytes_allocated);
if (obj == nullptr) {
return nullptr;
} else {
- c = sirt_c.get();
+ klass = sirt_c.get();
}
}
- obj->SetClass(c);
- // TODO: Set array length here.
+ obj->SetClass(klass);
+ pre_fence_visitor(obj);
DCHECK_GT(bytes_allocated, 0u);
const size_t new_num_bytes_allocated =
static_cast<size_t>(num_bytes_allocated_.fetch_add(bytes_allocated)) + bytes_allocated;
@@ -87,7 +88,7 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas
}
if (kInstrumented) {
if (Dbg::IsAllocTrackingEnabled()) {
- Dbg::RecordAllocation(c, bytes_allocated);
+ Dbg::RecordAllocation(klass, bytes_allocated);
}
} else {
DCHECK(!Dbg::IsAllocTrackingEnabled());