summaryrefslogtreecommitdiffstats
path: root/runtime/gc/heap-inl.h
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-05-07 15:43:14 -0700
committerMathieu Chartier <mathieuc@google.com>2014-05-13 14:45:54 -0700
commiteb8167a4f4d27fce0530f6724ab8032610cd146b (patch)
treebcfeaf13ad78f2dd68466bbd0e20c71944f7e854 /runtime/gc/heap-inl.h
parent6fb66a2bc4e1c0b7931101153e58714991237af7 (diff)
downloadandroid_art-eb8167a4f4d27fce0530f6724ab8032610cd146b.tar.gz
android_art-eb8167a4f4d27fce0530f6724ab8032610cd146b.tar.bz2
android_art-eb8167a4f4d27fce0530f6724ab8032610cd146b.zip
Add Handle/HandleScope and delete SirtRef.
Delete SirtRef and replaced it with Handle. Handles are value types which wrap around StackReference*. Renamed StackIndirectReferenceTable to HandleScope. Added a scoped handle wrapper which wraps around an Object** and restores it in its destructor. Renamed Handle::get -> Get. Bug: 8473721 Change-Id: Idbfebd4f35af629f0f43931b7c5184b334822c7a
Diffstat (limited to 'runtime/gc/heap-inl.h')
-rw-r--r--runtime/gc/heap-inl.h20
1 files changed, 8 insertions, 12 deletions
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h
index a06f272b55..7cee5a094c 100644
--- a/runtime/gc/heap-inl.h
+++ b/runtime/gc/heap-inl.h
@@ -27,7 +27,7 @@
#include "gc/space/large_object_space.h"
#include "gc/space/rosalloc_space-inl.h"
#include "runtime.h"
-#include "sirt_ref-inl.h"
+#include "handle_scope-inl.h"
#include "thread.h"
#include "thread-inl.h"
#include "verify_object-inl.h"
@@ -144,10 +144,10 @@ inline void Heap::PushOnAllocationStack(Thread* self, mirror::Object** obj) {
mirror::Object** end_address;
while (!allocation_stack_->AtomicBumpBack(kThreadLocalAllocationStackSize,
&start_address, &end_address)) {
- // Disable verify object in SirtRef as obj isn't on the alloc stack yet.
- SirtRefNoVerify<mirror::Object> ref(self, *obj);
+ // TODO: Add handle VerifyObject.
+ StackHandleScope<1> hs(self);
+ HandleWrapper<mirror::Object> wrapper(hs.NewHandleWrapper(obj));
CollectGarbageInternal(collector::kGcTypeSticky, kGcCauseForAlloc, false);
- *obj = ref.get();
}
self->SetThreadLocalAllocationStack(start_address, end_address);
// Retry on the new thread-local allocation stack.
@@ -159,10 +159,10 @@ inline void Heap::PushOnAllocationStack(Thread* self, mirror::Object** obj) {
// This is safe to do since the GC will never free objects which are neither in the allocation
// stack or the live bitmap.
while (!allocation_stack_->AtomicPushBack(*obj)) {
- // Disable verify object in SirtRef as obj isn't on the alloc stack yet.
- SirtRefNoVerify<mirror::Object> ref(self, *obj);
+ // TODO: Add handle VerifyObject.
+ StackHandleScope<1> hs(self);
+ HandleWrapper<mirror::Object> wrapper(hs.NewHandleWrapper(obj));
CollectGarbageInternal(collector::kGcTypeSticky, kGcCauseForAlloc, false);
- *obj = ref.get();
}
}
}
@@ -300,11 +300,7 @@ inline bool Heap::IsOutOfMemoryOnAllocation(AllocatorType allocator_type, size_t
inline void Heap::CheckConcurrentGC(Thread* self, size_t new_num_bytes_allocated,
mirror::Object** obj) {
if (UNLIKELY(new_num_bytes_allocated >= concurrent_start_bytes_)) {
- // The SirtRef is necessary since the calls in RequestConcurrentGC are a safepoint.
- SirtRef<mirror::Object> ref(self, *obj);
- RequestConcurrentGC(self);
- // Restore obj in case it moved.
- *obj = ref.get();
+ RequestConcurrentGCAndSaveObject(self, obj);
}
}