summaryrefslogtreecommitdiffstats
path: root/src/heap.cc
diff options
context:
space:
mode:
authorBen Murdoch <benm@google.com>2011-05-05 13:52:32 +0100
committerBen Murdoch <benm@google.com>2011-05-10 15:41:47 +0100
commitb0fe1620dcb4135ac3ab2d66ff93072373911299 (patch)
tree3487cdc7e01ec56a6f84ea20f4bae596a0b73986 /src/heap.cc
parentdf5bff59602802b769e994b0dc1d8869a27fa40c (diff)
downloadandroid_external_v8-b0fe1620dcb4135ac3ab2d66ff93072373911299.tar.gz
android_external_v8-b0fe1620dcb4135ac3ab2d66ff93072373911299.tar.bz2
android_external_v8-b0fe1620dcb4135ac3ab2d66ff93072373911299.zip
Update V8 to r6101 as required by WebKit r74534
Change-Id: I7f84af8dd732f11898fd644b2c2b1538914cb78d
Diffstat (limited to 'src/heap.cc')
-rw-r--r--src/heap.cc161
1 files changed, 148 insertions, 13 deletions
diff --git a/src/heap.cc b/src/heap.cc
index 16415ad3..1e999916 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -38,10 +38,12 @@
#include "mark-compact.h"
#include "natives.h"
#include "objects-visiting.h"
+#include "runtime-profiler.h"
#include "scanner-base.h"
#include "scopeinfo.h"
#include "snapshot.h"
#include "v8threads.h"
+#include "vm-state-inl.h"
#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
#include "regexp-macro-assembler.h"
#include "arm/regexp-macro-assembler-arm.h"
@@ -839,6 +841,8 @@ void Heap::MarkCompactPrologue(bool is_compacting) {
ContextSlotCache::Clear();
DescriptorLookupCache::Clear();
+ RuntimeProfiler::MarkCompactPrologue(is_compacting);
+
CompilationCache::MarkCompactPrologue();
CompletelyClearInstanceofCache();
@@ -1049,6 +1053,14 @@ void Heap::Scavenge() {
// Scavenge object reachable from the global contexts list directly.
scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
+ // Scavenge objects reachable from the runtime-profiler sampler
+ // window directly.
+ Object** sampler_window_address = RuntimeProfiler::SamplerWindowAddress();
+ int sampler_window_size = RuntimeProfiler::SamplerWindowSize();
+ scavenge_visitor.VisitPointers(
+ sampler_window_address,
+ sampler_window_address + sampler_window_size);
+
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
UpdateNewSpaceReferencesInExternalStringTable(
@@ -1116,6 +1128,40 @@ void Heap::UpdateNewSpaceReferencesInExternalStringTable(
}
+static Object* ProcessFunctionWeakReferences(Object* function,
+ WeakObjectRetainer* retainer) {
+ Object* head = Heap::undefined_value();
+ JSFunction* tail = NULL;
+ Object* candidate = function;
+ while (!candidate->IsUndefined()) {
+ // Check whether to keep the candidate in the list.
+ JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
+ Object* retain = retainer->RetainAs(candidate);
+ if (retain != NULL) {
+ if (head->IsUndefined()) {
+ // First element in the list.
+ head = candidate_function;
+ } else {
+ // Subsequent elements in the list.
+ ASSERT(tail != NULL);
+ tail->set_next_function_link(candidate_function);
+ }
+ // Retained function is new tail.
+ tail = candidate_function;
+ }
+ // Move to next element in the list.
+ candidate = candidate_function->next_function_link();
+ }
+
+ // Terminate the list if there is one or more elements.
+ if (tail != NULL) {
+ tail->set_next_function_link(Heap::undefined_value());
+ }
+
+ return head;
+}
+
+
void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
Object* head = undefined_value();
Context* tail = NULL;
@@ -1137,6 +1183,15 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
}
// Retained context is new tail.
tail = candidate_context;
+
+ // Process the weak list of optimized functions for the context.
+ Object* function_list_head =
+ ProcessFunctionWeakReferences(
+ candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
+ retainer);
+ candidate_context->set_unchecked(Context::OPTIMIZED_FUNCTIONS_LIST,
+ function_list_head,
+ UPDATE_WRITE_BARRIER);
}
// Move to next element in the list.
candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK);
@@ -1651,6 +1706,11 @@ bool Heap::CreateInitialMaps() {
}
set_byte_array_map(Map::cast(obj));
+ { MaybeObject* maybe_obj = AllocateByteArray(0, TENURED);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_empty_byte_array(ByteArray::cast(obj));
+
{ MaybeObject* maybe_obj =
AllocateMap(PIXEL_ARRAY_TYPE, PixelArray::kAlignedSize);
if (!maybe_obj->ToObject(&obj)) return false;
@@ -2245,9 +2305,11 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_debug_info(undefined_value());
share->set_inferred_name(empty_string());
share->set_compiler_hints(0);
+ share->set_deopt_counter(Smi::FromInt(FLAG_deopt_every_n_times));
share->set_initial_map(undefined_value());
share->set_this_property_assignments_count(0);
share->set_this_property_assignments(undefined_value());
+ share->set_opt_count(0);
share->set_num_literals(0);
share->set_end_position(0);
share->set_function_token_position(0);
@@ -2666,6 +2728,7 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
code->set_instruction_size(desc.instr_size);
code->set_relocation_info(ByteArray::cast(reloc_info));
code->set_flags(flags);
+ code->set_deoptimization_data(empty_fixed_array());
// Allow self references to created code object by patching the handle to
// point to the newly allocated Code object.
if (!self_reference.is_null()) {
@@ -2794,6 +2857,7 @@ MaybeObject* Heap::InitializeFunction(JSFunction* function,
function->set_prototype_or_initial_map(prototype);
function->set_context(undefined_value());
function->set_literals(empty_fixed_array());
+ function->set_next_function_link(undefined_value());
return function;
}
@@ -4419,7 +4483,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
MemoryAllocator::Size() + MemoryAllocator::Available();
*stats->os_error = OS::GetLastError();
if (take_snapshot) {
- HeapIterator iterator(HeapIterator::kPreciseFiltering);
+ HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
for (HeapObject* obj = iterator.next();
obj != NULL;
obj = iterator.next()) {
@@ -4853,13 +4917,20 @@ ObjectIterator* SpaceIterator::CreateIterator() {
}
-class FreeListNodesFilter {
+class HeapObjectsFilter {
+ public:
+ virtual ~HeapObjectsFilter() {}
+ virtual bool SkipObject(HeapObject* object) = 0;
+};
+
+
+class FreeListNodesFilter : public HeapObjectsFilter {
public:
FreeListNodesFilter() {
MarkFreeListNodes();
}
- inline bool IsFreeListNode(HeapObject* object) {
+ bool SkipObject(HeapObject* object) {
if (object->IsMarked()) {
object->ClearMark();
return true;
@@ -4891,6 +4962,65 @@ class FreeListNodesFilter {
};
+class UnreachableObjectsFilter : public HeapObjectsFilter {
+ public:
+ UnreachableObjectsFilter() {
+ MarkUnreachableObjects();
+ }
+
+ bool SkipObject(HeapObject* object) {
+ if (object->IsMarked()) {
+ object->ClearMark();
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ private:
+ class UnmarkingVisitor : public ObjectVisitor {
+ public:
+ UnmarkingVisitor() : list_(10) {}
+
+ void VisitPointers(Object** start, Object** end) {
+ for (Object** p = start; p < end; p++) {
+ if (!(*p)->IsHeapObject()) continue;
+ HeapObject* obj = HeapObject::cast(*p);
+ if (obj->IsMarked()) {
+ obj->ClearMark();
+ list_.Add(obj);
+ }
+ }
+ }
+
+ bool can_process() { return !list_.is_empty(); }
+
+ void ProcessNext() {
+ HeapObject* obj = list_.RemoveLast();
+ obj->Iterate(this);
+ }
+
+ private:
+ List<HeapObject*> list_;
+ };
+
+ void MarkUnreachableObjects() {
+ HeapIterator iterator;
+ for (HeapObject* obj = iterator.next();
+ obj != NULL;
+ obj = iterator.next()) {
+ obj->SetMark();
+ }
+ UnmarkingVisitor visitor;
+ Heap::IterateRoots(&visitor, VISIT_ONLY_STRONG);
+ while (visitor.can_process())
+ visitor.ProcessNext();
+ }
+
+ AssertNoAllocation no_alloc;
+};
+
+
HeapIterator::HeapIterator()
: filtering_(HeapIterator::kNoFiltering),
filter_(NULL) {
@@ -4898,7 +5028,7 @@ HeapIterator::HeapIterator()
}
-HeapIterator::HeapIterator(HeapIterator::FreeListNodesFiltering filtering)
+HeapIterator::HeapIterator(HeapIterator::HeapObjectsFiltering filtering)
: filtering_(filtering),
filter_(NULL) {
Init();
@@ -4912,12 +5042,17 @@ HeapIterator::~HeapIterator() {
void HeapIterator::Init() {
// Start the iteration.
- if (filtering_ == kPreciseFiltering) {
- filter_ = new FreeListNodesFilter;
- space_iterator_ =
- new SpaceIterator(MarkCompactCollector::SizeOfMarkedObject);
- } else {
- space_iterator_ = new SpaceIterator;
+ space_iterator_ = filtering_ == kNoFiltering ? new SpaceIterator :
+ new SpaceIterator(MarkCompactCollector::SizeOfMarkedObject);
+ switch (filtering_) {
+ case kFilterFreeListNodes:
+ filter_ = new FreeListNodesFilter;
+ break;
+ case kFilterUnreachable:
+ filter_ = new UnreachableObjectsFilter;
+ break;
+ default:
+ break;
}
object_iterator_ = space_iterator_->next();
}
@@ -4925,9 +5060,9 @@ void HeapIterator::Init() {
void HeapIterator::Shutdown() {
#ifdef DEBUG
- // Assert that in precise mode we have iterated through all
+ // Assert that in filtering mode we have iterated through all
// objects. Otherwise, heap will be left in an inconsistent state.
- if (filtering_ == kPreciseFiltering) {
+ if (filtering_ != kNoFiltering) {
ASSERT(object_iterator_ == NULL);
}
#endif
@@ -4944,7 +5079,7 @@ HeapObject* HeapIterator::next() {
if (filter_ == NULL) return NextObject();
HeapObject* obj = NextObject();
- while (obj != NULL && filter_->IsFreeListNode(obj)) obj = NextObject();
+ while (obj != NULL && filter_->SkipObject(obj)) obj = NextObject();
return obj;
}