summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBen Murdoch <benm@google.com>2012-05-21 14:31:26 -0700
committerAndroid Git Automerger <android-git-automerger@android.com>2012-05-21 14:31:26 -0700
commit067f904298c3c63313c6b5c8659c7daa5264714f (patch)
tree8292205315f0445a476d55e85ef077f494ff7efd
parenta52989282d6fb5ddd8b3a0aa23eff80ac25db913 (diff)
parent7523901fd9f17e9755b1e4e37201edade0dc24f6 (diff)
downloadandroid_external_v8-067f904298c3c63313c6b5c8659c7daa5264714f.tar.gz
android_external_v8-067f904298c3c63313c6b5c8659c7daa5264714f.tar.bz2
android_external_v8-067f904298c3c63313c6b5c8659c7daa5264714f.zip
am 7523901f: am 5710ceac: Merge V8 at 3.9.24.28
* commit '7523901fd9f17e9755b1e4e37201edade0dc24f6': Merge V8 at 3.9.24.28
-rw-r--r--V8_MERGE_REVISION4
-rw-r--r--src/arm/full-codegen-arm.cc98
-rw-r--r--src/arm/stub-cache-arm.cc15
-rw-r--r--src/array.js10
-rw-r--r--src/builtins.cc9
-rw-r--r--src/hydrogen.cc8
-rw-r--r--src/ia32/full-codegen-ia32.cc93
-rw-r--r--src/ia32/stub-cache-ia32.cc22
-rw-r--r--src/mips/full-codegen-mips.cc98
-rw-r--r--src/objects-printer.cc16
-rw-r--r--src/objects.cc9
-rw-r--r--src/runtime.cc30
-rw-r--r--src/runtime.h4
-rw-r--r--src/version.cc2
-rw-r--r--src/x64/full-codegen-x64.cc96
-rw-r--r--src/x64/stub-cache-x64.cc15
-rw-r--r--test/cctest/test-api.cc24
-rw-r--r--test/mjsunit/regress/regress-117409.js52
-rw-r--r--test/mjsunit/regress/regress-128018.js35
19 files changed, 183 insertions, 457 deletions
diff --git a/V8_MERGE_REVISION b/V8_MERGE_REVISION
index 35f9ea13..58dc4946 100644
--- a/V8_MERGE_REVISION
+++ b/V8_MERGE_REVISION
@@ -1,2 +1,2 @@
-V8 3.9.24.25
-http://v8.googlecode.com/svn/branches/3.9@11584
+V8 3.9.24.28
+http://v8.googlecode.com/svn/branches/3.9@11611
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 831467ac..69b12ce5 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -3362,104 +3362,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 3);
- VisitForStackValue(args->at(0));
- VisitForStackValue(args->at(1));
- VisitForStackValue(args->at(2));
- Label done;
- Label slow_case;
- Register object = r0;
- Register index1 = r1;
- Register index2 = r2;
- Register elements = r3;
- Register scratch1 = r4;
- Register scratch2 = r5;
-
- __ ldr(object, MemOperand(sp, 2 * kPointerSize));
- // Fetch the map and check if array is in fast case.
- // Check that object doesn't require security checks and
- // has no indexed interceptor.
- __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
- __ b(ne, &slow_case);
- // Map is now in scratch1.
-
- __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
- __ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
- __ b(ne, &slow_case);
-
- // Check the object's elements are in fast case and writable.
- __ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset));
- __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
- __ cmp(scratch1, ip);
- __ b(ne, &slow_case);
-
- // Check that both indices are smis.
- __ ldr(index1, MemOperand(sp, 1 * kPointerSize));
- __ ldr(index2, MemOperand(sp, 0));
- __ JumpIfNotBothSmi(index1, index2, &slow_case);
-
- // Check that both indices are valid.
- __ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
- __ cmp(scratch1, index1);
- __ cmp(scratch1, index2, hi);
- __ b(ls, &slow_case);
-
- // Bring the address of the elements into index1 and index2.
- __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ add(index1,
- scratch1,
- Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ add(index2,
- scratch1,
- Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
-
- // Swap elements.
- __ ldr(scratch1, MemOperand(index1, 0));
- __ ldr(scratch2, MemOperand(index2, 0));
- __ str(scratch1, MemOperand(index2, 0));
- __ str(scratch2, MemOperand(index1, 0));
-
- Label no_remembered_set;
- __ CheckPageFlag(elements,
- scratch1,
- 1 << MemoryChunk::SCAN_ON_SCAVENGE,
- ne,
- &no_remembered_set);
- // Possible optimization: do a check that both values are Smis
- // (or them and test against Smi mask.)
-
- // We are swapping two objects in an array and the incremental marker never
- // pauses in the middle of scanning a single object. Therefore the
- // incremental marker is not disturbed, so we don't need to call the
- // RecordWrite stub that notifies the incremental marker.
- __ RememberedSetHelper(elements,
- index1,
- scratch2,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
- __ RememberedSetHelper(elements,
- index2,
- scratch2,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
-
- __ bind(&no_remembered_set);
- // We are done. Drop elements from the stack, and return undefined.
- __ Drop(3);
- __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
- __ jmp(&done);
-
- __ bind(&slow_case);
- __ CallRuntime(Runtime::kSwapElements, 3);
-
- __ bind(&done);
- context()->Plug(r0);
-}
-
-
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index 98a3cdcb..d514b607 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -1266,12 +1266,19 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
name, miss);
ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
+ // Preserve the receiver register explicitly whenever it is different from
+ // the holder and it is needed should the interceptor return without any
+ // result. The CALLBACKS case needs the receiver to be passed into C++ code,
+ // the FIELD case might cause a miss during the prototype check.
+ bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
+ bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
+ (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
+
// Save necessary data before invoking an interceptor.
// Requires a frame to make GC aware of pushed pointers.
{
FrameScope frame_scope(masm(), StackFrame::INTERNAL);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- // CALLBACKS case needs a receiver to be passed into C++ callback.
+ if (must_preserve_receiver_reg) {
__ Push(receiver, holder_reg, name_reg);
} else {
__ Push(holder_reg, name_reg);
@@ -1296,14 +1303,14 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
__ bind(&interceptor_failed);
__ pop(name_reg);
__ pop(holder_reg);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ if (must_preserve_receiver_reg) {
__ pop(receiver);
}
// Leave the internal frame.
}
// Check that the maps from interceptor's holder to lookup's holder
// haven't changed. And load lookup's holder into |holder| register.
- if (*interceptor_holder != lookup->holder()) {
+ if (must_perfrom_prototype_check) {
holder_reg = CheckPrototypes(interceptor_holder,
holder_reg,
Handle<JSObject>(lookup->holder()),
diff --git a/src/array.js b/src/array.js
index daa75d57..00a4fee5 100644
--- a/src/array.js
+++ b/src/array.js
@@ -823,7 +823,8 @@ function ArraySort(comparefn) {
var element = a[i];
var order = %_CallFunction(receiver, element, pivot, comparefn);
if (order < 0) {
- %_SwapElements(a, i, low_end);
+ a[i] = a[low_end];
+ a[low_end] = element;
low_end++;
} else if (order > 0) {
do {
@@ -832,9 +833,12 @@ function ArraySort(comparefn) {
var top_elem = a[high_start];
order = %_CallFunction(receiver, top_elem, pivot, comparefn);
} while (order > 0);
- %_SwapElements(a, i, high_start);
+ a[i] = a[high_start];
+ a[high_start] = element;
if (order < 0) {
- %_SwapElements(a, i, low_end);
+ element = a[i];
+ a[i] = a[low_end];
+ a[low_end] = element;
low_end++;
}
}
diff --git a/src/builtins.cc b/src/builtins.cc
index 0f493e6e..01e88f55 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -412,12 +412,17 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
HeapObject* elms = array->elements();
Map* map = elms->map();
if (map == heap->fixed_array_map()) {
- if (args == NULL || !array->HasFastSmiOnlyElements()) {
+ if (args == NULL || array->HasFastElements()) return elms;
+ if (array->HasFastDoubleElements()) {
+ ASSERT(elms == heap->empty_fixed_array());
+ MaybeObject* maybe_transition =
+ array->TransitionElementsKind(FAST_ELEMENTS);
+ if (maybe_transition->IsFailure()) return maybe_transition;
return elms;
}
} else if (map == heap->fixed_cow_array_map()) {
MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
- if (args == NULL || !array->HasFastSmiOnlyElements() ||
+ if (args == NULL || array->HasFastElements() ||
maybe_writable_result->IsFailure()) {
return maybe_writable_result;
}
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 921aba10..fd7560a3 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -7465,14 +7465,6 @@ void HGraphBuilder::GenerateNumberToString(CallRuntime* call) {
}
-// Fast swapping of elements. Takes three expressions, the object and two
-// indices. This should only be used if the indices are known to be
-// non-negative and within bounds of the elements array at the call site.
-void HGraphBuilder::GenerateSwapElements(CallRuntime* call) {
- return Bailout("inlined runtime function: SwapElements");
-}
-
-
// Fast call for custom callbacks.
void HGraphBuilder::GenerateCallFunction(CallRuntime* call) {
// 1 ~ The function to call is not itself an argument to the call.
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 62a2c2af..cf16c5b6 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -3305,99 +3305,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 3);
- VisitForStackValue(args->at(0));
- VisitForStackValue(args->at(1));
- VisitForStackValue(args->at(2));
- Label done;
- Label slow_case;
- Register object = eax;
- Register index_1 = ebx;
- Register index_2 = ecx;
- Register elements = edi;
- Register temp = edx;
- __ mov(object, Operand(esp, 2 * kPointerSize));
- // Fetch the map and check if array is in fast case.
- // Check that object doesn't require security checks and
- // has no indexed interceptor.
- __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
- __ j(not_equal, &slow_case);
- __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
- KeyedLoadIC::kSlowCaseBitFieldMask);
- __ j(not_zero, &slow_case);
-
- // Check the object's elements are in fast case and writable.
- __ mov(elements, FieldOperand(object, JSObject::kElementsOffset));
- __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
- Immediate(isolate()->factory()->fixed_array_map()));
- __ j(not_equal, &slow_case);
-
- // Check that both indices are smis.
- __ mov(index_1, Operand(esp, 1 * kPointerSize));
- __ mov(index_2, Operand(esp, 0));
- __ mov(temp, index_1);
- __ or_(temp, index_2);
- __ JumpIfNotSmi(temp, &slow_case);
-
- // Check that both indices are valid.
- __ mov(temp, FieldOperand(object, JSArray::kLengthOffset));
- __ cmp(temp, index_1);
- __ j(below_equal, &slow_case);
- __ cmp(temp, index_2);
- __ j(below_equal, &slow_case);
-
- // Bring addresses into index1 and index2.
- __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
- __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
-
- // Swap elements. Use object and temp as scratch registers.
- __ mov(object, Operand(index_1, 0));
- __ mov(temp, Operand(index_2, 0));
- __ mov(Operand(index_2, 0), object);
- __ mov(Operand(index_1, 0), temp);
-
- Label no_remembered_set;
- __ CheckPageFlag(elements,
- temp,
- 1 << MemoryChunk::SCAN_ON_SCAVENGE,
- not_zero,
- &no_remembered_set,
- Label::kNear);
- // Possible optimization: do a check that both values are Smis
- // (or them and test against Smi mask.)
-
- // We are swapping two objects in an array and the incremental marker never
- // pauses in the middle of scanning a single object. Therefore the
- // incremental marker is not disturbed, so we don't need to call the
- // RecordWrite stub that notifies the incremental marker.
- __ RememberedSetHelper(elements,
- index_1,
- temp,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
- __ RememberedSetHelper(elements,
- index_2,
- temp,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
-
- __ bind(&no_remembered_set);
-
- // We are done. Drop elements from the stack, and return undefined.
- __ add(esp, Immediate(3 * kPointerSize));
- __ mov(eax, isolate()->factory()->undefined_value());
- __ jmp(&done);
-
- __ bind(&slow_case);
- __ CallRuntime(Runtime::kSwapElements, 3);
-
- __ bind(&done);
- context()->Plug(eax);
-}
-
-
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 5a5afdc7..eb86b2f5 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -1124,13 +1124,20 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
name, miss);
ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
+ // Preserve the receiver register explicitly whenever it is different from
+ // the holder and it is needed should the interceptor return without any
+ // result. The CALLBACKS case needs the receiver to be passed into C++ code,
+ // the FIELD case might cause a miss during the prototype check.
+ bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
+ bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
+ (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
+
// Save necessary data before invoking an interceptor.
// Requires a frame to make GC aware of pushed pointers.
{
FrameScope frame_scope(masm(), StackFrame::INTERNAL);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- // CALLBACKS case needs a receiver to be passed into C++ callback.
+ if (must_preserve_receiver_reg) {
__ push(receiver);
}
__ push(holder_reg);
@@ -1153,10 +1160,17 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
frame_scope.GenerateLeaveFrame();
__ ret(0);
+ // Clobber registers when generating debug-code to provoke errors.
__ bind(&interceptor_failed);
+ if (FLAG_debug_code) {
+ __ mov(receiver, Immediate(BitCast<int32_t>(kZapValue)));
+ __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue)));
+ __ mov(name_reg, Immediate(BitCast<int32_t>(kZapValue)));
+ }
+
__ pop(name_reg);
__ pop(holder_reg);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ if (must_preserve_receiver_reg) {
__ pop(receiver);
}
@@ -1165,7 +1179,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// Check that the maps from interceptor's holder to lookup's holder
// haven't changed. And load lookup's holder into holder_reg.
- if (*interceptor_holder != lookup->holder()) {
+ if (must_perfrom_prototype_check) {
holder_reg = CheckPrototypes(interceptor_holder,
holder_reg,
Handle<JSObject>(lookup->holder()),
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index 778d140b..4b58fc8c 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -3396,104 +3396,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 3);
- VisitForStackValue(args->at(0));
- VisitForStackValue(args->at(1));
- VisitForStackValue(args->at(2));
- Label done;
- Label slow_case;
- Register object = a0;
- Register index1 = a1;
- Register index2 = a2;
- Register elements = a3;
- Register scratch1 = t0;
- Register scratch2 = t1;
-
- __ lw(object, MemOperand(sp, 2 * kPointerSize));
- // Fetch the map and check if array is in fast case.
- // Check that object doesn't require security checks and
- // has no indexed interceptor.
- __ GetObjectType(object, scratch1, scratch2);
- __ Branch(&slow_case, ne, scratch2, Operand(JS_ARRAY_TYPE));
- // Map is now in scratch1.
-
- __ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
- __ And(scratch2, scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
- __ Branch(&slow_case, ne, scratch2, Operand(zero_reg));
-
- // Check the object's elements are in fast case and writable.
- __ lw(elements, FieldMemOperand(object, JSObject::kElementsOffset));
- __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
- __ LoadRoot(scratch2, Heap::kFixedArrayMapRootIndex);
- __ Branch(&slow_case, ne, scratch1, Operand(scratch2));
-
- // Check that both indices are smis.
- __ lw(index1, MemOperand(sp, 1 * kPointerSize));
- __ lw(index2, MemOperand(sp, 0));
- __ JumpIfNotBothSmi(index1, index2, &slow_case);
-
- // Check that both indices are valid.
- Label not_hi;
- __ lw(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
- __ Branch(&slow_case, ls, scratch1, Operand(index1));
- __ Branch(&not_hi, NegateCondition(hi), scratch1, Operand(index1));
- __ Branch(&slow_case, ls, scratch1, Operand(index2));
- __ bind(&not_hi);
-
- // Bring the address of the elements into index1 and index2.
- __ Addu(scratch1, elements,
- Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ sll(index1, index1, kPointerSizeLog2 - kSmiTagSize);
- __ Addu(index1, scratch1, index1);
- __ sll(index2, index2, kPointerSizeLog2 - kSmiTagSize);
- __ Addu(index2, scratch1, index2);
-
- // Swap elements.
- __ lw(scratch1, MemOperand(index1, 0));
- __ lw(scratch2, MemOperand(index2, 0));
- __ sw(scratch1, MemOperand(index2, 0));
- __ sw(scratch2, MemOperand(index1, 0));
-
- Label no_remembered_set;
- __ CheckPageFlag(elements,
- scratch1,
- 1 << MemoryChunk::SCAN_ON_SCAVENGE,
- ne,
- &no_remembered_set);
- // Possible optimization: do a check that both values are Smis
- // (or them and test against Smi mask).
-
- // We are swapping two objects in an array and the incremental marker never
- // pauses in the middle of scanning a single object. Therefore the
- // incremental marker is not disturbed, so we don't need to call the
- // RecordWrite stub that notifies the incremental marker.
- __ RememberedSetHelper(elements,
- index1,
- scratch2,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
- __ RememberedSetHelper(elements,
- index2,
- scratch2,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
-
- __ bind(&no_remembered_set);
- // We are done. Drop elements from the stack, and return undefined.
- __ Drop(3);
- __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
- __ jmp(&done);
-
- __ bind(&slow_case);
- __ CallRuntime(Runtime::kSwapElements, 3);
-
- __ bind(&done);
- context()->Plug(v0);
-}
-
-
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());
diff --git a/src/objects-printer.cc b/src/objects-printer.cc
index 2353a952..a12b8138 100644
--- a/src/objects-printer.cc
+++ b/src/objects-printer.cc
@@ -328,14 +328,16 @@ void JSObject::PrintElements(FILE* out) {
}
case FAST_DOUBLE_ELEMENTS: {
// Print in array notation for non-sparse arrays.
- FixedDoubleArray* p = FixedDoubleArray::cast(elements());
- for (int i = 0; i < p->length(); i++) {
- if (p->is_the_hole(i)) {
- PrintF(out, " %d: <the hole>", i);
- } else {
- PrintF(out, " %d: %g", i, p->get_scalar(i));
+ if (elements()->length() > 0) {
+ FixedDoubleArray* p = FixedDoubleArray::cast(elements());
+ for (int i = 0; i < p->length(); i++) {
+ if (p->is_the_hole(i)) {
+ PrintF(out, " %d: <the hole>", i);
+ } else {
+ PrintF(out, " %d: %g", i, p->get_scalar(i));
+ }
+ PrintF(out, "\n");
}
- PrintF(out, "\n");
}
break;
}
diff --git a/src/objects.cc b/src/objects.cc
index 208c75b8..904cf524 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -8521,7 +8521,7 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
// We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements());
- FixedDoubleArray* elems;
+ FixedArrayBase* elems;
{ MaybeObject* maybe_obj =
heap->AllocateUninitializedFixedDoubleArray(capacity);
if (!maybe_obj->To(&elems)) return maybe_obj;
@@ -9672,9 +9672,10 @@ MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
ElementsKind from_kind = map()->elements_kind();
Isolate* isolate = GetIsolate();
- if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- (to_kind == FAST_ELEMENTS ||
- elements() == isolate->heap()->empty_fixed_array())) {
+ if ((from_kind == FAST_SMI_ONLY_ELEMENTS ||
+ elements() == isolate->heap()->empty_fixed_array()) &&
+ to_kind == FAST_ELEMENTS) {
+ ASSERT(from_kind != FAST_ELEMENTS);
MaybeObject* maybe_new_map = GetElementsTransitionMap(isolate, to_kind);
Map* new_map;
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
diff --git a/src/runtime.cc b/src/runtime.cc
index 6a24f988..5996b826 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -9929,36 +9929,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_EstimateNumberOfElements) {
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_SwapElements) {
- HandleScope handle_scope(isolate);
-
- ASSERT_EQ(3, args.length());
-
- CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0);
- Handle<Object> key1 = args.at<Object>(1);
- Handle<Object> key2 = args.at<Object>(2);
-
- uint32_t index1, index2;
- if (!key1->ToArrayIndex(&index1)
- || !key2->ToArrayIndex(&index2)) {
- return isolate->ThrowIllegalOperation();
- }
-
- Handle<JSObject> jsobject = Handle<JSObject>::cast(object);
- Handle<Object> tmp1 = Object::GetElement(jsobject, index1);
- RETURN_IF_EMPTY_HANDLE(isolate, tmp1);
- Handle<Object> tmp2 = Object::GetElement(jsobject, index2);
- RETURN_IF_EMPTY_HANDLE(isolate, tmp2);
-
- RETURN_IF_EMPTY_HANDLE(
- isolate, JSObject::SetElement(jsobject, index1, tmp2, NONE, kStrictMode));
- RETURN_IF_EMPTY_HANDLE(
- isolate, JSObject::SetElement(jsobject, index2, tmp1, NONE, kStrictMode));
-
- return isolate->heap()->undefined_value();
-}
-
-
// Returns an array that tells you where in the [0, length) interval an array
// might have elements. Can either return keys (positive integers) or
// intervals (pair of a negative integer (-start-1) followed by a
diff --git a/src/runtime.h b/src/runtime.h
index 2e06263d..ef351da9 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -269,7 +269,6 @@ namespace internal {
F(GetArrayKeys, 2, 1) \
F(MoveArrayContents, 2, 1) \
F(EstimateNumberOfElements, 1, 1) \
- F(SwapElements, 3, 1) \
\
/* Getters and Setters */ \
F(LookupAccessor, 3, 1) \
@@ -530,8 +529,7 @@ namespace internal {
F(RegExpExec, 4, 1) \
F(RegExpConstructResult, 3, 1) \
F(GetFromCache, 2, 1) \
- F(NumberToString, 1, 1) \
- F(SwapElements, 3, 1)
+ F(NumberToString, 1, 1)
//---------------------------------------------------------------------------
diff --git a/src/version.cc b/src/version.cc
index 37c2ec26..558c4ac9 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 9
#define BUILD_NUMBER 24
-#define PATCH_LEVEL 25
+#define PATCH_LEVEL 28
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 85c5e758..d0c4f4dd 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -3191,102 +3191,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 3);
- VisitForStackValue(args->at(0));
- VisitForStackValue(args->at(1));
- VisitForStackValue(args->at(2));
- Label done;
- Label slow_case;
- Register object = rax;
- Register index_1 = rbx;
- Register index_2 = rcx;
- Register elements = rdi;
- Register temp = rdx;
- __ movq(object, Operand(rsp, 2 * kPointerSize));
- // Fetch the map and check if array is in fast case.
- // Check that object doesn't require security checks and
- // has no indexed interceptor.
- __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
- __ j(not_equal, &slow_case);
- __ testb(FieldOperand(temp, Map::kBitFieldOffset),
- Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
- __ j(not_zero, &slow_case);
-
- // Check the object's elements are in fast case and writable.
- __ movq(elements, FieldOperand(object, JSObject::kElementsOffset));
- __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
- Heap::kFixedArrayMapRootIndex);
- __ j(not_equal, &slow_case);
-
- // Check that both indices are smis.
- __ movq(index_1, Operand(rsp, 1 * kPointerSize));
- __ movq(index_2, Operand(rsp, 0 * kPointerSize));
- __ JumpIfNotBothSmi(index_1, index_2, &slow_case);
-
- // Check that both indices are valid.
- // The JSArray length field is a smi since the array is in fast case mode.
- __ movq(temp, FieldOperand(object, JSArray::kLengthOffset));
- __ SmiCompare(temp, index_1);
- __ j(below_equal, &slow_case);
- __ SmiCompare(temp, index_2);
- __ j(below_equal, &slow_case);
-
- __ SmiToInteger32(index_1, index_1);
- __ SmiToInteger32(index_2, index_2);
- // Bring addresses into index1 and index2.
- __ lea(index_1, FieldOperand(elements, index_1, times_pointer_size,
- FixedArray::kHeaderSize));
- __ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
- FixedArray::kHeaderSize));
-
- // Swap elements. Use object and temp as scratch registers.
- __ movq(object, Operand(index_1, 0));
- __ movq(temp, Operand(index_2, 0));
- __ movq(Operand(index_2, 0), object);
- __ movq(Operand(index_1, 0), temp);
-
- Label no_remembered_set;
- __ CheckPageFlag(elements,
- temp,
- 1 << MemoryChunk::SCAN_ON_SCAVENGE,
- not_zero,
- &no_remembered_set,
- Label::kNear);
- // Possible optimization: do a check that both values are Smis
- // (or them and test against Smi mask.)
-
- // We are swapping two objects in an array and the incremental marker never
- // pauses in the middle of scanning a single object. Therefore the
- // incremental marker is not disturbed, so we don't need to call the
- // RecordWrite stub that notifies the incremental marker.
- __ RememberedSetHelper(elements,
- index_1,
- temp,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
- __ RememberedSetHelper(elements,
- index_2,
- temp,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
-
- __ bind(&no_remembered_set);
-
- // We are done. Drop elements from the stack, and return undefined.
- __ addq(rsp, Immediate(3 * kPointerSize));
- __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
- __ jmp(&done);
-
- __ bind(&slow_case);
- __ CallRuntime(Runtime::kSwapElements, 3);
-
- __ bind(&done);
- context()->Plug(rax);
-}
-
-
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 7ce45688..96ff4993 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -1104,13 +1104,20 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
name, miss);
ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
+ // Preserve the receiver register explicitly whenever it is different from
+ // the holder and it is needed should the interceptor return without any
+ // result. The CALLBACKS case needs the receiver to be passed into C++ code,
+ // the FIELD case might cause a miss during the prototype check.
+ bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
+ bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
+ (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
+
// Save necessary data before invoking an interceptor.
// Requires a frame to make GC aware of pushed pointers.
{
FrameScope frame_scope(masm(), StackFrame::INTERNAL);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- // CALLBACKS case needs a receiver to be passed into C++ callback.
+ if (must_preserve_receiver_reg) {
__ push(receiver);
}
__ push(holder_reg);
@@ -1136,7 +1143,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
__ bind(&interceptor_failed);
__ pop(name_reg);
__ pop(holder_reg);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ if (must_preserve_receiver_reg) {
__ pop(receiver);
}
@@ -1145,7 +1152,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// Check that the maps from interceptor's holder to lookup's holder
// haven't changed. And load lookup's holder into |holder| register.
- if (*interceptor_holder != lookup->holder()) {
+ if (must_perfrom_prototype_check) {
holder_reg = CheckPrototypes(interceptor_holder,
holder_reg,
Handle<JSObject>(lookup->holder()),
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index b1a23c1e..f4ab9ad0 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -16135,6 +16135,30 @@ THREADED_TEST(Regress93759) {
}
+THREADED_TEST(Regress125988) {
+ v8::HandleScope scope;
+ Handle<FunctionTemplate> intercept = FunctionTemplate::New();
+ AddInterceptor(intercept, EmptyInterceptorGetter, EmptyInterceptorSetter);
+ LocalContext env;
+ env->Global()->Set(v8_str("Intercept"), intercept->GetFunction());
+ CompileRun("var a = new Object();"
+ "var b = new Intercept();"
+ "var c = new Object();"
+ "c.__proto__ = b;"
+ "b.__proto__ = a;"
+ "a.x = 23;"
+ "for (var i = 0; i < 3; i++) c.x;");
+ ExpectBoolean("c.hasOwnProperty('x')", false);
+ ExpectInt32("c.x", 23);
+ CompileRun("a.y = 42;"
+ "for (var i = 0; i < 3; i++) c.x;");
+ ExpectBoolean("c.hasOwnProperty('x')", false);
+ ExpectInt32("c.x", 23);
+ ExpectBoolean("c.hasOwnProperty('y')", false);
+ ExpectInt32("c.y", 42);
+}
+
+
static void TestReceiver(Local<Value> expected_result,
Local<Value> expected_receiver,
const char* code) {
diff --git a/test/mjsunit/regress/regress-117409.js b/test/mjsunit/regress/regress-117409.js
new file mode 100644
index 00000000..9222191a
--- /dev/null
+++ b/test/mjsunit/regress/regress-117409.js
@@ -0,0 +1,52 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+function KeyedStoreIC(a) { a[0] = Math.E; }
+
+// Create literal with a fast double elements backing store
+var literal = [1.2];
+
+// Specialize the IC for fast double elements
+KeyedStoreIC(literal);
+KeyedStoreIC(literal);
+
+// Trruncate array to 0 elements, at which point backing store will be replaced
+// with empty fixed array.
+literal.length = 0;
+
+// ArrayPush built-in will replace empty fixed array backing store with 19
+// elements fixed array backing store. This leads to a mismatch between the map
+// and the backing store. Debug mode will crash here in set_elements accessor.
+literal.push(Math.E, Math.E);
+
+// Corrupt the backing store!
+KeyedStoreIC(literal);
+
+// Release mode will crash here when trying to visit parts of E as pointers.
+gc();
diff --git a/test/mjsunit/regress/regress-128018.js b/test/mjsunit/regress/regress-128018.js
new file mode 100644
index 00000000..7bd15858
--- /dev/null
+++ b/test/mjsunit/regress/regress-128018.js
@@ -0,0 +1,35 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+function KeyedStoreIC(a) { a[(1)] = Math.E; }
+var literal = [1.2];
+literal.length = 0;
+literal.push('0' && 0 );
+KeyedStoreIC(literal);
+gc();