Version 3.11.2
Revert r11496. (Chromium issue 128146)
Implement map collection for incremental marking. (issue 1465)
Add toString method to CallSite (which describes a frame of the stack trace).
git-svn-id: http://v8.googlecode.com/svn/trunk@11582 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index c52a5ab..4f75720 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,13 @@
+2012-05-16: Version 3.11.2
+
+ Revert r11496. (Chromium issue 128146)
+
+ Implement map collection for incremental marking. (issue 1465)
+
+ Add toString method to CallSite (which describes a frame of the
+ stack trace).
+
+
2012-05-15: Version 3.11.1
Added a readbuffer function to d8 that reads a file into an ArrayBuffer.
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index c65c68c..048a027 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -484,8 +484,8 @@
global_context()->set_initial_object_prototype(*prototype);
SetPrototype(object_fun, prototype);
- object_function_map->
- set_instance_descriptors(heap->empty_descriptor_array());
+ object_function_map->set_instance_descriptors(
+ heap->empty_descriptor_array());
}
// Allocate the empty function as the prototype for function ECMAScript
@@ -516,12 +516,10 @@
function_instance_map_writable_prototype_->set_prototype(*empty_function);
// Allocate the function map first and then patch the prototype later
- Handle<Map> empty_fm = factory->CopyMapDropDescriptors(
- function_without_prototype_map);
- empty_fm->set_instance_descriptors(
- function_without_prototype_map->instance_descriptors());
- empty_fm->set_prototype(global_context()->object_function()->prototype());
- empty_function->set_map(*empty_fm);
+ Handle<Map> empty_function_map = CreateFunctionMap(DONT_ADD_PROTOTYPE);
+ empty_function_map->set_prototype(
+ global_context()->object_function()->prototype());
+ empty_function->set_map(*empty_function_map);
return empty_function;
}
diff --git a/src/builtins.cc b/src/builtins.cc
index 6d1c6a9..84a0c3d 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -412,14 +412,12 @@
HeapObject* elms = array->elements();
Map* map = elms->map();
if (map == heap->fixed_array_map()) {
- if (array->HasFastElements()) return elms;
- if (args == NULL) {
- if (array->HasFastDoubleElements()) {
- ASSERT(elms == heap->empty_fixed_array());
- MaybeObject* maybe_transition =
- array->TransitionElementsKind(FAST_ELEMENTS);
- if (maybe_transition->IsFailure()) return maybe_transition;
- }
+ if (args == NULL || array->HasFastElements()) return elms;
+ if (array->HasFastDoubleElements()) {
+ ASSERT(elms == heap->empty_fixed_array());
+ MaybeObject* maybe_transition =
+ array->TransitionElementsKind(FAST_ELEMENTS);
+ if (maybe_transition->IsFailure()) return maybe_transition;
return elms;
}
} else if (map == heap->fixed_cow_array_map()) {
diff --git a/src/incremental-marking-inl.h b/src/incremental-marking-inl.h
index 5ce003f..2dae6f2 100644
--- a/src/incremental-marking-inl.h
+++ b/src/incremental-marking-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -118,13 +118,29 @@
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) {
- WhiteToGrey(obj, mark_bit);
+ Marking::WhiteToGrey(mark_bit);
marking_deque_.PushGrey(obj);
}
-void IncrementalMarking::WhiteToGrey(HeapObject* obj, MarkBit mark_bit) {
- Marking::WhiteToGrey(mark_bit);
+bool IncrementalMarking::MarkObjectAndPush(HeapObject* obj) {
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
+ if (!mark_bit.Get()) {
+ WhiteToGreyAndPush(obj, mark_bit);
+ return true;
+ }
+ return false;
+}
+
+
+bool IncrementalMarking::MarkObjectWithoutPush(HeapObject* obj) {
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
+ if (!mark_bit.Get()) {
+ mark_bit.Set();
+ MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
+ return true;
+ }
+ return false;
}
diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc
index 5b58c9d..94afffa 100644
--- a/src/incremental-marking.cc
+++ b/src/incremental-marking.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -42,6 +42,7 @@
state_(STOPPED),
marking_deque_memory_(NULL),
marking_deque_memory_committed_(false),
+ marker_(this, heap->mark_compact_collector()),
steps_count_(0),
steps_took_(0),
longest_step_(0.0),
@@ -663,6 +664,22 @@
} else if (map == global_context_map) {
// Global contexts have weak fields.
VisitGlobalContext(Context::cast(obj), &marking_visitor);
+ } else if (map->instance_type() == MAP_TYPE) {
+ Map* map = Map::cast(obj);
+ heap_->ClearCacheOnMap(map);
+
+ // When map collection is enabled we have to mark through map's
+ // transitions and back pointers in a special way to make these links
+ // weak. Only maps for subclasses of JSReceiver can have transitions.
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+ if (FLAG_collect_maps &&
+ map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
+ marker_.MarkMapContents(map);
+ } else {
+ marking_visitor.VisitPointers(
+ HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
+ HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
+ }
} else {
obj->Iterate(&marking_visitor);
}
@@ -807,12 +824,6 @@
Map* map = obj->map();
if (map == filler_map) continue;
- if (obj->IsMap()) {
- Map* map = Map::cast(obj);
- heap_->ClearCacheOnMap(map);
- }
-
-
int size = obj->SizeFromMap(map);
bytes_to_process -= size;
MarkBit map_mark_bit = Marking::MarkBitFrom(map);
@@ -830,6 +841,22 @@
MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
VisitGlobalContext(ctx, &marking_visitor);
+ } else if (map->instance_type() == MAP_TYPE) {
+ Map* map = Map::cast(obj);
+ heap_->ClearCacheOnMap(map);
+
+ // When map collection is enabled we have to mark through map's
+ // transitions and back pointers in a special way to make these links
+ // weak. Only maps for subclasses of JSReceiver can have transitions.
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+ if (FLAG_collect_maps &&
+ map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
+ marker_.MarkMapContents(map);
+ } else {
+ marking_visitor.VisitPointers(
+ HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
+ HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
+ }
} else if (map->instance_type() == JS_FUNCTION_TYPE) {
marking_visitor.VisitPointers(
HeapObject::RawField(obj, JSFunction::kPropertiesOffset),
diff --git a/src/incremental-marking.h b/src/incremental-marking.h
index 8cbe6c1..39e8dae 100644
--- a/src/incremental-marking.h
+++ b/src/incremental-marking.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -154,8 +154,6 @@
inline void WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit);
- inline void WhiteToGrey(HeapObject* obj, MarkBit mark_bit);
-
// Does white->black or keeps gray or black color. Returns true if converting
// white to black.
inline bool MarkBlackOrKeepGrey(MarkBit mark_bit) {
@@ -169,6 +167,16 @@
return true;
}
+ // Marks the object grey and pushes it on the marking stack.
+ // Returns true if object needed marking and false otherwise.
+ // This is for incremental marking only.
+ INLINE(bool MarkObjectAndPush(HeapObject* obj));
+
+ // Marks the object black without pushing it on the marking stack.
+ // Returns true if object needed marking and false otherwise.
+ // This is for incremental marking only.
+ INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
+
inline int steps_count() {
return steps_count_;
}
@@ -260,6 +268,7 @@
VirtualMemory* marking_deque_memory_;
bool marking_deque_memory_committed_;
MarkingDeque marking_deque_;
+ Marker<IncrementalMarking> marker_;
int steps_count_;
double steps_took_;
diff --git a/src/mark-compact-inl.h b/src/mark-compact-inl.h
index 43f6b89..2f7e31f 100644
--- a/src/mark-compact-inl.h
+++ b/src/mark-compact-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -52,6 +52,15 @@
}
+bool MarkCompactCollector::MarkObjectAndPush(HeapObject* obj) {
+ if (MarkObjectWithoutPush(obj)) {
+ marking_deque_.PushBlack(obj);
+ return true;
+ }
+ return false;
+}
+
+
void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
if (!mark_bit.Get()) {
@@ -62,16 +71,13 @@
}
-bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* object) {
- MarkBit mark = Marking::MarkBitFrom(object);
- bool old_mark = mark.Get();
- if (!old_mark) SetMark(object, mark);
- return old_mark;
-}
-
-
-void MarkCompactCollector::MarkObjectAndPush(HeapObject* object) {
- if (!MarkObjectWithoutPush(object)) marking_deque_.PushBlack(object);
+bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* obj) {
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
+ if (!mark_bit.Get()) {
+ SetMark(obj, mark_bit);
+ return true;
+ }
+ return false;
}
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index 0aa1192..c455564 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -64,13 +64,13 @@
abort_incremental_marking_(false),
compacting_(false),
was_marked_incrementally_(false),
- collect_maps_(FLAG_collect_maps),
flush_monomorphic_ics_(false),
tracer_(NULL),
migration_slots_buffer_(NULL),
heap_(NULL),
code_flusher_(NULL),
- encountered_weak_maps_(NULL) { }
+ encountered_weak_maps_(NULL),
+ marker_(this, this) { }
#ifdef DEBUG
@@ -282,7 +282,7 @@
MarkLiveObjects();
ASSERT(heap_->incremental_marking()->IsStopped());
- if (collect_maps_) ClearNonLiveTransitions();
+ if (FLAG_collect_maps) ClearNonLiveTransitions();
ClearWeakMaps();
@@ -294,7 +294,7 @@
SweepSpaces();
- if (!collect_maps_) ReattachInitialMaps();
+ if (!FLAG_collect_maps) ReattachInitialMaps();
Finish();
@@ -658,11 +658,6 @@
void MarkCompactCollector::Prepare(GCTracer* tracer) {
was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
- // Disable collection of maps if incremental marking is enabled.
- // Map collection algorithm relies on a special map transition tree traversal
- // order which is not implemented for incremental marking.
- collect_maps_ = FLAG_collect_maps && !was_marked_incrementally_;
-
// Monomorphic ICs are preserved when possible, but need to be flushed
// when they might be keeping a Context alive, or when the heap is about
// to be serialized.
@@ -1798,11 +1793,11 @@
heap_->ClearCacheOnMap(map);
// When map collection is enabled we have to mark through map's transitions
- // in a special way to make transition links weak.
- // Only maps for subclasses of JSReceiver can have transitions.
+ // in a special way to make transition links weak. Only maps for subclasses
+ // of JSReceiver can have transitions.
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
- if (collect_maps_ && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
- MarkMapContents(map);
+ if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
+ marker_.MarkMapContents(map);
} else {
marking_deque_.PushBlack(map);
}
@@ -1812,85 +1807,86 @@
}
-void MarkCompactCollector::MarkMapContents(Map* map) {
+// Force instantiation of template instances.
+template void Marker<IncrementalMarking>::MarkMapContents(Map* map);
+template void Marker<MarkCompactCollector>::MarkMapContents(Map* map);
+
+
+template <class T>
+void Marker<T>::MarkMapContents(Map* map) {
// Mark prototype transitions array but don't push it into marking stack.
// This will make references from it weak. We will clean dead prototype
- // transitions in ClearNonLiveTransitions. But make sure that back pointers
- // stored inside prototype transitions arrays are marked.
- Object* raw_proto_transitions = map->unchecked_prototype_transitions();
- if (raw_proto_transitions->IsFixedArray()) {
- FixedArray* prototype_transitions = FixedArray::cast(raw_proto_transitions);
+ // transitions in ClearNonLiveTransitions.
+ Object** proto_trans_slot =
+ HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset);
+ HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
+ if (prototype_transitions->IsFixedArray()) {
+ mark_compact_collector()->RecordSlot(proto_trans_slot,
+ proto_trans_slot,
+ prototype_transitions);
MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
if (!mark.Get()) {
mark.Set();
MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
prototype_transitions->Size());
- MarkObjectAndPush(HeapObject::cast(
- prototype_transitions->get(Map::kProtoTransitionBackPointerOffset)));
}
}
- Object** raw_descriptor_array_slot =
+ // Make sure that the back pointer stored either in the map itself or inside
+ // its prototype transitions array is marked. Treat pointers in the descriptor
+ // array as weak and also mark that array to prevent visiting it later.
+ base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
+
+ Object** descriptor_array_slot =
HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset);
- Object* raw_descriptor_array = *raw_descriptor_array_slot;
- if (!raw_descriptor_array->IsSmi()) {
- MarkDescriptorArray(
- reinterpret_cast<DescriptorArray*>(raw_descriptor_array));
+ Object* descriptor_array = *descriptor_array_slot;
+ if (!descriptor_array->IsSmi()) {
+ MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array));
}
- // Mark the Object* fields of the Map.
- // Since the descriptor array has been marked already, it is fine
- // that one of these fields contains a pointer to it.
- Object** start_slot = HeapObject::RawField(map,
- Map::kPointerFieldsBeginOffset);
-
- Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
-
- StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot);
+ // Mark the Object* fields of the Map. Since the descriptor array has been
+ // marked already, it is fine that one of these fields contains a pointer
+ // to it. But make sure to skip back pointer and prototype transitions.
+ STATIC_ASSERT(Map::kPointerFieldsEndOffset ==
+ Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize);
+ Object** start_slot = HeapObject::RawField(
+ map, Map::kPointerFieldsBeginOffset);
+ Object** end_slot = HeapObject::RawField(
+ map, Map::kPrototypeTransitionsOrBackPointerOffset);
+ for (Object** slot = start_slot; slot < end_slot; slot++) {
+ Object* obj = *slot;
+ if (!obj->NonFailureIsHeapObject()) continue;
+ mark_compact_collector()->RecordSlot(start_slot, slot, obj);
+ base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
+ }
}
-void MarkCompactCollector::MarkAccessorPairSlot(HeapObject* accessors,
- int offset) {
- Object** slot = HeapObject::RawField(accessors, offset);
- HeapObject* accessor = HeapObject::cast(*slot);
- if (accessor->IsMap()) return;
- RecordSlot(slot, slot, accessor);
- MarkObjectAndPush(accessor);
-}
-
-
-void MarkCompactCollector::MarkDescriptorArray(
- DescriptorArray* descriptors) {
- MarkBit descriptors_mark = Marking::MarkBitFrom(descriptors);
- if (descriptors_mark.Get()) return;
+template <class T>
+void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) {
// Empty descriptor array is marked as a root before any maps are marked.
- ASSERT(descriptors != heap()->empty_descriptor_array());
- SetMark(descriptors, descriptors_mark);
+ ASSERT(descriptors != descriptors->GetHeap()->empty_descriptor_array());
- FixedArray* contents = reinterpret_cast<FixedArray*>(
+ // The DescriptorArray contains a pointer to its contents array, but the
+ // contents array will be marked black and hence not be visited again.
+ if (!base_marker()->MarkObjectAndPush(descriptors)) return;
+ FixedArray* contents = FixedArray::cast(
descriptors->get(DescriptorArray::kContentArrayIndex));
- ASSERT(contents->IsHeapObject());
- ASSERT(!IsMarked(contents));
- ASSERT(contents->IsFixedArray());
ASSERT(contents->length() >= 2);
- MarkBit contents_mark = Marking::MarkBitFrom(contents);
- SetMark(contents, contents_mark);
- // Contents contains (value, details) pairs. If the details say that the type
- // of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION,
- // EXTERNAL_ARRAY_TRANSITION or NULL_DESCRIPTOR, we don't mark the value as
- // live. Only for MAP_TRANSITION, EXTERNAL_ARRAY_TRANSITION and
- // CONSTANT_TRANSITION is the value an Object* (a Map*).
+ ASSERT(Marking::IsWhite(Marking::MarkBitFrom(contents)));
+ base_marker()->MarkObjectWithoutPush(contents);
+
+ // Contents contains (value, details) pairs. If the descriptor contains a
+ // transition (value is a Map), we don't mark the value as live. It might
+ // be set to the NULL_DESCRIPTOR in ClearNonLiveTransitions later.
for (int i = 0; i < contents->length(); i += 2) {
- // If the pair (value, details) at index i, i+1 is not
- // a transition or null descriptor, mark the value.
PropertyDetails details(Smi::cast(contents->get(i + 1)));
Object** slot = contents->data_start() + i;
if (!(*slot)->IsHeapObject()) continue;
HeapObject* value = HeapObject::cast(*slot);
- RecordSlot(slot, slot, *slot);
+ mark_compact_collector()->RecordSlot(slot, slot, *slot);
switch (details.type()) {
case NORMAL:
@@ -1898,21 +1894,22 @@
case CONSTANT_FUNCTION:
case HANDLER:
case INTERCEPTOR:
- MarkObjectAndPush(value);
+ base_marker()->MarkObjectAndPush(value);
break;
case CALLBACKS:
if (!value->IsAccessorPair()) {
- MarkObjectAndPush(value);
- } else if (!MarkObjectWithoutPush(value)) {
- MarkAccessorPairSlot(value, AccessorPair::kGetterOffset);
- MarkAccessorPairSlot(value, AccessorPair::kSetterOffset);
+ base_marker()->MarkObjectAndPush(value);
+ } else if (base_marker()->MarkObjectWithoutPush(value)) {
+ AccessorPair* accessors = AccessorPair::cast(value);
+ MarkAccessorPairSlot(accessors, AccessorPair::kGetterOffset);
+ MarkAccessorPairSlot(accessors, AccessorPair::kSetterOffset);
}
break;
case ELEMENTS_TRANSITION:
// For maps with multiple elements transitions, the transition maps are
// stored in a FixedArray. Keep the fixed array alive but not the maps
// that it refers to.
- if (value->IsFixedArray()) MarkObjectWithoutPush(value);
+ if (value->IsFixedArray()) base_marker()->MarkObjectWithoutPush(value);
break;
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
@@ -1920,9 +1917,16 @@
break;
}
}
- // The DescriptorArray descriptors contains a pointer to its contents array,
- // but the contents array is already marked.
- marking_deque_.PushBlack(descriptors);
+}
+
+
+template <class T>
+void Marker<T>::MarkAccessorPairSlot(AccessorPair* accessors, int offset) {
+ Object** slot = HeapObject::RawField(accessors, offset);
+ HeapObject* accessor = HeapObject::cast(*slot);
+ if (accessor->IsMap()) return;
+ mark_compact_collector()->RecordSlot(slot, slot, accessor);
+ base_marker()->MarkObjectAndPush(accessor);
}
diff --git a/src/mark-compact.h b/src/mark-compact.h
index 6420a21..dbc2869 100644
--- a/src/mark-compact.h
+++ b/src/mark-compact.h
@@ -42,6 +42,7 @@
// Forward declarations.
class CodeFlusher;
class GCTracer;
+class MarkCompactCollector;
class MarkingVisitor;
class RootMarkingVisitor;
@@ -166,7 +167,6 @@
// ----------------------------------------------------------------------------
// Marking deque for tracing live objects.
-
class MarkingDeque {
public:
MarkingDeque()
@@ -383,6 +383,34 @@
};
+// -------------------------------------------------------------------------
+// Marker shared between incremental and non-incremental marking
+template<class BaseMarker> class Marker {
+ public:
+ Marker(BaseMarker* base_marker, MarkCompactCollector* mark_compact_collector)
+ : base_marker_(base_marker),
+ mark_compact_collector_(mark_compact_collector) {}
+
+ // Mark pointers in a Map and its DescriptorArray together, possibly
+ // treating transitions or back pointers weak.
+ void MarkMapContents(Map* map);
+ void MarkDescriptorArray(DescriptorArray* descriptors);
+ void MarkAccessorPairSlot(AccessorPair* accessors, int offset);
+
+ private:
+ BaseMarker* base_marker() {
+ return base_marker_;
+ }
+
+ MarkCompactCollector* mark_compact_collector() {
+ return mark_compact_collector_;
+ }
+
+ BaseMarker* base_marker_;
+ MarkCompactCollector* mark_compact_collector_;
+};
+
+
// Defined in isolate.h.
class ThreadLocalTop;
@@ -584,8 +612,6 @@
bool was_marked_incrementally_;
- bool collect_maps_;
-
bool flush_monomorphic_ics_;
// A pointer to the current stack-allocated GC tracer object during a full
@@ -608,12 +634,13 @@
//
// After: Live objects are marked and non-live objects are unmarked.
-
friend class RootMarkingVisitor;
friend class MarkingVisitor;
friend class StaticMarkingVisitor;
friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
+ friend class Marker<IncrementalMarking>;
+ friend class Marker<MarkCompactCollector>;
// Mark non-optimize code for functions inlined into the given optimized
// code. This will prevent it from being flushed.
@@ -631,22 +658,25 @@
void AfterMarking();
// Marks the object black and pushes it on the marking stack.
- // This is for non-incremental marking.
+ // Returns true if object needed marking and false otherwise.
+ // This is for non-incremental marking only.
+ INLINE(bool MarkObjectAndPush(HeapObject* obj));
+
+ // Marks the object black and pushes it on the marking stack.
+ // This is for non-incremental marking only.
INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit));
- INLINE(bool MarkObjectWithoutPush(HeapObject* object));
- INLINE(void MarkObjectAndPush(HeapObject* value));
+ // Marks the object black without pushing it on the marking stack.
+ // Returns true if object needed marking and false otherwise.
+ // This is for non-incremental marking only.
+ INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
- // Marks the object black. This is for non-incremental marking.
+ // Marks the object black assuming that it is not yet marked.
+ // This is for non-incremental marking only.
INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
void ProcessNewlyMarkedObject(HeapObject* obj);
- // Mark a Map and its DescriptorArray together, skipping transitions.
- void MarkMapContents(Map* map);
- void MarkAccessorPairSlot(HeapObject* accessors, int offset);
- void MarkDescriptorArray(DescriptorArray* descriptors);
-
// Mark the heap roots and all objects reachable from them.
void MarkRoots(RootMarkingVisitor* visitor);
@@ -749,6 +779,7 @@
MarkingDeque marking_deque_;
CodeFlusher* code_flusher_;
Object* encountered_weak_maps_;
+ Marker<MarkCompactCollector> marker_;
List<Page*> evacuation_candidates_;
List<Code*> invalidated_code_;
diff --git a/src/messages.js b/src/messages.js
index f8b5766..d813df9 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -745,7 +745,7 @@
function GetStackTraceLine(recv, fun, pos, isGlobal) {
- return FormatSourcePosition(new CallSite(recv, fun, pos));
+ return new CallSite(recv, fun, pos).toString();
}
// ----------------------------------------------------------------------------
@@ -919,6 +919,65 @@
return this.fun === constructor;
}
+function CallSiteToString() {
+ var fileName;
+ var fileLocation = "";
+ if (this.isNative()) {
+ fileLocation = "native";
+ } else if (this.isEval()) {
+ fileName = this.getScriptNameOrSourceURL();
+ if (!fileName) {
+ fileLocation = this.getEvalOrigin();
+ }
+ } else {
+ fileName = this.getFileName();
+ }
+
+ if (fileName) {
+ fileLocation += fileName;
+ var lineNumber = this.getLineNumber();
+ if (lineNumber != null) {
+ fileLocation += ":" + lineNumber;
+ var columnNumber = this.getColumnNumber();
+ if (columnNumber) {
+ fileLocation += ":" + columnNumber;
+ }
+ }
+ }
+
+ if (!fileLocation) {
+ fileLocation = "unknown source";
+ }
+ var line = "";
+ var functionName = this.getFunction().name;
+ var addPrefix = true;
+ var isConstructor = this.isConstructor();
+ var isMethodCall = !(this.isToplevel() || isConstructor);
+ if (isMethodCall) {
+ var methodName = this.getMethodName();
+ line += this.getTypeName() + ".";
+ if (functionName) {
+ line += functionName;
+ if (methodName && (methodName != functionName)) {
+ line += " [as " + methodName + "]";
+ }
+ } else {
+ line += methodName || "<anonymous>";
+ }
+ } else if (isConstructor) {
+ line += "new " + (functionName || "<anonymous>");
+ } else if (functionName) {
+ line += functionName;
+ } else {
+ line += fileLocation;
+ addPrefix = false;
+ }
+ if (addPrefix) {
+ line += " (" + fileLocation + ")";
+ }
+ return line;
+}
+
SetUpLockedPrototype(CallSite, $Array("receiver", "fun", "pos"), $Array(
"getThis", CallSiteGetThis,
"getTypeName", CallSiteGetTypeName,
@@ -934,7 +993,8 @@
"getColumnNumber", CallSiteGetColumnNumber,
"isNative", CallSiteIsNative,
"getPosition", CallSiteGetPosition,
- "isConstructor", CallSiteIsConstructor
+ "isConstructor", CallSiteIsConstructor,
+ "toString", CallSiteToString
));
@@ -976,65 +1036,6 @@
return eval_origin;
}
-function FormatSourcePosition(frame) {
- var fileName;
- var fileLocation = "";
- if (frame.isNative()) {
- fileLocation = "native";
- } else if (frame.isEval()) {
- fileName = frame.getScriptNameOrSourceURL();
- if (!fileName) {
- fileLocation = frame.getEvalOrigin();
- }
- } else {
- fileName = frame.getFileName();
- }
-
- if (fileName) {
- fileLocation += fileName;
- var lineNumber = frame.getLineNumber();
- if (lineNumber != null) {
- fileLocation += ":" + lineNumber;
- var columnNumber = frame.getColumnNumber();
- if (columnNumber) {
- fileLocation += ":" + columnNumber;
- }
- }
- }
-
- if (!fileLocation) {
- fileLocation = "unknown source";
- }
- var line = "";
- var functionName = frame.getFunction().name;
- var addPrefix = true;
- var isConstructor = frame.isConstructor();
- var isMethodCall = !(frame.isToplevel() || isConstructor);
- if (isMethodCall) {
- var methodName = frame.getMethodName();
- line += frame.getTypeName() + ".";
- if (functionName) {
- line += functionName;
- if (methodName && (methodName != functionName)) {
- line += " [as " + methodName + "]";
- }
- } else {
- line += methodName || "<anonymous>";
- }
- } else if (isConstructor) {
- line += "new " + (functionName || "<anonymous>");
- } else if (functionName) {
- line += functionName;
- } else {
- line += fileLocation;
- addPrefix = false;
- }
- if (addPrefix) {
- line += " (" + fileLocation + ")";
- }
- return line;
-}
-
function FormatStackTrace(error, frames) {
var lines = [];
try {
@@ -1050,7 +1051,7 @@
var frame = frames[i];
var line;
try {
- line = FormatSourcePosition(frame);
+ line = frame.toString();
} catch (e) {
try {
line = "<error: " + e + ">";
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index 986921f..a4de721 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -2343,6 +2343,7 @@
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
Register scratch = scratch0();
+
int map_count = instr->hydrogen()->types()->length();
bool need_generic = instr->hydrogen()->need_generic();
@@ -2357,8 +2358,8 @@
bool last = (i == map_count - 1);
Handle<Map> map = instr->hydrogen()->types()->at(i);
if (last && !need_generic) {
- Handle<Map> map = instr->hydrogen()->types()->last();
DeoptimizeIf(ne, instr->environment(), scratch, Operand(map));
+ EmitLoadFieldOrConstantFunction(result, object, map, name);
} else {
Label next;
__ Branch(&next, ne, scratch, Operand(map));
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index 3bfb74d..9006abd 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -992,6 +992,28 @@
}
+void Map::ZapInstanceDescriptors() {
+ DescriptorArray* descriptors = instance_descriptors();
+ if (descriptors == GetHeap()->empty_descriptor_array()) return;
+ FixedArray* contents = FixedArray::cast(
+ descriptors->get(DescriptorArray::kContentArrayIndex));
+ MemsetPointer(descriptors->data_start(),
+ GetHeap()->the_hole_value(),
+ descriptors->length());
+ MemsetPointer(contents->data_start(),
+ GetHeap()->the_hole_value(),
+ contents->length());
+}
+
+
+void Map::ZapPrototypeTransitions() {
+ FixedArray* proto_transitions = prototype_transitions();
+ MemsetPointer(proto_transitions->data_start(),
+ GetHeap()->the_hole_value(),
+ proto_transitions->length());
+}
+
+
#endif // DEBUG
} } // namespace v8::internal
diff --git a/src/objects-inl.h b/src/objects-inl.h
index eb1586a..5444438 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -3351,6 +3351,9 @@
Object* object = READ_FIELD(this,
kInstanceDescriptorsOrBitField3Offset);
if (!object->IsSmi()) {
+#ifdef DEBUG
+ ZapInstanceDescriptors();
+#endif
WRITE_FIELD(
this,
kInstanceDescriptorsOrBitField3Offset,
@@ -3376,6 +3379,11 @@
}
}
ASSERT(!is_shared());
+#ifdef DEBUG
+ if (value != instance_descriptors()) {
+ ZapInstanceDescriptors();
+ }
+#endif
WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
CONDITIONAL_WRITE_BARRIER(
heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
@@ -3448,6 +3456,11 @@
Heap* heap = GetHeap();
ASSERT(value != heap->empty_fixed_array());
value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
+#ifdef DEBUG
+ if (value != prototype_transitions()) {
+ ZapPrototypeTransitions();
+ }
+#endif
WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
CONDITIONAL_WRITE_BARRIER(
heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
diff --git a/src/objects.cc b/src/objects.cc
index 7f75611..cb87c71 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -4417,12 +4417,7 @@
LookupResult result(GetHeap()->isolate());
LocalLookupRealNamedProperty(name, &result);
if (result.IsProperty() && result.type() == CALLBACKS) {
- // Note that the result can actually have IsDontDelete() == true when we
- // e.g. have to fall back to the slow case while adding a setter after
- // successfully reusing a map transition for a getter. Nevertheless, this is
- // OK, because the assertion only holds for the whole addition of both
- // accessors, not for the addition of each part. See first comment in
- // DefinePropertyAccessor below.
+ ASSERT(!result.IsDontDelete());
Object* obj = result.GetCallbackObject();
if (obj->IsAccessorPair()) {
return AccessorPair::cast(obj)->CopyWithoutTransitions();
@@ -4436,28 +4431,6 @@
Object* getter,
Object* setter,
PropertyAttributes attributes) {
- // We could assert that the property is configurable here, but we would need
- // to do a lookup, which seems to be a bit of overkill.
- Heap* heap = GetHeap();
- bool only_attribute_changes = getter->IsNull() && setter->IsNull();
- if (HasFastProperties() && !only_attribute_changes) {
- MaybeObject* getterOk = heap->undefined_value();
- if (!getter->IsNull()) {
- getterOk = DefineFastAccessor(name, ACCESSOR_GETTER, getter, attributes);
- if (getterOk->IsFailure()) return getterOk;
- }
-
- MaybeObject* setterOk = heap->undefined_value();
- if (getterOk != heap->null_value() && !setter->IsNull()) {
- setterOk = DefineFastAccessor(name, ACCESSOR_SETTER, setter, attributes);
- if (setterOk->IsFailure()) return setterOk;
- }
-
- if (getterOk != heap->null_value() && setterOk != heap->null_value()) {
- return heap->undefined_value();
- }
- }
-
AccessorPair* accessors;
{ MaybeObject* maybe_accessors = CreateAccessorPairFor(name);
if (!maybe_accessors->To(&accessors)) return maybe_accessors;
@@ -4607,159 +4580,6 @@
}
-static MaybeObject* CreateFreshAccessor(JSObject* obj,
- String* name,
- AccessorComponent component,
- Object* accessor,
- PropertyAttributes attributes) {
- // step 1: create a new getter/setter pair with only the accessor in it
- Heap* heap = obj->GetHeap();
- AccessorPair* accessors2;
- { MaybeObject* maybe_accessors2 = heap->AllocateAccessorPair();
- if (!maybe_accessors2->To(&accessors2)) return maybe_accessors2;
- }
- accessors2->set(component, accessor);
-
- // step 2: create a copy of the descriptors, incl. the new getter/setter pair
- Map* map1 = obj->map();
- CallbacksDescriptor callbacks_descr2(name, accessors2, attributes);
- DescriptorArray* descriptors2;
- { MaybeObject* maybe_descriptors2 =
- map1->instance_descriptors()->CopyInsert(&callbacks_descr2,
- REMOVE_TRANSITIONS);
- if (!maybe_descriptors2->To(&descriptors2)) return maybe_descriptors2;
- }
-
- // step 3: create a new map with the new descriptors
- Map* map2;
- { MaybeObject* maybe_map2 = map1->CopyDropDescriptors();
- if (!maybe_map2->To(&map2)) return maybe_map2;
- }
- map2->set_instance_descriptors(descriptors2);
-
- // step 4: create a new getter/setter pair with a transition to the new map
- AccessorPair* accessors1;
- { MaybeObject* maybe_accessors1 = heap->AllocateAccessorPair();
- if (!maybe_accessors1->To(&accessors1)) return maybe_accessors1;
- }
- accessors1->set(component, map2);
-
- // step 5: create a copy of the descriptors, incl. the new getter/setter pair
- // with the transition
- CallbacksDescriptor callbacks_descr1(name, accessors1, attributes);
- DescriptorArray* descriptors1;
- { MaybeObject* maybe_descriptors1 =
- map1->instance_descriptors()->CopyInsert(&callbacks_descr1,
- KEEP_TRANSITIONS);
- if (!maybe_descriptors1->To(&descriptors1)) return maybe_descriptors1;
- }
-
- // step 6: everything went well so far, so we make our changes visible
- obj->set_map(map2);
- map1->set_instance_descriptors(descriptors1);
- map2->SetBackPointer(map1);
- return obj;
-}
-
-
-static bool TransitionToSameAccessor(Object* map,
- String* name,
- AccessorComponent component,
- Object* accessor,
- PropertyAttributes attributes ) {
- DescriptorArray* descs = Map::cast(map)->instance_descriptors();
- int number = descs->SearchWithCache(name);
- ASSERT(number != DescriptorArray::kNotFound);
- Object* target_accessor =
- AccessorPair::cast(descs->GetCallbacksObject(number))->get(component);
- PropertyAttributes target_attributes = descs->GetDetails(number).attributes();
- return target_accessor == accessor && target_attributes == attributes;
-}
-
-
-static MaybeObject* NewCallbackTransition(JSObject* obj,
- String* name,
- AccessorComponent component,
- Object* accessor,
- PropertyAttributes attributes,
- AccessorPair* accessors2) {
- // step 1: copy the old getter/setter pair and set the new accessor
- AccessorPair* accessors3;
- { MaybeObject* maybe_accessors3 = accessors2->CopyWithoutTransitions();
- if (!maybe_accessors3->To(&accessors3)) return maybe_accessors3;
- }
- accessors3->set(component, accessor);
-
- // step 2: create a copy of the descriptors, incl. the new getter/setter pair
- Map* map2 = obj->map();
- CallbacksDescriptor callbacks_descr3(name, accessors3, attributes);
- DescriptorArray* descriptors3;
- { MaybeObject* maybe_descriptors3 =
- map2->instance_descriptors()->CopyInsert(&callbacks_descr3,
- REMOVE_TRANSITIONS);
- if (!maybe_descriptors3->To(&descriptors3)) return maybe_descriptors3;
- }
-
- // step 3: create a new map with the new descriptors
- Map* map3;
- { MaybeObject* maybe_map3 = map2->CopyDropDescriptors();
- if (!maybe_map3->To(&map3)) return maybe_map3;
- }
- map3->set_instance_descriptors(descriptors3);
-
- // step 4: everything went well so far, so we make our changes visible
- obj->set_map(map3);
- accessors2->set(component, map3);
- map3->SetBackPointer(map2);
- return obj;
-}
-
-
-MaybeObject* JSObject::DefineFastAccessor(String* name,
- AccessorComponent component,
- Object* accessor,
- PropertyAttributes attributes) {
- ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined());
- LookupResult result(GetIsolate());
- LocalLookup(name, &result);
-
- // If we have a new property, create a fresh accessor plus a transition to it.
- if (!result.IsFound()) {
- return CreateFreshAccessor(this, name, component, accessor, attributes);
- }
-
- // If the property is not a JavaScript accessor, fall back to the slow case.
- if (result.type() != CALLBACKS) return GetHeap()->null_value();
- Object* callback_value = result.GetValue();
- if (!callback_value->IsAccessorPair()) return GetHeap()->null_value();
- AccessorPair* accessors = AccessorPair::cast(callback_value);
-
- // Follow a callback transition, if there is a fitting one.
- Object* entry = accessors->get(component);
- if (entry->IsMap() &&
- TransitionToSameAccessor(entry, name, component, accessor, attributes)) {
- set_map(Map::cast(entry));
- return this;
- }
-
- // When we re-add the same accessor again, there is nothing to do.
- if (entry == accessor && result.GetAttributes() == attributes) return this;
-
- // Only the other accessor has been set so far, create a new transition.
- if (entry->IsTheHole()) {
- return NewCallbackTransition(this,
- name,
- component,
- accessor,
- attributes,
- accessors);
- }
-
- // Nothing from the above worked, so we have to fall back to the slow case.
- return GetHeap()->null_value();
-}
-
-
MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
Isolate* isolate = GetIsolate();
String* name = String::cast(info->name());
@@ -6127,8 +5947,8 @@
Object* AccessorPair::GetComponent(AccessorComponent component) {
- Object* accessor = get(component);
- return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
+ Object* accessor = (component == ACCESSOR_GETTER) ? getter() : setter();
+ return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
}
diff --git a/src/objects.h b/src/objects.h
index 22993f2..4fd29ad 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -704,13 +704,12 @@
WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \
-class AccessorPair;
class DictionaryElementsAccessor;
class ElementsAccessor;
-class Failure;
class FixedArrayBase;
class ObjectVisitor;
class StringStream;
+class Failure;
struct ValueInfo : public Malloced {
ValueInfo() : type(FIRST_TYPE), ptr(NULL), str(NULL), number(0) { }
@@ -1643,14 +1642,6 @@
Object* getter,
Object* setter,
PropertyAttributes attributes);
- // Try to define a single accessor paying attention to map transitions.
- // Returns a JavaScript null if this was not possible and we have to use the
- // slow case. Note that we can fail due to allocations, too.
- MUST_USE_RESULT MaybeObject* DefineFastAccessor(
- String* name,
- AccessorComponent component,
- Object* accessor,
- PropertyAttributes attributes);
Object* LookupAccessor(String* name, AccessorComponent component);
MUST_USE_RESULT MaybeObject* DefineAccessor(AccessorInfo* info);
@@ -4855,6 +4846,14 @@
Handle<Map> FindTransitionedMap(MapHandleList* candidates);
Map* FindTransitionedMap(MapList* candidates);
+ // Zaps the contents of backing data structures in debug mode. Note that the
+ // heap verifier (i.e. VerifyMarkingVisitor) relies on zapping of objects
+ // holding weak references when incremental marking is used, because it also
+ // iterates over objects that are otherwise unreachable.
+#ifdef DEBUG
+ void ZapInstanceDescriptors();
+ void ZapPrototypeTransitions();
+#endif
// Dispatched behavior.
#ifdef OBJECT_PRINT
@@ -8120,18 +8119,6 @@
MUST_USE_RESULT MaybeObject* CopyWithoutTransitions();
- Object* get(AccessorComponent component) {
- return component == ACCESSOR_GETTER ? getter() : setter();
- }
-
- void set(AccessorComponent component, Object* value) {
- if (component == ACCESSOR_GETTER) {
- set_getter(value);
- } else {
- set_setter(value);
- }
- }
-
// Note: Returns undefined instead in case of a hole.
Object* GetComponent(AccessorComponent component);
diff --git a/src/version.cc b/src/version.cc
index 0f263ee..6162ea4 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 11
-#define BUILD_NUMBER 1
+#define BUILD_NUMBER 2
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index 72079dc..d4a40bf 100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -1735,3 +1735,60 @@
CHECK(HEAP->InNewSpace(*o));
}
+
+
+static int CountMapTransitions(Map* map) {
+ int result = 0;
+ DescriptorArray* descs = map->instance_descriptors();
+ for (int i = 0; i < descs->number_of_descriptors(); i++) {
+ if (descs->IsTransitionOnly(i)) {
+ result++;
+ }
+ }
+ return result;
+}
+
+
+// Test that map transitions are cleared and maps are collected with
+// incremental marking as well.
+TEST(Regress1465) {
+ i::FLAG_allow_natives_syntax = true;
+ i::FLAG_trace_incremental_marking = true;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ #define TRANSITION_COUNT 256
+ for (int i = 0; i < TRANSITION_COUNT; i++) {
+ EmbeddedVector<char, 64> buffer;
+ OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
+ CompileRun(buffer.start());
+ }
+ CompileRun("var root = new Object;");
+ Handle<JSObject> root =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Object>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
+
+ // Count number of live transitions before marking.
+ int transitions_before = CountMapTransitions(root->map());
+ CompileRun("%DebugPrint(root);");
+ CHECK_EQ(TRANSITION_COUNT, transitions_before);
+
+ // Go through all incremental marking steps in one swoop.
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ CHECK(marking->IsStopped());
+ marking->Start();
+ CHECK(marking->IsMarking());
+ while (!marking->IsComplete()) {
+ marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ }
+ CHECK(marking->IsComplete());
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(marking->IsStopped());
+
+ // Count number of live transitions after marking. Note that one transition
+ // is left, because 'o' still holds an instance of one transition target.
+ int transitions_after = CountMapTransitions(root->map());
+ CompileRun("%DebugPrint(root);");
+ CHECK_EQ(1, transitions_after);
+}
diff --git a/test/mjsunit/accessor-map-sharing.js b/test/mjsunit/accessor-map-sharing.js
deleted file mode 100644
index ab45afa..0000000
--- a/test/mjsunit/accessor-map-sharing.js
+++ /dev/null
@@ -1,176 +0,0 @@
-// Copyright 2012 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Flags: --allow-natives-syntax
-
-// Handy abbreviations.
-var dp = Object.defineProperty;
-var gop = Object.getOwnPropertyDescriptor;
-
-function getter() { return 111; }
-function setter(x) { print(222); }
-function anotherGetter() { return 333; }
-function anotherSetter(x) { print(444); }
-var obj1, obj2;
-
-// Two objects with the same getter.
-obj1 = {};
-dp(obj1, "alpha", { get: getter });
-obj2 = {};
-dp(obj2, "alpha", { get: getter });
-assertTrue(%HaveSameMap(obj1, obj2));
-
-// Two objects with the same getter, oldskool.
-obj1 = {};
-obj1.__defineGetter__("bravo", getter);
-assertEquals(getter, obj1.__lookupGetter__("bravo"));
-obj2 = {};
-obj2.__defineGetter__("bravo", getter);
-assertEquals(getter, obj2.__lookupGetter__("bravo"));
-assertTrue(%HaveSameMap(obj1, obj2));
-
-// Two objects with the same setter.
-obj1 = {};
-dp(obj1, "charlie", { set: setter });
-obj2 = {};
-dp(obj2, "charlie", { set: setter });
-assertTrue(%HaveSameMap(obj1, obj2));
-
-// Two objects with the same setter, oldskool.
-obj1 = {};
-obj1.__defineSetter__("delta", setter);
-assertEquals(setter, obj1.__lookupSetter__("delta"));
-obj2 = {};
-obj2.__defineSetter__("delta", setter);
-assertEquals(setter, obj2.__lookupSetter__("delta"));
-assertTrue(%HaveSameMap(obj1, obj2));
-
-// Two objects with the same getter and setter.
-obj1 = {};
-dp(obj1, "foxtrot", { get: getter, set: setter });
-obj2 = {};
-dp(obj2, "foxtrot", { get: getter, set: setter });
-assertTrue(%HaveSameMap(obj1, obj2));
-
-// Two objects with the same getter and setter, set separately.
-obj1 = {};
-dp(obj1, "golf", { get: getter, configurable: true });
-dp(obj1, "golf", { set: setter, configurable: true });
-obj2 = {};
-dp(obj2, "golf", { get: getter, configurable: true });
-dp(obj2, "golf", { set: setter, configurable: true });
-assertTrue(%HaveSameMap(obj1, obj2));
-
-// Two objects with the same getter and setter, set separately, oldskool.
-obj1 = {};
-obj1.__defineGetter__("hotel", getter);
-obj1.__defineSetter__("hotel", setter);
-obj2 = {};
-obj2.__defineGetter__("hotel", getter);
-obj2.__defineSetter__("hotel", setter);
-assertTrue(%HaveSameMap(obj1, obj2));
-
-// Attribute-only change, shouldn't affect previous descriptor properties.
-obj1 = {};
-dp(obj1, "india", { get: getter, configurable: true, enumerable: true });
-assertEquals(getter, gop(obj1, "india").get);
-assertTrue(gop(obj1, "india").configurable);
-assertTrue(gop(obj1, "india").enumerable);
-dp(obj1, "india", { enumerable: false });
-assertEquals(getter, gop(obj1, "india").get);
-assertTrue(gop(obj1, "india").configurable);
-assertFalse(gop(obj1, "india").enumerable);
-
-// Attribute-only change, shouldn't affect objects with previously shared maps.
-obj1 = {};
-dp(obj1, "juliet", { set: setter, configurable: true, enumerable: false });
-assertEquals(setter, gop(obj1, "juliet").set);
-assertTrue(gop(obj1, "juliet").configurable);
-assertFalse(gop(obj1, "juliet").enumerable);
-obj2 = {};
-dp(obj2, "juliet", { set: setter, configurable: true, enumerable: false });
-assertEquals(setter, gop(obj2, "juliet").set);
-assertTrue(gop(obj2, "juliet").configurable);
-assertFalse(gop(obj2, "juliet").enumerable);
-dp(obj1, "juliet", { set: setter, configurable: false, enumerable: true });
-assertEquals(setter, gop(obj1, "juliet").set);
-assertFalse(gop(obj1, "juliet").configurable);
-assertTrue(gop(obj1, "juliet").enumerable);
-assertEquals(setter, gop(obj2, "juliet").set);
-assertTrue(gop(obj2, "juliet").configurable);
-assertFalse(gop(obj2, "juliet").enumerable);
-
-// Two objects with the different getters.
-obj1 = {};
-dp(obj1, "kilo", { get: getter });
-obj2 = {};
-dp(obj2, "kilo", { get: anotherGetter });
-assertEquals(getter, gop(obj1, "kilo").get);
-assertEquals(anotherGetter, gop(obj2, "kilo").get);
-assertFalse(%HaveSameMap(obj1, obj2));
-
-// Two objects with the same getters and different setters.
-obj1 = {};
-dp(obj1, "lima", { get: getter, set: setter });
-obj2 = {};
-dp(obj2, "lima", { get: getter, set: anotherSetter });
-assertEquals(setter, gop(obj1, "lima").set);
-assertEquals(anotherSetter, gop(obj2, "lima").set);
-assertFalse(%HaveSameMap(obj1, obj2));
-
-// Even 'undefined' is a kind of getter.
-obj1 = {};
-dp(obj1, "mike", { get: undefined });
-assertTrue("mike" in obj1);
-assertEquals(undefined, gop(obj1, "mike").get);
-assertEquals(undefined, obj1.__lookupGetter__("mike"));
-assertEquals(undefined, gop(obj1, "mike").set);
-assertEquals(undefined, obj1.__lookupSetter__("mike"));
-
-// Even 'undefined' is a kind of setter.
-obj1 = {};
-dp(obj1, "november", { set: undefined });
-assertTrue("november" in obj1);
-assertEquals(undefined, gop(obj1, "november").get);
-assertEquals(undefined, obj1.__lookupGetter__("november"));
-assertEquals(undefined, gop(obj1, "november").set);
-assertEquals(undefined, obj1.__lookupSetter__("november"));
-
-// Redefining a data property.
-obj1 = {};
-obj1.oscar = 12345;
-dp(obj1, "oscar", { set: setter });
-assertEquals(setter, gop(obj1, "oscar").set);
-
-// Re-adding the same getter/attributes pair.
-obj1 = {};
-dp(obj1, "papa", { get: getter, configurable: true });
-dp(obj1, "papa", { get: getter, set: setter, configurable: true });
-assertEquals(getter, gop(obj1, "papa").get);
-assertEquals(setter, gop(obj1, "papa").set);
-assertTrue(gop(obj1, "papa").configurable);
-assertFalse(gop(obj1, "papa").enumerable);
diff --git a/test/mjsunit/regress/regress-128018.js b/test/mjsunit/regress/regress-128018.js
new file mode 100644
index 0000000..7bd1585
--- /dev/null
+++ b/test/mjsunit/regress/regress-128018.js
@@ -0,0 +1,35 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+function KeyedStoreIC(a) { a[(1)] = Math.E; }
+var literal = [1.2];
+literal.length = 0;
+literal.push('0' && 0 );
+KeyedStoreIC(literal);
+gc();
diff --git a/test/mjsunit/regress/regress-128146.js b/test/mjsunit/regress/regress-128146.js
new file mode 100644
index 0000000..5c22b4e
--- /dev/null
+++ b/test/mjsunit/regress/regress-128146.js
@@ -0,0 +1,38 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Object.defineProperty({},"foo",{set:function(){},configurable:false});
+Object.defineProperty({},"foo",{get:function(){},configurable:false});
+Object.defineProperty({},"foo",{});
+
+// From WebKit layout tests (fast/js/prototypes.html)
+var wasSet = false;
+var o = { };
+o.__defineGetter__("__proto__", function() { wasSet = true });
+o.__proto__;
+assertFalse(wasSet);
+
diff --git a/test/mjsunit/regress/regress-transcendental.js b/test/mjsunit/regress/regress-transcendental.js
new file mode 100644
index 0000000..b5dbcb4
--- /dev/null
+++ b/test/mjsunit/regress/regress-transcendental.js
@@ -0,0 +1,49 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+// Test whether the runtime implementation and generated code of
+// sine and tangens return the same results.
+
+function test(f, x, name) {
+ // Reset transcendental cache.
+ gc();
+ // Initializing cache leads to a runtime call.
+ var runtime_result = f(x);
+ // Flush transcendental cache entries and optimize f.
+ for (var i = 0; i < 100000; i++) f(i);
+ // Calculate using generated code.
+ var gencode_result = f(x);
+ print(name + " runtime function: " + runtime_result);
+ print(name + " generated code : " + gencode_result);
+ assertEquals(gencode_result, runtime_result);
+}
+
+test(Math.tan, -1.57079632679489660000, "Math.tan");
+test(Math.sin, 6.283185307179586, "Math.sin");
+