Migrate FastCloneShallowObjectStub to TurboFan
BUG=
Committed: https://crrev.com/4c2b04542f263b2679194f9fb75672ebbe72b924
Cr-Commit-Position: refs/heads/master@{#35330}
Review URL: https://codereview.chromium.org/1838283003
Cr-Commit-Position: refs/heads/master@{#35494}
diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc
index 50337ad..320799d 100644
--- a/src/code-stubs-hydrogen.cc
+++ b/src/code-stubs-hydrogen.cc
@@ -550,79 +550,6 @@
template <>
-HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
- HValue* undefined = graph()->GetConstantUndefined();
- HValue* closure = GetParameter(0);
- HValue* literal_index = GetParameter(1);
-
- HValue* literals_array = Add<HLoadNamedField>(
- closure, nullptr, HObjectAccess::ForLiteralsPointer());
-
- HInstruction* allocation_site = Add<HLoadKeyed>(
- literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
- NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
-
- IfBuilder checker(this);
- checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
- undefined);
- checker.And();
-
- HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
- AllocationSite::kTransitionInfoOffset);
- HInstruction* boilerplate =
- Add<HLoadNamedField>(allocation_site, nullptr, access);
-
- int length = casted_stub()->length();
- if (length == 0) {
- // Empty objects have some slack added to them.
- length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
- }
- int size = JSObject::kHeaderSize + length * kPointerSize;
- int object_size = size;
- if (FLAG_allocation_site_pretenuring) {
- size += AllocationMemento::kSize;
- }
-
- HValue* boilerplate_map =
- Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
- HValue* boilerplate_size = Add<HLoadNamedField>(
- boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
- HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
- checker.If<HCompareNumericAndBranch>(boilerplate_size,
- size_in_words, Token::EQ);
- checker.Then();
-
- HValue* size_in_bytes = Add<HConstant>(size);
-
- HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
- NOT_TENURED, JS_OBJECT_TYPE);
-
- for (int i = 0; i < object_size; i += kPointerSize) {
- HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
- Add<HStoreNamedField>(object, access,
- Add<HLoadNamedField>(boilerplate, nullptr, access));
- }
-
- DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
- if (FLAG_allocation_site_pretenuring) {
- BuildCreateAllocationMemento(
- object, Add<HConstant>(object_size), allocation_site);
- }
-
- environment()->Push(object);
- checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
- checker.End();
-
- return environment()->Pop();
-}
-
-
-Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
- return DoGenerateCode(this);
-}
-
-
-template <>
HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
// This stub is performance sensitive, the generated code must be tuned
// so that it doesn't build an eager frame.
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index c5721646..27712ce 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -3623,6 +3623,82 @@
slot, vector);
}
+void FastCloneShallowObjectStub::GenerateAssembly(
+ compiler::CodeStubAssembler* assembler) const {
+ typedef compiler::CodeStubAssembler::Label Label;
+ typedef compiler::Node Node;
+ Label call_runtime(assembler);
+ Node* closure = assembler->Parameter(0);
+ Node* literals_index = assembler->Parameter(1);
+
+ Node* undefined = assembler->UndefinedConstant();
+ Node* literals_array =
+ assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
+ Node* allocation_site = assembler->LoadFixedArrayElementSmiIndex(
+ literals_array, literals_index,
+ LiteralsArray::kFirstLiteralIndex * kPointerSize);
+ assembler->GotoIf(assembler->WordEqual(allocation_site, undefined),
+ &call_runtime);
+
+ Node* boilerplate = assembler->LoadObjectField(
+ allocation_site, AllocationSite::kTransitionInfoOffset);
+
+ int length = this->length();
+ if (length == 0) {
+ length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
+ }
+ int size = JSObject::kHeaderSize + length * kPointerSize;
+ int object_size = size;
+ if (FLAG_allocation_site_pretenuring) {
+ size += AllocationMemento::kSize;
+ }
+
+ Node* boilerplate_map = assembler->LoadMap(boilerplate);
+ Node* instance_size = assembler->LoadMapInstanceSize(boilerplate_map);
+ Node* size_in_words =
+ assembler->Int32Constant(object_size >> kPointerSizeLog2);
+ assembler->GotoUnless(assembler->Word32Equal(instance_size, size_in_words),
+ &call_runtime);
+
+ Node* copy = assembler->Allocate(size);
+
+ for (int i = 0; i < size; i += kPointerSize) {
+ // The Allocate above guarantees that the copy lies in new space. This
+ // allows us to skip write barriers. This is necessary since we may also be
+ // copying unboxed doubles.
+ Node* field =
+ assembler->LoadObjectField(boilerplate, i, MachineType::IntPtr());
+ assembler->StoreObjectFieldNoWriteBarrier(
+ copy, i, field, MachineType::PointerRepresentation());
+ }
+
+ if (FLAG_allocation_site_pretenuring) {
+ Node* memento = assembler->InnerAllocate(copy, object_size);
+ assembler->StoreObjectFieldNoWriteBarrier(
+ memento, HeapObject::kMapOffset,
+ assembler->LoadRoot(Heap::kAllocationMementoMapRootIndex));
+ assembler->StoreObjectFieldNoWriteBarrier(
+ memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
+ Node* memento_create_count = assembler->LoadObjectField(
+ allocation_site, AllocationSite::kPretenureCreateCountOffset);
+ memento_create_count = assembler->SmiAdd(
+ memento_create_count, assembler->SmiConstant(Smi::FromInt(1)));
+ assembler->StoreObjectFieldNoWriteBarrier(
+ allocation_site, AllocationSite::kPretenureCreateCountOffset,
+ memento_create_count);
+ }
+
+ // TODO(verwaest): Allocate and fill in double boxes.
+ assembler->Return(copy);
+
+ assembler->Bind(&call_runtime);
+ Node* constant_properties = assembler->Parameter(2);
+ Node* flags = assembler->Parameter(3);
+ Node* context = assembler->Parameter(4);
+ assembler->TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure,
+ literals_index, constant_properties, flags);
+}
+
template<class StateType>
void HydrogenCodeStub::TraceTransition(StateType from, StateType to) {
// Note: Although a no-op transition is semantically OK, it is hinting at a
@@ -3765,14 +3841,6 @@
}
-void FastCloneShallowObjectStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- FastCloneShallowObjectDescriptor call_descriptor(isolate());
- descriptor->Initialize(
- Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry);
-}
-
-
void CreateAllocationSiteStub::InitializeDescriptor(CodeStubDescriptor* d) {}
diff --git a/src/code-stubs.h b/src/code-stubs.h
index 5d9dde4..b2dc0afe3 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -70,7 +70,6 @@
V(FastArrayPush) \
V(FastCloneRegExp) \
V(FastCloneShallowArray) \
- V(FastCloneShallowObject) \
V(FastNewClosure) \
V(FastNewContext) \
V(FastNewObject) \
@@ -122,6 +121,7 @@
V(BitwiseAnd) \
V(BitwiseOr) \
V(BitwiseXor) \
+ V(FastCloneShallowObject) \
V(LessThan) \
V(LessThanOrEqual) \
V(GreaterThan) \
@@ -1106,26 +1106,25 @@
DEFINE_HYDROGEN_CODE_STUB(FastCloneShallowArray, HydrogenCodeStub);
};
-
-class FastCloneShallowObjectStub : public HydrogenCodeStub {
+class FastCloneShallowObjectStub : public TurboFanCodeStub {
public:
// Maximum number of properties in copied object.
static const int kMaximumClonedProperties = 6;
FastCloneShallowObjectStub(Isolate* isolate, int length)
- : HydrogenCodeStub(isolate) {
+ : TurboFanCodeStub(isolate) {
DCHECK_GE(length, 0);
DCHECK_LE(length, kMaximumClonedProperties);
- set_sub_minor_key(LengthBits::encode(length));
+ minor_key_ = LengthBits::encode(LengthBits::encode(length));
}
- int length() const { return LengthBits::decode(sub_minor_key()); }
+ int length() const { return LengthBits::decode(minor_key_); }
private:
class LengthBits : public BitField<int, 0, 4> {};
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneShallowObject);
- DEFINE_HYDROGEN_CODE_STUB(FastCloneShallowObject, HydrogenCodeStub);
+ DEFINE_TURBOFAN_CODE_STUB(FastCloneShallowObject, TurboFanCodeStub);
};
diff --git a/src/compiler/code-stub-assembler.cc b/src/compiler/code-stub-assembler.cc
index 02efcca..424f5b0 100644
--- a/src/compiler/code-stub-assembler.cc
+++ b/src/compiler/code-stub-assembler.cc
@@ -523,6 +523,11 @@
return Load(MachineType::AnyTagged(), object, offset);
}
+Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
+ return Load(MachineType::Uint8(), map,
+ IntPtrConstant(Map::kInstanceSizeOffset - kHeapObjectTag));
+}
+
Node* CodeStubAssembler::LoadFixedArrayElementSmiIndex(Node* object,
Node* smi_index,
int additional_offset) {
@@ -704,6 +709,10 @@
limit_address);
}
+Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
+ return IntPtrAdd(previous, IntPtrConstant(offset));
+}
+
Node* CodeStubAssembler::AllocateHeapNumber() {
Node* result = Allocate(HeapNumber::kSize, kNone);
StoreMapNoWriteBarrier(result, HeapNumberMapConstant());
diff --git a/src/compiler/code-stub-assembler.h b/src/compiler/code-stub-assembler.h
index 0e3bfaf..b73c723 100644
--- a/src/compiler/code-stub-assembler.h
+++ b/src/compiler/code-stub-assembler.h
@@ -340,6 +340,8 @@
// Load the hash field of a name.
Node* LoadNameHash(Node* name);
+ // Load the instance size of a Map.
+ Node* LoadMapInstanceSize(Node* map);
// Load an array element from a FixedArray.
Node* LoadFixedArrayElementInt32Index(Node* object, Node* int32_index,
diff --git a/src/full-codegen/arm/full-codegen-arm.cc b/src/full-codegen/arm/full-codegen-arm.cc
index 6c7b9dd..8dcb302 100644
--- a/src/full-codegen/arm/full-codegen-arm.cc
+++ b/src/full-codegen/arm/full-codegen-arm.cc
@@ -1427,6 +1427,7 @@
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
diff --git a/src/full-codegen/arm64/full-codegen-arm64.cc b/src/full-codegen/arm64/full-codegen-arm64.cc
index 6d1ed37..bf7d36a 100644
--- a/src/full-codegen/arm64/full-codegen-arm64.cc
+++ b/src/full-codegen/arm64/full-codegen-arm64.cc
@@ -1414,6 +1414,7 @@
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
+ __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
diff --git a/src/full-codegen/ia32/full-codegen-ia32.cc b/src/full-codegen/ia32/full-codegen-ia32.cc
index 4a09fdb..3d6c8e2 100644
--- a/src/full-codegen/ia32/full-codegen-ia32.cc
+++ b/src/full-codegen/ia32/full-codegen-ia32.cc
@@ -1356,6 +1356,7 @@
__ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
diff --git a/src/full-codegen/mips/full-codegen-mips.cc b/src/full-codegen/mips/full-codegen-mips.cc
index 1a917a0..3a06532 100644
--- a/src/full-codegen/mips/full-codegen-mips.cc
+++ b/src/full-codegen/mips/full-codegen-mips.cc
@@ -1424,6 +1424,7 @@
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
+ __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
diff --git a/src/full-codegen/mips64/full-codegen-mips64.cc b/src/full-codegen/mips64/full-codegen-mips64.cc
index b31e93a..bb1ae8d 100644
--- a/src/full-codegen/mips64/full-codegen-mips64.cc
+++ b/src/full-codegen/mips64/full-codegen-mips64.cc
@@ -1425,6 +1425,7 @@
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
+ __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
diff --git a/src/full-codegen/ppc/full-codegen-ppc.cc b/src/full-codegen/ppc/full-codegen-ppc.cc
index 9767faa..7fccb1aa8 100644
--- a/src/full-codegen/ppc/full-codegen-ppc.cc
+++ b/src/full-codegen/ppc/full-codegen-ppc.cc
@@ -1389,6 +1389,7 @@
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
+ __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
diff --git a/src/full-codegen/s390/full-codegen-s390.cc b/src/full-codegen/s390/full-codegen-s390.cc
index 5662f1c..34de5c6 100644
--- a/src/full-codegen/s390/full-codegen-s390.cc
+++ b/src/full-codegen/s390/full-codegen-s390.cc
@@ -1348,6 +1348,7 @@
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
+ __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
diff --git a/src/full-codegen/x64/full-codegen-x64.cc b/src/full-codegen/x64/full-codegen-x64.cc
index 86e339f..f2850ce 100644
--- a/src/full-codegen/x64/full-codegen-x64.cc
+++ b/src/full-codegen/x64/full-codegen-x64.cc
@@ -1382,6 +1382,7 @@
__ Move(rdx, Smi::FromInt(flags));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
+ __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
diff --git a/src/full-codegen/x87/full-codegen-x87.cc b/src/full-codegen/x87/full-codegen-x87.cc
index 8d21143..46be487 100644
--- a/src/full-codegen/x87/full-codegen-x87.cc
+++ b/src/full-codegen/x87/full-codegen-x87.cc
@@ -1348,6 +1348,7 @@
__ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
diff --git a/src/objects.cc b/src/objects.cc
index a1b2d2c..f398057 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -2187,7 +2187,9 @@
} else if (IsFalse()) {
os << "<false>";
} else {
- os << "<Odd Oddball>";
+ os << "<Odd Oddball: ";
+ os << Oddball::cast(this)->to_string()->ToCString().get();
+ os << ">";
}
break;
}