Skip to content

Commit

Permalink
Share types on objects with getter/setter properties. If the Accessor…
Browse files Browse the repository at this point in the history
… bit is set in a property's attributes, that indicates the property is a Getter. The Setter is stored in another slot. Use an array of indices to find the setter associated with a given Getter. Add the option of increasing a TypePath's path length without adding another property ID to its property map. Fix the way we use the accessor inline cache, in both runtime code and jitted code. We need to look at th isOnProto bit to determine which object to get the property from, the object pointed to by the cache (if isOnProto is set) or the current local instance. (This wasn't necessary when objects with accessors didn't share types.) While I was in there, I deleted all the machine-dependent lowering code related to flag caches and added code in Lower.cpp for everyone to share.
  • Loading branch information
pleath committed Feb 11, 2018
1 parent 3f76911 commit a848a47
Show file tree
Hide file tree
Showing 29 changed files with 991 additions and 561 deletions.
211 changes: 207 additions & 4 deletions lib/Backend/Lower.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ Lowerer::LowerRange(IR::Instr *instrStart, IR::Instr *instrEnd, bool defaultDoFa
case Js::OpCode::LdMethodFromFlags:
{
Assert(instr->HasBailOutInfo());
bool success = m_lowererMD.GenerateFastLdMethodFromFlags(instr);
bool success = GenerateFastLdMethodFromFlags(instr);
AssertMsg(success, "Not expected to generate helper block here");
break;
}
Expand Down Expand Up @@ -5768,6 +5768,209 @@ Lowerer::LowerProfiledLdFld(IR::JitProfilingInstr *ldFldInstr)
return instrPrev;
}

void
Lowerer::GenerateProtoLdFldFromFlagInlineCache(
IR::Instr * insertBeforeInstr,
IR::Opnd * opndDst,
IR::RegOpnd * opndInlineCache,
IR::LabelInstr * labelFallThru,
bool isInlineSlot)
{
// Generate:
//
// s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
// s1 = MOV [&s1->slots] -- load the slot array
// s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
// dst = MOV [s1 + s2*4]
// JMP $fallthru

IR::Opnd* inlineCacheObjOpnd;
IR::IndirOpnd * opndIndir;
IR::RegOpnd * opndObjSlots = nullptr;

inlineCacheObjOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);

// s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
IR::RegOpnd *opndObject = IR::RegOpnd::New(TyMachReg, this->m_func);
InsertMove(opndObject, inlineCacheObjOpnd, insertBeforeInstr, false);

if (!isInlineSlot)
{
// s1 = MOV [&s1->slots] -- load the slot array
opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
opndIndir = IR::IndirOpnd::New(opndObject, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
InsertMove(opndObjSlots, opndIndir, insertBeforeInstr, false);
}

// s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
InsertMove(opndSlotIndex, slotIndexOpnd, insertBeforeInstr, false);

if (isInlineSlot)
{
// dst = MOV [s1 + s2*4]
opndIndir = IR::IndirOpnd::New(opndObject, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
}
else
{
// dst = MOV [s1 + s2*4]
opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
}
InsertMove(opndDst, opndIndir, insertBeforeInstr, false);

// JMP $fallthru
InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
}

void
Lowerer::GenerateLocalLdFldFromFlagInlineCache(
IR::Instr * insertBeforeInstr,
IR::RegOpnd * opndBase,
IR::Opnd * opndDst,
IR::RegOpnd * opndInlineCache,
IR::LabelInstr * labelFallThru,
bool isInlineSlot)
{
// Generate:
//
// s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
// s1 = MOV [&s1->slots] -- load the slot array
// s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
// dst = MOV [s1 + s2*4]
// JMP $fallthru

IR::IndirOpnd * opndIndir;
IR::RegOpnd * opndObjSlots = nullptr;

if (!isInlineSlot)
{
// s1 = MOV [&s1->slots] -- load the slot array
opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
InsertMove(opndObjSlots, opndIndir, insertBeforeInstr, false);
}

// s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
InsertMove(opndSlotIndex, slotIndexOpnd, insertBeforeInstr, false);

if (isInlineSlot)
{
// dst = MOV [s1 + s2*4]
opndIndir = IR::IndirOpnd::New(opndBase, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
}
else
{
// dst = MOV [s1 + s2*4]
opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
}
InsertMove(opndDst, opndIndir, insertBeforeInstr, false);

// JMP $fallthru
InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
}

void
Lowerer::GenerateFlagProtoCheck(
IR::Instr * insertBeforeInstr,
IR::RegOpnd * opndInlineCache,
IR::LabelInstr * labelNotOnProto)
{
// Generate:
//
// TEST [&(inlineCache->u.accessor.isOnProto)], Js::FlagIsOnProto
// JEQ $next
IR::Opnd* flagsOpnd;
flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, insertBeforeInstr->m_func);

uint isOnProtoFlagMask = Js::InlineCache::GetIsOnProtoFlagMask();
InsertTestBranch(flagsOpnd, IR::IntConstOpnd::New(isOnProtoFlagMask, TyInt8, this->m_func), Js::OpCode::BrEq_A, labelNotOnProto, insertBeforeInstr);
}

///----------------------------------------------------------------------------
///
/// Lowerer::GenerateFastLdMethodFromFlags
///
/// Make use of the helper to cache the type and slot index used to do a LdFld
/// and do an inline load from the appropriate slot if the type hasn't changed
/// since the last time this LdFld was executed.
///
///----------------------------------------------------------------------------

bool
Lowerer::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
{
IR::LabelInstr * labelFallThru;
IR::LabelInstr * bailOutLabel;
IR::Opnd * opndSrc;
IR::Opnd * opndDst;
IR::RegOpnd * opndBase;
IR::RegOpnd * opndType;
IR::RegOpnd * opndInlineCache;

opndSrc = instrLdFld->GetSrc1();

AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
"Expected property sym operand as src of LdFldFlags");

IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();

Assert(!instrLdFld->DoStackArgsOpt(this->m_func));

if (propertySymOpnd->IsTypeCheckSeqCandidate())
{
AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
}
else
{
opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
}

opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
opndDst = instrLdFld->GetDst();
opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);

labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
// Label to jump to (or fall through to) when bailing out
bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);

instrLdFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, opndInlineCache, LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), this->m_func));
IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);

// Check the flag cache with the untagged type
GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
IR::LabelInstr * labelFlagInlineLocal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
GenerateFlagProtoCheck(instrLdFld, opndInlineCache, labelFlagInlineLocal);
GenerateProtoLdFldFromFlagInlineCache(instrLdFld, opndDst, opndInlineCache, labelFallThru, true);
instrLdFld->InsertBefore(labelFlagInlineLocal);
GenerateLocalLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);

// Check the flag cache with the tagged type
instrLdFld->InsertBefore(labelFlagAux);
IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
m_lowererMD.GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
IR::LabelInstr * labelFlagAuxLocal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
GenerateFlagProtoCheck(instrLdFld, opndInlineCache, labelFlagAuxLocal);
GenerateProtoLdFldFromFlagInlineCache(instrLdFld, opndDst, opndInlineCache, labelFallThru, false);
instrLdFld->InsertBefore(labelFlagAuxLocal);
GenerateLocalLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);

instrLdFld->InsertBefore(bailOutLabel);
instrLdFld->InsertAfter(labelFallThru);
// Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
// ordering instructions anymore.
instrLdFld->UnlinkSrc1();
GenerateBailOut(instrLdFld);

return true;
}

///----------------------------------------------------------------------------
///
/// Lowerer::LowerLdFld
Expand Down Expand Up @@ -24388,7 +24591,7 @@ Lowerer::GenerateProtoInlineCacheCheck(
return InsertBranch(Js::OpCode::BrNeq_A, labelNext, instrLdSt);
}

IR::BranchInstr *
void
Lowerer::GenerateFlagInlineCacheCheck(
IR::Instr * instrLdSt,
IR::RegOpnd * opndType,
Expand All @@ -24402,8 +24605,8 @@ Lowerer::GenerateFlagInlineCacheCheck(

IR::Opnd* typeOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.type), TyMachReg, instrLdSt->m_func);

InsertCompare(opndType, typeOpnd, instrLdSt);
return InsertBranch(Js::OpCode::BrNeq_A, labelNext, instrLdSt);
// CMP s1, [&(inlineCache->u.flag.type)]
InsertCompareBranch(opndType, typeOpnd, Js::OpCode::BrNeq_A, labelNext, instrLdSt);
}

void
Expand Down
19 changes: 18 additions & 1 deletion lib/Backend/Lower.h
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,24 @@ class Lowerer
bool GenerateLdThisStrict(IR::Instr * instr);
bool GenerateFastIsInst(IR::Instr * instr);

void GenerateProtoLdFldFromFlagInlineCache(
IR::Instr * insertBeforeInstr,
IR::Opnd * opndDst,
IR::RegOpnd * opndInlineCache,
IR::LabelInstr * labelFallThru,
bool isInlineSlot);
void GenerateLocalLdFldFromFlagInlineCache(
IR::Instr * insertBeforeInstr,
IR::RegOpnd * opndBase,
IR::Opnd * opndDst,
IR::RegOpnd * opndInlineCache,
IR::LabelInstr * labelFallThru,
bool isInlineSlot);

void GenerateFlagProtoCheck(IR::Instr * insertBeforeInstr, IR::RegOpnd * opndInlineCache, IR::LabelInstr * labelFail);
void GenerateFlagInlineCacheCheck(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndInlineCache, IR::LabelInstr * labelNext);
bool GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld);

void EnsureStackFunctionListStackSym();
void EnsureZeroLastStackFunctionNext();
void AllocStackClosure();
Expand Down Expand Up @@ -611,7 +629,6 @@ class Lowerer
void GenerateLdFldFromFlagInlineCache(IR::Instr * insertBeforeInstr, IR::RegOpnd * opndBase, IR::Opnd * opndDst, IR::RegOpnd * opndInlineCache, IR::LabelInstr * labelFallThru, bool isInlineSlot);
static IR::BranchInstr * GenerateLocalInlineCacheCheck(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndInlineCache, IR::LabelInstr * labelNext, bool checkTypeWithoutProperty = false);
static IR::BranchInstr * GenerateProtoInlineCacheCheck(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndInlineCache, IR::LabelInstr * labelNext);
static IR::BranchInstr * GenerateFlagInlineCacheCheck(IR::Instr * instrLdSt, IR::RegOpnd * opndType, IR::RegOpnd * opndInlineCache, IR::LabelInstr * labelNext);
static void GenerateLdFldFromLocalInlineCache(IR::Instr * instrLdFld, IR::RegOpnd * opndBase, IR::Opnd * opndDst, IR::RegOpnd * opndInlineCache, IR::LabelInstr * labelFallThru, bool isInlineSlot);
static void GenerateLdFldFromProtoInlineCache(IR::Instr * instrLdFld, IR::RegOpnd * opndBase, IR::Opnd * opndDst, IR::RegOpnd * opndInlineCache, IR::LabelInstr * labelFallThru, bool isInlineSlot);

Expand Down
78 changes: 0 additions & 78 deletions lib/Backend/LowerMDShared.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3836,84 +3836,6 @@ LowererMD::GenerateLoadTaggedType(IR::Instr * instrLdSt, IR::RegOpnd * opndType,
}
}

///----------------------------------------------------------------------------
///
/// LowererMD::GenerateFastLdMethodFromFlags
///
/// Make use of the helper to cache the type and slot index used to do a LdFld
/// and do an inline load from the appropriate slot if the type hasn't changed
/// since the last time this LdFld was executed.
///
///----------------------------------------------------------------------------

bool
LowererMD::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
{
IR::LabelInstr * labelFallThru;
IR::LabelInstr * bailOutLabel;
IR::Opnd * opndSrc;
IR::Opnd * opndDst;
IR::RegOpnd * opndBase;
IR::RegOpnd * opndType;
IR::RegOpnd * opndInlineCache;

opndSrc = instrLdFld->GetSrc1();

AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
"Expected property sym operand as src of LdFldFlags");

IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();

Assert(!instrLdFld->DoStackArgsOpt(this->m_func));

if (propertySymOpnd->IsTypeCheckSeqCandidate())
{
AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
}
else
{
opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
}

opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
opndDst = instrLdFld->GetDst();
opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);

labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
// Label to jump to (or fall through to) when bailing out
bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);

instrLdFld->InsertBefore(IR::Instr::New(Js::OpCode::MOV, opndInlineCache, m_lowerer->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), this->m_func));
IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);

// Check the flag cache with the untagged type
this->m_lowerer->GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
// Blindly do the check for getter flag first and then do the type check
// We avoid repeated check for getter flag when the function object may be in either
// inline slots or auxiliary slots
this->m_lowerer->GenerateFlagInlineCacheCheckForGetterSetter(instrLdFld, opndInlineCache, bailOutLabel);
this->m_lowerer->GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
this->m_lowerer->GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);

// Check the flag cache with the tagged type
instrLdFld->InsertBefore(labelFlagAux);
IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
this->m_lowerer->GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
this->m_lowerer->GenerateLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);

instrLdFld->InsertBefore(bailOutLabel);
instrLdFld->InsertAfter(labelFallThru);
// Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
// ordering instructions anymore.
instrLdFld->UnlinkSrc1();
this->m_lowerer->GenerateBailOut(instrLdFld);

return true;
}

void
LowererMD::GenerateLoadPolymorphicInlineCacheSlot(IR::Instr * instrLdSt, IR::RegOpnd * opndInlineCache, IR::RegOpnd * opndType, uint polymorphicInlineCacheSize)
{
Expand Down
1 change: 0 additions & 1 deletion lib/Backend/LowerMDShared.h
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,6 @@ class LowererMD
void GenerateCheckForArgumentsLength(IR::Instr* ldElem, IR::LabelInstr* labelCreateHeapArgs, IR::Opnd* actualParamOpnd, IR::Opnd* valueOpnd, Js::OpCode);
IR::RegOpnd * LoadNonnegativeIndex(IR::RegOpnd *indexOpnd, const bool skipNegativeCheck, IR::LabelInstr *const notTaggedIntLabel, IR::LabelInstr *const negativeLabel, IR::Instr *const insertBeforeInstr);
IR::RegOpnd * GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck = true);
bool GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld);
IR::Instr * GenerateFastScopedLdFld(IR::Instr * instrLdFld);
IR::Instr * GenerateFastScopedStFld(IR::Instr * instrStFld);
void GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel);
Expand Down
7 changes: 5 additions & 2 deletions lib/Backend/ObjTypeSpecFldInfo.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -361,9 +361,12 @@ ObjTypeSpecFldInfo* ObjTypeSpecFldInfo::CreateFrom(uint id, Js::InlineCache* cac
if (type != localCache.u.accessor.type)
{
usesAuxSlot = true;
fieldValue = localCache.u.accessor.object->GetAuxSlot(slotIndex);
if (localCache.u.accessor.isOnProto)
{
fieldValue = localCache.u.accessor.object->GetAuxSlot(slotIndex);
}
}
else
else if (localCache.u.accessor.isOnProto)
{
fieldValue = localCache.u.accessor.object->GetInlineSlot(slotIndex);
}
Expand Down
Loading

0 comments on commit a848a47

Please sign in to comment.