Skip to content
20 changes: 20 additions & 0 deletions src/coreclr/jit/fgbasic.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,16 @@ void Compiler::fgRemoveEhfSuccessor(BasicBlock* block, const unsigned succIndex)
(succCount - succIndex - 1) * sizeof(FlowEdge*));
}

// If the block has other successors, distribute the removed edge's likelihood among the remaining successor edges.
if (succCount > 1)
{
const weight_t likelihoodIncrease = succEdge->getLikelihood() / (succCount - 1);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't these proportionally scale up?

Say there are 3 successors with likelihoods A 0.1, B 0.1, C 0.8. We remove A. Then we should have B = 0.1111..., C = 0.8888...

With these changes we'd get B = 0.15, C = 0.85, so the relative likelihood of B would increase.

Generally $p_{i,new} = p_{i,old} / (1 - p_{removed})$, unless $p_{removed}$ is 1.0 or close to 1.0, in which case equal distribution seems ok.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good point; fixed

for (unsigned i = 0; i < (succCount - 1); i++)
{
succTab[i]->addLikelihood(likelihoodIncrease);
}
}

#ifdef DEBUG
// We only expect to see a successor once in the table.
for (unsigned i = succIndex; i < (succCount - 1); i++)
Expand Down Expand Up @@ -427,6 +437,16 @@ void Compiler::fgRemoveEhfSuccessor(FlowEdge* succEdge)
}
}

// If the block has other successors, distribute the removed edge's likelihood among the remaining successor edges.
if (succCount > 1)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ditto here

{
const weight_t likelihoodIncrease = succEdge->getLikelihood() / (succCount - 1);
for (unsigned i = 0; i < (succCount - 1); i++)
{
succTab[i]->addLikelihood(likelihoodIncrease);
}
}

assert(found);
ehfDesc->bbeCount--;
}
Expand Down
57 changes: 19 additions & 38 deletions src/coreclr/jit/fgehopt.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1644,27 +1644,28 @@ PhaseStatus Compiler::fgCloneFinally()

for (BasicBlock* const block : Blocks(firstBlock, lastBlock))
{
if (block->hasProfileWeight())
{
weight_t const blockWeight = block->bbWeight;
block->setBBProfileWeight(blockWeight * originalScale);
JITDUMP("Set weight of " FMT_BB " to " FMT_WT "\n", block->bbNum, block->bbWeight);
weight_t const blockWeight = block->bbWeight;
block->setBBProfileWeight(blockWeight * originalScale);
JITDUMP("Set weight of " FMT_BB " to " FMT_WT "\n", block->bbNum, block->bbWeight);

BasicBlock* const clonedBlock = blockMap[block];
clonedBlock->setBBProfileWeight(blockWeight * clonedScale);
JITDUMP("Set weight of " FMT_BB " to " FMT_WT "\n", clonedBlock->bbNum, clonedBlock->bbWeight);
}
BasicBlock* const clonedBlock = blockMap[block];
clonedBlock->setBBProfileWeight(blockWeight * clonedScale);
JITDUMP("Set weight of " FMT_BB " to " FMT_WT "\n", clonedBlock->bbNum, clonedBlock->bbWeight);
}

if (!retargetedAllCalls)
{
JITDUMP(
"Reduced flow out of EH%u needs to be propagated to continuation block(s). Data %s inconsistent.\n",
XTnum, fgPgoConsistent ? "is now" : "was already");
fgPgoConsistent = false;
}
}

// Update flow into normalCallFinallyReturn
if (normalCallFinallyReturn->hasProfileWeight())
{
normalCallFinallyReturn->bbWeight = BB_ZERO_WEIGHT;
for (FlowEdge* const predEdge : normalCallFinallyReturn->PredEdges())
{
normalCallFinallyReturn->increaseBBProfileWeight(predEdge->getLikelyWeight());
}
normalCallFinallyReturn->setBBProfileWeight(normalCallFinallyReturn->computeIncomingWeight());
}

// Done!
Expand Down Expand Up @@ -2185,33 +2186,13 @@ bool Compiler::fgRetargetBranchesToCanonicalCallFinally(BasicBlock* block,
//
if (block->hasProfileWeight())
{
// Add weight to the canonical call finally pair.
// Add weight to the canonical call-finally.
//
weight_t const canonicalWeight =
canonicalCallFinally->hasProfileWeight() ? canonicalCallFinally->bbWeight : BB_ZERO_WEIGHT;
weight_t const newCanonicalWeight = block->bbWeight + canonicalWeight;

canonicalCallFinally->setBBProfileWeight(newCanonicalWeight);
canonicalCallFinally->increaseBBProfileWeight(block->bbWeight);

BasicBlock* const canonicalLeaveBlock = canonicalCallFinally->Next();

weight_t const canonicalLeaveWeight =
canonicalLeaveBlock->hasProfileWeight() ? canonicalLeaveBlock->bbWeight : BB_ZERO_WEIGHT;
weight_t const newLeaveWeight = block->bbWeight + canonicalLeaveWeight;

canonicalLeaveBlock->setBBProfileWeight(newLeaveWeight);

// Remove weight from the old call finally pair.
// Remove weight from the old call-finally.
//
if (callFinally->hasProfileWeight())
{
callFinally->decreaseBBProfileWeight(block->bbWeight);
}

if (leaveBlock->hasProfileWeight())
{
leaveBlock->decreaseBBProfileWeight(block->bbWeight);
}
callFinally->decreaseBBProfileWeight(block->bbWeight);
}

return true;
Expand Down
30 changes: 2 additions & 28 deletions src/coreclr/jit/fgprofile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4787,20 +4787,12 @@ bool Compiler::fgDebugCheckIncomingProfileData(BasicBlock* block, ProfileChecks
weight_t incomingLikelyWeight = 0;
unsigned missingLikelyWeight = 0;
bool foundPreds = false;
bool foundEHPreds = false;

for (FlowEdge* const predEdge : block->PredEdges())
{
if (predEdge->hasLikelihood())
{
if (BasicBlock::sameHndRegion(block, predEdge->getSourceBlock()))
{
incomingLikelyWeight += predEdge->getLikelyWeight();
}
else
{
foundEHPreds = true;
}
incomingLikelyWeight += predEdge->getLikelyWeight();
}
else
{
Expand All @@ -4812,29 +4804,11 @@ bool Compiler::fgDebugCheckIncomingProfileData(BasicBlock* block, ProfileChecks
foundPreds = true;
}

// We almost certainly won't get the likelihoods on a BBJ_EHFINALLYRET right,
// so special-case BBJ_CALLFINALLYRET incoming flow.
//
if (block->isBBCallFinallyPairTail())
{
incomingLikelyWeight = block->Prev()->bbWeight;
foundEHPreds = false;
}

// We almost certainly won't get the likelihoods on a BBJ_EHFINALLYRET right,
// so special-case BBJ_CALLFINALLYRET incoming flow.
//
if (block->isBBCallFinallyPairTail())
{
incomingLikelyWeight = block->Prev()->bbWeight;
foundEHPreds = false;
}

bool likelyWeightsValid = true;

// If we have EH preds we may not have consistent incoming flow.
//
if (foundPreds && !foundEHPreds)
if (foundPreds)
{
if (verifyLikelyWeights)
{
Expand Down
47 changes: 20 additions & 27 deletions src/coreclr/jit/fgprofilesynthesis.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,11 @@ void ProfileSynthesis::Run(ProfileSynthesisOption option)
assert(m_loops != nullptr);
}

if (m_loops->NumLoops() > 0)
{
m_cyclicProbabilities = new (m_comp, CMK_Pgo) weight_t[m_loops->NumLoops()];
}

// Profile synthesis can be run before or after morph, so tolerate (non-)canonical method entries
//
m_entryBlock = (m_comp->opts.IsOSR() && (m_comp->fgEntryBB != nullptr)) ? m_comp->fgEntryBB : m_comp->fgFirstBB;
Expand Down Expand Up @@ -168,7 +173,10 @@ void ProfileSynthesis::Run(ProfileSynthesisOption option)
if (m_approximate)
{
JITDUMP("Profile is inconsistent. Bypassing post-phase consistency checks.\n");
m_comp->Metrics.ProfileInconsistentInitially++;
if (!m_comp->fgImportDone)
{
m_comp->Metrics.ProfileInconsistentInitially++;
}
}

// Derive the method's call count from the entry block's weight
Expand Down Expand Up @@ -745,13 +753,6 @@ void ProfileSynthesis::RandomizeLikelihoods()
//
void ProfileSynthesis::ComputeCyclicProbabilities()
{
m_cyclicProbabilities = nullptr;
if (m_loops->NumLoops() == 0)
{
return;
}

m_cyclicProbabilities = new (m_comp, CMK_Pgo) weight_t[m_loops->NumLoops()];
// Walk loops in post order to visit inner loops before outer loops.
for (FlowGraphNaturalLoop* loop : m_loops->InPostOrder())
{
Expand Down Expand Up @@ -828,10 +829,7 @@ void ProfileSynthesis::ComputeCyclicProbabilities(FlowGraphNaturalLoop* loop)

for (FlowEdge* const edge : nestedLoop->EntryEdges())
{
if (BasicBlock::sameHndRegion(block, edge->getSourceBlock()))
{
newWeight += edge->getLikelyWeight();
}
newWeight += edge->getLikelyWeight();
}

newWeight *= m_cyclicProbabilities[nestedLoop->GetIndex()];
Expand All @@ -847,10 +845,10 @@ void ProfileSynthesis::ComputeCyclicProbabilities(FlowGraphNaturalLoop* loop)
{
BasicBlock* const sourceBlock = edge->getSourceBlock();

// Ignore flow across EH, or from preds not in the loop.
// Latter can happen if there are unreachable blocks that flow into the loop.
// Ignore flow from preds not in the loop.
// This can happen if there are unreachable blocks that flow into the loop.
//
if (BasicBlock::sameHndRegion(block, sourceBlock) && loop->ContainsBlock(sourceBlock))
if (loop->ContainsBlock(sourceBlock))
{
newWeight += edge->getLikelyWeight();
}
Expand Down Expand Up @@ -1214,7 +1212,8 @@ void ProfileSynthesis::GaussSeidelSolver()
const FlowGraphDfsTree* const dfs = m_loops->GetDfsTree();
unsigned const blockCount = dfs->GetPostOrderCount();
bool checkEntryExitWeight = true;
bool showDetails = false;
bool const showDetails = false;
bool const callFinalliesCreated = m_comp->fgImportDone;

JITDUMP("Synthesis solver: flow graph has %u improper loop headers\n", m_improperLoopHeaders);

Expand Down Expand Up @@ -1290,9 +1289,10 @@ void ProfileSynthesis::GaussSeidelSolver()
{
newWeight = block->bbWeight;

// Finallies also add in the weight of their try.
// If we haven't added flow edges into/out of finallies yet,
// add in the weight of their corresponding try regions.
//
if (ehDsc->HasFinallyHandler())
if (!callFinalliesCreated && ehDsc->HasFinallyHandler())
{
newWeight += countVector[ehDsc->ebdTryBeg->bbNum];
}
Expand All @@ -1318,11 +1318,7 @@ void ProfileSynthesis::GaussSeidelSolver()
for (FlowEdge* const edge : loop->EntryEdges())
{
BasicBlock* const predBlock = edge->getSourceBlock();

if (BasicBlock::sameHndRegion(block, predBlock))
{
newWeight += edge->getLikelihood() * countVector[predBlock->bbNum];
}
newWeight += edge->getLikelihood() * countVector[predBlock->bbNum];
}

// Scale by cyclic probability
Expand Down Expand Up @@ -1354,10 +1350,7 @@ void ProfileSynthesis::GaussSeidelSolver()
continue;
}

if (BasicBlock::sameHndRegion(block, predBlock))
{
newWeight += edge->getLikelihood() * countVector[predBlock->bbNum];
}
newWeight += edge->getLikelihood() * countVector[predBlock->bbNum];
}

if (selfEdge != nullptr)
Expand Down
45 changes: 42 additions & 3 deletions src/coreclr/jit/importer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12576,8 +12576,9 @@ void Compiler::impImport()
//
void Compiler::impFixPredLists()
{
unsigned XTnum = 0;
bool added = false;
unsigned XTnum = 0;
bool added = false;
const bool usingProfileWeights = fgIsUsingProfileWeights();

for (EHblkDsc* HBtab = compHndBBtab; XTnum < compHndBBtabCount; XTnum++, HBtab++)
{
Expand All @@ -12586,6 +12587,7 @@ void Compiler::impFixPredLists()
BasicBlock* const finallyBegBlock = HBtab->ebdHndBeg;
BasicBlock* const finallyLastBlock = HBtab->ebdHndLast;
unsigned predCount = (unsigned)-1;
const weight_t finallyWeight = finallyBegBlock->bbWeight;

for (BasicBlock* const finallyBlock : BasicBlockRangeList(finallyBegBlock, finallyLastBlock))
{
Expand Down Expand Up @@ -12629,7 +12631,8 @@ void Compiler::impFixPredLists()
jumpEhf->bbeCount = predCount;
jumpEhf->bbeSuccs = new (this, CMK_FlowEdge) FlowEdge*[predCount];

unsigned predNum = 0;
unsigned predNum = 0;
weight_t remainingLikelihood = 1.0;
for (BasicBlock* const predBlock : finallyBegBlock->PredBlocks())
{
// We only care about preds that are callfinallies.
Expand All @@ -12643,6 +12646,22 @@ void Compiler::impFixPredLists()
FlowEdge* const newEdge = fgAddRefPred(continuation, finallyBlock);
newEdge->setLikelihood(1.0 / predCount);

if (usingProfileWeights && (finallyWeight != BB_ZERO_WEIGHT))
{
// Derive edge likelihood from the entry block's weight relative to other entries.
//
const weight_t callFinallyWeight = predBlock->bbWeight;
const weight_t likelihood = min(callFinallyWeight / finallyWeight, 1.0);
newEdge->setLikelihood(min(likelihood, remainingLikelihood));
remainingLikelihood = max(BB_ZERO_WEIGHT, remainingLikelihood - likelihood);
}
else
{
// If we don't have profile data, evenly distribute the likelihoods.
//
newEdge->setLikelihood(1.0 / predCount);
}

jumpEhf->bbeSuccs[predNum] = newEdge;
++predNum;

Expand All @@ -12657,6 +12676,26 @@ void Compiler::impFixPredLists()

finallyBlock->SetEhfTargets(jumpEhf);
}

if (usingProfileWeights)
{
// Compute new flow into the finally region's continuation successors.
//
bool profileConsistent = true;
for (BasicBlock* const callFinally : finallyBegBlock->PredBlocks())
{
BasicBlock* const callFinallyRet = callFinally->Next();
callFinallyRet->setBBProfileWeight(callFinallyRet->computeIncomingWeight());
profileConsistent &= fgProfileWeightsEqual(callFinally->bbWeight, callFinallyRet->bbWeight);
}

if (!profileConsistent)
{
JITDUMP("Flow into finally handler EH%u does not match outgoing flow. Data %s inconsistent.\n",
XTnum, fgPgoConsistent ? "is now" : "was already");
fgPgoConsistent = false;
}
}
}
}
}
Expand Down
Loading