diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index 7d407d9643b41e..d34a9b827a29d2 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -4239,7 +4239,6 @@ class Compiler void impImportBlockCode(BasicBlock* block); void impReimportMarkBlock(BasicBlock* block); - void impReimportMarkSuccessors(BasicBlock* block); void impVerifyEHBlock(BasicBlock* block, bool isTryStart); diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp index 995068c10aef62..8d8135d239ecb2 100644 --- a/src/coreclr/jit/importer.cpp +++ b/src/coreclr/jit/importer.cpp @@ -11043,20 +11043,6 @@ inline void Compiler::impReimportMarkBlock(BasicBlock* block) block->bbFlags &= ~BBF_IMPORTED; } -/***************************************************************************** - * Mark the successors of the given block as unimported. - * Note that the caller is responsible for calling impImportBlockPending() - * for all the successors, with the appropriate stack-state. - */ - -void Compiler::impReimportMarkSuccessors(BasicBlock* block) -{ - for (BasicBlock* const succBlock : block->Succs()) - { - impReimportMarkBlock(succBlock); - } -} - /***************************************************************************** * * Filter wrapper to handle only passed in exception code @@ -11379,38 +11365,35 @@ void Compiler::impImportBlock(BasicBlock* block) baseTmp = impGetSpillTmpBase(block); } - /* Spill all stack entries into temps */ - unsigned level, tempNum; + // Spill all stack entries into temps JITDUMP("\nSpilling stack entries into temps\n"); - for (level = 0, tempNum = baseTmp; level < verCurrentState.esStackDepth; level++, tempNum++) + for (unsigned level = 0, tempNum = baseTmp; level < verCurrentState.esStackDepth; level++, tempNum++) { GenTree* tree = verCurrentState.esStack[level].val; - /* VC generates code where it pushes a byref from one branch, and an int (ldc.i4 0) from - the other. This should merge to a byref in unverifiable code. - However, if the branch which leaves the TYP_I_IMPL on the stack is imported first, the - successor would be imported assuming there was a TYP_I_IMPL on - the stack. Thus the value would not get GC-tracked. Hence, - change the temp to TYP_BYREF and reimport the successors. - Note: We should only allow this in unverifiable code. - */ - if (tree->gtType == TYP_BYREF && lvaTable[tempNum].lvType == TYP_I_IMPL) + // VC generates code where it pushes a byref from one branch, and an int (ldc.i4 0) from + // the other. This should merge to a byref in unverifiable code. + // However, if the branch which leaves the TYP_I_IMPL on the stack is imported first, the + // successor would be imported assuming there was a TYP_I_IMPL on + // the stack. Thus the value would not get GC-tracked. Hence, + // change the temp to TYP_BYREF and reimport the clique. + LclVarDsc* tempDsc = lvaGetDesc(tempNum); + if (tree->TypeIs(TYP_BYREF) && (tempDsc->TypeGet() == TYP_I_IMPL)) { - lvaTable[tempNum].lvType = TYP_BYREF; - impReimportMarkSuccessors(block); - markImport = true; + tempDsc->lvType = TYP_BYREF; + reimportSpillClique = true; } #ifdef TARGET_64BIT - if (genActualType(tree->gtType) == TYP_I_IMPL && lvaTable[tempNum].lvType == TYP_INT) + if ((genActualType(tree) == TYP_I_IMPL) && (tempDsc->TypeGet() == TYP_INT)) { // Some other block in the spill clique set this to "int", but now we have "native int". // Change the type and go back to re-import any blocks that used the wrong type. - lvaTable[tempNum].lvType = TYP_I_IMPL; - reimportSpillClique = true; + tempDsc->lvType = TYP_I_IMPL; + reimportSpillClique = true; } - else if (genActualType(tree->gtType) == TYP_INT && lvaTable[tempNum].lvType == TYP_I_IMPL) + else if ((genActualType(tree) == TYP_INT) && (tempDsc->TypeGet() == TYP_I_IMPL)) { // Spill clique has decided this should be "native int", but this block only pushes an "int". // Insert a sign-extension to "native int" so we match the clique. @@ -11425,14 +11408,14 @@ void Compiler::impImportBlock(BasicBlock* block) // imported already, we need to change the type of the local and reimport the spill clique. // If the 'byref' side has imported, we insert a cast from int to 'native int' to match // the 'byref' size. - if (genActualType(tree->gtType) == TYP_BYREF && lvaTable[tempNum].lvType == TYP_INT) + if ((genActualType(tree) == TYP_BYREF) && (tempDsc->TypeGet() == TYP_INT)) { // Some other block in the spill clique set this to "int", but now we have "byref". // Change the type and go back to re-import any blocks that used the wrong type. - lvaTable[tempNum].lvType = TYP_BYREF; - reimportSpillClique = true; + tempDsc->lvType = TYP_BYREF; + reimportSpillClique = true; } - else if (genActualType(tree->gtType) == TYP_INT && lvaTable[tempNum].lvType == TYP_BYREF) + else if ((genActualType(tree) == TYP_INT) && (tempDsc->TypeGet() == TYP_BYREF)) { // Spill clique has decided this should be "byref", but this block only pushes an "int". // Insert a sign-extension to "native int" so we match the clique size. @@ -11441,14 +11424,14 @@ void Compiler::impImportBlock(BasicBlock* block) #endif // TARGET_64BIT - if (tree->gtType == TYP_DOUBLE && lvaTable[tempNum].lvType == TYP_FLOAT) + if (tree->TypeIs(TYP_DOUBLE) && (tempDsc->lvType == TYP_FLOAT)) { // Some other block in the spill clique set this to "float", but now we have "double". // Change the type and go back to re-import any blocks that used the wrong type. - lvaTable[tempNum].lvType = TYP_DOUBLE; - reimportSpillClique = true; + tempDsc->lvType = TYP_DOUBLE; + reimportSpillClique = true; } - else if (tree->gtType == TYP_FLOAT && lvaTable[tempNum].lvType == TYP_DOUBLE) + else if (tree->TypeIs(TYP_FLOAT) && (tempDsc->TypeGet() == TYP_DOUBLE)) { // Spill clique has decided this should be "double", but this block only pushes a "float". // Insert a cast to "double" so we match the clique. @@ -11459,11 +11442,11 @@ void Compiler::impImportBlock(BasicBlock* block) are spilling to the temps already used by a previous block), we need to spill addStmt */ - if (addStmt != nullptr && !newTemps && gtHasRef(addStmt->GetRootNode(), tempNum)) + if ((addStmt != nullptr) && !newTemps && gtHasRef(addStmt->GetRootNode(), tempNum)) { GenTree* addTree = addStmt->GetRootNode(); - if (addTree->gtOper == GT_JTRUE) + if (addTree->OperIs(GT_JTRUE)) { GenTree* relOp = addTree->AsOp()->gtOp1; assert(relOp->OperIsCompare()); @@ -11488,7 +11471,7 @@ void Compiler::impImportBlock(BasicBlock* block) } else { - assert(addTree->gtOper == GT_SWITCH && genActualTypeIsIntOrI(addTree->AsOp()->gtOp1->TypeGet())); + assert(addTree->OperIs(GT_SWITCH) && genActualTypeIsIntOrI(addTree->AsOp()->gtOp1->TypeGet())); unsigned temp = lvaGrabTemp(true DEBUGARG("spill addStmt SWITCH")); impStoreTemp(temp, addTree->AsOp()->gtOp1, level); diff --git a/src/coreclr/jit/lclvars.cpp b/src/coreclr/jit/lclvars.cpp index e1a23da69d4a56..57b4f164fd444c 100644 --- a/src/coreclr/jit/lclvars.cpp +++ b/src/coreclr/jit/lclvars.cpp @@ -4146,7 +4146,6 @@ void Compiler::lvaMarkLclRefs(GenTree* tree, BasicBlock* block, Statement* stmt, // Check that the LCL_VAR node has the same type as the underlying variable, save a few mismatches we allow. assert(tree->TypeIs(varDsc->TypeGet(), genActualType(varDsc)) || - (tree->TypeIs(TYP_I_IMPL) && (varDsc->TypeGet() == TYP_BYREF)) || // Created for spill clique import. (tree->TypeIs(TYP_BYREF) && (varDsc->TypeGet() == TYP_I_IMPL)) || // Created by inliner substitution. (tree->TypeIs(TYP_INT) && (varDsc->TypeGet() == TYP_LONG))); // Created by "optNarrowTree". }