From 24fa97ab99b4a63f638f63a90ae83c4b80772114 Mon Sep 17 00:00:00 2001 From: SingleAccretion <62474226+SingleAccretion@users.noreply.github.com> Date: Mon, 3 Apr 2023 12:07:40 +0300 Subject: [PATCH] Delete GTF_BLK_VOLATILE and GTF_BLK_UNALIGNED (#84217) These struct ASG-specific flags are as legacy leftover from GT_COPYBLK days, inconsistent with how non-struct stores are handled. --- src/coreclr/jit/codegenarm.cpp | 4 +- src/coreclr/jit/codegenarm64.cpp | 6 +-- src/coreclr/jit/codegenarmarch.cpp | 6 +-- src/coreclr/jit/codegenloongarch64.cpp | 10 ++--- src/coreclr/jit/compiler.cpp | 8 ++-- src/coreclr/jit/compiler.h | 4 +- src/coreclr/jit/gentree.cpp | 38 +++++++--------- src/coreclr/jit/gentree.h | 10 ++--- src/coreclr/jit/importer.cpp | 62 +++++++++++++++----------- src/coreclr/jit/lower.cpp | 2 +- src/coreclr/jit/morphblock.cpp | 12 ++--- src/coreclr/jit/rationalize.cpp | 3 +- src/coreclr/jit/simdashwintrinsic.cpp | 2 +- 13 files changed, 81 insertions(+), 86 deletions(-) diff --git a/src/coreclr/jit/codegenarm.cpp b/src/coreclr/jit/codegenarm.cpp index 2a49dbdd60db6..b83ba8042763b 100644 --- a/src/coreclr/jit/codegenarm.cpp +++ b/src/coreclr/jit/codegenarm.cpp @@ -841,7 +841,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode) regNumber tmpReg = cpObjNode->ExtractTempReg(); assert(genIsValidIntReg(tmpReg)); - if (cpObjNode->gtFlags & GTF_BLK_VOLATILE) + if (cpObjNode->IsVolatile()) { // issue a full memory barrier before & after a volatile CpObj operation instGen_MemoryBarrier(); @@ -888,7 +888,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode) assert(gcPtrCount == 0); } - if (cpObjNode->gtFlags & GTF_BLK_VOLATILE) + if (cpObjNode->IsVolatile()) { // issue a full memory barrier before & after a volatile CpObj operation instGen_MemoryBarrier(); diff --git a/src/coreclr/jit/codegenarm64.cpp b/src/coreclr/jit/codegenarm64.cpp index 63a5f3a452d04..479c5c14668a4 100644 --- a/src/coreclr/jit/codegenarm64.cpp +++ b/src/coreclr/jit/codegenarm64.cpp @@ -3601,7 +3601,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode) assert(tmpReg2 != REG_WRITE_BARRIER_SRC_BYREF); } - if (cpObjNode->gtFlags & GTF_BLK_VOLATILE) + if (cpObjNode->IsVolatile()) { // issue a full memory barrier before a volatile CpObj operation instGen_MemoryBarrier(); @@ -3674,7 +3674,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode) assert(gcPtrCount == 0); } - if (cpObjNode->gtFlags & GTF_BLK_VOLATILE) + if (cpObjNode->IsVolatile()) { // issue a load barrier after a volatile CpObj operation instGen_MemoryBarrier(BARRIER_LOAD_ONLY); @@ -4188,7 +4188,7 @@ void CodeGen::genCodeForStoreInd(GenTreeStoreInd* tree) var_types type = tree->TypeGet(); instruction ins = ins_StoreFromSrc(dataReg, type); - if ((tree->gtFlags & GTF_IND_VOLATILE) != 0) + if (tree->IsVolatile()) { bool addrIsInReg = addr->isUsedFromReg(); bool addrIsAligned = ((tree->gtFlags & GTF_IND_UNALIGNED) == 0); diff --git a/src/coreclr/jit/codegenarmarch.cpp b/src/coreclr/jit/codegenarmarch.cpp index a93183c31f2cc..f6026fb9afe91 100644 --- a/src/coreclr/jit/codegenarmarch.cpp +++ b/src/coreclr/jit/codegenarmarch.cpp @@ -1900,7 +1900,7 @@ void CodeGen::genCodeForCpBlkHelper(GenTreeBlk* cpBlkNode) // genConsumeBlockOp takes care of this for us. genConsumeBlockOp(cpBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2); - if (cpBlkNode->gtFlags & GTF_BLK_VOLATILE) + if (cpBlkNode->IsVolatile()) { // issue a full memory barrier before a volatile CpBlk operation instGen_MemoryBarrier(); @@ -1908,7 +1908,7 @@ void CodeGen::genCodeForCpBlkHelper(GenTreeBlk* cpBlkNode) genEmitHelperCall(CORINFO_HELP_MEMCPY, 0, EA_UNKNOWN); - if (cpBlkNode->gtFlags & GTF_BLK_VOLATILE) + if (cpBlkNode->IsVolatile()) { // issue a load barrier after a volatile CpBlk operation instGen_MemoryBarrier(BARRIER_LOAD_ONLY); @@ -3193,7 +3193,7 @@ void CodeGen::genCodeForInitBlkHelper(GenTreeBlk* initBlkNode) // genConsumeBlockOp takes care of this for us. genConsumeBlockOp(initBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2); - if (initBlkNode->gtFlags & GTF_BLK_VOLATILE) + if (initBlkNode->IsVolatile()) { // issue a full memory barrier before a volatile initBlock Operation instGen_MemoryBarrier(); diff --git a/src/coreclr/jit/codegenloongarch64.cpp b/src/coreclr/jit/codegenloongarch64.cpp index 0498378155d51..403f19e49b895 100644 --- a/src/coreclr/jit/codegenloongarch64.cpp +++ b/src/coreclr/jit/codegenloongarch64.cpp @@ -2957,7 +2957,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode) assert(tmpReg2 != REG_WRITE_BARRIER_SRC_BYREF); } - if (cpObjNode->gtFlags & GTF_BLK_VOLATILE) + if (cpObjNode->IsVolatile()) { // issue a full memory barrier before a volatile CpObj operation instGen_MemoryBarrier(); @@ -3065,7 +3065,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode) assert(gcPtrCount == 0); } - if (cpObjNode->gtFlags & GTF_BLK_VOLATILE) + if (cpObjNode->IsVolatile()) { // issue a INS_BARRIER_RMB after a volatile CpObj operation // TODO-LOONGARCH64: there is only BARRIER_FULL for LOONGARCH64. @@ -6432,7 +6432,7 @@ void CodeGen::genCodeForCpBlkHelper(GenTreeBlk* cpBlkNode) // genConsumeBlockOp takes care of this for us. genConsumeBlockOp(cpBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2); - if (cpBlkNode->gtFlags & GTF_BLK_VOLATILE) + if (cpBlkNode->IsVolatile()) { // issue a full memory barrier before a volatile CpBlk operation instGen_MemoryBarrier(); @@ -6440,7 +6440,7 @@ void CodeGen::genCodeForCpBlkHelper(GenTreeBlk* cpBlkNode) genEmitHelperCall(CORINFO_HELP_MEMCPY, 0, EA_UNKNOWN); - if (cpBlkNode->gtFlags & GTF_BLK_VOLATILE) + if (cpBlkNode->IsVolatile()) { // issue a INS_BARRIER_RMB after a volatile CpBlk operation instGen_MemoryBarrier(BARRIER_FULL); @@ -6645,7 +6645,7 @@ void CodeGen::genCodeForInitBlkHelper(GenTreeBlk* initBlkNode) // genConsumeBlockOp takes care of this for us. genConsumeBlockOp(initBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2); - if (initBlkNode->gtFlags & GTF_BLK_VOLATILE) + if (initBlkNode->IsVolatile()) { // issue a full memory barrier before a volatile initBlock Operation instGen_MemoryBarrier(); diff --git a/src/coreclr/jit/compiler.cpp b/src/coreclr/jit/compiler.cpp index f683ae458bd2d..8134c167bd167 100644 --- a/src/coreclr/jit/compiler.cpp +++ b/src/coreclr/jit/compiler.cpp @@ -9918,13 +9918,13 @@ void cTreeFlags(Compiler* comp, GenTree* tree) case GT_STORE_BLK: case GT_STORE_DYN_BLK: - if (tree->gtFlags & GTF_BLK_VOLATILE) + if (tree->gtFlags & GTF_IND_VOLATILE) { - chars += printf("[BLK_VOLATILE]"); + chars += printf("[IND_VOLATILE]"); } - if (tree->AsBlk()->IsUnaligned()) + if (tree->gtFlags & GTF_IND_UNALIGNED) { - chars += printf("[BLK_UNALIGNED]"); + chars += printf("[IND_UNALIGNED]"); } break; diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index b86128982c48d..517a5fae972c9 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -2507,8 +2507,7 @@ class Compiler public: GenTreeObj* gtNewObjNode(ClassLayout* layout, GenTree* addr); GenTreeObj* gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr); - GenTree* gtNewStructVal(ClassLayout* layout, GenTree* addr); - GenTree* gtNewBlockVal(GenTree* addr, unsigned size); + GenTree* gtNewStructVal(ClassLayout* layout, GenTree* addr, GenTreeFlags indirFlags = GTF_EMPTY); GenTreeCall* gtNewCallNode(gtCallTypes callType, CORINFO_METHOD_HANDLE handle, @@ -3735,6 +3734,7 @@ class Compiler static void impValidateMemoryAccessOpcode(const BYTE* codeAddr, const BYTE* codeEndp, bool volatilePrefix); static OPCODE impGetNonPrefixOpcode(const BYTE* codeAddr, const BYTE* codeEndp); + static GenTreeFlags impPrefixFlagsToIndirFlags(unsigned prefixFlags); static bool impOpcodeIsCallOpcode(OPCODE opcode); public: diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index 54f896c550e1e..e2a336fcfef6c 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -7882,16 +7882,20 @@ GenTreeObj* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr // gtNewStructVal: Return a node that represents a struct or block value // // Arguments: -// layout - The struct's layout -// addr - The address of the struct +// layout - The struct's layout +// addr - The struct's address +// indirFlags - Indirection flags // // Return Value: // An "OBJ/BLK" node, or "LCL_VAR" node if "addr" points to a local // with a layout compatible with "layout". // -GenTree* Compiler::gtNewStructVal(ClassLayout* layout, GenTree* addr) +GenTree* Compiler::gtNewStructVal(ClassLayout* layout, GenTree* addr, GenTreeFlags indirFlags) { - if (addr->OperIs(GT_LCL_VAR_ADDR)) + assert((indirFlags & ~GTF_IND_FLAGS) == 0); + + bool isVolatile = (indirFlags & GTF_IND_VOLATILE) != 0; + if (!isVolatile && addr->OperIs(GT_LCL_VAR_ADDR)) { unsigned lclNum = addr->AsLclVar()->GetLclNum(); LclVarDsc* varDsc = lvaGetDesc(lclNum); @@ -7912,29 +7916,16 @@ GenTree* Compiler::gtNewStructVal(ClassLayout* layout, GenTree* addr) blkNode = gtNewObjNode(layout, addr); } + blkNode->gtFlags |= indirFlags; + if (isVolatile) + { + blkNode->gtFlags |= GTF_ORDER_SIDEEFF; + } blkNode->SetIndirExceptionFlags(this); return blkNode; } -//------------------------------------------------------------------------ -// gtNewBlockVal: Return a node that represents a possibly untyped block value -// -// Arguments: -// addr - The address of the block -// size - The size of the block -// -// Return Value: -// A block, object or local node that represents the block value pointed to by 'addr'. -// -GenTree* Compiler::gtNewBlockVal(GenTree* addr, unsigned size) -{ - ClassLayout* layout = typGetBlkLayout(size); - GenTree* blkNode = gtNewStructVal(layout, addr); - - return blkNode; -} - //------------------------------------------------------------------------ // FixupInitBlkValue: Fixup the init value for an initBlk operation // @@ -8208,7 +8199,8 @@ GenTree* Compiler::gtNewBlkOpNode(GenTree* dst, GenTree* srcOrFillVal, bool isVo if (isVolatile) { - result->gtFlags |= GTF_BLK_VOLATILE; + assert(dst->OperIsIndir()); + dst->gtFlags |= GTF_IND_VOLATILE; } #ifdef FEATURE_SIMD diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h index 353269f5a25ed..be7699074135e 100644 --- a/src/coreclr/jit/gentree.h +++ b/src/coreclr/jit/gentree.h @@ -483,16 +483,15 @@ enum GenTreeFlags : unsigned int GTF_INX_ADDR_NONNULL = 0x40000000, // GT_INDEX_ADDR -- this array address is not null GTF_IND_TGT_NOT_HEAP = 0x80000000, // GT_IND -- the target is not on the heap - GTF_IND_VOLATILE = 0x40000000, // GT_IND -- the load or store must use volatile semantics (this is a nop on X86) - GTF_IND_NONFAULTING = 0x20000000, // Operations for which OperIsIndir() is true -- An indir that cannot fault. + GTF_IND_VOLATILE = 0x40000000, // OperIsIndir() -- the load or store must use volatile semantics (this is a nop on X86) + GTF_IND_NONFAULTING = 0x20000000, // OperIsIndir() -- An indir that cannot fault. GTF_IND_TGT_HEAP = 0x10000000, // GT_IND -- the target is on the heap GTF_IND_REQ_ADDR_IN_REG = 0x08000000, // GT_IND -- requires its addr operand to be evaluated into a register. // This flag is useful in cases where it is required to generate register // indirect addressing mode. One such case is virtual stub calls on xarch. GTF_IND_ASG_LHS = 0x04000000, // GT_IND -- this GT_IND node is (the effective val) of the LHS of an // assignment; don't evaluate it independently. - GTF_IND_UNALIGNED = 0x02000000, // GT_IND -- the load or store is unaligned (we assume worst case - // alignment of 1 byte) + GTF_IND_UNALIGNED = 0x02000000, // OperIsIndir() -- the load or store is unaligned (we assume worst case alignment of 1 byte) GTF_IND_INVARIANT = 0x01000000, // GT_IND -- the target is invariant (a prejit indirection) GTF_IND_NONNULL = 0x00400000, // GT_IND -- the indirection never returns null (zero) @@ -556,9 +555,6 @@ enum GenTreeFlags : unsigned int // of the static field; in both of those cases, the constant // is what gets flagged. - GTF_BLK_VOLATILE = GTF_IND_VOLATILE, // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is a volatile block operation - GTF_BLK_UNALIGNED = GTF_IND_UNALIGNED, // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is an unaligned block operation - GTF_OVERFLOW = 0x10000000, // Supported for: GT_ADD, GT_SUB, GT_MUL and GT_CAST. // Requires an overflow check. Use gtOverflow(Ex)() to check this flag. diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp index e472822245f39..e6f0287dd6583 100644 --- a/src/coreclr/jit/importer.cpp +++ b/src/coreclr/jit/importer.cpp @@ -5451,6 +5451,21 @@ OPCODE Compiler::impGetNonPrefixOpcode(const BYTE* codeAddr, const BYTE* codeEnd return CEE_ILLEGAL; } +GenTreeFlags Compiler::impPrefixFlagsToIndirFlags(unsigned prefixFlags) +{ + GenTreeFlags indirFlags = GTF_EMPTY; + if ((prefixFlags & PREFIX_VOLATILE) != 0) + { + indirFlags |= GTF_IND_VOLATILE; + } + if ((prefixFlags & PREFIX_UNALIGNED) != 0) + { + indirFlags |= GTF_IND_UNALIGNED; + } + + return indirFlags; +} + /*****************************************************************************/ // Checks whether the opcode is a valid opcode for volatile. and unaligned. prefixes @@ -6586,7 +6601,6 @@ void Compiler::impImportBlockCode(BasicBlock* block) GenTree* op3; genTreeOps oper; - unsigned size; int val; @@ -10634,10 +10648,11 @@ void Compiler::impImportBlockCode(BasicBlock* block) case CEE_INITBLK: case CEE_CPBLK: - - op3 = impPopStack().val; // Size - op2 = impPopStack().val; // Value / Src addr - op1 = impPopStack().val; // Dst addr + { + GenTreeFlags indirFlags = impPrefixFlagsToIndirFlags(prefixFlags); + op3 = impPopStack().val; // Size + op2 = impPopStack().val; // Value / Src addr + op1 = impPopStack().val; // Dst addr if (op3->IsCnsIntOrI()) { @@ -10656,10 +10671,10 @@ void Compiler::impImportBlockCode(BasicBlock* block) break; } - size = static_cast(op3->AsIntConCommon()->IconValue()); - op1 = gtNewBlockVal(op1, size); - op2 = opcode == CEE_INITBLK ? op2 : gtNewBlockVal(op2, size); - op1 = gtNewBlkOpNode(op1, op2, (prefixFlags & PREFIX_VOLATILE) != 0); + ClassLayout* layout = typGetBlkLayout(static_cast(op3->AsIntConCommon()->IconValue())); + op1 = gtNewStructVal(layout, op1, indirFlags); + op2 = opcode == CEE_INITBLK ? op2 : gtNewStructVal(layout, op2, indirFlags); + op1 = gtNewBlkOpNode(op1, op2, (indirFlags & GTF_IND_VOLATILE) != 0); } else { @@ -10677,17 +10692,14 @@ void Compiler::impImportBlockCode(BasicBlock* block) #ifdef TARGET_64BIT // STORE_DYN_BLK takes a native uint size as it turns into call to memcpy. - op3 = gtNewCastNode(TYP_I_IMPL, op3, /* fromUnsigned */ true, TYP_U_IMPL); + op3 = gtNewCastNode(TYP_I_IMPL, op3, /* fromUnsigned */ true, TYP_I_IMPL); #endif op1 = new (this, GT_STORE_DYN_BLK) GenTreeStoreDynBlk(op1, op2, op3); - - if ((prefixFlags & PREFIX_VOLATILE) != 0) - { - op1->gtFlags |= GTF_BLK_VOLATILE; - } + op1->gtFlags |= indirFlags; } goto SPILL_APPEND; + } case CEE_CPOBJ: { @@ -10735,17 +10747,13 @@ void Compiler::impImportBlockCode(BasicBlock* block) goto STIND; } - op2 = impPopStack().val; // Value - op1 = impPopStack().val; // Ptr + GenTreeFlags indirFlags = impPrefixFlagsToIndirFlags(prefixFlags); + op2 = impPopStack().val; // Value + op1 = impPopStack().val; // Ptr assertImp(varTypeIsStruct(op2)); - - op1 = impAssignStructPtr(op1, op2, resolvedToken.hClass, CHECK_SPILL_ALL); - - if (op1->OperIsBlkOp() && (prefixFlags & PREFIX_UNALIGNED)) - { - op1->gtFlags |= GTF_BLK_UNALIGNED; - } + op1 = gtNewStructVal(typGetObjLayout(resolvedToken.hClass), op1, indirFlags); + op1 = impAssignStruct(op1, op2, CHECK_SPILL_ALL); goto SPILL_APPEND; } @@ -10812,10 +10820,10 @@ void Compiler::impImportBlockCode(BasicBlock* block) op1 = gtNewObjNode(resolvedToken.hClass, op1); op1->gtFlags |= GTF_EXCEPT; - - if (prefixFlags & PREFIX_UNALIGNED) + op1->gtFlags |= impPrefixFlagsToIndirFlags(prefixFlags); + if (op1->AsIndir()->IsVolatile()) { - op1->gtFlags |= GTF_IND_UNALIGNED; + op1->gtFlags |= GTF_ORDER_SIDEEFF; } impPushOnStack(op1, tiRetVal); diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp index cc171c71e21af..766683d692aa0 100644 --- a/src/coreclr/jit/lower.cpp +++ b/src/coreclr/jit/lower.cpp @@ -1829,7 +1829,7 @@ GenTree* Lowering::LowerCallMemmove(GenTreeCall* call) GenTreeBlk* storeBlk = new (comp, GT_STORE_BLK) GenTreeBlk(GT_STORE_BLK, TYP_STRUCT, dstAddr, srcBlk, comp->typGetBlkLayout((unsigned)cnsSize)); - storeBlk->gtFlags |= (GTF_BLK_UNALIGNED | GTF_ASG | GTF_EXCEPT | GTF_GLOB_REF); + storeBlk->gtFlags |= (GTF_IND_UNALIGNED | GTF_ASG | GTF_EXCEPT | GTF_GLOB_REF); // TODO-CQ: Use GenTreeObj::BlkOpKindUnroll here if srcAddr and dstAddr don't overlap, thus, we can // unroll this memmove as memcpy - it doesn't require lots of temp registers diff --git a/src/coreclr/jit/morphblock.cpp b/src/coreclr/jit/morphblock.cpp index ee1973db24f89..b4922a84bc759 100644 --- a/src/coreclr/jit/morphblock.cpp +++ b/src/coreclr/jit/morphblock.cpp @@ -888,15 +888,15 @@ void MorphCopyBlockHelper::MorphStructCases() } #if defined(TARGET_ARM) - if ((m_src->OperIsIndir()) && (m_src->gtFlags & GTF_IND_UNALIGNED)) + if ((m_dst->OperIsIndir()) && m_dst->AsIndir()->IsUnaligned()) { - JITDUMP(" src is unaligned"); + JITDUMP(" store is unaligned"); requiresCopyBlock = true; } - if (m_asg->gtFlags & GTF_BLK_UNALIGNED) + if ((m_src->OperIsIndir()) && m_src->AsIndir()->IsUnaligned()) { - JITDUMP(" m_asg is unaligned"); + JITDUMP(" src is unaligned"); requiresCopyBlock = true; } #endif // TARGET_ARM @@ -1621,7 +1621,7 @@ GenTree* Compiler::fgMorphStoreDynBlock(GenTreeStoreDynBlk* tree) if ((size != 0) && FitsIn(size)) { ClassLayout* layout = typGetBlkLayout(static_cast(size)); - GenTree* dst = gtNewStructVal(layout, tree->Addr()); + GenTree* dst = gtNewStructVal(layout, tree->Addr(), tree->gtFlags & GTF_IND_FLAGS); dst->gtFlags |= GTF_GLOB_REF; GenTree* src = tree->Data(); @@ -1633,7 +1633,7 @@ GenTree* Compiler::fgMorphStoreDynBlock(GenTreeStoreDynBlk* tree) } GenTree* asg = gtNewAssignNode(dst, src); - asg->gtFlags |= (tree->gtFlags & (GTF_ALL_EFFECT | GTF_BLK_VOLATILE | GTF_BLK_UNALIGNED)); + asg->AddAllEffectsFlags(tree); INDEBUG(asg->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED); fgAssignSetVarDef(asg); diff --git a/src/coreclr/jit/rationalize.cpp b/src/coreclr/jit/rationalize.cpp index 198cb75e590e1..46c5d5fd9638f 100644 --- a/src/coreclr/jit/rationalize.cpp +++ b/src/coreclr/jit/rationalize.cpp @@ -450,8 +450,7 @@ void Rationalizer::RewriteAssignment(LIR::Use& use) GenTree::OpName(storeOper)); storeBlk->SetOperRaw(storeOper); storeBlk->gtFlags &= ~GTF_DONT_CSE; - storeBlk->gtFlags |= - (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_BLK_VOLATILE | GTF_BLK_UNALIGNED | GTF_DONT_CSE)); + storeBlk->gtFlags |= (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_DONT_CSE)); storeBlk->AsBlk()->Data() = value; // Remove the block node from its current position and replace the assignment node with it diff --git a/src/coreclr/jit/simdashwintrinsic.cpp b/src/coreclr/jit/simdashwintrinsic.cpp index e6d2bc59de5d2..836cd0a209512 100644 --- a/src/coreclr/jit/simdashwintrinsic.cpp +++ b/src/coreclr/jit/simdashwintrinsic.cpp @@ -2241,7 +2241,7 @@ GenTree* Compiler::impSimdAsHWIntrinsicSpecial(NamedIntrinsic intrinsic, // GTF_ALL_EFFECT for the dest. This is currently emulating the previous behavior of // block ops. - GenTree* dest = gtNewBlockVal(copyBlkDst, simdSize); + GenTree* dest = gtNewStructVal(typGetBlkLayout(simdSize), copyBlkDst); dest->gtType = simdType; dest->gtFlags |= GTF_GLOB_REF;