Skip to content

Commit

Permalink
Delete GTF_BLK_VOLATILE and GTF_BLK_UNALIGNED (#84217)
Browse files Browse the repository at this point in the history
These struct ASG-specific flags are as legacy leftover from
GT_COPYBLK days, inconsistent with how non-struct stores are handled.
  • Loading branch information
SingleAccretion authored Apr 3, 2023
1 parent d0913fc commit 24fa97a
Show file tree
Hide file tree
Showing 13 changed files with 81 additions and 86 deletions.
4 changes: 2 additions & 2 deletions src/coreclr/jit/codegenarm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -841,7 +841,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
regNumber tmpReg = cpObjNode->ExtractTempReg();
assert(genIsValidIntReg(tmpReg));

if (cpObjNode->gtFlags & GTF_BLK_VOLATILE)
if (cpObjNode->IsVolatile())
{
// issue a full memory barrier before & after a volatile CpObj operation
instGen_MemoryBarrier();
Expand Down Expand Up @@ -888,7 +888,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
assert(gcPtrCount == 0);
}

if (cpObjNode->gtFlags & GTF_BLK_VOLATILE)
if (cpObjNode->IsVolatile())
{
// issue a full memory barrier before & after a volatile CpObj operation
instGen_MemoryBarrier();
Expand Down
6 changes: 3 additions & 3 deletions src/coreclr/jit/codegenarm64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3601,7 +3601,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
assert(tmpReg2 != REG_WRITE_BARRIER_SRC_BYREF);
}

if (cpObjNode->gtFlags & GTF_BLK_VOLATILE)
if (cpObjNode->IsVolatile())
{
// issue a full memory barrier before a volatile CpObj operation
instGen_MemoryBarrier();
Expand Down Expand Up @@ -3674,7 +3674,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
assert(gcPtrCount == 0);
}

if (cpObjNode->gtFlags & GTF_BLK_VOLATILE)
if (cpObjNode->IsVolatile())
{
// issue a load barrier after a volatile CpObj operation
instGen_MemoryBarrier(BARRIER_LOAD_ONLY);
Expand Down Expand Up @@ -4188,7 +4188,7 @@ void CodeGen::genCodeForStoreInd(GenTreeStoreInd* tree)
var_types type = tree->TypeGet();
instruction ins = ins_StoreFromSrc(dataReg, type);

if ((tree->gtFlags & GTF_IND_VOLATILE) != 0)
if (tree->IsVolatile())
{
bool addrIsInReg = addr->isUsedFromReg();
bool addrIsAligned = ((tree->gtFlags & GTF_IND_UNALIGNED) == 0);
Expand Down
6 changes: 3 additions & 3 deletions src/coreclr/jit/codegenarmarch.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1900,15 +1900,15 @@ void CodeGen::genCodeForCpBlkHelper(GenTreeBlk* cpBlkNode)
// genConsumeBlockOp takes care of this for us.
genConsumeBlockOp(cpBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2);

if (cpBlkNode->gtFlags & GTF_BLK_VOLATILE)
if (cpBlkNode->IsVolatile())
{
// issue a full memory barrier before a volatile CpBlk operation
instGen_MemoryBarrier();
}

genEmitHelperCall(CORINFO_HELP_MEMCPY, 0, EA_UNKNOWN);

if (cpBlkNode->gtFlags & GTF_BLK_VOLATILE)
if (cpBlkNode->IsVolatile())
{
// issue a load barrier after a volatile CpBlk operation
instGen_MemoryBarrier(BARRIER_LOAD_ONLY);
Expand Down Expand Up @@ -3193,7 +3193,7 @@ void CodeGen::genCodeForInitBlkHelper(GenTreeBlk* initBlkNode)
// genConsumeBlockOp takes care of this for us.
genConsumeBlockOp(initBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2);

if (initBlkNode->gtFlags & GTF_BLK_VOLATILE)
if (initBlkNode->IsVolatile())
{
// issue a full memory barrier before a volatile initBlock Operation
instGen_MemoryBarrier();
Expand Down
10 changes: 5 additions & 5 deletions src/coreclr/jit/codegenloongarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2957,7 +2957,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
assert(tmpReg2 != REG_WRITE_BARRIER_SRC_BYREF);
}

if (cpObjNode->gtFlags & GTF_BLK_VOLATILE)
if (cpObjNode->IsVolatile())
{
// issue a full memory barrier before a volatile CpObj operation
instGen_MemoryBarrier();
Expand Down Expand Up @@ -3065,7 +3065,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
assert(gcPtrCount == 0);
}

if (cpObjNode->gtFlags & GTF_BLK_VOLATILE)
if (cpObjNode->IsVolatile())
{
// issue a INS_BARRIER_RMB after a volatile CpObj operation
// TODO-LOONGARCH64: there is only BARRIER_FULL for LOONGARCH64.
Expand Down Expand Up @@ -6432,15 +6432,15 @@ void CodeGen::genCodeForCpBlkHelper(GenTreeBlk* cpBlkNode)
// genConsumeBlockOp takes care of this for us.
genConsumeBlockOp(cpBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2);

if (cpBlkNode->gtFlags & GTF_BLK_VOLATILE)
if (cpBlkNode->IsVolatile())
{
// issue a full memory barrier before a volatile CpBlk operation
instGen_MemoryBarrier();
}

genEmitHelperCall(CORINFO_HELP_MEMCPY, 0, EA_UNKNOWN);

if (cpBlkNode->gtFlags & GTF_BLK_VOLATILE)
if (cpBlkNode->IsVolatile())
{
// issue a INS_BARRIER_RMB after a volatile CpBlk operation
instGen_MemoryBarrier(BARRIER_FULL);
Expand Down Expand Up @@ -6645,7 +6645,7 @@ void CodeGen::genCodeForInitBlkHelper(GenTreeBlk* initBlkNode)
// genConsumeBlockOp takes care of this for us.
genConsumeBlockOp(initBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2);

if (initBlkNode->gtFlags & GTF_BLK_VOLATILE)
if (initBlkNode->IsVolatile())
{
// issue a full memory barrier before a volatile initBlock Operation
instGen_MemoryBarrier();
Expand Down
8 changes: 4 additions & 4 deletions src/coreclr/jit/compiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9918,13 +9918,13 @@ void cTreeFlags(Compiler* comp, GenTree* tree)
case GT_STORE_BLK:
case GT_STORE_DYN_BLK:

if (tree->gtFlags & GTF_BLK_VOLATILE)
if (tree->gtFlags & GTF_IND_VOLATILE)
{
chars += printf("[BLK_VOLATILE]");
chars += printf("[IND_VOLATILE]");
}
if (tree->AsBlk()->IsUnaligned())
if (tree->gtFlags & GTF_IND_UNALIGNED)
{
chars += printf("[BLK_UNALIGNED]");
chars += printf("[IND_UNALIGNED]");
}
break;

Expand Down
4 changes: 2 additions & 2 deletions src/coreclr/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -2507,8 +2507,7 @@ class Compiler
public:
GenTreeObj* gtNewObjNode(ClassLayout* layout, GenTree* addr);
GenTreeObj* gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr);
GenTree* gtNewStructVal(ClassLayout* layout, GenTree* addr);
GenTree* gtNewBlockVal(GenTree* addr, unsigned size);
GenTree* gtNewStructVal(ClassLayout* layout, GenTree* addr, GenTreeFlags indirFlags = GTF_EMPTY);

GenTreeCall* gtNewCallNode(gtCallTypes callType,
CORINFO_METHOD_HANDLE handle,
Expand Down Expand Up @@ -3735,6 +3734,7 @@ class Compiler

static void impValidateMemoryAccessOpcode(const BYTE* codeAddr, const BYTE* codeEndp, bool volatilePrefix);
static OPCODE impGetNonPrefixOpcode(const BYTE* codeAddr, const BYTE* codeEndp);
static GenTreeFlags impPrefixFlagsToIndirFlags(unsigned prefixFlags);
static bool impOpcodeIsCallOpcode(OPCODE opcode);

public:
Expand Down
38 changes: 15 additions & 23 deletions src/coreclr/jit/gentree.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7882,16 +7882,20 @@ GenTreeObj* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr
// gtNewStructVal: Return a node that represents a struct or block value
//
// Arguments:
// layout - The struct's layout
// addr - The address of the struct
// layout - The struct's layout
// addr - The struct's address
// indirFlags - Indirection flags
//
// Return Value:
// An "OBJ/BLK" node, or "LCL_VAR" node if "addr" points to a local
// with a layout compatible with "layout".
//
GenTree* Compiler::gtNewStructVal(ClassLayout* layout, GenTree* addr)
GenTree* Compiler::gtNewStructVal(ClassLayout* layout, GenTree* addr, GenTreeFlags indirFlags)
{
if (addr->OperIs(GT_LCL_VAR_ADDR))
assert((indirFlags & ~GTF_IND_FLAGS) == 0);

bool isVolatile = (indirFlags & GTF_IND_VOLATILE) != 0;
if (!isVolatile && addr->OperIs(GT_LCL_VAR_ADDR))
{
unsigned lclNum = addr->AsLclVar()->GetLclNum();
LclVarDsc* varDsc = lvaGetDesc(lclNum);
Expand All @@ -7912,29 +7916,16 @@ GenTree* Compiler::gtNewStructVal(ClassLayout* layout, GenTree* addr)
blkNode = gtNewObjNode(layout, addr);
}

blkNode->gtFlags |= indirFlags;
if (isVolatile)
{
blkNode->gtFlags |= GTF_ORDER_SIDEEFF;
}
blkNode->SetIndirExceptionFlags(this);

return blkNode;
}

//------------------------------------------------------------------------
// gtNewBlockVal: Return a node that represents a possibly untyped block value
//
// Arguments:
// addr - The address of the block
// size - The size of the block
//
// Return Value:
// A block, object or local node that represents the block value pointed to by 'addr'.
//
GenTree* Compiler::gtNewBlockVal(GenTree* addr, unsigned size)
{
ClassLayout* layout = typGetBlkLayout(size);
GenTree* blkNode = gtNewStructVal(layout, addr);

return blkNode;
}

//------------------------------------------------------------------------
// FixupInitBlkValue: Fixup the init value for an initBlk operation
//
Expand Down Expand Up @@ -8208,7 +8199,8 @@ GenTree* Compiler::gtNewBlkOpNode(GenTree* dst, GenTree* srcOrFillVal, bool isVo

if (isVolatile)
{
result->gtFlags |= GTF_BLK_VOLATILE;
assert(dst->OperIsIndir());
dst->gtFlags |= GTF_IND_VOLATILE;
}

#ifdef FEATURE_SIMD
Expand Down
10 changes: 3 additions & 7 deletions src/coreclr/jit/gentree.h
Original file line number Diff line number Diff line change
Expand Up @@ -483,16 +483,15 @@ enum GenTreeFlags : unsigned int
GTF_INX_ADDR_NONNULL = 0x40000000, // GT_INDEX_ADDR -- this array address is not null

GTF_IND_TGT_NOT_HEAP = 0x80000000, // GT_IND -- the target is not on the heap
GTF_IND_VOLATILE = 0x40000000, // GT_IND -- the load or store must use volatile semantics (this is a nop on X86)
GTF_IND_NONFAULTING = 0x20000000, // Operations for which OperIsIndir() is true -- An indir that cannot fault.
GTF_IND_VOLATILE = 0x40000000, // OperIsIndir() -- the load or store must use volatile semantics (this is a nop on X86)
GTF_IND_NONFAULTING = 0x20000000, // OperIsIndir() -- An indir that cannot fault.
GTF_IND_TGT_HEAP = 0x10000000, // GT_IND -- the target is on the heap
GTF_IND_REQ_ADDR_IN_REG = 0x08000000, // GT_IND -- requires its addr operand to be evaluated into a register.
// This flag is useful in cases where it is required to generate register
// indirect addressing mode. One such case is virtual stub calls on xarch.
GTF_IND_ASG_LHS = 0x04000000, // GT_IND -- this GT_IND node is (the effective val) of the LHS of an
// assignment; don't evaluate it independently.
GTF_IND_UNALIGNED = 0x02000000, // GT_IND -- the load or store is unaligned (we assume worst case
// alignment of 1 byte)
GTF_IND_UNALIGNED = 0x02000000, // OperIsIndir() -- the load or store is unaligned (we assume worst case alignment of 1 byte)
GTF_IND_INVARIANT = 0x01000000, // GT_IND -- the target is invariant (a prejit indirection)
GTF_IND_NONNULL = 0x00400000, // GT_IND -- the indirection never returns null (zero)

Expand Down Expand Up @@ -556,9 +555,6 @@ enum GenTreeFlags : unsigned int
// of the static field; in both of those cases, the constant
// is what gets flagged.

GTF_BLK_VOLATILE = GTF_IND_VOLATILE, // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is a volatile block operation
GTF_BLK_UNALIGNED = GTF_IND_UNALIGNED, // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is an unaligned block operation

GTF_OVERFLOW = 0x10000000, // Supported for: GT_ADD, GT_SUB, GT_MUL and GT_CAST.
// Requires an overflow check. Use gtOverflow(Ex)() to check this flag.

Expand Down
62 changes: 35 additions & 27 deletions src/coreclr/jit/importer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5451,6 +5451,21 @@ OPCODE Compiler::impGetNonPrefixOpcode(const BYTE* codeAddr, const BYTE* codeEnd
return CEE_ILLEGAL;
}

GenTreeFlags Compiler::impPrefixFlagsToIndirFlags(unsigned prefixFlags)
{
GenTreeFlags indirFlags = GTF_EMPTY;
if ((prefixFlags & PREFIX_VOLATILE) != 0)
{
indirFlags |= GTF_IND_VOLATILE;
}
if ((prefixFlags & PREFIX_UNALIGNED) != 0)
{
indirFlags |= GTF_IND_UNALIGNED;
}

return indirFlags;
}

/*****************************************************************************/
// Checks whether the opcode is a valid opcode for volatile. and unaligned. prefixes

Expand Down Expand Up @@ -6586,7 +6601,6 @@ void Compiler::impImportBlockCode(BasicBlock* block)

GenTree* op3;
genTreeOps oper;
unsigned size;

int val;

Expand Down Expand Up @@ -10634,10 +10648,11 @@ void Compiler::impImportBlockCode(BasicBlock* block)

case CEE_INITBLK:
case CEE_CPBLK:

op3 = impPopStack().val; // Size
op2 = impPopStack().val; // Value / Src addr
op1 = impPopStack().val; // Dst addr
{
GenTreeFlags indirFlags = impPrefixFlagsToIndirFlags(prefixFlags);
op3 = impPopStack().val; // Size
op2 = impPopStack().val; // Value / Src addr
op1 = impPopStack().val; // Dst addr

if (op3->IsCnsIntOrI())
{
Expand All @@ -10656,10 +10671,10 @@ void Compiler::impImportBlockCode(BasicBlock* block)
break;
}

size = static_cast<unsigned>(op3->AsIntConCommon()->IconValue());
op1 = gtNewBlockVal(op1, size);
op2 = opcode == CEE_INITBLK ? op2 : gtNewBlockVal(op2, size);
op1 = gtNewBlkOpNode(op1, op2, (prefixFlags & PREFIX_VOLATILE) != 0);
ClassLayout* layout = typGetBlkLayout(static_cast<unsigned>(op3->AsIntConCommon()->IconValue()));
op1 = gtNewStructVal(layout, op1, indirFlags);
op2 = opcode == CEE_INITBLK ? op2 : gtNewStructVal(layout, op2, indirFlags);
op1 = gtNewBlkOpNode(op1, op2, (indirFlags & GTF_IND_VOLATILE) != 0);
}
else
{
Expand All @@ -10677,17 +10692,14 @@ void Compiler::impImportBlockCode(BasicBlock* block)

#ifdef TARGET_64BIT
// STORE_DYN_BLK takes a native uint size as it turns into call to memcpy.
op3 = gtNewCastNode(TYP_I_IMPL, op3, /* fromUnsigned */ true, TYP_U_IMPL);
op3 = gtNewCastNode(TYP_I_IMPL, op3, /* fromUnsigned */ true, TYP_I_IMPL);
#endif

op1 = new (this, GT_STORE_DYN_BLK) GenTreeStoreDynBlk(op1, op2, op3);

if ((prefixFlags & PREFIX_VOLATILE) != 0)
{
op1->gtFlags |= GTF_BLK_VOLATILE;
}
op1->gtFlags |= indirFlags;
}
goto SPILL_APPEND;
}

case CEE_CPOBJ:
{
Expand Down Expand Up @@ -10735,17 +10747,13 @@ void Compiler::impImportBlockCode(BasicBlock* block)
goto STIND;
}

op2 = impPopStack().val; // Value
op1 = impPopStack().val; // Ptr
GenTreeFlags indirFlags = impPrefixFlagsToIndirFlags(prefixFlags);
op2 = impPopStack().val; // Value
op1 = impPopStack().val; // Ptr

assertImp(varTypeIsStruct(op2));

op1 = impAssignStructPtr(op1, op2, resolvedToken.hClass, CHECK_SPILL_ALL);

if (op1->OperIsBlkOp() && (prefixFlags & PREFIX_UNALIGNED))
{
op1->gtFlags |= GTF_BLK_UNALIGNED;
}
op1 = gtNewStructVal(typGetObjLayout(resolvedToken.hClass), op1, indirFlags);
op1 = impAssignStruct(op1, op2, CHECK_SPILL_ALL);
goto SPILL_APPEND;
}

Expand Down Expand Up @@ -10812,10 +10820,10 @@ void Compiler::impImportBlockCode(BasicBlock* block)

op1 = gtNewObjNode(resolvedToken.hClass, op1);
op1->gtFlags |= GTF_EXCEPT;

if (prefixFlags & PREFIX_UNALIGNED)
op1->gtFlags |= impPrefixFlagsToIndirFlags(prefixFlags);
if (op1->AsIndir()->IsVolatile())
{
op1->gtFlags |= GTF_IND_UNALIGNED;
op1->gtFlags |= GTF_ORDER_SIDEEFF;
}

impPushOnStack(op1, tiRetVal);
Expand Down
2 changes: 1 addition & 1 deletion src/coreclr/jit/lower.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1829,7 +1829,7 @@ GenTree* Lowering::LowerCallMemmove(GenTreeCall* call)

GenTreeBlk* storeBlk = new (comp, GT_STORE_BLK)
GenTreeBlk(GT_STORE_BLK, TYP_STRUCT, dstAddr, srcBlk, comp->typGetBlkLayout((unsigned)cnsSize));
storeBlk->gtFlags |= (GTF_BLK_UNALIGNED | GTF_ASG | GTF_EXCEPT | GTF_GLOB_REF);
storeBlk->gtFlags |= (GTF_IND_UNALIGNED | GTF_ASG | GTF_EXCEPT | GTF_GLOB_REF);

// TODO-CQ: Use GenTreeObj::BlkOpKindUnroll here if srcAddr and dstAddr don't overlap, thus, we can
// unroll this memmove as memcpy - it doesn't require lots of temp registers
Expand Down
Loading

0 comments on commit 24fa97a

Please sign in to comment.