diff --git a/src/coreclr/jit/hwintrinsiccodegenxarch.cpp b/src/coreclr/jit/hwintrinsiccodegenxarch.cpp index d2fba55f424d9c..1057660e6ae287 100644 --- a/src/coreclr/jit/hwintrinsiccodegenxarch.cpp +++ b/src/coreclr/jit/hwintrinsiccodegenxarch.cpp @@ -2001,6 +2001,8 @@ void CodeGen::genBaseIntrinsic(GenTreeHWIntrinsic* node, insOpts instOptions) assert(genStackLevel == 0); #endif // !FEATURE_FIXED_OUT_ARGS + assert(op2->TypeIs(TYP_I_IMPL)); + regNumber indexReg = op2->GetRegNum(); regNumber valueReg = op3->GetRegNum(); // New element value to be stored @@ -2035,6 +2037,8 @@ void CodeGen::genBaseIntrinsic(GenTreeHWIntrinsic* node, insOpts instOptions) simdType = TYP_SIMD16; } + assert(op2->TypeIs(TYP_I_IMPL)); + // Optimize the case of op1 is in memory and trying to access i'th element. if (!op1->isUsedFromReg()) { diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp index 9e52cd07f388ff..f8c3a7bc0975e3 100644 --- a/src/coreclr/jit/lower.cpp +++ b/src/coreclr/jit/lower.cpp @@ -11682,6 +11682,40 @@ GenTree* Lowering::InsertNewSimdCreateScalarUnsafeNode(var_types simdType, } return result; } + +//---------------------------------------------------------------------------------------------- +// Lowering::NormalizeIndexToNativeSized: +// Prepare to use an index for address calculations by ensuring it is native sized. +// +// Arguments: +// index - The index that may be an int32 +// +// Returns: +// The node itself, or a cast added on top of the node to perform normalization. +// +// Remarks: +// May insert a cast or may bash the node type in place for constants. Does +// not replace the use. +// +GenTree* Lowering::NormalizeIndexToNativeSized(GenTree* index) +{ + if (genActualType(index) == TYP_I_IMPL) + { + return index; + } + + if (index->OperIsConst()) + { + index->gtType = TYP_I_IMPL; + return index; + } + else + { + GenTree* cast = comp->gtNewCastNode(TYP_I_IMPL, index, true, TYP_I_IMPL); + BlockRange().InsertAfter(index, cast); + return cast; + } +} #endif // FEATURE_HW_INTRINSICS //---------------------------------------------------------------------------------------------- diff --git a/src/coreclr/jit/lower.h b/src/coreclr/jit/lower.h index b3afd8cceaeb28..d923a40f04af7f 100644 --- a/src/coreclr/jit/lower.h +++ b/src/coreclr/jit/lower.h @@ -463,6 +463,7 @@ class Lowering final : public Phase GenTree* op1, CorInfoType simdBaseJitType, unsigned simdSize); + GenTree* NormalizeIndexToNativeSized(GenTree* index); #endif // FEATURE_HW_INTRINSICS // Utility functions diff --git a/src/coreclr/jit/lowerarmarch.cpp b/src/coreclr/jit/lowerarmarch.cpp index 5fd53c25fbb137..f1d450661bdbcd 100644 --- a/src/coreclr/jit/lowerarmarch.cpp +++ b/src/coreclr/jit/lowerarmarch.cpp @@ -1684,6 +1684,9 @@ GenTree* Lowering::LowerHWIntrinsic(GenTreeHWIntrinsic* node) GenTree* op1 = node->Op(1); GenTree* op2 = node->Op(2); + op2 = NormalizeIndexToNativeSized(op2); + node->Op(2) = op2; + bool isContainableMemory = IsContainableMemoryOp(op1) && IsSafeToContainMem(node, op1); if (isContainableMemory || !op2->OperIsConst()) diff --git a/src/coreclr/jit/lowerxarch.cpp b/src/coreclr/jit/lowerxarch.cpp index f44117ba0ce00e..4dac70f9e6998a 100644 --- a/src/coreclr/jit/lowerxarch.cpp +++ b/src/coreclr/jit/lowerxarch.cpp @@ -4890,6 +4890,9 @@ GenTree* Lowering::LowerHWIntrinsicGetElement(GenTreeHWIntrinsic* node) return LowerNode(node); } + op2 = NormalizeIndexToNativeSized(op2); + node->Op(2) = op2; + uint32_t elemSize = genTypeSize(simdBaseType); uint32_t count = simdSize / elemSize; @@ -5252,6 +5255,12 @@ GenTree* Lowering::LowerHWIntrinsicGetElement(GenTreeHWIntrinsic* node) { // We specially handle float and double for more efficient codegen resIntrinsic = NI_Vector128_GetElement; + // GetElement takes a native sized index after lowering, so change + // the type of the constant we inserted above. + // (This is generally only for the non constant index case, + // which is not the case here, but keep the index operand's + // type consistent) + op2->gtType = TYP_I_IMPL; break; } @@ -5337,6 +5346,8 @@ GenTree* Lowering::LowerHWIntrinsicWithElement(GenTreeHWIntrinsic* node) if (!op2->OperIsConst()) { + op2 = NormalizeIndexToNativeSized(op2); + node->Op(2) = op2; // We will specially handle WithElement in codegen when op2 isn't a constant ContainCheckHWIntrinsic(node); return node->gtNext;