Skip to content
Merged
Show file tree
Hide file tree
Changes from 26 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
363dbee
missing pieces
VSadov Jun 23, 2022
8ad2f99
fixes
VSadov Jun 23, 2022
0e8ef45
pThreadToHijack
VSadov Jun 24, 2022
c393715
NATIVE_CONTEXT
VSadov Jun 29, 2022
815cfb2
at safe point
VSadov Jun 29, 2022
09af4f1
enable TestConcurrentIsBackgroundProperty
VSadov Jun 30, 2022
22851f4
couple tweaks
VSadov Jun 30, 2022
349f25f
fixes after rebasing
VSadov Jul 2, 2022
819706d
temporarily do not handle multireg returns
VSadov Jul 2, 2022
d808039
support single reg return hijack
VSadov Jul 6, 2022
0154a90
disable return hijacking for ARM64
VSadov Jul 6, 2022
2499330
enable safe point suspend
VSadov Jul 6, 2022
8406314
enable multireg
VSadov Jul 6, 2022
9c7c9ab
make gcc happy
VSadov Jul 7, 2022
c3c3f1a
fixws for win-x64
VSadov Jul 7, 2022
464f3a9
Do not wait for Unhijack.
VSadov Jul 7, 2022
cf113a5
crude check for VirtualUnwind issue in inline suspend and some tweaks
VSadov Jul 8, 2022
88adef7
disable hijacking on ARM64/UNIX
VSadov Jul 8, 2022
a16f63e
preserve xmm1
VSadov Jul 9, 2022
59b9b0b
IsUnwindable check
VSadov Jul 9, 2022
d9af8d6
detect not-unwindable code, at least the most common cases.
VSadov Jul 12, 2022
ee8a3d4
IsUnwindable tweaks
VSadov Jul 12, 2022
c78a93d
detect epilogs
VSadov Jul 17, 2022
21bcb53
hijack in epilogues.
VSadov Jul 17, 2022
e23afbb
PR feedback (refactoring)
VSadov Jul 17, 2022
a477f94
Check the trap flag in `HijackCallback` (optimization)
VSadov Jul 17, 2022
e3c7ad4
Apply suggestions from code review
VSadov Jul 18, 2022
626a983
PR feedback (refactor/renames)
VSadov Jul 18, 2022
460a3ce
handle "int 3" as not unwindable, not hijackable.
VSadov Jul 18, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/coreclr/nativeaot/Runtime/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ else()
unix/cgroup.cpp
unix/HardwareExceptions.cpp
unix/UnixContext.cpp
unix/UnixSignals.cpp
unix/UnwindHelpers.cpp
unix/UnixNativeCodeManager.cpp
../libunwind/src/Unwind-EHABI.cpp
Expand Down
44 changes: 40 additions & 4 deletions src/coreclr/nativeaot/Runtime/ICodeManager.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ enum GCRefKind : unsigned char
GCRK_Scalar = 0x00,
GCRK_Object = 0x01,
GCRK_Byref = 0x02,
#ifdef TARGET_ARM64
#ifdef TARGET_64BIT
// Composite return kinds for value types returned in two registers (encoded with two bits per register)
GCRK_Scalar_Obj = (GCRK_Object << 2) | GCRK_Scalar,
GCRK_Obj_Obj = (GCRK_Object << 2) | GCRK_Object,
Expand Down Expand Up @@ -66,20 +66,54 @@ inline GCRefKind TransitionFrameFlagsToReturnKind(uint64_t transFrameFlags)
return returnKind;
}

// Extract individual GCRefKind components from a composite return kind
inline GCRefKind ExtractReg0ReturnKind(GCRefKind returnKind)
inline GCRefKind ExtractReg1ReturnKind(GCRefKind returnKind)
{
ASSERT(returnKind <= GCRK_LastValid);
return (GCRefKind)(returnKind & (GCRK_Object | GCRK_Byref));
return (GCRefKind)(returnKind >> 2);
}

#elif defined(TARGET_AMD64)

// Verify that we can use bitwise shifts to convert from GCRefKind to PInvokeTransitionFrameFlags and back
C_ASSERT(PTFF_RAX_IS_GCREF == ((uint64_t)GCRK_Object << 16));
C_ASSERT(PTFF_RAX_IS_BYREF == ((uint64_t)GCRK_Byref << 16));
C_ASSERT(PTFF_RDX_IS_GCREF == ((uint64_t)GCRK_Scalar_Obj << 16));
C_ASSERT(PTFF_RDX_IS_BYREF == ((uint64_t)GCRK_Scalar_Byref << 16));

inline uint64_t ReturnKindToTransitionFrameFlags(GCRefKind returnKind)
{
if (returnKind == GCRK_Scalar)
return 0;

return PTFF_SAVE_RAX | PTFF_SAVE_RDX | ((uint64_t)returnKind << 16);
}

inline GCRefKind TransitionFrameFlagsToReturnKind(uint64_t transFrameFlags)
{
GCRefKind returnKind = (GCRefKind)((transFrameFlags & (PTFF_RAX_IS_GCREF | PTFF_RAX_IS_BYREF | PTFF_RDX_IS_GCREF | PTFF_RDX_IS_BYREF)) >> 16);
#if defined(TARGET_UNIX)
ASSERT((returnKind == GCRK_Scalar) || ((transFrameFlags & PTFF_SAVE_RAX) && (transFrameFlags & PTFF_SAVE_RDX)));
#else
ASSERT((returnKind == GCRK_Scalar) || (transFrameFlags & PTFF_SAVE_RAX));
#endif
return returnKind;
}

inline GCRefKind ExtractReg1ReturnKind(GCRefKind returnKind)
{
ASSERT(returnKind <= GCRK_LastValid);
return (GCRefKind)(returnKind >> 2);
}

#endif // TARGET_ARM64

// Extract individual GCRefKind components from a composite return kind
inline GCRefKind ExtractReg0ReturnKind(GCRefKind returnKind)
{
ASSERT(returnKind <= GCRK_LastValid);
return (GCRefKind)(returnKind & (GCRK_Object | GCRK_Byref));
}

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ExtractReg1ReturnKind can be moved here as well.

//
// MethodInfo is placeholder type used to allocate space for MethodInfo. Maximum size
// of the actual method should be less or equal to the placeholder size.
Expand Down Expand Up @@ -162,6 +196,8 @@ class ICodeManager
virtual uintptr_t GetConservativeUpperBoundForOutgoingArgs(MethodInfo * pMethodInfo,
REGDISPLAY * pRegisterSet) = 0;

virtual bool IsUnwindable(PTR_VOID pvAddress) = 0;

virtual bool GetReturnAddressHijackInfo(MethodInfo * pMethodInfo,
REGDISPLAY * pRegisterSet, // in
PTR_PTR_VOID * ppvRetAddrLocation, // out
Expand Down
14 changes: 10 additions & 4 deletions src/coreclr/nativeaot/Runtime/PalRedhawk.h
Original file line number Diff line number Diff line change
Expand Up @@ -596,8 +596,6 @@ REDHAWK_PALIMPORT bool REDHAWK_PALAPI PalGetCompleteThreadContext(HANDLE hThread
REDHAWK_PALIMPORT bool REDHAWK_PALAPI PalSetThreadContext(HANDLE hThread, _Out_ CONTEXT * pCtx);
REDHAWK_PALIMPORT void REDHAWK_PALAPI PalRestoreContext(CONTEXT * pCtx);

REDHAWK_PALIMPORT bool REDHAWK_PALAPI PalGetThreadContext(HANDLE hThread, _Out_ PAL_LIMITED_CONTEXT * pCtx);

REDHAWK_PALIMPORT int32_t REDHAWK_PALAPI PalGetProcessCpuCount();

// Retrieves the entire range of memory dedicated to the calling thread's stack. This does
Expand Down Expand Up @@ -683,6 +681,13 @@ REDHAWK_PALIMPORT uint64_t REDHAWK_PALAPI PalGetTickCount64();
REDHAWK_PALIMPORT void REDHAWK_PALAPI PalTerminateCurrentProcess(uint32_t exitCode);
REDHAWK_PALIMPORT HANDLE REDHAWK_PALAPI PalGetModuleHandleFromPointer(_In_ void* pointer);

#ifdef TARGET_UNIX
struct UNIX_CONTEXT;
#define NATIVE_CONTEXT UNIX_CONTEXT
#else
#define NATIVE_CONTEXT CONTEXT
#endif

#ifdef TARGET_UNIX
REDHAWK_PALIMPORT void REDHAWK_PALAPI PalSetHardwareExceptionHandler(PHARDWARE_EXCEPTION_HANDLER handler);
#else
Expand All @@ -693,8 +698,9 @@ typedef uint32_t (__stdcall *BackgroundCallback)(_In_opt_ void* pCallbackContext
REDHAWK_PALIMPORT bool REDHAWK_PALAPI PalStartBackgroundGCThread(_In_ BackgroundCallback callback, _In_opt_ void* pCallbackContext);
REDHAWK_PALIMPORT bool REDHAWK_PALAPI PalStartFinalizerThread(_In_ BackgroundCallback callback, _In_opt_ void* pCallbackContext);

typedef UInt32_BOOL (*PalHijackCallback)(HANDLE hThread, _In_ PAL_LIMITED_CONTEXT* pThreadContext, _In_opt_ void* pCallbackContext);
REDHAWK_PALIMPORT uint32_t REDHAWK_PALAPI PalHijack(HANDLE hThread, _In_ PalHijackCallback callback, _In_opt_ void* pCallbackContext);
typedef void (*PalHijackCallback)(_In_ NATIVE_CONTEXT* pThreadContext, _In_opt_ void* pThreadToHijack);
REDHAWK_PALIMPORT void REDHAWK_PALAPI PalHijack(HANDLE hThread, _In_opt_ void* pThreadToHijack);
REDHAWK_PALIMPORT UInt32_BOOL REDHAWK_PALAPI PalRegisterHijackCallback(_In_ PalHijackCallback callback);

#ifdef FEATURE_ETW
REDHAWK_PALIMPORT bool REDHAWK_PALAPI PalEventEnabled(REGHANDLE regHandle, _In_ const EVENT_DESCRIPTOR* eventDescriptor);
Expand Down
148 changes: 79 additions & 69 deletions src/coreclr/nativeaot/Runtime/StackFrameIterator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@
#include "RuntimeInstance.h"
#include "rhbinder.h"

#ifdef TARGET_UNIX
#include "UnixContext.h"
#endif

// warning C4061: enumerator '{blah}' in switch of enum '{blarg}' is not explicitly handled by a case label
#pragma warning(disable:4061)

Expand Down Expand Up @@ -93,13 +97,11 @@ StackFrameIterator::StackFrameIterator(Thread * pThreadToWalk, PInvokeTransition
STRESS_LOG0(LF_STACKWALK, LL_INFO10000, "----Init---- [ GC ]\n");
ASSERT(!pThreadToWalk->DangerousCrossThreadIsHijacked());

#ifdef FEATURE_SUSPEND_REDIRECTION
if (pInitialTransitionFrame == REDIRECTED_THREAD_MARKER)
if (pInitialTransitionFrame == INTERRUPTED_THREAD_MARKER)
{
InternalInit(pThreadToWalk, pThreadToWalk->GetRedirectionContext(), GcStackWalkFlags | ActiveStackFrame);
InternalInit(pThreadToWalk, pThreadToWalk->GetInterruptedContext(), GcStackWalkFlags | ActiveStackFrame);
}
else
#endif
{
InternalInit(pThreadToWalk, pInitialTransitionFrame, GcStackWalkFlags);
}
Expand All @@ -109,7 +111,14 @@ StackFrameIterator::StackFrameIterator(Thread * pThreadToWalk, PInvokeTransition

StackFrameIterator::StackFrameIterator(Thread * pThreadToWalk, PTR_PAL_LIMITED_CONTEXT pCtx)
{
STRESS_LOG0(LF_STACKWALK, LL_INFO10000, "----Init---- [ hijack ]\n");
STRESS_LOG0(LF_STACKWALK, LL_INFO10000, "----Init with limited ctx---- [ hijack ]\n");
InternalInit(pThreadToWalk, pCtx, 0);
PrepareToYieldFrame();
}

StackFrameIterator::StackFrameIterator(Thread* pThreadToWalk, NATIVE_CONTEXT* pCtx)
{
STRESS_LOG0(LF_STACKWALK, LL_INFO10000, "----Init with native ctx---- [ hijack ]\n");
InternalInit(pThreadToWalk, pCtx, 0);
PrepareToYieldFrame();
}
Expand Down Expand Up @@ -283,15 +292,11 @@ void StackFrameIterator::InternalInit(Thread * pThreadToWalk, PInvokeTransitionF
if (pFrame->m_Flags & PTFF_SAVE_R11) { m_RegDisplay.pR11 = pPreservedRegsCursor++; }
#endif // TARGET_AMD64

if (pFrame->m_Flags & PTFF_RAX_IS_GCREF)
{
m_pHijackedReturnValue = (PTR_RtuObjectRef) m_RegDisplay.pRax;
m_HijackedReturnValueKind = GCRK_Object;
}
if (pFrame->m_Flags & PTFF_RAX_IS_BYREF)
GCRefKind retValueKind = TransitionFrameFlagsToReturnKind(pFrame->m_Flags);
if (retValueKind != GCRK_Scalar)
{
m_pHijackedReturnValue = (PTR_RtuObjectRef) m_RegDisplay.pRax;
m_HijackedReturnValueKind = GCRK_Byref;
m_pHijackedReturnValue = (PTR_RtuObjectRef)m_RegDisplay.pRax;
m_HijackedReturnValueKind = retValueKind;
}

#endif // TARGET_ARM
Expand Down Expand Up @@ -508,10 +513,9 @@ void StackFrameIterator::InternalInit(Thread * pThreadToWalk, PTR_PAL_LIMITED_CO
#endif // TARGET_ARM
}

// Prepare to start a stack walk from the context listed in the supplied CONTEXT.
// The supplied context can describe a location in either managed or unmanaged code. In the
// latter case the iterator is left in an invalid state when this function returns.
void StackFrameIterator::InternalInit(Thread * pThreadToWalk, CONTEXT* pCtx, uint32_t dwFlags)
// Prepare to start a stack walk from the context listed in the supplied NATIVE_CONTEXT.
// The supplied context can describe a location in managed code.
void StackFrameIterator::InternalInit(Thread * pThreadToWalk, NATIVE_CONTEXT* pCtx, uint32_t dwFlags)
{
ASSERT((dwFlags & MethodStateCalculated) == 0);

Expand All @@ -524,10 +528,8 @@ void StackFrameIterator::InternalInit(Thread * pThreadToWalk, CONTEXT* pCtx, uin
// properly walk it in parallel.
ResetNextExInfoForSP(pCtx->GetSp());

// This codepath is used by the hijack stackwalk and we can get arbitrary ControlPCs from there. If this
// context has a non-managed control PC, then we're done.
if (!m_pInstance->IsManaged(dac_cast<PTR_VOID>(pCtx->GetIp())))
return;
// This codepath is used by the hijack stackwalk. The IP must be in managed code.
ASSERT(m_pInstance->IsManaged(dac_cast<PTR_VOID>(pCtx->GetIp())));

//
// control state
Expand All @@ -536,82 +538,90 @@ void StackFrameIterator::InternalInit(Thread * pThreadToWalk, CONTEXT* pCtx, uin
m_RegDisplay.SP = pCtx->GetSp();
m_RegDisplay.IP = pCtx->GetIp();

#ifdef TARGET_UNIX
#define PTR_TO_REG(ptr, reg) (&((ptr)->reg()))
#else
#define PTR_TO_REG(ptr, reg) (&((ptr)->reg))
#endif

#ifdef TARGET_ARM64

m_RegDisplay.pIP = PTR_TO_MEMBER(CONTEXT, pCtx, Pc);
m_RegDisplay.pIP = (PTR_PCODE)PTR_TO_REG(pCtx, Pc);

//
// preserved regs
//
m_RegDisplay.pX19 = PTR_TO_MEMBER(CONTEXT, pCtx, X19);
m_RegDisplay.pX20 = PTR_TO_MEMBER(CONTEXT, pCtx, X20);
m_RegDisplay.pX21 = PTR_TO_MEMBER(CONTEXT, pCtx, X21);
m_RegDisplay.pX22 = PTR_TO_MEMBER(CONTEXT, pCtx, X22);
m_RegDisplay.pX23 = PTR_TO_MEMBER(CONTEXT, pCtx, X23);
m_RegDisplay.pX24 = PTR_TO_MEMBER(CONTEXT, pCtx, X24);
m_RegDisplay.pX25 = PTR_TO_MEMBER(CONTEXT, pCtx, X25);
m_RegDisplay.pX26 = PTR_TO_MEMBER(CONTEXT, pCtx, X26);
m_RegDisplay.pX27 = PTR_TO_MEMBER(CONTEXT, pCtx, X27);
m_RegDisplay.pX28 = PTR_TO_MEMBER(CONTEXT, pCtx, X28);
m_RegDisplay.pFP = PTR_TO_MEMBER(CONTEXT, pCtx, Fp);
m_RegDisplay.pLR = PTR_TO_MEMBER(CONTEXT, pCtx, Lr);
m_RegDisplay.pX19 = (PTR_UIntNative)PTR_TO_REG(pCtx, X19);
m_RegDisplay.pX20 = (PTR_UIntNative)PTR_TO_REG(pCtx, X20);
m_RegDisplay.pX21 = (PTR_UIntNative)PTR_TO_REG(pCtx, X21);
m_RegDisplay.pX22 = (PTR_UIntNative)PTR_TO_REG(pCtx, X22);
m_RegDisplay.pX23 = (PTR_UIntNative)PTR_TO_REG(pCtx, X23);
m_RegDisplay.pX24 = (PTR_UIntNative)PTR_TO_REG(pCtx, X24);
m_RegDisplay.pX25 = (PTR_UIntNative)PTR_TO_REG(pCtx, X25);
m_RegDisplay.pX26 = (PTR_UIntNative)PTR_TO_REG(pCtx, X26);
m_RegDisplay.pX27 = (PTR_UIntNative)PTR_TO_REG(pCtx, X27);
m_RegDisplay.pX28 = (PTR_UIntNative)PTR_TO_REG(pCtx, X28);
m_RegDisplay.pFP = (PTR_UIntNative)PTR_TO_REG(pCtx, Fp);
m_RegDisplay.pLR = (PTR_UIntNative)PTR_TO_REG(pCtx, Lr);

//
// scratch regs
//
m_RegDisplay.pX0 = PTR_TO_MEMBER(CONTEXT, pCtx, X0);
m_RegDisplay.pX1 = PTR_TO_MEMBER(CONTEXT, pCtx, X1);
m_RegDisplay.pX2 = PTR_TO_MEMBER(CONTEXT, pCtx, X2);
m_RegDisplay.pX3 = PTR_TO_MEMBER(CONTEXT, pCtx, X3);
m_RegDisplay.pX4 = PTR_TO_MEMBER(CONTEXT, pCtx, X4);
m_RegDisplay.pX5 = PTR_TO_MEMBER(CONTEXT, pCtx, X5);
m_RegDisplay.pX6 = PTR_TO_MEMBER(CONTEXT, pCtx, X6);
m_RegDisplay.pX7 = PTR_TO_MEMBER(CONTEXT, pCtx, X7);
m_RegDisplay.pX8 = PTR_TO_MEMBER(CONTEXT, pCtx, X8);
m_RegDisplay.pX9 = PTR_TO_MEMBER(CONTEXT, pCtx, X9);
m_RegDisplay.pX10 = PTR_TO_MEMBER(CONTEXT, pCtx, X10);
m_RegDisplay.pX11 = PTR_TO_MEMBER(CONTEXT, pCtx, X11);
m_RegDisplay.pX12 = PTR_TO_MEMBER(CONTEXT, pCtx, X12);
m_RegDisplay.pX13 = PTR_TO_MEMBER(CONTEXT, pCtx, X13);
m_RegDisplay.pX14 = PTR_TO_MEMBER(CONTEXT, pCtx, X14);
m_RegDisplay.pX15 = PTR_TO_MEMBER(CONTEXT, pCtx, X15);
m_RegDisplay.pX16 = PTR_TO_MEMBER(CONTEXT, pCtx, X16);
m_RegDisplay.pX17 = PTR_TO_MEMBER(CONTEXT, pCtx, X17);
m_RegDisplay.pX18 = PTR_TO_MEMBER(CONTEXT, pCtx, X18);
m_RegDisplay.pX0 = (PTR_UIntNative)PTR_TO_REG(pCtx, X0);
m_RegDisplay.pX1 = (PTR_UIntNative)PTR_TO_REG(pCtx, X1);
m_RegDisplay.pX2 = (PTR_UIntNative)PTR_TO_REG(pCtx, X2);
m_RegDisplay.pX3 = (PTR_UIntNative)PTR_TO_REG(pCtx, X3);
m_RegDisplay.pX4 = (PTR_UIntNative)PTR_TO_REG(pCtx, X4);
m_RegDisplay.pX5 = (PTR_UIntNative)PTR_TO_REG(pCtx, X5);
m_RegDisplay.pX6 = (PTR_UIntNative)PTR_TO_REG(pCtx, X6);
m_RegDisplay.pX7 = (PTR_UIntNative)PTR_TO_REG(pCtx, X7);
m_RegDisplay.pX8 = (PTR_UIntNative)PTR_TO_REG(pCtx, X8);
m_RegDisplay.pX9 = (PTR_UIntNative)PTR_TO_REG(pCtx, X9);
m_RegDisplay.pX10 = (PTR_UIntNative)PTR_TO_REG(pCtx, X10);
m_RegDisplay.pX11 = (PTR_UIntNative)PTR_TO_REG(pCtx, X11);
m_RegDisplay.pX12 = (PTR_UIntNative)PTR_TO_REG(pCtx, X12);
m_RegDisplay.pX13 = (PTR_UIntNative)PTR_TO_REG(pCtx, X13);
m_RegDisplay.pX14 = (PTR_UIntNative)PTR_TO_REG(pCtx, X14);
m_RegDisplay.pX15 = (PTR_UIntNative)PTR_TO_REG(pCtx, X15);
m_RegDisplay.pX16 = (PTR_UIntNative)PTR_TO_REG(pCtx, X16);
m_RegDisplay.pX17 = (PTR_UIntNative)PTR_TO_REG(pCtx, X17);
m_RegDisplay.pX18 = (PTR_UIntNative)PTR_TO_REG(pCtx, X18);

#elif defined(TARGET_X86) || defined(TARGET_AMD64)

m_RegDisplay.pIP = (PTR_PCODE)PTR_TO_MEMBER(CONTEXT, pCtx, Rip);
m_RegDisplay.pIP = (PTR_PCODE)PTR_TO_REG(pCtx, Rip);

//
// preserved regs
//
m_RegDisplay.pRbp = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, Rbp);
m_RegDisplay.pRsi = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, Rsi);
m_RegDisplay.pRdi = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, Rdi);
m_RegDisplay.pRbx = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, Rbx);
m_RegDisplay.pRbp = (PTR_UIntNative)PTR_TO_REG(pCtx, Rbp);
m_RegDisplay.pRsi = (PTR_UIntNative)PTR_TO_REG(pCtx, Rsi);
m_RegDisplay.pRdi = (PTR_UIntNative)PTR_TO_REG(pCtx, Rdi);
m_RegDisplay.pRbx = (PTR_UIntNative)PTR_TO_REG(pCtx, Rbx);
#ifdef TARGET_AMD64
m_RegDisplay.pR12 = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, R12);
m_RegDisplay.pR13 = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, R13);
m_RegDisplay.pR14 = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, R14);
m_RegDisplay.pR15 = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, R15);
m_RegDisplay.pR12 = (PTR_UIntNative)PTR_TO_REG(pCtx, R12);
m_RegDisplay.pR13 = (PTR_UIntNative)PTR_TO_REG(pCtx, R13);
m_RegDisplay.pR14 = (PTR_UIntNative)PTR_TO_REG(pCtx, R14);
m_RegDisplay.pR15 = (PTR_UIntNative)PTR_TO_REG(pCtx, R15);
#endif // TARGET_AMD64

//
// scratch regs
//
m_RegDisplay.pRax = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, Rax);
m_RegDisplay.pRcx = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, Rcx);
m_RegDisplay.pRdx = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, Rdx);
m_RegDisplay.pRax = (PTR_UIntNative)PTR_TO_REG(pCtx, Rax);
m_RegDisplay.pRcx = (PTR_UIntNative)PTR_TO_REG(pCtx, Rcx);
m_RegDisplay.pRdx = (PTR_UIntNative)PTR_TO_REG(pCtx, Rdx);
#ifdef TARGET_AMD64
m_RegDisplay.pR8 = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, R8);
m_RegDisplay.pR9 = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, R9);
m_RegDisplay.pR10 = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, R10);
m_RegDisplay.pR11 = (PTR_UIntNative)PTR_TO_MEMBER(CONTEXT, pCtx, R11);
m_RegDisplay.pR8 = (PTR_UIntNative)PTR_TO_REG(pCtx, R8);
m_RegDisplay.pR9 = (PTR_UIntNative)PTR_TO_REG(pCtx, R9);
m_RegDisplay.pR10 = (PTR_UIntNative)PTR_TO_REG(pCtx, R10);
m_RegDisplay.pR11 = (PTR_UIntNative)PTR_TO_REG(pCtx, R11);
#endif // TARGET_AMD64
#else
PORTABILITY_ASSERT("StackFrameIterator::InternalInit");
#endif // TARGET_ARM

#undef PTR_TO_REG
}

PTR_VOID StackFrameIterator::HandleExCollide(PTR_ExInfo pExInfo)
Expand Down
3 changes: 2 additions & 1 deletion src/coreclr/nativeaot/Runtime/StackFrameIterator.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ class StackFrameIterator
public:
StackFrameIterator() {}
StackFrameIterator(Thread * pThreadToWalk, PInvokeTransitionFrame* pInitialTransitionFrame);
StackFrameIterator(Thread* pThreadToWalk, NATIVE_CONTEXT* pCtx);
StackFrameIterator(Thread * pThreadToWalk, PTR_PAL_LIMITED_CONTEXT pCtx);

bool IsValid();
Expand Down Expand Up @@ -82,7 +83,7 @@ class StackFrameIterator

void InternalInit(Thread * pThreadToWalk, PTR_PInvokeTransitionFrame pFrame, uint32_t dwFlags); // GC stackwalk
void InternalInit(Thread * pThreadToWalk, PTR_PAL_LIMITED_CONTEXT pCtx, uint32_t dwFlags); // EH and hijack stackwalk, and collided unwind
void InternalInit(Thread * pThreadToWalk, CONTEXT* pCtx, uint32_t dwFlags); // GC stackwalk of redirected thread
void InternalInit(Thread * pThreadToWalk, NATIVE_CONTEXT* pCtx, uint32_t dwFlags); // GC stackwalk of redirected thread

void InternalInitForEH(Thread * pThreadToWalk, PAL_LIMITED_CONTEXT * pCtx, bool instructionFault); // EH stackwalk
void InternalInitForStackTrace(); // Environment.StackTrace
Expand Down
4 changes: 2 additions & 2 deletions src/coreclr/nativeaot/Runtime/amd64/AsmMacros.inc
Original file line number Diff line number Diff line change
Expand Up @@ -365,11 +365,11 @@ PTFF_SAVE_R14 equ 00000040h
PTFF_SAVE_R15 equ 00000080h
PTFF_SAVE_ALL_PRESERVED equ 000000F7h ;; NOTE: RBP is not included in this set!
PTFF_SAVE_RSP equ 00008000h
PTFF_SAVE_RAX equ 00000100h ;; RAX is saved if it contains a GC ref and we're in hijack handler
PTFF_SAVE_RAX equ 00000100h ;; RAX is saved in hijack handler - in case it contains a GC ref
PTFF_SAVE_ALL_SCRATCH equ 00007F00h
PTFF_RAX_IS_GCREF equ 00010000h ;; iff PTFF_SAVE_RAX: set -> eax is Object, clear -> eax is scalar
PTFF_RAX_IS_BYREF equ 00020000h ;; iff PTFF_SAVE_RAX: set -> eax is ByRef, clear -> eax is Object or scalar
PTFF_THREAD_ABORT equ 00040000h ;; indicates that ThreadAbortException should be thrown when returning from the transition
PTFF_THREAD_ABORT equ 00100000h ;; indicates that ThreadAbortException should be thrown when returning from the transition

;; These must match the TrapThreadsFlags enum
TrapThreadsFlags_None equ 0
Expand Down
Loading