diff --git a/src/coreclr/nativeaot/Bootstrap/main.cpp b/src/coreclr/nativeaot/Bootstrap/main.cpp index d6de0e292f12a..5e30ae1f0f495 100644 --- a/src/coreclr/nativeaot/Bootstrap/main.cpp +++ b/src/coreclr/nativeaot/Bootstrap/main.cpp @@ -115,6 +115,7 @@ extern "C" void IDynamicCastableGetInterfaceImplementation(); extern "C" void ObjectiveCMarshalTryGetTaggedMemory(); extern "C" void ObjectiveCMarshalGetIsTrackedReferenceCallback(); extern "C" void ObjectiveCMarshalGetOnEnteredFinalizerQueueCallback(); +extern "C" void ObjectiveCMarshalGetUnhandledExceptionPropagationHandler(); #endif typedef void(*pfn)(); @@ -134,10 +135,12 @@ static const pfn c_classlibFunctions[] = { &ObjectiveCMarshalTryGetTaggedMemory, &ObjectiveCMarshalGetIsTrackedReferenceCallback, &ObjectiveCMarshalGetOnEnteredFinalizerQueueCallback, + &ObjectiveCMarshalGetUnhandledExceptionPropagationHandler, #else nullptr, nullptr, nullptr, + nullptr, #endif }; diff --git a/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/ExceptionHandling.cs b/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/ExceptionHandling.cs index 4576862fda02e..1f0bf451771b0 100644 --- a/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/ExceptionHandling.cs +++ b/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/ExceptionHandling.cs @@ -611,6 +611,8 @@ private static void DispatchEx(scoped ref StackFrameIterator frameIter, ref ExIn byte* prevOriginalPC = null; UIntPtr prevFramePtr = UIntPtr.Zero; bool unwoundReversePInvoke = false; + IntPtr pReversePInvokePropagationCallback = IntPtr.Zero; + IntPtr pReversePInvokePropagationContext = IntPtr.Zero; bool isValid = frameIter.Init(exInfo._pExContext, (exInfo._kind & ExKind.InstructionFaultFlag) != 0); Debug.Assert(isValid, "RhThrowEx called with an unexpected context"); @@ -643,7 +645,29 @@ private static void DispatchEx(scoped ref StackFrameIterator frameIter, ref ExIn } } - if (pCatchHandler == null) +#if FEATURE_OBJCMARSHAL + if (unwoundReversePInvoke) + { + // We did not find any managed handlers before hitting a reverse P/Invoke boundary. + // See if the classlib has a handler to propagate the exception to native code. + IntPtr pGetHandlerClasslibFunction = (IntPtr)InternalCalls.RhpGetClasslibFunctionFromCodeAddress((IntPtr)prevControlPC, + ClassLibFunctionId.ObjectiveCMarshalGetUnhandledExceptionPropagationHandler); + if (pGetHandlerClasslibFunction != IntPtr.Zero) + { + var pGetHandler = (delegate*)pGetHandlerClasslibFunction; + pReversePInvokePropagationCallback = pGetHandler( + exceptionObj, (IntPtr)prevControlPC, out pReversePInvokePropagationContext); + if (pReversePInvokePropagationCallback != IntPtr.Zero) + { + // Tell the second pass to unwind to this frame. + handlingFrameSP = frameIter.SP; + catchingTryRegionIdx = MaxTryRegionIdx; + } + } + } +#endif // FEATURE_OBJCMARSHAL + + if (pCatchHandler == null && pReversePInvokePropagationCallback == IntPtr.Zero) { OnUnhandledExceptionViaClassLib(exceptionObj); @@ -655,8 +679,8 @@ private static void DispatchEx(scoped ref StackFrameIterator frameIter, ref ExIn } // We FailFast above if the exception goes unhandled. Therefore, we cannot run the second pass - // without a catch handler. - Debug.Assert(pCatchHandler != null, "We should have a handler if we're starting the second pass"); + // without a catch handler or propagation callback. + Debug.Assert(pCatchHandler != null || pReversePInvokePropagationCallback != IntPtr.Zero, "We should have a handler if we're starting the second pass"); // ------------------------------------------------ // @@ -673,12 +697,23 @@ private static void DispatchEx(scoped ref StackFrameIterator frameIter, ref ExIn exInfo._passNumber = 2; startIdx = MaxTryRegionIdx; + unwoundReversePInvoke = false; isValid = frameIter.Init(exInfo._pExContext, (exInfo._kind & ExKind.InstructionFaultFlag) != 0); - for (; isValid && ((byte*)frameIter.SP <= (byte*)handlingFrameSP); isValid = frameIter.Next(&startIdx)) + for (; isValid && ((byte*)frameIter.SP <= (byte*)handlingFrameSP); isValid = frameIter.Next(&startIdx, &unwoundReversePInvoke)) { Debug.Assert(isValid, "second-pass EH unwind failed unexpectedly"); DebugScanCallFrame(exInfo._passNumber, frameIter.ControlPC, frameIter.SP); + if (unwoundReversePInvoke) + { + Debug.Assert(pReversePInvokePropagationCallback != IntPtr.Zero, "Unwound to a reverse P/Invoke in the second pass. We should have a propagation handler."); + Debug.Assert(frameIter.SP == handlingFrameSP, "Encountered a different reverse P/Invoke frame in the second pass."); + Debug.Assert(frameIter.PreviousTransitionFrame != IntPtr.Zero, "Should have a transition frame for reverse P/Invoke."); + // Found the native frame that called the reverse P/invoke. + // It is not possible to run managed second pass handlers on a native frame. + break; + } + if ((frameIter.SP == handlingFrameSP) #if TARGET_ARM64 && (frameIter.ControlPC == prevControlPC) @@ -693,6 +728,18 @@ private static void DispatchEx(scoped ref StackFrameIterator frameIter, ref ExIn InvokeSecondPass(ref exInfo, startIdx); } +#if FEATURE_OBJCMARSHAL + if (pReversePInvokePropagationCallback != IntPtr.Zero) + { + InternalCalls.RhpCallPropagateExceptionCallback( + pReversePInvokePropagationContext, pReversePInvokePropagationCallback, frameIter.RegisterSet, ref exInfo, frameIter.PreviousTransitionFrame); + // the helper should jump to propagation handler and not return + Debug.Assert(false, "unreachable"); + FallbackFailFast(RhFailFastReason.InternalError, null); + } +#endif // FEATURE_OBJCMARSHAL + + // ------------------------------------------------ // // Call the handler and resume execution diff --git a/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/InternalCalls.cs b/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/InternalCalls.cs index f97a62a9c98f6..0b9cdd1e9006b 100644 --- a/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/InternalCalls.cs +++ b/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/InternalCalls.cs @@ -47,6 +47,7 @@ internal enum ClassLibFunctionId ObjectiveCMarshalTryGetTaggedMemory = 10, ObjectiveCMarshalGetIsTrackedReferenceCallback = 11, ObjectiveCMarshalGetOnEnteredFinalizerQueueCallback = 12, + ObjectiveCMarshalGetUnhandledExceptionPropagationHandler = 13, } internal static class InternalCalls @@ -230,6 +231,13 @@ internal static extern unsafe IntPtr RhpCallCatchFunclet( internal static extern unsafe bool RhpCallFilterFunclet( object exceptionObj, byte* pFilterIP, void* pvRegDisplay); +#if FEATURE_OBJCMARSHAL + [RuntimeImport(Redhawk.BaseName, "RhpCallPropagateExceptionCallback")] + [MethodImpl(MethodImplOptions.InternalCall)] + internal static extern unsafe IntPtr RhpCallPropagateExceptionCallback( + IntPtr callbackContext, IntPtr callback, void* pvRegDisplay, ref EH.ExInfo exInfo, IntPtr pPreviousTransitionFrame); +#endif // FEATURE_OBJCMARSHAL + [RuntimeImport(Redhawk.BaseName, "RhpFallbackFailFast")] [MethodImpl(MethodImplOptions.InternalCall)] internal static extern unsafe void RhpFallbackFailFast(); diff --git a/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/StackFrameIterator.cs b/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/StackFrameIterator.cs index da2dffdc5a566..9a772bd3e3370 100644 --- a/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/StackFrameIterator.cs +++ b/src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/StackFrameIterator.cs @@ -23,12 +23,15 @@ internal unsafe struct StackFrameIterator private REGDISPLAY _regDisplay; [FieldOffset(AsmOffsets.OFFSETOF__StackFrameIterator__m_OriginalControlPC)] private IntPtr _originalControlPC; + [FieldOffset(AsmOffsets.OFFSETOF__StackFrameIterator__m_pPreviousTransitionFrame)] + private IntPtr _pPreviousTransitionFrame; internal byte* ControlPC { get { return (byte*)_controlPC; } } internal byte* OriginalControlPC { get { return (byte*)_originalControlPC; } } internal void* RegisterSet { get { fixed (void* pRegDisplay = &_regDisplay) { return pRegDisplay; } } } internal UIntPtr SP { get { return _regDisplay.SP; } } internal UIntPtr FramePointer { get { return _framePointer; } } + internal IntPtr PreviousTransitionFrame { get { return _pPreviousTransitionFrame; } } internal bool Init(EH.PAL_LIMITED_CONTEXT* pStackwalkCtx, bool instructionFault = false) { diff --git a/src/coreclr/nativeaot/Runtime/ICodeManager.h b/src/coreclr/nativeaot/Runtime/ICodeManager.h index a5e188f97197b..4205c62c4de08 100644 --- a/src/coreclr/nativeaot/Runtime/ICodeManager.h +++ b/src/coreclr/nativeaot/Runtime/ICodeManager.h @@ -157,6 +157,7 @@ enum class ClasslibFunctionId ObjectiveCMarshalTryGetTaggedMemory = 10, ObjectiveCMarshalGetIsTrackedReferenceCallback = 11, ObjectiveCMarshalGetOnEnteredFinalizerQueueCallback = 12, + ObjectiveCMarshalGetUnhandledExceptionPropagationHandler = 13, }; enum class AssociatedDataFlags : unsigned char @@ -165,6 +166,14 @@ enum class AssociatedDataFlags : unsigned char HasUnboxingStubTarget = 1, }; +enum UnwindStackFrameFlags +{ + USFF_None = 0, + // If this is a reverse P/Invoke frame, do not continue the unwind + // after extracting the saved transition frame. + USFF_StopUnwindOnTransitionFrame = 1, +}; + class ICodeManager { public: @@ -185,6 +194,7 @@ class ICodeManager bool isActiveStackFrame) PURE_VIRTUAL virtual bool UnwindStackFrame(MethodInfo * pMethodInfo, + uint32_t flags, REGDISPLAY * pRegisterSet, // in/out PInvokeTransitionFrame** ppPreviousTransitionFrame) PURE_VIRTUAL // out diff --git a/src/coreclr/nativeaot/Runtime/StackFrameIterator.cpp b/src/coreclr/nativeaot/Runtime/StackFrameIterator.cpp index 3da12598ba196..0dcd4058ab359 100644 --- a/src/coreclr/nativeaot/Runtime/StackFrameIterator.cpp +++ b/src/coreclr/nativeaot/Runtime/StackFrameIterator.cpp @@ -141,6 +141,7 @@ void StackFrameIterator::EnterInitialInvalidState(Thread * pThreadToWalk) m_ShouldSkipRegularGcReporting = false; m_pendingFuncletFramePointer = NULL; m_pNextExInfo = pThreadToWalk->GetCurExInfo(); + m_pPreviousTransitionFrame = NULL; SetControlPC(0); } @@ -172,6 +173,7 @@ void StackFrameIterator::InternalInit(Thread * pThreadToWalk, PInvokeTransitionF } m_dwFlags = dwFlags; + m_pPreviousTransitionFrame = pFrame; // We need to walk the ExInfo chain in parallel with the stackwalk so that we know when we cross over // exception throw points. So we must find our initial point in the ExInfo chain here so that we can @@ -1413,6 +1415,8 @@ void StackFrameIterator::NextInternal() { UnwindOutOfCurrentManagedFrame: ASSERT(m_dwFlags & MethodStateCalculated); + // Due to the lack of an ICodeManager for native code, we can't unwind from a native frame. + ASSERT((m_dwFlags & (SkipNativeFrames|UnwoundReversePInvoke)) != UnwoundReversePInvoke); m_dwFlags &= ~(ExCollide|MethodStateCalculated|UnwoundReversePInvoke|ActiveStackFrame); ASSERT(IsValid()); @@ -1432,33 +1436,41 @@ void StackFrameIterator::NextInternal() uintptr_t DEBUG_preUnwindSP = m_RegDisplay.GetSP(); #endif - PInvokeTransitionFrame* pPreviousTransitionFrame; - FAILFAST_OR_DAC_FAIL(GetCodeManager()->UnwindStackFrame(&m_methodInfo, &m_RegDisplay, &pPreviousTransitionFrame)); + uint32_t unwindFlags = USFF_None; + if ((m_dwFlags & SkipNativeFrames) != 0) + { + unwindFlags |= USFF_StopUnwindOnTransitionFrame; + } + + FAILFAST_OR_DAC_FAIL(GetCodeManager()->UnwindStackFrame(&m_methodInfo, unwindFlags, &m_RegDisplay, + &m_pPreviousTransitionFrame)); + + if (m_pPreviousTransitionFrame != NULL) + { + m_dwFlags |= UnwoundReversePInvoke; + } bool doingFuncletUnwind = GetCodeManager()->IsFunclet(&m_methodInfo); - if (pPreviousTransitionFrame != NULL) + if (m_pPreviousTransitionFrame != NULL && (m_dwFlags & SkipNativeFrames) != 0) { ASSERT(!doingFuncletUnwind); - if (pPreviousTransitionFrame == TOP_OF_STACK_MARKER) + if (m_pPreviousTransitionFrame == TOP_OF_STACK_MARKER) { SetControlPC(0); } else { - // NOTE: If this is an EH stack walk, then reinitializing the iterator using the GC stack - // walk flags is incorrect. That said, this is OK because the exception dispatcher will - // immediately trigger a failfast when it sees the UnwoundReversePInvoke flag. // NOTE: This can generate a conservative stack range if the recovered PInvoke callsite // resides in an assembly thunk and not in normal managed code. In this case InternalInit // will unwind through the thunk and back to the nearest managed frame, and therefore may // see a conservative range reported by one of the thunks encountered during this "nested" // unwind. - InternalInit(m_pThread, pPreviousTransitionFrame, GcStackWalkFlags); + InternalInit(m_pThread, m_pPreviousTransitionFrame, GcStackWalkFlags); + m_dwFlags |= UnwoundReversePInvoke; ASSERT(m_pInstance->IsManaged(m_ControlPC)); } - m_dwFlags |= UnwoundReversePInvoke; } else { @@ -1579,11 +1591,12 @@ void StackFrameIterator::NextInternal() } // Now that all assembly thunks and ExInfo collisions have been processed, it is guaranteed - // that the next managed frame has been located. The located frame must now be yielded + // that the next managed frame has been located. Or the next native frame + // if we are not skipping them. The located frame must now be yielded // from the iterator with the one and only exception being cases where a managed frame must // be skipped due to funclet collapsing. - ASSERT(m_pInstance->IsManaged(m_ControlPC)); + ASSERT(m_pInstance->IsManaged(m_ControlPC) || (m_pPreviousTransitionFrame != NULL && (m_dwFlags & SkipNativeFrames) == 0)); if (collapsingTargetFrame != NULL) { @@ -1692,7 +1705,8 @@ void StackFrameIterator::PrepareToYieldFrame() if (!IsValid()) return; - ASSERT(m_pInstance->IsManaged(m_ControlPC)); + ASSERT(m_pInstance->IsManaged(m_ControlPC) || + ((m_dwFlags & SkipNativeFrames) == 0 && (m_dwFlags & UnwoundReversePInvoke) != 0)); if (m_dwFlags & ApplyReturnAddressAdjustment) { @@ -1748,6 +1762,7 @@ REGDISPLAY * StackFrameIterator::GetRegisterSet() PTR_VOID StackFrameIterator::GetEffectiveSafePointAddress() { ASSERT(IsValid()); + ASSERT(m_effectiveSafePointAddress); return m_effectiveSafePointAddress; } @@ -1780,6 +1795,17 @@ void StackFrameIterator::CalculateCurrentMethodState() if (m_dwFlags & MethodStateCalculated) return; + // Check if we are on a native frame. + if ((m_dwFlags & (SkipNativeFrames|UnwoundReversePInvoke)) == UnwoundReversePInvoke) + { + // There is no implementation of ICodeManager for native code. + m_pCodeManager = nullptr; + m_effectiveSafePointAddress = nullptr; + m_FramePointer = nullptr; + m_dwFlags |= MethodStateCalculated; + return; + } + // Assume that the caller is likely to be in the same module if (m_pCodeManager == NULL || !m_pCodeManager->FindMethodInfo(m_ControlPC, &m_methodInfo)) { diff --git a/src/coreclr/nativeaot/Runtime/StackFrameIterator.h b/src/coreclr/nativeaot/Runtime/StackFrameIterator.h index bf61ba767a1c8..f01ca6656fbf2 100644 --- a/src/coreclr/nativeaot/Runtime/StackFrameIterator.h +++ b/src/coreclr/nativeaot/Runtime/StackFrameIterator.h @@ -147,7 +147,10 @@ class StackFrameIterator // The thread was interrupted in the current frame at the current IP by a signal, SuspendThread or similar. ActiveStackFrame = 0x40, - GcStackWalkFlags = (CollapseFunclets | RemapHardwareFaultsToSafePoint), + // When encountering a reverse P/Invoke, unwind directly to the P/Invoke frame using the saved transition frame. + SkipNativeFrames = 0x80, + + GcStackWalkFlags = (CollapseFunclets | RemapHardwareFaultsToSafePoint | SkipNativeFrames), EHStackWalkFlags = ApplyReturnAddressAdjustment, StackTraceStackWalkFlags = GcStackWalkFlags }; @@ -209,7 +212,7 @@ class StackFrameIterator GCRefKind m_HijackedReturnValueKind; PTR_UIntNative m_pConservativeStackRangeLowerBound; PTR_UIntNative m_pConservativeStackRangeUpperBound; - uint32_t m_dwFlags; + uint32_t m_dwFlags; PTR_ExInfo m_pNextExInfo; PTR_VOID m_pendingFuncletFramePointer; PreservedRegPtrs m_funcletPtrs; // @TODO: Placing the 'scratch space' in the StackFrameIterator is not @@ -217,6 +220,7 @@ class StackFrameIterator // space. However, the implementation simpler by doing it this way. bool m_ShouldSkipRegularGcReporting; PTR_VOID m_OriginalControlPC; + PTR_PInvokeTransitionFrame m_pPreviousTransitionFrame; }; #endif // __StackFrameIterator_h__ diff --git a/src/coreclr/nativeaot/Runtime/amd64/AsmOffsetsCpu.h b/src/coreclr/nativeaot/Runtime/amd64/AsmOffsetsCpu.h index 2239433993f08..8cc7f63f282de 100644 --- a/src/coreclr/nativeaot/Runtime/amd64/AsmOffsetsCpu.h +++ b/src/coreclr/nativeaot/Runtime/amd64/AsmOffsetsCpu.h @@ -8,7 +8,7 @@ // NOTE: the offsets MUST be in hex notation WITHOUT the 0x prefix #ifndef UNIX_AMD64_ABI -PLAT_ASM_SIZEOF(260, ExInfo) +PLAT_ASM_SIZEOF(270, ExInfo) PLAT_ASM_OFFSET(0, ExInfo, m_pPrevExInfo) PLAT_ASM_OFFSET(8, ExInfo, m_pExContext) PLAT_ASM_OFFSET(10, ExInfo, m_exception) @@ -16,7 +16,7 @@ PLAT_ASM_OFFSET(18, ExInfo, m_kind) PLAT_ASM_OFFSET(19, ExInfo, m_passNumber) PLAT_ASM_OFFSET(1c, ExInfo, m_idxCurClause) PLAT_ASM_OFFSET(20, ExInfo, m_frameIter) -PLAT_ASM_OFFSET(250, ExInfo, m_notifyDebuggerSP) +PLAT_ASM_OFFSET(260, ExInfo, m_notifyDebuggerSP) PLAT_ASM_OFFSET(0, PInvokeTransitionFrame, m_RIP) PLAT_ASM_OFFSET(8, PInvokeTransitionFrame, m_FramePointer) @@ -24,11 +24,12 @@ PLAT_ASM_OFFSET(10, PInvokeTransitionFrame, m_pThread) PLAT_ASM_OFFSET(18, PInvokeTransitionFrame, m_Flags) PLAT_ASM_OFFSET(20, PInvokeTransitionFrame, m_PreservedRegs) -PLAT_ASM_SIZEOF(230, StackFrameIterator) +PLAT_ASM_SIZEOF(240, StackFrameIterator) PLAT_ASM_OFFSET(10, StackFrameIterator, m_FramePointer) PLAT_ASM_OFFSET(18, StackFrameIterator, m_ControlPC) PLAT_ASM_OFFSET(20, StackFrameIterator, m_RegDisplay) PLAT_ASM_OFFSET(228, StackFrameIterator, m_OriginalControlPC) +PLAT_ASM_OFFSET(230, StackFrameIterator, m_pPreviousTransitionFrame) PLAT_ASM_SIZEOF(100, PAL_LIMITED_CONTEXT) PLAT_ASM_OFFSET(0, PAL_LIMITED_CONTEXT, IP) @@ -70,7 +71,7 @@ PLAT_ASM_OFFSET(90, REGDISPLAY, Xmm) #else // !UNIX_AMD64_ABI -PLAT_ASM_SIZEOF(1a8, ExInfo) +PLAT_ASM_SIZEOF(1b0, ExInfo) PLAT_ASM_OFFSET(0, ExInfo, m_pPrevExInfo) PLAT_ASM_OFFSET(8, ExInfo, m_pExContext) PLAT_ASM_OFFSET(10, ExInfo, m_exception) @@ -78,7 +79,7 @@ PLAT_ASM_OFFSET(18, ExInfo, m_kind) PLAT_ASM_OFFSET(19, ExInfo, m_passNumber) PLAT_ASM_OFFSET(1c, ExInfo, m_idxCurClause) PLAT_ASM_OFFSET(20, ExInfo, m_frameIter) -PLAT_ASM_OFFSET(1a0, ExInfo, m_notifyDebuggerSP) +PLAT_ASM_OFFSET(1a8, ExInfo, m_notifyDebuggerSP) PLAT_ASM_OFFSET(0, PInvokeTransitionFrame, m_RIP) PLAT_ASM_OFFSET(8, PInvokeTransitionFrame, m_FramePointer) @@ -86,11 +87,12 @@ PLAT_ASM_OFFSET(10, PInvokeTransitionFrame, m_pThread) PLAT_ASM_OFFSET(18, PInvokeTransitionFrame, m_Flags) PLAT_ASM_OFFSET(20, PInvokeTransitionFrame, m_PreservedRegs) -PLAT_ASM_SIZEOF(180, StackFrameIterator) +PLAT_ASM_SIZEOF(188, StackFrameIterator) PLAT_ASM_OFFSET(10, StackFrameIterator, m_FramePointer) PLAT_ASM_OFFSET(18, StackFrameIterator, m_ControlPC) PLAT_ASM_OFFSET(20, StackFrameIterator, m_RegDisplay) PLAT_ASM_OFFSET(178, StackFrameIterator, m_OriginalControlPC) +PLAT_ASM_OFFSET(180, StackFrameIterator, m_pPreviousTransitionFrame) PLAT_ASM_SIZEOF(50, PAL_LIMITED_CONTEXT) PLAT_ASM_OFFSET(0, PAL_LIMITED_CONTEXT, IP) diff --git a/src/coreclr/nativeaot/Runtime/amd64/ExceptionHandling.S b/src/coreclr/nativeaot/Runtime/amd64/ExceptionHandling.S index fd348e7466a96..4e2248ed346e8 100644 --- a/src/coreclr/nativeaot/Runtime/amd64/ExceptionHandling.S +++ b/src/coreclr/nativeaot/Runtime/amd64/ExceptionHandling.S @@ -532,3 +532,152 @@ NESTED_ENTRY RhpCallFilterFunclet, _TEXT, NoHandler ret NESTED_END RhpCallFilterFunclet, _TEXT + + +#ifdef FEATURE_OBJCMARSHAL + +////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// +// void* FASTCALL RhpCallPropagateExceptionCallback(void* pCallbackContext, void* pCallback, REGDISPLAY* pRegDisplay, +// ExInfo* pExInfo, PInvokeTransitionFrame* pPreviousTransitionFrame) +// +// INPUT: RDI: callback context +// RSI: callback +// RDX: REGDISPLAY* +// RCX: ExInfo* +// R8 : pPreviousTransitionFrame +// +// OUTPUT: +// +////////////////////////////////////////////////////////////////////////////////////////////////////////////// +NESTED_ENTRY RhpCallPropagateExceptionCallback, _TEXT, NoHandler + + // We could use a simpler prolog, as we don't have to be unwindable by StackFrameIterator::UnwindFuncletInvokeThunk() + FUNCLET_CALL_PROLOGUE 6, 1 + + locThread = 0 + locArg0 = 0x08 + locArg1 = 0x10 + locArg2 = 0x18 + locArg3 = 0x20 + locArg4 = 0x28 + + mov [rsp + locArg0], rdi // save arguments for later + mov [rsp + locArg1], rsi + mov [rsp + locArg2], rdx + mov [rsp + locArg3], rcx + mov [rsp + locArg4], r8 + + mov rbx, rdx + INLINE_GETTHREAD + mov rdx, rbx + + mov [rsp + locThread], rax // save Thread* for later + + // Clear the DoNotTriggerGc state before calling out to the propagation callback. + lock and dword ptr [rax + OFFSETOF__Thread__m_ThreadStateFlags], ~TSF_DoNotTriggerGc + + mov rax, [rdx + OFFSETOF__REGDISPLAY__pRbx] + mov rbx, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pRbp] + mov rbp, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR12] + mov r12, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR13] + mov r13, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR14] + mov r14, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR15] + mov r15, [rax] + +#if 0 // _DEBUG // @TODO: temporarily removed because trashing RBP breaks the debugger + // trash the values at the old homes to make sure nobody uses them + mov rcx, 0xbaaddeed + mov rax, [rdx + OFFSETOF__REGDISPLAY__pRbx] + mov [rax], rcx + mov rax, [rdx + OFFSETOF__REGDISPLAY__pRbp] + mov [rax], rcx + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR12] + mov [rax], rcx + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR13] + mov [rax], rcx + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR14] + mov [rax], rcx + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR15] + mov [rax], rcx +#endif + +#ifdef _DEBUG + // Call into some C++ code to validate the pop of the ExInfo. We only do this in debug because we + // have to spill all the preserved registers and then refill them after the call. + + mov rdi, [rdx + OFFSETOF__REGDISPLAY__pRbx] + mov [rdi] , rbx + mov rdi, [rdx + OFFSETOF__REGDISPLAY__pRbp] + mov [rdi] , rbp + mov rdi, [rdx + OFFSETOF__REGDISPLAY__pR12] + mov [rdi] , r12 + mov rdi, [rdx + OFFSETOF__REGDISPLAY__pR13] + mov [rdi] , r13 + mov rdi, [rdx + OFFSETOF__REGDISPLAY__pR14] + mov [rdi] , r14 + mov rdi, [rdx + OFFSETOF__REGDISPLAY__pR15] + mov [rdi] , r15 + + mov rdi, [rsp] // rdi <- Thread* + mov rsi, [rsp + locArg3] // rsi <- current ExInfo * + mov rdx, [rdx + OFFSETOF__REGDISPLAY__SP] // rdx <- resume SP value + call C_FUNC(RhpValidateExInfoPop) + + mov rdx, [rsp + locArg2] // rdx <- dispatch context + mov rax, [rdx + OFFSETOF__REGDISPLAY__pRbx] + mov rbx, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pRbp] + mov rbp, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR12] + mov r12, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR13] + mov r13, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR14] + mov r14, [rax] + mov rax, [rdx + OFFSETOF__REGDISPLAY__pR15] + mov r15, [rax] + +#endif + mov rsi, [rsp + locThread] // rsi <- Thread* + + // We must unhijack the thread at this point because the section of stack where the hijack is applied + // may go dead. If it does, then the next time we try to unhijack the thread, it will corrupt the stack. + INLINE_THREAD_UNHIJACK rsi, rdi, rcx // Thread in rsi, trashes rdi and rcx + + mov rdi, [rsp + locArg3] // rdi <- current ExInfo * + mov rdx, [rdx + OFFSETOF__REGDISPLAY__SP] // rdx <- resume SP value + xor ecx, ecx // rcx <- 0 + +LOCAL_LABEL(Propagate_ExInfoLoop): + mov rdi, [rdi + OFFSETOF__ExInfo__m_pPrevExInfo] // rdi <- next ExInfo + cmp rdi, rcx + je LOCAL_LABEL(Propagate_ExInfoLoopDone) // we're done if it's null + cmp rdi, rdx + jl LOCAL_LABEL(Propagate_ExInfoLoop) // keep looping if it's lower than the new SP + +LOCAL_LABEL(Propagate_ExInfoLoopDone): + mov [rsi + OFFSETOF__Thread__m_pExInfoStackHead], rdi // store the new head on the Thread + + // Switch to preemptive mode. + mov r8, [rsp + locArg4] + mov [rsi + OFFSETOF__Thread__m_pTransitionFrame], r8 + + // reset RSP and RDI and jump to the propagation callback + mov rdi, [rsp + locArg0] // rdi <- callback context + mov rax, [rsp + locArg1] // rax <- callback + mov rsp, rdx // reset the SP + // Don't restore the stack pointer to exact same context. Leave the + // return IP on the stack to let the unwinder work if the callback throws + // an exception as opposed to failing fast. + sub rsp, 8 + jmp rax + +NESTED_END RhpCallPropagateExceptionCallback, _TEXT + +#endif // FEATURE_OBJCMARSHAL diff --git a/src/coreclr/nativeaot/Runtime/arm/AsmOffsetsCpu.h b/src/coreclr/nativeaot/Runtime/arm/AsmOffsetsCpu.h index a8b3b9465a9f7..987710ef3676c 100644 --- a/src/coreclr/nativeaot/Runtime/arm/AsmOffsetsCpu.h +++ b/src/coreclr/nativeaot/Runtime/arm/AsmOffsetsCpu.h @@ -7,7 +7,7 @@ // // NOTE: the offsets MUST be in hex notation WITHOUT the 0x prefix -PLAT_ASM_SIZEOF(138, ExInfo) +PLAT_ASM_SIZEOF(13c, ExInfo) PLAT_ASM_OFFSET(0, ExInfo, m_pPrevExInfo) PLAT_ASM_OFFSET(4, ExInfo, m_pExContext) PLAT_ASM_OFFSET(8, ExInfo, m_exception) @@ -15,7 +15,7 @@ PLAT_ASM_OFFSET(0c, ExInfo, m_kind) PLAT_ASM_OFFSET(0d, ExInfo, m_passNumber) PLAT_ASM_OFFSET(10, ExInfo, m_idxCurClause) PLAT_ASM_OFFSET(18, ExInfo, m_frameIter) -PLAT_ASM_OFFSET(130, ExInfo, m_notifyDebuggerSP) +PLAT_ASM_OFFSET(134, ExInfo, m_notifyDebuggerSP) PLAT_ASM_OFFSET(4, PInvokeTransitionFrame, m_RIP) PLAT_ASM_OFFSET(8, PInvokeTransitionFrame, m_FramePointer) @@ -23,11 +23,12 @@ PLAT_ASM_OFFSET(0c, PInvokeTransitionFrame, m_pThread) PLAT_ASM_OFFSET(10, PInvokeTransitionFrame, m_Flags) PLAT_ASM_OFFSET(14, PInvokeTransitionFrame, m_PreservedRegs) -PLAT_ASM_SIZEOF(118, StackFrameIterator) +PLAT_ASM_SIZEOF(11c, StackFrameIterator) PLAT_ASM_OFFSET(08, StackFrameIterator, m_FramePointer) PLAT_ASM_OFFSET(0c, StackFrameIterator, m_ControlPC) PLAT_ASM_OFFSET(10, StackFrameIterator, m_RegDisplay) PLAT_ASM_OFFSET(114, StackFrameIterator, m_OriginalControlPC) +PLAT_ASM_OFFSET(118, StackFrameIterator, m_pPreviousTransitionFrame) PLAT_ASM_SIZEOF(70, PAL_LIMITED_CONTEXT) PLAT_ASM_OFFSET(24, PAL_LIMITED_CONTEXT, IP) diff --git a/src/coreclr/nativeaot/Runtime/arm64/AsmOffsetsCpu.h b/src/coreclr/nativeaot/Runtime/arm64/AsmOffsetsCpu.h index 6e59ade597ad4..cfef3d7f05f08 100644 --- a/src/coreclr/nativeaot/Runtime/arm64/AsmOffsetsCpu.h +++ b/src/coreclr/nativeaot/Runtime/arm64/AsmOffsetsCpu.h @@ -7,7 +7,7 @@ // // NOTE: the offsets MUST be in hex notation WITHOUT the 0x prefix -PLAT_ASM_SIZEOF(290, ExInfo) +PLAT_ASM_SIZEOF(298, ExInfo) PLAT_ASM_OFFSET(0, ExInfo, m_pPrevExInfo) PLAT_ASM_OFFSET(8, ExInfo, m_pExContext) PLAT_ASM_OFFSET(10, ExInfo, m_exception) @@ -15,7 +15,7 @@ PLAT_ASM_OFFSET(18, ExInfo, m_kind) PLAT_ASM_OFFSET(19, ExInfo, m_passNumber) PLAT_ASM_OFFSET(1c, ExInfo, m_idxCurClause) PLAT_ASM_OFFSET(20, ExInfo, m_frameIter) -PLAT_ASM_OFFSET(288, ExInfo, m_notifyDebuggerSP) +PLAT_ASM_OFFSET(290, ExInfo, m_notifyDebuggerSP) PLAT_ASM_OFFSET(0, PInvokeTransitionFrame, m_FramePointer) PLAT_ASM_OFFSET(8, PInvokeTransitionFrame, m_RIP) @@ -23,11 +23,12 @@ PLAT_ASM_OFFSET(10, PInvokeTransitionFrame, m_pThread) PLAT_ASM_OFFSET(18, PInvokeTransitionFrame, m_Flags) PLAT_ASM_OFFSET(20, PInvokeTransitionFrame, m_PreservedRegs) -PLAT_ASM_SIZEOF(268, StackFrameIterator) +PLAT_ASM_SIZEOF(270, StackFrameIterator) PLAT_ASM_OFFSET(10, StackFrameIterator, m_FramePointer) PLAT_ASM_OFFSET(18, StackFrameIterator, m_ControlPC) PLAT_ASM_OFFSET(20, StackFrameIterator, m_RegDisplay) PLAT_ASM_OFFSET(260, StackFrameIterator, m_OriginalControlPC) +PLAT_ASM_OFFSET(268, StackFrameIterator, m_pPreviousTransitionFrame) PLAT_ASM_SIZEOF(C0, PAL_LIMITED_CONTEXT) @@ -62,4 +63,5 @@ PLAT_ASM_OFFSET(d0, REGDISPLAY, pX26) PLAT_ASM_OFFSET(d8, REGDISPLAY, pX27) PLAT_ASM_OFFSET(e0, REGDISPLAY, pX28) PLAT_ASM_OFFSET(e8, REGDISPLAY, pFP) +PLAT_ASM_OFFSET(f0, REGDISPLAY, pLR) PLAT_ASM_OFFSET(110, REGDISPLAY, D) diff --git a/src/coreclr/nativeaot/Runtime/arm64/ExceptionHandling.S b/src/coreclr/nativeaot/Runtime/arm64/ExceptionHandling.S index 6fb47f101823f..d0425171e1d19 100644 --- a/src/coreclr/nativeaot/Runtime/arm64/ExceptionHandling.S +++ b/src/coreclr/nativeaot/Runtime/arm64/ExceptionHandling.S @@ -515,6 +515,13 @@ NoAbort: mov sp, x2 br x0 +#undef rsp_offset_is_not_handling_thread_abort +#undef rsp_offset_x0 +#undef rsp_offset_x1 +#undef rsp_offset_x2 +#undef rsp_offset_x3 +#undef rsp_CatchFunclet_offset_thread + NESTED_END RhpCallCatchFunclet, _Text // @@ -611,6 +618,9 @@ SetSuccess: FREE_CALL_FUNCLET_FRAME 0x60 EPILOG_RETURN +#undef rsp_offset_x1 +#undef rsp_FinallyFunclet_offset_thread + NESTED_END RhpCallFinallyFunclet, _Text @@ -651,3 +661,123 @@ SetSuccess: EPILOG_RETURN NESTED_END RhpCallFilterFunclet, Text + +#ifdef FEATURE_OBJCMARSHAL + +// +// void* FASTCALL RhpCallPropagateExceptionCallback(void* pCallbackContext, void* pCallback, REGDISPLAY* pRegDisplay, +// ExInfo* pExInfo, PInvokeTransitionFrame* pPreviousTransitionFrame) +// +// INPUT: X0: callback context +// X1: callback +// X2: REGDISPLAY* +// X3: ExInfo* +// X4: pPreviousTransitionFrame +// +// OUTPUT: +// + + NESTED_ENTRY RhpCallPropagateExceptionCallback, _TEXT, NoHandler + +#define rsp_offset_x0 0x10 +#define rsp_offset_x1 0x18 +#define rsp_offset_x2 0x20 +#define rsp_offset_x3 0x28 +#define rsp_offset_x4 0x30 +#define rsp_CallPropagationCallback_offset_thread 0x38 + + // Using the NO_FP macro so that the debugger unwinds using SP. + // This makes backtraces work even after using RESTORE_PRESERVED_REGISTERS. + PROLOG_SAVE_REG_PAIR_NO_FP_INDEXED fp, lr, -0x40 + mov fp, sp + stp x0, x1, [sp, #rsp_offset_x0] // x0 to x3 are stored to restore them anytime + stp x2, x3, [sp, #rsp_offset_x2] + stp x4, xzr, [sp, #rsp_offset_x4] + // str xzr, [sp, #rsp_CallPropagationCallback_offset_thread] // xzr makes space to store the thread obj + + // + // clear the DoNotTriggerGc flag, trashes x4-x6 + // + + bl C_FUNC(RhpGetThread) + str x0, [sp, rsp_CallPropagationCallback_offset_thread] + mov x5,x0 + ldp x0, x1, [sp, #rsp_offset_x0] + ldp x2, x3, [sp, #rsp_offset_x2] + + add x12, x5, #OFFSETOF__Thread__m_ThreadStateFlags + +ClearRetry_Propagate: + ldxr w4, [x12] + bic w4, w4, #TSF_DoNotTriggerGc + stxr w6, w4, [x12] + cbz w6, ClearSuccess_Propagate + b ClearRetry_Propagate +ClearSuccess_Propagate: + + // + // set preserved regs to the values expected by the funclet + // + RESTORE_PRESERVED_REGISTERS x2 + // + // trash the values at the old homes to make sure nobody uses them + // + TRASH_PRESERVED_REGISTERS_STORAGE x2 + +#ifdef _DEBUG + // Call into some C++ code to validate the pop of the ExInfo. We only do this in debug because we + // have to spill all the preserved registers and then refill them after the call. + + SAVE_PRESERVED_REGISTERS x2 + + ldr x0, [sp, rsp_CallPropagationCallback_offset_thread] // x0 <- Thread* + ldr x1, [sp, #rsp_offset_x3] // x1 <- current ExInfo* + ldr x2, [x2, #OFFSETOF__REGDISPLAY__SP] // x2 <- resume SP value + bl C_FUNC(RhpValidateExInfoPop) + + ldr x2, [sp, rsp_offset_x2] // x2 <- REGDISPLAY* + + RESTORE_PRESERVED_REGISTERS x2 +#endif + + ldr x1, [sp, rsp_CallPropagationCallback_offset_thread] + + // We must unhijack the thread at this point because the section of stack where the hijack is applied + // may go dead. If it does, then the next time we try to unhijack the thread, it will corrupt the stack. + INLINE_THREAD_UNHIJACK x1, x3, x12 // Thread in x1, trashes x3 and x12 + + ldr x3, [sp, #rsp_offset_x3] // x3 <- current ExInfo* + ldr x2, [x2, #OFFSETOF__REGDISPLAY__SP] // x2 <- resume SP value + +Propagate_PopExInfoLoop: + ldr x3, [x3, #OFFSETOF__ExInfo__m_pPrevExInfo] // x3 <- next ExInfo + cbz x3, Propagate_DonePopping // if (pExInfo == null) { we're done } + cmp x3, x2 + blt Propagate_PopExInfoLoop // if (pExInfo < resume SP} { keep going } + +Propagate_DonePopping: + str x3, [x1, #OFFSETOF__Thread__m_pExInfoStackHead] // store the new head on the Thread + + // restore preemptive mode + ldr x4, [sp, #rsp_offset_x4] // pPreviousTransitionFrame + str x4, [x1, #OFFSETOF__Thread__m_pTransitionFrame] + + // reset SP and LR and jump to continuation address + ldr x0, [sp, #rsp_offset_x0] // callback context + ldr x1, [sp, #rsp_offset_x1] // callback + ldr x2, [sp, #rsp_offset_x2] // REGDISPLAY* + ldr x3, [x2, #OFFSETOF__REGDISPLAY__pLR] // x3 <- &resume LR value + ldr lr, [x3] + ldr x3, [x2, #OFFSETOF__REGDISPLAY__SP] // x3 <- resume SP value + mov sp, x3 + br x1 + +#undef rsp_offset_x0 +#undef rsp_offset_x1 +#undef rsp_offset_x2 +#undef rsp_offset_x3 +#undef rsp_CallPropagationCallback_offset_thread + + NESTED_END RhpCallPropagateExceptionCallback, _Text + +#endif // FEATURE_OBJCMARSHAL diff --git a/src/coreclr/nativeaot/Runtime/i386/AsmOffsetsCpu.h b/src/coreclr/nativeaot/Runtime/i386/AsmOffsetsCpu.h index a92f24d789b47..c16a5f9e696e3 100644 --- a/src/coreclr/nativeaot/Runtime/i386/AsmOffsetsCpu.h +++ b/src/coreclr/nativeaot/Runtime/i386/AsmOffsetsCpu.h @@ -7,7 +7,7 @@ // // NOTE: the offsets MUST be in hex notation WITHOUT the 0x prefix -PLAT_ASM_SIZEOF(c0, ExInfo) +PLAT_ASM_SIZEOF(c4, ExInfo) PLAT_ASM_OFFSET(0, ExInfo, m_pPrevExInfo) PLAT_ASM_OFFSET(4, ExInfo, m_pExContext) PLAT_ASM_OFFSET(8, ExInfo, m_exception) @@ -15,7 +15,7 @@ PLAT_ASM_OFFSET(0c, ExInfo, m_kind) PLAT_ASM_OFFSET(0d, ExInfo, m_passNumber) PLAT_ASM_OFFSET(10, ExInfo, m_idxCurClause) PLAT_ASM_OFFSET(14, ExInfo, m_frameIter) -PLAT_ASM_OFFSET(bc, ExInfo, m_notifyDebuggerSP) +PLAT_ASM_OFFSET(c0, ExInfo, m_notifyDebuggerSP) PLAT_ASM_OFFSET(0, PInvokeTransitionFrame, m_RIP) PLAT_ASM_OFFSET(4, PInvokeTransitionFrame, m_FramePointer) @@ -23,11 +23,12 @@ PLAT_ASM_OFFSET(8, PInvokeTransitionFrame, m_pThread) PLAT_ASM_OFFSET(0c, PInvokeTransitionFrame, m_Flags) PLAT_ASM_OFFSET(10, PInvokeTransitionFrame, m_PreservedRegs) -PLAT_ASM_SIZEOF(a8, StackFrameIterator) +PLAT_ASM_SIZEOF(ac, StackFrameIterator) PLAT_ASM_OFFSET(08, StackFrameIterator, m_FramePointer) PLAT_ASM_OFFSET(0c, StackFrameIterator, m_ControlPC) PLAT_ASM_OFFSET(10, StackFrameIterator, m_RegDisplay) PLAT_ASM_OFFSET(a4, StackFrameIterator, m_OriginalControlPC) +PLAT_ASM_OFFSET(a8, StackFrameIterator, m_pPreviousTransitionFrame) PLAT_ASM_SIZEOF(1c, PAL_LIMITED_CONTEXT) PLAT_ASM_OFFSET(0, PAL_LIMITED_CONTEXT, IP) diff --git a/src/coreclr/nativeaot/Runtime/unix/UnixNativeCodeManager.cpp b/src/coreclr/nativeaot/Runtime/unix/UnixNativeCodeManager.cpp index 8ca31f5ae05bc..8dd3e839a8c79 100644 --- a/src/coreclr/nativeaot/Runtime/unix/UnixNativeCodeManager.cpp +++ b/src/coreclr/nativeaot/Runtime/unix/UnixNativeCodeManager.cpp @@ -278,6 +278,7 @@ uintptr_t UnixNativeCodeManager::GetConservativeUpperBoundForOutgoingArgs(Method } bool UnixNativeCodeManager::UnwindStackFrame(MethodInfo * pMethodInfo, + uint32_t flags, REGDISPLAY * pRegisterSet, // in/out PInvokeTransitionFrame** ppPreviousTransitionFrame) // out { @@ -314,10 +315,16 @@ bool UnixNativeCodeManager::UnwindStackFrame(MethodInfo * pMethodInfo, } *ppPreviousTransitionFrame = *(PInvokeTransitionFrame**)(basePointer + slot); - return true; - } - *ppPreviousTransitionFrame = NULL; + if ((flags & USFF_StopUnwindOnTransitionFrame) != 0) + { + return true; + } + } + else + { + *ppPreviousTransitionFrame = NULL; + } if (!VirtualUnwind(pRegisterSet)) { diff --git a/src/coreclr/nativeaot/Runtime/unix/UnixNativeCodeManager.h b/src/coreclr/nativeaot/Runtime/unix/UnixNativeCodeManager.h index 7257b209129e7..2c79efb281b6d 100644 --- a/src/coreclr/nativeaot/Runtime/unix/UnixNativeCodeManager.h +++ b/src/coreclr/nativeaot/Runtime/unix/UnixNativeCodeManager.h @@ -45,6 +45,7 @@ class UnixNativeCodeManager : public ICodeManager bool isActiveStackFrame); bool UnwindStackFrame(MethodInfo * pMethodInfo, + uint32_t flags, REGDISPLAY * pRegisterSet, // in/out PInvokeTransitionFrame** ppPreviousTransitionFrame); // out diff --git a/src/coreclr/nativeaot/Runtime/windows/CoffNativeCodeManager.cpp b/src/coreclr/nativeaot/Runtime/windows/CoffNativeCodeManager.cpp index 70ec2175fe79a..aceb755fa5dd5 100644 --- a/src/coreclr/nativeaot/Runtime/windows/CoffNativeCodeManager.cpp +++ b/src/coreclr/nativeaot/Runtime/windows/CoffNativeCodeManager.cpp @@ -535,6 +535,7 @@ uintptr_t CoffNativeCodeManager::GetConservativeUpperBoundForOutgoingArgs(Method } bool CoffNativeCodeManager::UnwindStackFrame(MethodInfo * pMethodInfo, + uint32_t flags, REGDISPLAY * pRegisterSet, // in/out PInvokeTransitionFrame** ppPreviousTransitionFrame) // out { @@ -574,10 +575,16 @@ bool CoffNativeCodeManager::UnwindStackFrame(MethodInfo * pMethodInfo, } *ppPreviousTransitionFrame = *(PInvokeTransitionFrame**)(basePointer + slot); - return true; - } - *ppPreviousTransitionFrame = NULL; + if ((flags & USFF_StopUnwindOnTransitionFrame) != 0) + { + return true; + } + } + else + { + *ppPreviousTransitionFrame = NULL; + } CONTEXT context; KNONVOLATILE_CONTEXT_POINTERS contextPointers; diff --git a/src/coreclr/nativeaot/Runtime/windows/CoffNativeCodeManager.h b/src/coreclr/nativeaot/Runtime/windows/CoffNativeCodeManager.h index 1598f640d18c9..50bbc0e85373c 100644 --- a/src/coreclr/nativeaot/Runtime/windows/CoffNativeCodeManager.h +++ b/src/coreclr/nativeaot/Runtime/windows/CoffNativeCodeManager.h @@ -81,6 +81,7 @@ class CoffNativeCodeManager : public ICodeManager bool isActiveStackFrame); bool UnwindStackFrame(MethodInfo * pMethodInfo, + uint32_t flags, REGDISPLAY * pRegisterSet, // in/out PInvokeTransitionFrame** ppPreviousTransitionFrame); // out diff --git a/src/coreclr/nativeaot/System.Private.CoreLib/src/System/Runtime/InteropServices/ObjectiveCMarshal.NativeAot.cs b/src/coreclr/nativeaot/System.Private.CoreLib/src/System/Runtime/InteropServices/ObjectiveCMarshal.NativeAot.cs index 831edffbd1a35..bcef4e47b28ba 100644 --- a/src/coreclr/nativeaot/System.Private.CoreLib/src/System/Runtime/InteropServices/ObjectiveCMarshal.NativeAot.cs +++ b/src/coreclr/nativeaot/System.Private.CoreLib/src/System/Runtime/InteropServices/ObjectiveCMarshal.NativeAot.cs @@ -82,6 +82,28 @@ static bool TryGetTaggedMemory(IntPtr pObj, IntPtr* tagged) return s_OnEnteredFinalizerQueueCallback; } + [RuntimeExport("ObjectiveCMarshalGetUnhandledExceptionPropagationHandler")] +#pragma warning disable IDE0060 + static IntPtr ObjectiveCMarshalGetUnhandledExceptionPropagationHandler(object exceptionObj, IntPtr ip, out IntPtr context) +#pragma warning restore IDE0060 + { + if (s_unhandledExceptionPropagationHandler == null) + { + context = IntPtr.Zero; + return IntPtr.Zero; + } + + Exception? ex = exceptionObj as Exception; + if (ex == null) + Environment.FailFast("Exceptions must derive from the System.Exception class"); + + // TODO: convert IP to RuntimeMethodHandle. + // https://github.com/dotnet/runtime/issues/80985 + RuntimeMethodHandle lastMethod = default; + + return (IntPtr)s_unhandledExceptionPropagationHandler(ex, lastMethod, out context); + } + private static bool TryInitializeReferenceTracker( delegate* unmanaged beginEndCallback, delegate* unmanaged isReferencedCallback, diff --git a/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/NativeObjCMarshalTests.cpp b/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/NativeObjCMarshalTests.cpp index 1062d5a738826..c2ca337d170d5 100644 --- a/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/NativeObjCMarshalTests.cpp +++ b/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/NativeObjCMarshalTests.cpp @@ -1,5 +1,9 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. + +// Always enable asserts. +#undef NDEBUG + #include #include #include @@ -7,6 +11,7 @@ #include #include +using BeforeThrowNativeExceptionCallback = void(STDMETHODCALLTYPE *)(void); using BeginEndCallback = void(STDMETHODCALLTYPE *)(void); using IsReferencedCallback = int(STDMETHODCALLTYPE *)(void*); using EnteredFinalizationCallback = void(STDMETHODCALLTYPE *)(void*); @@ -90,6 +95,15 @@ extern "C" DLL_EXPORT void GetExports( *enteredFinalizer = EnteredFinalizerCb; } +static BeforeThrowNativeExceptionCallback s_beforeThrow; + +extern "C" DLL_EXPORT void SetImports(BeforeThrowNativeExceptionCallback beforeThrow) +{ + assert(beforeThrow != nullptr); + + s_beforeThrow = beforeThrow; +} + using propagation_func_t = void(*)(void*); namespace @@ -97,6 +111,7 @@ namespace [[noreturn]] void ThrowInt(void* cxt) { + s_beforeThrow(); int val = (int)(size_t)cxt; throw val; } @@ -104,6 +119,7 @@ namespace [[noreturn]] void ThrowException(void*) { + s_beforeThrow(); throw std::exception{}; } } @@ -131,6 +147,8 @@ extern "C" DLL_EXPORT int CallAndCatch(fptr_t fptr, int a) } catch (int e) { + // check that a was not clobbered. + assert(a == e); return e; } catch (const std::exception &e) diff --git a/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/Program.cs b/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/Program.cs index a06a4327a78a0..ca97dbba76302 100644 --- a/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/Program.cs +++ b/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/Program.cs @@ -22,6 +22,10 @@ public static extern unsafe void GetExports( out delegate* unmanaged isReferencedCallback, out delegate* unmanaged trackedObjectEnteredFinalization); + [DllImport(nameof(NativeObjCMarshalTests))] + public static extern unsafe void SetImports( + delegate* unmanaged beforeThrowNativeExceptionCallback); + [DllImport(nameof(NativeObjCMarshalTests))] public static extern int CallAndCatch(IntPtr fptr, int a); @@ -173,6 +177,9 @@ static void InitializeObjectiveCMarshal() delegate* unmanaged trackedObjectEnteredFinalization; NativeObjCMarshalTests.GetExports(out beginEndCallback, out isReferencedCallback, out trackedObjectEnteredFinalization); + delegate* unmanaged beforeThrow = &BeforeThrowNativeException; + NativeObjCMarshalTests.SetImports(beforeThrow); + ObjectiveCMarshal.Initialize(beginEndCallback, isReferencedCallback, trackedObjectEnteredFinalization, OnUnhandledExceptionPropagationHandler); } @@ -282,6 +289,14 @@ static unsafe void Validate_ReferenceTracking_Scenario() _Validate_ExceptionPropagation(); } + [UnmanagedCallersOnly] + private static void BeforeThrowNativeException() + { + // This function is called from the exception propagation callback. + // It ensures that the thread was transitioned to preemptive mode. + GC.Collect(); + } + private class IntException : Exception { public int Value { get; } @@ -293,22 +308,72 @@ private class ExceptionException : Exception public ExceptionException() {} } + static bool s_finallyExecuted; + [UnmanagedCallersOnly] - static void UCO_ThrowIntException(int a) => throw new IntException(a); + static void UCO_ThrowIntException(int a) + { + try + { + throw new IntException(a); + } + finally + { + s_finallyExecuted = true; + } + } + [UnmanagedCallersOnly] - static void UCO_ThrowExceptionException(int _) => throw new ExceptionException(); + static void UCO_ThrowExceptionException(int _) + { + try + { + throw new ExceptionException(); + } + finally + { + s_finallyExecuted = true; + } + } delegate void ThrowExceptionDelegate(int a); - static void DEL_ThrowIntException(int a) => throw new IntException(a); - static void DEL_ThrowExceptionException(int _) => throw new ExceptionException(); + + static void DEL_ThrowIntException(int a) + { + try + { + throw new IntException(a); + } + finally + { + s_finallyExecuted = true; + } + } + + static void DEL_ThrowExceptionException(int _) + { + try + { + throw new ExceptionException(); + } + finally + { + s_finallyExecuted = true; + } + } static unsafe delegate* unmanaged OnUnhandledExceptionPropagationHandler( Exception e, RuntimeMethodHandle lastMethodHandle, out IntPtr context) { - var lastMethod = (MethodInfo)MethodBase.GetMethodFromHandle(lastMethodHandle); - Assert.True(lastMethod != null); + // Not yet implemented For NativeAOT. + // https://github.com/dotnet/runtime/issues/80985 + if (!TestLibrary.Utilities.IsNativeAot) + { + var lastMethod = (MethodInfo)MethodBase.GetMethodFromHandle(lastMethodHandle); + Assert.True(lastMethod != null); + } context = IntPtr.Zero; if (e is IntException ie) @@ -335,14 +400,6 @@ class Scenario // Do not call this method from Main as it depends on a previous test for set up. static void _Validate_ExceptionPropagation() { - // Not yet implemented for NativeAOT. - // https://github.com/dotnet/runtime/issues/77472 - if (TestLibrary.Utilities.IsNativeAot) - { - Console.WriteLine($"Skipping {nameof(_Validate_ExceptionPropagation)}, NYI"); - return; - } - Console.WriteLine($"Running {nameof(_Validate_ExceptionPropagation)}"); var delThrowInt = new ThrowExceptionDelegate(DEL_ThrowIntException); @@ -357,9 +414,11 @@ static void _Validate_ExceptionPropagation() foreach (var scen in scenarios) { + s_finallyExecuted = false; delegate* unmanaged testNativeMethod = scen.Fptr; int ret = NativeObjCMarshalTests.CallAndCatch((IntPtr)testNativeMethod, scen.Expected); Assert.Equal(scen.Expected, ret); + Assert.True(s_finallyExecuted, "Finally block not executed."); } GC.KeepAlive(delThrowInt);