From ab85b040f0b7ffe9d49ccd9f5fcc0d51a386d654 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 25 Apr 2024 08:20:57 -0700 Subject: [PATCH 01/51] WorkingOnIt --- src/coreclr/debug/daccess/request.cpp | 2 +- src/coreclr/vm/amd64/asmconstants.h | 2 +- src/coreclr/vm/jitinterface.cpp | 1 + src/coreclr/vm/method.cpp | 128 +++++++++++++++++++++++++- src/coreclr/vm/method.hpp | 46 ++++++++- src/coreclr/vm/methodtable.cpp | 4 +- src/coreclr/vm/methodtablebuilder.cpp | 20 ++++ src/coreclr/vm/prestub.cpp | 1 + 8 files changed, 192 insertions(+), 12 deletions(-) diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index d6312ad44c66c..606d952233f2c 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -435,7 +435,7 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA MethodDesc * pMD = it.GetMethodDesc(); if (pMD->GetSlot() == slot) { - *value = pMD->GetMethodEntryPoint(); + *value = pMD->GetMethodEntryPoint_NoAlloc(); hr = S_OK; } } diff --git a/src/coreclr/vm/amd64/asmconstants.h b/src/coreclr/vm/amd64/asmconstants.h index 47cca560d7bb1..79047dfdc6d79 100644 --- a/src/coreclr/vm/amd64/asmconstants.h +++ b/src/coreclr/vm/amd64/asmconstants.h @@ -98,7 +98,7 @@ ASMCONSTANTS_C_ASSERT(SIZEOF__ComPrestubMethodFrame ASMCONSTANTS_C_ASSERT(SIZEOF__ComMethodFrame == sizeof(ComMethodFrame)); -#define OFFSETOF__ComPlusCallMethodDesc__m_pComPlusCallInfo DBG_FRE(0x30, 0x08) +#define OFFSETOF__ComPlusCallMethodDesc__m_pComPlusCallInfo DBG_FRE(0x38, 0x10) ASMCONSTANTS_C_ASSERT(OFFSETOF__ComPlusCallMethodDesc__m_pComPlusCallInfo == offsetof(ComPlusCallMethodDesc, m_pComPlusCallInfo)); diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp index dd61fff1cf887..19291f5f4fdfe 100644 --- a/src/coreclr/vm/jitinterface.cpp +++ b/src/coreclr/vm/jitinterface.cpp @@ -8695,6 +8695,7 @@ void CEEInfo::getMethodVTableOffset (CORINFO_METHOD_HANDLE methodHnd, JIT_TO_EE_TRANSITION_LEAF(); MethodDesc* method = GetMethod(methodHnd); + method->EnsureTemporaryEntryPoint(method->GetLoaderAllocator()); //@GENERICS: shouldn't be doing this for instantiated methods as they live elsewhere _ASSERTE(!method->HasMethodInstantiation()); diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 6f97c67c87895..aa87df62276dc 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -446,11 +446,46 @@ Signature MethodDesc::GetSignature() return Signature(pSig, cSig); } +#ifndef HAS_COMPACT_ENTRYPOINTS +PCODE MethodDesc::GetMethodEntryPoint_NoAlloc() +{ + CONTRACTL + { + NOTHROW; + GC_NOTRIGGER; + MODE_ANY; + SUPPORTS_DAC; + } + CONTRACTL_END; + + // Similarly to SetMethodEntryPoint(), it is up to the caller to ensure that calls to this function are appropriately + // synchronized + + // Keep implementations of MethodDesc::GetMethodEntryPoint and MethodDesc::GetAddrOfSlot in sync! + + if (HasNonVtableSlot()) + { + SIZE_T size = GetBaseSize(); + + TADDR pSlot = dac_cast(this) + size; + + return *PTR_PCODE(pSlot); + } + + _ASSERTE(GetMethodTable()->IsCanonicalMethodTable()); + return GetMethodTable()->GetSlot(GetSlot()); +} +#endif + PCODE MethodDesc::GetMethodEntryPoint() { CONTRACTL { +#ifndef HAS_COMPACT_ENTRYPOINTS NOTHROW; +#else + THROWS; +#endif GC_NOTRIGGER; MODE_ANY; SUPPORTS_DAC; @@ -468,6 +503,13 @@ PCODE MethodDesc::GetMethodEntryPoint() TADDR pSlot = dac_cast(this) + size; +#if !defined(HAS_COMPACT_ENTRYPOINTS) && !defined(DACCESS_COMPILE) + if (*PTR_PCODE(pSlot) == NULL) + { + EnsureTemporaryEntryPoint(GetLoaderAllocator()); + _ASSERTE(*PTR_PCODE(pSlot) != NULL); + } +#endif return *PTR_PCODE(pSlot); } @@ -2167,7 +2209,8 @@ BOOL MethodDesc::IsPointingToPrestub() { if (IsVersionableWithVtableSlotBackpatch()) { - return GetMethodEntryPoint() == GetTemporaryEntryPoint(); + PCODE methodEntrypoint = GetMethodEntryPoint_NoAlloc(); + return methodEntrypoint == GetTemporaryEntryPoint_NoAlloc() && methodEntrypoint != NULL; } return TRUE; } @@ -2829,6 +2872,7 @@ TADDR MethodDescChunk::AllocateCompactEntryPoints(LoaderAllocator *pLoaderAlloca #endif // HAS_COMPACT_ENTRYPOINTS //******************************************************************************* +#ifdef HAS_COMPACT_ENTRYPOINTS PCODE MethodDescChunk::GetTemporaryEntryPoint(int index) { LIMITED_METHOD_CONTRACT; @@ -2854,7 +2898,9 @@ PCODE MethodDescChunk::GetTemporaryEntryPoint(int index) return Precode::GetPrecodeForTemporaryEntryPoint(GetTemporaryEntryPoints(), index)->GetEntryPoint(); } +#endif // HAS_COMPACT_ENTRYPOINTS +#ifdef HAS_COMPACT_ENTRYPOINTS PCODE MethodDesc::GetTemporaryEntryPoint() { CONTRACTL @@ -2875,6 +2921,35 @@ PCODE MethodDesc::GetTemporaryEntryPoint() return pEntryPoint; } +#else +PCODE MethodDesc::GetTemporaryEntryPoint() +{ + CONTRACTL + { + THROWS; + GC_NOTRIGGER; + MODE_ANY; + } + CONTRACTL_END; + + TADDR pEntryPoint = GetTemporaryEntryPoint_NoAlloc(); + if (pEntryPoint != NULL) + return pEntryPoint; + +#ifndef DACCESS_COMPILE + EnsureTemporaryEntryPoint(GetLoaderAllocator()); + pEntryPoint = GetTemporaryEntryPoint_NoAlloc(); + _ASSERTE(pEntryPoint != NULL); + +#ifdef _DEBUG + MethodDesc * pMD = MethodDesc::GetMethodDescFromStubAddr(pEntryPoint); + _ASSERTE(PTR_HOST_TO_TADDR(this) == PTR_HOST_TO_TADDR(pMD)); +#endif + +#endif + return pEntryPoint; +} +#endif // HAS_COMPACT_ENTRYPOINTS #ifndef DACCESS_COMPILE //******************************************************************************* @@ -2882,10 +2957,16 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc { WRAPPER_NO_CONTRACT; +#ifdef HAS_COMPACT_ENTRYPOINTS GetMethodDescChunk()->EnsureTemporaryEntryPointsCreated(pLoaderAllocator, pamTracker); +#else + EnsureTemporaryEntryPointCore(pLoaderAllocator, pamTracker); +#endif PTR_PCODE pSlot = GetAddrOfSlot(); +#ifdef HAS_COMPACT_ENTRYPOINTS _ASSERTE(*pSlot == NULL); +#endif *pSlot = GetTemporaryEntryPoint(); if (RequiresStableEntryPoint()) @@ -2896,7 +2977,41 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc } } +#ifndef HAS_COMPACT_ENTRYPOINTS +void MethodDesc::EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator) +{ + if (GetTemporaryEntryPoint_NoAlloc() == NULL) + { + AllocMemTracker amt; + EnsureTemporaryEntryPointCore(pLoaderAllocator, &amt); + } +} + +void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker) +{ + if (GetTemporaryEntryPoint_NoAlloc() == NULL) + { + PTR_PCODE pSlot = GetAddrOfSlot(); + + AllocMemTracker amt; + Precode* pPrecode = Precode::Allocate(GetPrecodeType(), this, GetLoaderAllocator(), &amt); + + if (InterlockedCompareExchangeT(&m_pTemporaryEntryPoint, pPrecode->GetEntryPoint(), (PCODE)NULL) == NULL) + amt.SuppressRelease(); + + PCODE tempEntryPoint = GetTemporaryEntryPoint_NoAlloc(); + _ASSERTE(tempEntryPoint != NULL); + + if (*pSlot == NULL) + { + InterlockedCompareExchangeT(pSlot, tempEntryPoint, (PCODE)NULL); + } + } +} +#endif + //******************************************************************************* +#ifdef HAS_COMPACT_ENTRYPOINTS void MethodDescChunk::CreateTemporaryEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker) { WRAPPER_NO_CONTRACT; @@ -2913,10 +3028,12 @@ void MethodDescChunk::CreateTemporaryEntryPoints(LoaderAllocator *pLoaderAllocat } #endif // HAS_COMPACT_ENTRYPOINTS - *(((TADDR *)this)-1) = temporaryEntryPoints; + m_pTemporaryEntryPoints = temporaryEntryPoints; _ASSERTE(GetTemporaryEntryPoints() != NULL); } +#endif // HAS_COMPACT_ENTRYPOINTS + //******************************************************************************* Precode* MethodDesc::GetOrCreatePrecode() @@ -2930,12 +3047,12 @@ Precode* MethodDesc::GetOrCreatePrecode() } PTR_PCODE pSlot = GetAddrOfSlot(); - PCODE tempEntry = GetTemporaryEntryPoint(); + PCODE tempEntry = GetTemporaryEntryPoint_NoAlloc(); PrecodeType requiredType = GetPrecodeType(); PrecodeType availableType = PRECODE_INVALID; - if (!GetMethodDescChunk()->HasCompactEntryPoints()) + if (!GetMethodDescChunk()->HasCompactEntryPoints() && tempEntry != NULL) { availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType(); } @@ -3810,7 +3927,7 @@ MethodDescChunk::EnumMemoryRegions(CLRDataEnumMemoryFlags flags) { pMT->EnumMemoryRegions(flags); } - +#ifdef HAS_COMPACT_ENTRYPOINTS SIZE_T size; #ifdef HAS_COMPACT_ENTRYPOINTS @@ -3825,6 +3942,7 @@ MethodDescChunk::EnumMemoryRegions(CLRDataEnumMemoryFlags flags) } DacEnumMemoryRegion(GetTemporaryEntryPoints(), size); +#endif // HAS_COMPACT_ENTRYPOINTS MethodDesc * pMD = GetFirstMethodDesc(); MethodDesc * pOldMD = NULL; diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index b198ea27733a6..8ab50730ce19c 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -238,7 +238,7 @@ class MethodDesc _ASSERTE(HasStableEntryPoint()); _ASSERTE(!IsVersionableWithVtableSlotBackpatch()); - return GetMethodEntryPoint(); + return GetMethodEntryPoint_NoAlloc(); } void SetMethodEntryPoint(PCODE addr); @@ -246,11 +246,27 @@ class MethodDesc PCODE GetTemporaryEntryPoint(); + PCODE GetTemporaryEntryPoint_NoAlloc() + { + LIMITED_METHOD_CONTRACT; +#ifdef HAS_COMPACT_ENTRYPOINTS + return GetTemporaryEntryPoint(); +#else + return VolatileLoadWithoutBarrier(&m_pTemporaryEntryPoint); +#endif + } + void SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); PCODE GetInitialEntryPointForCopiedSlot() { - WRAPPER_NO_CONTRACT; + CONTRACTL + { + THROWS; + GC_NOTRIGGER; + MODE_ANY; + } + CONTRACTL_END; if (IsVersionableWithVtableSlotBackpatch()) { @@ -1431,6 +1447,21 @@ class MethodDesc // PCODE GetMethodEntryPoint(); + PCODE GetMethodEntryPoint_NoAlloc() +#ifdef HAS_COMPACT_ENTRYPOINTS + { + WRAPPER_NO_CONTRACT; + return GetMethodEntryPoint(); + } +#else + ; +#endif + +#ifndef HAS_COMPACT_ENTRYPOINTS + void EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator); + void EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); +#endif + //******************************************************************************* // Returns the address of the native code. PCODE GetNativeCode(); @@ -1637,6 +1668,9 @@ class MethodDesc // The slot number of this MethodDesc in the vtable array. WORD m_wSlotNumber; WORD m_wFlags; +#ifndef HAS_COMPACT_ENTRYPOINTS + PCODE m_pTemporaryEntryPoint; +#endif public: #ifdef DACCESS_COMPILE @@ -2144,10 +2178,11 @@ class MethodDescChunk MethodTable *initialMT, class AllocMemTracker *pamTracker); +#ifdef HAS_COMPACT_ENTRYPOINTS TADDR GetTemporaryEntryPoints() { LIMITED_METHOD_CONTRACT; - return *(dac_cast(this) - 1); + return m_pCompactEntryPoints; } PCODE GetTemporaryEntryPoint(int index); @@ -2167,6 +2202,7 @@ class MethodDescChunk } void CreateTemporaryEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); +#endif #ifdef HAS_COMPACT_ENTRYPOINTS // @@ -2302,6 +2338,10 @@ class MethodDescChunk PTR_MethodDescChunk m_next; +#ifdef HAS_COMPACT_ENTRYPOINTS + TADDR m_pCompactEntryPoints; +#endif + BYTE m_size; // The size of this chunk minus 1 (in multiples of MethodDesc::ALIGNMENT) BYTE m_count; // The number of MethodDescs in this chunk minus 1 UINT16 m_flagsAndTokenRange; diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index a20835d2d0114..074604cb11fdb 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -1706,7 +1706,7 @@ MethodTable::DebugDumpVtable(LPCUTF8 szClassName, BOOL fDebug) name, pszName, IsMdFinal(dwAttrs) ? " (final)" : "", - (VOID *)pMD->GetMethodEntryPoint(), + (VOID *)pMD->GetMethodEntryPoint_NoAlloc(), pMD->GetSlot() ); OutputDebugStringUtf8(buff); @@ -1720,7 +1720,7 @@ MethodTable::DebugDumpVtable(LPCUTF8 szClassName, BOOL fDebug) pMD->GetClass()->GetDebugClassName(), pszName, IsMdFinal(dwAttrs) ? " (final)" : "", - (VOID *)pMD->GetMethodEntryPoint(), + (VOID *)pMD->GetMethodEntryPoint_NoAlloc(), pMD->GetSlot() )); } diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index 69d2a105ecd8d..cda16127a566d 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -10830,6 +10830,7 @@ MethodTableBuilder::SetupMethodTable2( // Keep bmtInterface data around since we no longer write the flags (IsDeclaredOnType and // IsImplementedByParent) into the interface map (these flags are only required during type loading). +#ifdef HAS_COMPACT_ENTRYPOINTS { for (MethodDescChunk *pChunk = GetHalfBakedClass()->GetChunks(); pChunk != NULL; pChunk = pChunk->GetNextChunk()) { @@ -10838,6 +10839,22 @@ MethodTableBuilder::SetupMethodTable2( pChunk->EnsureTemporaryEntryPointsCreated(GetLoaderAllocator(), GetMemTracker()); } } +#else +#if MAYBE_WE_DONT_NEED_THIS + for (bmtVtable::Iterator slotIt = bmtVT->IterateSlots(); !slotIt.AtEnd(); ++slotIt) + { + SLOT_INDEX iCurSlot = static_cast(slotIt.CurrentIndex()); + + // We need to ensure that all vtable slots have temporary entrypoints created for them. + MethodDesc * pMD = NULL; + if (iCurSlot < bmtVT->cVtableSlots) + { + pMD = slotIt->Impl().GetMethodDesc(); + pMD->EnsureTemporaryEntryPointCore(GetLoaderAllocator(), GetMemTracker()); + } + } +#endif +#endif { // copy onto the real vtable (methods only) //@GENERICS: Because we sometimes load an inexact parent (see ClassLoader::GetParent) the inherited slots might @@ -10884,6 +10901,9 @@ MethodTableBuilder::SetupMethodTable2( // _ASSERTE(iCurSlot >= bmtVT->cVirtualSlots || ChangesImplementationOfVirtualSlot(iCurSlot)); + if (pMD->GetSlot() == iCurSlot) + continue; // For cases where the slot is the same as the method desc slot, we don't need to fill it in yet + PCODE addr = pMD->GetTemporaryEntryPoint(); _ASSERTE(addr != NULL); diff --git a/src/coreclr/vm/prestub.cpp b/src/coreclr/vm/prestub.cpp index 685bfba70f501..1cffe3fbdbcb0 100644 --- a/src/coreclr/vm/prestub.cpp +++ b/src/coreclr/vm/prestub.cpp @@ -3224,6 +3224,7 @@ EXTERN_C PCODE STDCALL ExternalMethodFixupWorker(TransitionBlock * pTransitionBl if (pMD->IsVtableMethod()) { slot = pMD->GetSlot(); + pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); pMT = th.IsNull() ? pMD->GetMethodTable() : th.GetMethodTable(); fVirtual = true; From 889f8a8792190ed9224303e504a777d75ca8aacc Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 25 Apr 2024 12:34:36 -0700 Subject: [PATCH 02/51] It basically works for a single example. Baseline Loader Heap: ---------------------------------------- System Domain: 7ffab916ec00 LoaderAllocator: 7ffab916ec00 LowFrequencyHeap: Size: 0xf0000 (983040) bytes total. HighFrequencyHeap: Size: 0x16a000 (1482752) bytes total, 0x3000 (12288) bytes wasted. StubHeap: Size: 0x1000 (4096) bytes total. FixupPrecodeHeap: Size: 0x168000 (1474560) bytes total. NewStubPrecodeHeap: Size: 0x18000 (98304) bytes total. IndirectionCellHeap: Size: 0x1000 (4096) bytes total. CacheEntryHeap: Size: 0x1000 (4096) bytes total. Total size: Size: 0x3dd000 (4050944) bytes total, 0x3000 (12288) bytes wasted. Compare Loader Heap: ---------------------------------------- System Domain: 7ff9eb49dc00 LoaderAllocator: 7ff9eb49dc00 LowFrequencyHeap: Size: 0xef000 (978944) bytes total. HighFrequencyHeap: Size: 0x1b2000 (1777664) bytes total, 0x3000 (12288) bytes wasted. StubHeap: Size: 0x1000 (4096) bytes total. FixupPrecodeHeap: Size: 0x70000 (458752) bytes total. NewStubPrecodeHeap: Size: 0x10000 (65536) bytes total. IndirectionCellHeap: Size: 0x1000 (4096) bytes total. CacheEntryHeap: Size: 0x1000 (4096) bytes total. Total size: Size: 0x324000 (3293184) bytes total, 0x3000 (12288) bytes wasted. LowFrequencyHeap is 4KB bigger HighFrequencyHeap is 288KB bigger FixupPrecodeHeap is 992KB smaller NewstubPrecodeHeap is 32KB smaller --- src/coreclr/debug/daccess/request.cpp | 2 +- src/coreclr/vm/arm/stubs.cpp | 1 + src/coreclr/vm/arm64/stubs.cpp | 2 + src/coreclr/vm/comutilnative.cpp | 4 +- src/coreclr/vm/frames.cpp | 2 +- src/coreclr/vm/i386/stublinkerx86.cpp | 3 ++ src/coreclr/vm/jithelpers.cpp | 4 +- src/coreclr/vm/jitinterface.cpp | 2 +- src/coreclr/vm/loongarch64/stubs.cpp | 2 + src/coreclr/vm/method.cpp | 8 ++-- src/coreclr/vm/methodimpl.cpp | 52 +------------------------ src/coreclr/vm/methodtable.cpp | 56 +++++++++++++++++---------- src/coreclr/vm/methodtable.h | 4 +- src/coreclr/vm/methodtable.inl | 45 ++++++++++++++++++++- src/coreclr/vm/methodtablebuilder.cpp | 17 ++++---- src/coreclr/vm/prestub.cpp | 2 +- src/coreclr/vm/riscv64/stubs.cpp | 1 + src/coreclr/vm/virtualcallstub.cpp | 5 ++- 18 files changed, 115 insertions(+), 97 deletions(-) diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index 606d952233f2c..59a900d0b64e7 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -424,7 +424,7 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA else if (slot < mTable->GetNumVtableSlots()) { // Now get the slot: - *value = mTable->GetRestoredSlot(slot); + *value = mTable->GetRestoredSlotIfExists(slot); } else { diff --git a/src/coreclr/vm/arm/stubs.cpp b/src/coreclr/vm/arm/stubs.cpp index 5f8e3bf445e78..75f1b79a976b4 100644 --- a/src/coreclr/vm/arm/stubs.cpp +++ b/src/coreclr/vm/arm/stubs.cpp @@ -1381,6 +1381,7 @@ VOID StubLinkerCPU::EmitShuffleThunk(ShuffleEntry *pShuffleEntryArray) void StubLinkerCPU::ThumbEmitTailCallManagedMethod(MethodDesc *pMD) { + pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); // Use direct call if possible. if (pMD->HasStableEntryPoint()) { diff --git a/src/coreclr/vm/arm64/stubs.cpp b/src/coreclr/vm/arm64/stubs.cpp index 623938dfba61c..f952c606760d8 100644 --- a/src/coreclr/vm/arm64/stubs.cpp +++ b/src/coreclr/vm/arm64/stubs.cpp @@ -1625,6 +1625,8 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { + pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); + // Use direct call if possible. if (pMD->HasStableEntryPoint()) { diff --git a/src/coreclr/vm/comutilnative.cpp b/src/coreclr/vm/comutilnative.cpp index a3c9d0a848cdf..a04110371e346 100644 --- a/src/coreclr/vm/comutilnative.cpp +++ b/src/coreclr/vm/comutilnative.cpp @@ -1552,7 +1552,7 @@ extern "C" void QCALLTYPE Interlocked_MemoryBarrierProcessWide() static BOOL HasOverriddenMethod(MethodTable* mt, MethodTable* classMT, WORD methodSlot) { CONTRACTL{ - NOTHROW; + THROWS; GC_NOTRIGGER; MODE_ANY; } CONTRACTL_END; @@ -1809,7 +1809,7 @@ static WORD g_slotBeginWrite, g_slotEndWrite; static bool HasOverriddenStreamMethod(MethodTable * pMT, WORD slot) { CONTRACTL{ - NOTHROW; + THROWS; GC_NOTRIGGER; MODE_ANY; } CONTRACTL_END; diff --git a/src/coreclr/vm/frames.cpp b/src/coreclr/vm/frames.cpp index b4fbf4a7fb4f0..6d6af83ed5635 100644 --- a/src/coreclr/vm/frames.cpp +++ b/src/coreclr/vm/frames.cpp @@ -612,7 +612,7 @@ MethodDesc* StubDispatchFrame::GetFunction() { if (m_pRepresentativeMT != NULL) { - pMD = m_pRepresentativeMT->GetMethodDescForSlot(m_representativeSlot); + pMD = m_pRepresentativeMT->GetMethodDescForSlot_NoThrow(m_representativeSlot); #ifndef DACCESS_COMPILE m_pMD = pMD; #endif diff --git a/src/coreclr/vm/i386/stublinkerx86.cpp b/src/coreclr/vm/i386/stublinkerx86.cpp index 76d888c0c5275..6210981f03ff0 100644 --- a/src/coreclr/vm/i386/stublinkerx86.cpp +++ b/src/coreclr/vm/i386/stublinkerx86.cpp @@ -3071,6 +3071,8 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s #ifdef TARGET_AMD64 VOID StubLinkerCPU::EmitLoadMethodAddressIntoAX(MethodDesc *pMD) { + pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); + if (pMD->HasStableEntryPoint()) { X86EmitRegLoad(kRAX, pMD->GetStableEntryPoint());// MOV RAX, DWORD @@ -3097,6 +3099,7 @@ VOID StubLinkerCPU::EmitTailJumpToMethod(MethodDesc *pMD) } else { + pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); // jmp [slot] Emit16(0x25ff); Emit32((DWORD)(size_t)pMD->GetAddrOfSlot()); diff --git a/src/coreclr/vm/jithelpers.cpp b/src/coreclr/vm/jithelpers.cpp index 7400708001589..1fa022ceaca61 100644 --- a/src/coreclr/vm/jithelpers.cpp +++ b/src/coreclr/vm/jithelpers.cpp @@ -5904,7 +5904,7 @@ HCIMPL3(void, JIT_VTableProfile32, Object* obj, CORINFO_METHOD_HANDLE baseMethod WORD slot = pBaseMD->GetSlot(); _ASSERTE(slot < pBaseMD->GetMethodTable()->GetNumVirtuals()); - MethodDesc* pMD = pMT->GetMethodDescForSlot(slot); + MethodDesc* pMD = pMT->GetMethodDescForSlot_NoThrow(slot); MethodDesc* pRecordedMD = (MethodDesc*)DEFAULT_UNKNOWN_HANDLE; if (!pMD->GetLoaderAllocator()->IsCollectible() && !pMD->IsDynamicMethod()) @@ -5953,7 +5953,7 @@ HCIMPL3(void, JIT_VTableProfile64, Object* obj, CORINFO_METHOD_HANDLE baseMethod WORD slot = pBaseMD->GetSlot(); _ASSERTE(slot < pBaseMD->GetMethodTable()->GetNumVirtuals()); - MethodDesc* pMD = pMT->GetMethodDescForSlot(slot); + MethodDesc* pMD = pMT->GetMethodDescForSlot_NoThrow(slot); MethodDesc* pRecordedMD = (MethodDesc*)DEFAULT_UNKNOWN_HANDLE; if (!pMD->GetLoaderAllocator()->IsCollectible() && !pMD->IsDynamicMethod()) diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp index 19291f5f4fdfe..f94b9110e54ea 100644 --- a/src/coreclr/vm/jitinterface.cpp +++ b/src/coreclr/vm/jitinterface.cpp @@ -9153,7 +9153,7 @@ void CEEInfo::getFunctionEntryPoint(CORINFO_METHOD_HANDLE ftnHnd, { // should never get here for EnC methods or if interception via remoting stub is required _ASSERTE(!ftn->InEnCEnabledModule()); - + ftn->EnsureTemporaryEntryPoint(ftn->GetLoaderAllocator()); ret = (void *)ftn->GetAddrOfSlot(); accessType = IAT_PVALUE; diff --git a/src/coreclr/vm/loongarch64/stubs.cpp b/src/coreclr/vm/loongarch64/stubs.cpp index 56581498f003d..9709314f80c02 100644 --- a/src/coreclr/vm/loongarch64/stubs.cpp +++ b/src/coreclr/vm/loongarch64/stubs.cpp @@ -1476,6 +1476,8 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { + pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); // THIS And CODE LIKE THIS SHOULD USE A FUNCTION which checks RequiresStableEntryPoint and fills that in as needed + // Use direct call if possible. if (pMD->HasStableEntryPoint()) { diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index aa87df62276dc..54a60f8d106ee 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -481,7 +481,7 @@ PCODE MethodDesc::GetMethodEntryPoint() { CONTRACTL { -#ifndef HAS_COMPACT_ENTRYPOINTS +#ifdef HAS_COMPACT_ENTRYPOINTS NOTHROW; #else THROWS; @@ -514,7 +514,7 @@ PCODE MethodDesc::GetMethodEntryPoint() } _ASSERTE(GetMethodTable()->IsCanonicalMethodTable()); - return GetMethodTable()->GetSlot(GetSlot()); + return GetMethodTable()->GetRestoredSlot(GetSlot()); } PTR_PCODE MethodDesc::GetAddrOfSlot() @@ -1748,10 +1748,10 @@ MethodDescChunk *MethodDescChunk::CreateChunk(LoaderHeap *pHeap, DWORD methodDes DWORD count = min(methodDescCount, maxMethodDescsPerChunk); void * pMem = pamTracker->Track( - pHeap->AllocMem(S_SIZE_T(sizeof(TADDR) + sizeof(MethodDescChunk) + oneSize * count))); + pHeap->AllocMem(S_SIZE_T(sizeof(MethodDescChunk) + oneSize * count))); // Skip pointer to temporary entrypoints - MethodDescChunk * pChunk = (MethodDescChunk *)((BYTE*)pMem + sizeof(TADDR)); + MethodDescChunk * pChunk = (MethodDescChunk *)((BYTE*)pMem); pChunk->SetSizeAndCount(oneSize * count, count); pChunk->SetMethodTable(pInitialMT); diff --git a/src/coreclr/vm/methodimpl.cpp b/src/coreclr/vm/methodimpl.cpp index 13a4b73e35275..92428ab33c882 100644 --- a/src/coreclr/vm/methodimpl.cpp +++ b/src/coreclr/vm/methodimpl.cpp @@ -90,62 +90,12 @@ PTR_MethodDesc MethodImpl::GetMethodDesc(DWORD slotIndex, PTR_MethodDesc default TADDR base = dac_cast(pRelPtrForSlot) + slotIndex * sizeof(MethodDesc *); PTR_MethodDesc result = *dac_cast(base); - // Prejitted images may leave NULL in this table if - // the methoddesc is declared in another module. - // In this case we need to manually compute & restore it - // from the slot number. - - if (result == NULL) -#ifndef DACCESS_COMPILE - result = RestoreSlot(slotIndex, defaultReturn->GetMethodTable()); -#else // DACCESS_COMPILE - DacNotImpl(); -#endif // DACCESS_COMPILE - + _ASSERTE(result != NULL); return result; } #ifndef DACCESS_COMPILE -MethodDesc *MethodImpl::RestoreSlot(DWORD index, MethodTable *pMT) -{ - CONTRACTL - { - NOTHROW; - GC_NOTRIGGER; - FORBID_FAULT; - PRECONDITION(pdwSlots != NULL); - } - CONTRACTL_END - - MethodDesc *result; - - PREFIX_ASSUME(pdwSlots != NULL); - DWORD slot = GetSlots()[index]; - - // Since the overridden method is in a different module, we - // are guaranteed that it is from a different class. It is - // either an override of a parent virtual method or parent-implemented - // interface, or of an interface that this class has introduced. - - // In the former 2 cases, the slot number will be in the parent's - // vtable section, and we can retrieve the implemented MethodDesc from - // there. In the latter case, we can search through our interface - // map to determine which interface it is from. - - MethodTable *pParentMT = pMT->GetParentMethodTable(); - CONSISTENCY_CHECK(pParentMT != NULL && slot < pParentMT->GetNumVirtuals()); - { - result = pParentMT->GetMethodDescForSlot(slot); - } - - _ASSERTE(result != NULL); - - pImplementedMD[index] = result; - - return result; -} - /////////////////////////////////////////////////////////////////////////////////////// void MethodImpl::SetSize(LoaderHeap *pHeap, AllocMemTracker *pamTracker, DWORD size) { diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index 074604cb11fdb..95ca6d89ef28c 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -1833,9 +1833,9 @@ MethodTable::Debug_DumpDispatchMap() nInterfaceIndex, pInterface->GetDebugClassName(), nInterfaceSlotNumber, - pInterface->GetMethodDescForSlot(nInterfaceSlotNumber)->GetName(), + pInterface->GetMethodDescForSlot_NoThrow(nInterfaceSlotNumber)->GetName(), nImplementationSlotNumber, - GetMethodDescForSlot(nImplementationSlotNumber)->GetName())); + GetMethodDescForSlot_NoThrow(nImplementationSlotNumber)->GetName())); it.Next(); } @@ -4146,7 +4146,7 @@ BOOL MethodTable::RunClassInitEx(OBJECTREF *pThrowable) MethodTable * pCanonMT = GetCanonicalMethodTable(); // Call the code method without touching MethodDesc if possible - PCODE pCctorCode = pCanonMT->GetSlot(pCanonMT->GetClassConstructorSlot()); + PCODE pCctorCode = pCanonMT->GetRestoredSlot(pCanonMT->GetClassConstructorSlot()); if (pCanonMT->IsSharedByGenericInstantiations()) { @@ -7025,19 +7025,6 @@ void MethodTable::SetCl(mdTypeDef token) _ASSERTE(GetCl() == token); } -//========================================================================================== -MethodDesc * MethodTable::GetClassConstructor() -{ - CONTRACTL - { - NOTHROW; - GC_NOTRIGGER; - MODE_ANY; - } - CONTRACTL_END; - return GetMethodDescForSlot(GetClassConstructorSlot()); -} - //========================================================================================== DWORD MethodTable::HasFixedAddressVTStatics() { @@ -7486,7 +7473,7 @@ MethodDesc *MethodTable::MethodDataObject::GetImplMethodDesc(UINT32 slotNumber) { CONTRACTL { - NOTHROW; + THROWS; GC_NOTRIGGER; MODE_ANY; } @@ -7510,7 +7497,7 @@ MethodDesc *MethodTable::MethodDataObject::GetImplMethodDesc(UINT32 slotNumber) } else { - _ASSERTE(slotNumber >= GetNumVirtuals() || pMDRet == m_pDeclMT->GetMethodDescForSlot(slotNumber)); + _ASSERTE(slotNumber >= GetNumVirtuals() || pMDRet == m_pDeclMT->GetMethodDescForSlot_NoThrow(slotNumber)); } return pMDRet; @@ -8365,11 +8352,23 @@ Module *MethodTable::GetDefiningModuleForOpenType() RETURN NULL; } +PCODE MethodTable::GetRestoredSlotIfExists(DWORD slotNumber) +{ + CONTRACTL { + NOTHROW; + GC_NOTRIGGER; + MODE_ANY; + SUPPORTS_DAC; + } CONTRACTL_END; + + return GetCanonicalMethodTable()->GetSlot(slotNumber); +} + //========================================================================================== PCODE MethodTable::GetRestoredSlot(DWORD slotNumber) { CONTRACTL { - NOTHROW; + THROWS; GC_NOTRIGGER; MODE_ANY; SUPPORTS_DAC; @@ -8380,7 +8379,22 @@ PCODE MethodTable::GetRestoredSlot(DWORD slotNumber) // PCODE slot = GetCanonicalMethodTable()->GetSlot(slotNumber); +#ifndef DACCESS_COMPILE + if (slot == NULL) + { + // This is a slot that has not been filled in yet. This can happen if we are + // looking at a slot which has not yet been given a temporary entry point. + MethodDesc *pMD = GetCanonicalMethodTable()->GetMethodDescForSlot_NoThrow(slotNumber); + PCODE temporaryEntryPoint = pMD->GetTemporaryEntryPoint(); + slot = GetCanonicalMethodTable()->GetSlot(slotNumber); + if (slot == NULL) + { + InterlockedCompareExchangeT(GetCanonicalMethodTable()->GetSlotPtrRaw(slotNumber), temporaryEntryPoint, (PCODE)NULL); + slot = GetCanonicalMethodTable()->GetSlot(slotNumber); + } + } _ASSERTE(slot != NULL); +#endif // DACCESS_COMPILE return slot; } @@ -8456,7 +8470,7 @@ MethodDesc* MethodTable::GetParallelMethodDesc(MethodDesc* pDefMD) return GetParallelMethodDescForEnC(this, pDefMD); #endif // FEATURE_METADATA_UPDATER - return GetMethodDescForSlot(pDefMD->GetSlot()); + return GetMethodDescForSlot_NoThrow(pDefMD->GetSlot()); // TODO! We should probably use the throwing variant where possible } #ifndef DACCESS_COMPILE @@ -8529,7 +8543,7 @@ BOOL MethodTable::HasExplicitOrImplicitPublicDefaultConstructor() return FALSE; } - MethodDesc * pCanonMD = GetMethodDescForSlot(GetDefaultConstructorSlot()); + MethodDesc * pCanonMD = GetMethodDescForSlot_NoThrow(GetDefaultConstructorSlot()); return pCanonMD != NULL && pCanonMD->IsPublic(); } diff --git a/src/coreclr/vm/methodtable.h b/src/coreclr/vm/methodtable.h index 3b498200cecb4..7516d01118b71 100644 --- a/src/coreclr/vm/methodtable.h +++ b/src/coreclr/vm/methodtable.h @@ -896,8 +896,6 @@ class MethodTable // THE CLASS CONSTRUCTOR // - MethodDesc * GetClassConstructor(); - BOOL HasClassConstructor(); void SetHasClassConstructor(); WORD GetClassConstructorSlot(); @@ -1418,10 +1416,12 @@ class MethodTable // MethodDesc* GetMethodDescForSlot(DWORD slot); + MethodDesc* GetMethodDescForSlot_NoThrow(DWORD slot); static MethodDesc* GetMethodDescForSlotAddress(PCODE addr, BOOL fSpeculative = FALSE); PCODE GetRestoredSlot(DWORD slot); + PCODE GetRestoredSlotIfExists(DWORD slot); // Returns MethodTable that GetRestoredSlot get its values from MethodTable * GetRestoredSlotMT(DWORD slot); diff --git a/src/coreclr/vm/methodtable.inl b/src/coreclr/vm/methodtable.inl index d0c9b889f2f03..ba9e0e6a4f21f 100644 --- a/src/coreclr/vm/methodtable.inl +++ b/src/coreclr/vm/methodtable.inl @@ -396,7 +396,7 @@ inline MethodDesc* MethodTable::GetMethodDescForSlot(DWORD slot) { CONTRACTL { - NOTHROW; + THROWS; GC_NOTRIGGER; MODE_ANY; } @@ -414,6 +414,49 @@ inline MethodDesc* MethodTable::GetMethodDescForSlot(DWORD slot) return MethodTable::GetMethodDescForSlotAddress(pCode); } +//========================================================================================== +inline MethodDesc* MethodTable::GetMethodDescForSlot_NoThrow(DWORD slot) +{ + CONTRACTL + { + NOTHROW; + GC_NOTRIGGER; + MODE_ANY; + } + CONTRACTL_END; + + PCODE pCode = GetRestoredSlotIfExists(slot); + + if (pCode == NULL) + { + // This code path should only be hit for methods which have not been overriden + MethodTable *pMTToSearchForMethodDesc = this->GetCanonicalMethodTable(); + while (pMTToSearchForMethodDesc != NULL) + { + IntroducedMethodIterator it(pMTToSearchForMethodDesc); + for (; it.IsValid(); it.Next()) + { + if (it.GetMethodDesc()->GetSlot() == slot) + { + return it.GetMethodDesc(); + } + } + + pMTToSearchForMethodDesc = pMTToSearchForMethodDesc->GetParentMethodTable()->GetCanonicalMethodTable(); + } + _ASSERTE(!"We should never reach here, as there should always be a MethodDesc for a slot"); + } + + // This is an optimization that we can take advantage of if we're trying to get the MethodDesc + // for an interface virtual, since their slots usually point to stub. + if (IsInterface() && slot < GetNumVirtuals()) + { + return MethodDesc::GetMethodDescFromStubAddr(pCode); + } + + return MethodTable::GetMethodDescForSlotAddress(pCode); +} + #ifndef DACCESS_COMPILE //========================================================================================== diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index cda16127a566d..f17744f941e0d 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -6868,7 +6868,7 @@ VOID MethodTableBuilder::ValidateInterfaceMethodConstraints() // Grab the method token MethodTable * pMTItf = pItf->GetMethodTable(); - CONSISTENCY_CHECK(CheckPointer(pMTItf->GetMethodDescForSlot(it.GetSlotNumber()))); + CONSISTENCY_CHECK(CheckPointer(pMTItf->GetMethodDescForSlot_NoThrow(it.GetSlotNumber()))); mdMethodDef mdTok = pItf->GetMethodTable()->GetMethodDescForSlot(it.GetSlotNumber())->GetMemberDef(); // Default to the current module. The code immediately below determines if this @@ -7040,10 +7040,10 @@ VOID MethodTableBuilder::AllocAndInitMethodDescChunk(COUNT_T startIndex, COUNT_T } CONTRACTL_END; void * pMem = GetMemTracker()->Track( - GetLoaderAllocator()->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(TADDR) + sizeof(MethodDescChunk) + sizeOfMethodDescs))); + GetLoaderAllocator()->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(MethodDescChunk) + sizeOfMethodDescs))); // Skip pointer to temporary entrypoints - MethodDescChunk * pChunk = (MethodDescChunk *)((BYTE*)pMem + sizeof(TADDR)); + MethodDescChunk * pChunk = (MethodDescChunk *)((BYTE*)pMem); COUNT_T methodDescCount = 0; @@ -9266,7 +9266,7 @@ void MethodTableBuilder::CopyExactParentSlots(MethodTable *pMT) // fix up wrongly-inherited method descriptors MethodDesc* pMD = hMTData->GetImplMethodDesc(i); CONSISTENCY_CHECK(CheckPointer(pMD)); - CONSISTENCY_CHECK(pMD == pMT->GetMethodDescForSlot(i)); + CONSISTENCY_CHECK(pMD == pMT->GetMethodDescForSlot_NoThrow(i)); if (pMD->GetMethodTable() == pMT) continue; @@ -10901,9 +10901,10 @@ MethodTableBuilder::SetupMethodTable2( // _ASSERTE(iCurSlot >= bmtVT->cVirtualSlots || ChangesImplementationOfVirtualSlot(iCurSlot)); - if (pMD->GetSlot() == iCurSlot) - continue; // For cases where the slot is the same as the method desc slot, we don't need to fill it in yet + if ((pMD->GetSlot() == iCurSlot) && GetParentMethodTable() != NULL && iCurSlot >= GetParentMethodTable()->GetNumVirtuals()) + continue; // For cases where the method is defining the method desc slot, we don't need to fill it in yet + pMD->EnsureTemporaryEntryPointCore(GetLoaderAllocator(), GetMemTracker()); PCODE addr = pMD->GetTemporaryEntryPoint(); _ASSERTE(addr != NULL); @@ -10969,7 +10970,7 @@ MethodTableBuilder::SetupMethodTable2( MethodDesc* pMD = hMTData->GetImplMethodDesc(i); CONSISTENCY_CHECK(CheckPointer(pMD)); - CONSISTENCY_CHECK(pMD == pMT->GetMethodDescForSlot(i)); + CONSISTENCY_CHECK(pMD == pMT->GetMethodDescForSlot_NoThrow(i)); // This indicates that the method body in this slot was copied here through a methodImpl. // Thus, copy the value of the slot from which the body originally came, in case it was @@ -10979,7 +10980,7 @@ MethodTableBuilder::SetupMethodTable2( { MethodDesc *pOriginalMD = hMTData->GetImplMethodDesc(originalIndex); CONSISTENCY_CHECK(CheckPointer(pOriginalMD)); - CONSISTENCY_CHECK(pOriginalMD == pMT->GetMethodDescForSlot(originalIndex)); + CONSISTENCY_CHECK(pOriginalMD == pMT->GetMethodDescForSlot_NoThrow(originalIndex)); if (pMD != pOriginalMD) { // Copy the slot value in the method's original slot. diff --git a/src/coreclr/vm/prestub.cpp b/src/coreclr/vm/prestub.cpp index 1cffe3fbdbcb0..0b99ef797c312 100644 --- a/src/coreclr/vm/prestub.cpp +++ b/src/coreclr/vm/prestub.cpp @@ -3224,7 +3224,7 @@ EXTERN_C PCODE STDCALL ExternalMethodFixupWorker(TransitionBlock * pTransitionBl if (pMD->IsVtableMethod()) { slot = pMD->GetSlot(); - pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); + pMD->GetMethodTable()->GetRestoredSlot(slot); // Ensure that the target slot has an entrypoint pMT = th.IsNull() ? pMD->GetMethodTable() : th.GetMethodTable(); fVirtual = true; diff --git a/src/coreclr/vm/riscv64/stubs.cpp b/src/coreclr/vm/riscv64/stubs.cpp index ebc0d0495c2a0..1e3ffa386ec06 100644 --- a/src/coreclr/vm/riscv64/stubs.cpp +++ b/src/coreclr/vm/riscv64/stubs.cpp @@ -1521,6 +1521,7 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { + pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); // Use direct call if possible. if (pMD->HasStableEntryPoint()) { diff --git a/src/coreclr/vm/virtualcallstub.cpp b/src/coreclr/vm/virtualcallstub.cpp index 8da8aea4be572..18913528c368b 100644 --- a/src/coreclr/vm/virtualcallstub.cpp +++ b/src/coreclr/vm/virtualcallstub.cpp @@ -987,6 +987,7 @@ PCODE VirtualCallStubManager::GetCallStub(TypeHandle ownerType, DWORD slot) GCX_COOP(); // This is necessary for BucketTable synchronization MethodTable * pMT = ownerType.GetMethodTable(); + pMT->GetRestoredSlot(slot); DispatchToken token; if (pMT->IsInterface()) @@ -2133,7 +2134,7 @@ VirtualCallStubManager::GetRepresentativeMethodDescFromToken( token = DispatchToken::CreateDispatchToken(token.GetSlotNumber()); } CONSISTENCY_CHECK(token.IsThisToken()); - RETURN (pMT->GetMethodDescForSlot(token.GetSlotNumber())); + RETURN (pMT->GetMethodDescForSlot_NoThrow(token.GetSlotNumber())); } //---------------------------------------------------------------------------- @@ -2165,7 +2166,7 @@ MethodDesc *VirtualCallStubManager::GetInterfaceMethodDescFromToken(DispatchToke MethodTable * pMT = GetTypeFromToken(token); PREFIX_ASSUME(pMT != NULL); CONSISTENCY_CHECK(CheckPointer(pMT)); - return pMT->GetMethodDescForSlot(token.GetSlotNumber()); + return pMT->GetMethodDescForSlot_NoThrow(token.GetSlotNumber()); #else // DACCESS_COMPILE From 693d83713b3c2a1e1792228638eb4da4fdd28354 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 25 Apr 2024 15:13:09 -0700 Subject: [PATCH 03/51] If there isn't a parent methodtable and the slot matches... then it by definition the method is defining the slot --- src/coreclr/vm/methodtablebuilder.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index f17744f941e0d..92270e1ca6dae 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -10901,7 +10901,7 @@ MethodTableBuilder::SetupMethodTable2( // _ASSERTE(iCurSlot >= bmtVT->cVirtualSlots || ChangesImplementationOfVirtualSlot(iCurSlot)); - if ((pMD->GetSlot() == iCurSlot) && GetParentMethodTable() != NULL && iCurSlot >= GetParentMethodTable()->GetNumVirtuals()) + if ((pMD->GetSlot() == iCurSlot) && (GetParentMethodTable() == NULL || iCurSlot >= GetParentMethodTable()->GetNumVirtuals())) continue; // For cases where the method is defining the method desc slot, we don't need to fill it in yet pMD->EnsureTemporaryEntryPointCore(GetLoaderAllocator(), GetMemTracker()); From da774fb0f4664bcdb558b9a74be1025ed653a901 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 25 Apr 2024 16:18:28 -0700 Subject: [PATCH 04/51] Fix a couple more issues found when running a subset of the coreclr tests --- src/coreclr/vm/jitinterface.cpp | 8 ++++---- src/coreclr/vm/method.cpp | 16 ++++++++++++++++ 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp index f94b9110e54ea..d80def4a661e0 100644 --- a/src/coreclr/vm/jitinterface.cpp +++ b/src/coreclr/vm/jitinterface.cpp @@ -8687,12 +8687,12 @@ void CEEInfo::getMethodVTableOffset (CORINFO_METHOD_HANDLE methodHnd, bool * isRelative) { CONTRACTL { - NOTHROW; - GC_NOTRIGGER; + THROWS; + GC_TRIGGERS; MODE_PREEMPTIVE; } CONTRACTL_END; - JIT_TO_EE_TRANSITION_LEAF(); + JIT_TO_EE_TRANSITION(); MethodDesc* method = GetMethod(methodHnd); method->EnsureTemporaryEntryPoint(method->GetLoaderAllocator()); @@ -8709,7 +8709,7 @@ void CEEInfo::getMethodVTableOffset (CORINFO_METHOD_HANDLE methodHnd, *pOffsetAfterIndirection = MethodTable::GetIndexAfterVtableIndirection(method->GetSlot()) * TARGET_POINTER_SIZE /* sizeof(MethodTable::VTableIndir2_t) */; *isRelative = false; - EE_TO_JIT_TRANSITION_LEAF(); + EE_TO_JIT_TRANSITION(); } /*********************************************************************/ diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 54a60f8d106ee..5fcf9cf6e437a 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -2980,6 +2980,14 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc #ifndef HAS_COMPACT_ENTRYPOINTS void MethodDesc::EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator) { + CONTRACTL + { + THROWS; + GC_NOTRIGGER; + MODE_ANY; + } + CONTRACTL_END; + if (GetTemporaryEntryPoint_NoAlloc() == NULL) { AllocMemTracker amt; @@ -2989,6 +2997,14 @@ void MethodDesc::EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator) void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker) { + CONTRACTL + { + THROWS; + GC_NOTRIGGER; + MODE_ANY; + } + CONTRACTL_END; + if (GetTemporaryEntryPoint_NoAlloc() == NULL) { PTR_PCODE pSlot = GetAddrOfSlot(); From bb59e29207d0512f81cf24bfb55797a678e3cb1c Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 25 Apr 2024 16:43:03 -0700 Subject: [PATCH 05/51] Get X86 building again --- src/coreclr/vm/i386/stublinkerx86.cpp | 4 ++++ src/coreclr/vm/jitinterface.cpp | 5 +++++ src/coreclr/vm/method.hpp | 4 ++-- src/coreclr/vm/methodtablebuilder.cpp | 2 ++ 4 files changed, 13 insertions(+), 2 deletions(-) diff --git a/src/coreclr/vm/i386/stublinkerx86.cpp b/src/coreclr/vm/i386/stublinkerx86.cpp index 6210981f03ff0..dd750f56490a7 100644 --- a/src/coreclr/vm/i386/stublinkerx86.cpp +++ b/src/coreclr/vm/i386/stublinkerx86.cpp @@ -3071,7 +3071,9 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s #ifdef TARGET_AMD64 VOID StubLinkerCPU::EmitLoadMethodAddressIntoAX(MethodDesc *pMD) { +#ifndef HAS_COMPACT_ENTRYPOINTS pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); +#endif if (pMD->HasStableEntryPoint()) { @@ -3099,7 +3101,9 @@ VOID StubLinkerCPU::EmitTailJumpToMethod(MethodDesc *pMD) } else { +#ifndef HAS_COMPACT_ENTRYPOINTS pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); +#endif // jmp [slot] Emit16(0x25ff); Emit32((DWORD)(size_t)pMD->GetAddrOfSlot()); diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp index d80def4a661e0..f2c7d8a6ba727 100644 --- a/src/coreclr/vm/jitinterface.cpp +++ b/src/coreclr/vm/jitinterface.cpp @@ -8695,7 +8695,9 @@ void CEEInfo::getMethodVTableOffset (CORINFO_METHOD_HANDLE methodHnd, JIT_TO_EE_TRANSITION(); MethodDesc* method = GetMethod(methodHnd); +#ifndef HAS_COMPACT_ENTRYPOINTS method->EnsureTemporaryEntryPoint(method->GetLoaderAllocator()); +#endif //@GENERICS: shouldn't be doing this for instantiated methods as they live elsewhere _ASSERTE(!method->HasMethodInstantiation()); @@ -9153,7 +9155,10 @@ void CEEInfo::getFunctionEntryPoint(CORINFO_METHOD_HANDLE ftnHnd, { // should never get here for EnC methods or if interception via remoting stub is required _ASSERTE(!ftn->InEnCEnabledModule()); + +#ifndef HAS_COMPACT_ENTRYPOINTS ftn->EnsureTemporaryEntryPoint(ftn->GetLoaderAllocator()); +#endif ret = (void *)ftn->GetAddrOfSlot(); accessType = IAT_PVALUE; diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 8ab50730ce19c..bd112f62cdce7 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -2182,7 +2182,7 @@ class MethodDescChunk TADDR GetTemporaryEntryPoints() { LIMITED_METHOD_CONTRACT; - return m_pCompactEntryPoints; + return m_pTemporaryEntryPoints; } PCODE GetTemporaryEntryPoint(int index); @@ -2339,7 +2339,7 @@ class MethodDescChunk PTR_MethodDescChunk m_next; #ifdef HAS_COMPACT_ENTRYPOINTS - TADDR m_pCompactEntryPoints; + TADDR m_pTemporaryEntryPoints; #endif BYTE m_size; // The size of this chunk minus 1 (in multiples of MethodDesc::ALIGNMENT) diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index 92270e1ca6dae..b891f87ac5d3a 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -10904,7 +10904,9 @@ MethodTableBuilder::SetupMethodTable2( if ((pMD->GetSlot() == iCurSlot) && (GetParentMethodTable() == NULL || iCurSlot >= GetParentMethodTable()->GetNumVirtuals())) continue; // For cases where the method is defining the method desc slot, we don't need to fill it in yet +#ifndef HAS_COMPACT_ENTRYPOINTS pMD->EnsureTemporaryEntryPointCore(GetLoaderAllocator(), GetMemTracker()); +#endif // HAS_COMPACT_ENTRYPOINTS PCODE addr = pMD->GetTemporaryEntryPoint(); _ASSERTE(addr != NULL); From bfde6def21552185a1656669fefba574e1c44e70 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 1 May 2024 16:05:23 -0700 Subject: [PATCH 06/51] Attempt to use a consistent api to force slots to be set --- src/coreclr/vm/arm/stubs.cpp | 2 +- src/coreclr/vm/i386/stublinkerx86.cpp | 8 +++---- src/coreclr/vm/jitinterface.cpp | 4 ++-- src/coreclr/vm/loongarch64/stubs.cpp | 2 +- src/coreclr/vm/method.cpp | 11 +++++++++- src/coreclr/vm/method.hpp | 30 +++++++++++++++++++++++++++ src/coreclr/vm/methodtablebuilder.cpp | 15 -------------- src/coreclr/vm/prestub.cpp | 3 +++ src/coreclr/vm/riscv64/stubs.cpp | 2 +- 9 files changed, 52 insertions(+), 25 deletions(-) diff --git a/src/coreclr/vm/arm/stubs.cpp b/src/coreclr/vm/arm/stubs.cpp index 75f1b79a976b4..e02c10a59ce7d 100644 --- a/src/coreclr/vm/arm/stubs.cpp +++ b/src/coreclr/vm/arm/stubs.cpp @@ -1381,7 +1381,7 @@ VOID StubLinkerCPU::EmitShuffleThunk(ShuffleEntry *pShuffleEntryArray) void StubLinkerCPU::ThumbEmitTailCallManagedMethod(MethodDesc *pMD) { - pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); + pMD->EnsureSlotFilled(); // Use direct call if possible. if (pMD->HasStableEntryPoint()) { diff --git a/src/coreclr/vm/i386/stublinkerx86.cpp b/src/coreclr/vm/i386/stublinkerx86.cpp index dd750f56490a7..89671d901eb7c 100644 --- a/src/coreclr/vm/i386/stublinkerx86.cpp +++ b/src/coreclr/vm/i386/stublinkerx86.cpp @@ -3072,7 +3072,7 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s VOID StubLinkerCPU::EmitLoadMethodAddressIntoAX(MethodDesc *pMD) { #ifndef HAS_COMPACT_ENTRYPOINTS - pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); + pMD->EnsureSlotFilled(); #endif if (pMD->HasStableEntryPoint()) @@ -3094,6 +3094,9 @@ VOID StubLinkerCPU::EmitTailJumpToMethod(MethodDesc *pMD) EmitLoadMethodAddressIntoAX(pMD); Emit16(X86_INSTR_JMP_EAX); #else +#ifndef HAS_COMPACT_ENTRYPOINTS + pMD->EnsureSlotFilled(); +#endif // Use direct call if possible if (pMD->HasStableEntryPoint()) { @@ -3101,9 +3104,6 @@ VOID StubLinkerCPU::EmitTailJumpToMethod(MethodDesc *pMD) } else { -#ifndef HAS_COMPACT_ENTRYPOINTS - pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); -#endif // jmp [slot] Emit16(0x25ff); Emit32((DWORD)(size_t)pMD->GetAddrOfSlot()); diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp index f2c7d8a6ba727..2c9a8eb2edda5 100644 --- a/src/coreclr/vm/jitinterface.cpp +++ b/src/coreclr/vm/jitinterface.cpp @@ -8696,7 +8696,7 @@ void CEEInfo::getMethodVTableOffset (CORINFO_METHOD_HANDLE methodHnd, MethodDesc* method = GetMethod(methodHnd); #ifndef HAS_COMPACT_ENTRYPOINTS - method->EnsureTemporaryEntryPoint(method->GetLoaderAllocator()); + method->EnsureSlotFilled(); #endif //@GENERICS: shouldn't be doing this for instantiated methods as they live elsewhere @@ -9157,7 +9157,7 @@ void CEEInfo::getFunctionEntryPoint(CORINFO_METHOD_HANDLE ftnHnd, _ASSERTE(!ftn->InEnCEnabledModule()); #ifndef HAS_COMPACT_ENTRYPOINTS - ftn->EnsureTemporaryEntryPoint(ftn->GetLoaderAllocator()); + ftn->EnsureSlotFilled(); #endif ret = (void *)ftn->GetAddrOfSlot(); diff --git a/src/coreclr/vm/loongarch64/stubs.cpp b/src/coreclr/vm/loongarch64/stubs.cpp index 9709314f80c02..eb2e8e0fcc9b5 100644 --- a/src/coreclr/vm/loongarch64/stubs.cpp +++ b/src/coreclr/vm/loongarch64/stubs.cpp @@ -1476,7 +1476,7 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { - pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); // THIS And CODE LIKE THIS SHOULD USE A FUNCTION which checks RequiresStableEntryPoint and fills that in as needed + pMD->EnsureSlotFilled(); // Use direct call if possible. if (pMD->HasStableEntryPoint()) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 5fcf9cf6e437a..43e7ca8223feb 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -506,7 +506,7 @@ PCODE MethodDesc::GetMethodEntryPoint() #if !defined(HAS_COMPACT_ENTRYPOINTS) && !defined(DACCESS_COMPILE) if (*PTR_PCODE(pSlot) == NULL) { - EnsureTemporaryEntryPoint(GetLoaderAllocator()); + EnsureSlotFilled(); _ASSERTE(*PTR_PCODE(pSlot) != NULL); } #endif @@ -2026,6 +2026,11 @@ PCODE MethodDesc::TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags _ASSERTE((accessFlags & ~CORINFO_ACCESS_LDFTN) == 0); } +#ifndef HAS_COMPACT_ENTRYPOINTS + if (RequiresStableEntryPoint() && !HasStableEntryPoint()) + EnsureSlotFilled(); +#endif + // We create stable entrypoints for these upfront if (IsWrapperStub() || IsEnCAddedMethod()) return GetStableEntryPoint(); @@ -3086,6 +3091,10 @@ Precode* MethodDesc::GetOrCreatePrecode() if (InterlockedCompareExchangeT(pSlot, pPrecode->GetEntryPoint(), tempEntry) == tempEntry) amt.SuppressRelease(); } + else if (*pSlot == NULL) + { + InterlockedCompareExchangeT(pSlot, tempEntry, (PCODE)NULL); + } // Set the flags atomically InterlockedUpdateFlags3(enum_flag3_HasStableEntryPoint | enum_flag3_HasPrecode, TRUE); diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index bd112f62cdce7..483037b19e1ff 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -1460,6 +1460,36 @@ class MethodDesc #ifndef HAS_COMPACT_ENTRYPOINTS void EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator); void EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); + +#ifndef DACCESS_COMPILE + void EnsureSlotFilled() + { + WRAPPER_NO_CONTRACT; + EnsureTemporaryEntryPoint(GetLoaderAllocator()); + PCODE *pSlot = GetAddrOfSlot(); + if (*pSlot == NULL) + { + if (RequiresStableEntryPoint()) + { + GetOrCreatePrecode(); + } + else + { + *pSlot = GetTemporaryEntryPoint(); + } + } + else + { + if (RequiresStableEntryPoint() && !HasStableEntryPoint()) + { + _ASSERTE(*pSlot == GetTemporaryEntryPoint()); + // We may be in a race with another thread that will be setting HasStableEntryPoint + // Just set it now along with HasPrecode + InterlockedUpdateFlags3(enum_flag3_HasStableEntryPoint | enum_flag3_HasPrecode, FALSE); + } + } + } +#endif // DACCESS_COMPILE #endif //******************************************************************************* diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index b891f87ac5d3a..6c39b78eb8f4b 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -10839,21 +10839,6 @@ MethodTableBuilder::SetupMethodTable2( pChunk->EnsureTemporaryEntryPointsCreated(GetLoaderAllocator(), GetMemTracker()); } } -#else -#if MAYBE_WE_DONT_NEED_THIS - for (bmtVtable::Iterator slotIt = bmtVT->IterateSlots(); !slotIt.AtEnd(); ++slotIt) - { - SLOT_INDEX iCurSlot = static_cast(slotIt.CurrentIndex()); - - // We need to ensure that all vtable slots have temporary entrypoints created for them. - MethodDesc * pMD = NULL; - if (iCurSlot < bmtVT->cVtableSlots) - { - pMD = slotIt->Impl().GetMethodDesc(); - pMD->EnsureTemporaryEntryPointCore(GetLoaderAllocator(), GetMemTracker()); - } - } -#endif #endif { // copy onto the real vtable (methods only) diff --git a/src/coreclr/vm/prestub.cpp b/src/coreclr/vm/prestub.cpp index 0b99ef797c312..6b10eb79d2066 100644 --- a/src/coreclr/vm/prestub.cpp +++ b/src/coreclr/vm/prestub.cpp @@ -2799,6 +2799,9 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT, CallerGCMode callerGCMo { pCode = GetStubForInteropMethod(this); +#ifndef HAS_COMPACT_ENTRYPOINTS + EnsureSlotFilled(); +#endif GetPrecode()->SetTargetInterlocked(pCode); RETURN GetStableEntryPoint(); diff --git a/src/coreclr/vm/riscv64/stubs.cpp b/src/coreclr/vm/riscv64/stubs.cpp index 1e3ffa386ec06..1501c29ef74f8 100644 --- a/src/coreclr/vm/riscv64/stubs.cpp +++ b/src/coreclr/vm/riscv64/stubs.cpp @@ -1521,7 +1521,7 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { - pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); + pMD->EnsureSlotFilled(); // Use direct call if possible. if (pMD->HasStableEntryPoint()) { From 54b8ab603008ce7f2e7beec848592217eaf65776 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 1 May 2024 16:21:46 -0700 Subject: [PATCH 07/51] Put cache around RequiresStableEntryPoint --- src/coreclr/vm/method.cpp | 23 +++++++++++++++++++++++ src/coreclr/vm/method.hpp | 14 ++++++++++++++ src/coreclr/vm/methodtablebuilder.cpp | 4 ++++ 3 files changed, 41 insertions(+) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 43e7ca8223feb..fa6d1c7a61737 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -1760,7 +1760,9 @@ MethodDescChunk *MethodDescChunk::CreateChunk(LoaderHeap *pHeap, DWORD methodDes for (DWORD i = 0; i < count; i++) { pMD->SetChunkIndex(pChunk); +#ifdef HAS_COMPACT_ENTRYPOINTS pMD->SetMethodDescIndex(i); +#endif pMD->SetClassification(classification); if (fNonVtableSlot) @@ -2322,6 +2324,27 @@ BOOL MethodDesc::RequiresMethodDescCallingConvention(BOOL fEstimateForChunk /*=F //******************************************************************************* BOOL MethodDesc::RequiresStableEntryPoint(BOOL fEstimateForChunk /*=FALSE*/) +{ +#ifdef HAS_COMPACT_ENTRYPOINTS + return RequiresStableEntryPointCore(fEstimateForChunk); +#else + BYTE bFlags4 = VolatileLoadWithoutBarrier(&m_bFlags4); + if (bFlags4 & enum_flag4_ComputedRequiresStableEntryPoint) + { + return (bFlags4 & enum_flag4_RequiresStableEntryPoint) != 0; + } + else + { + if (fEstimateForChunk) + return RequiresStableEntryPointCore(fEstimateForChunk); + BOOL fRequiresStableEntryPoint = RequiresStableEntryPointCore(FALSE); + VolatileStore(&m_bFlags4, (BYTE)(enum_flag4_ComputedRequiresStableEntryPoint | (fRequiresStableEntryPoint ? enum_flag4_RequiresStableEntryPoint : 0))); + return fRequiresStableEntryPoint; + } +#endif +} + +BOOL MethodDesc::RequiresStableEntryPointCore(BOOL fEstimateForChunk) { LIMITED_METHOD_CONTRACT; diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 483037b19e1ff..5446f5e842c35 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -1636,6 +1636,9 @@ class MethodDesc // Returns true if the method has to have stable entrypoint always. BOOL RequiresStableEntryPoint(BOOL fEstimateForChunk = FALSE); +private: + BOOL RequiresStableEntryPointCore(BOOL fEstimateForChunk); +public: // // Backpatch method slots @@ -1693,7 +1696,16 @@ class MethodDesc UINT16 m_wFlags3AndTokenRemainder; BYTE m_chunkIndex; + +#ifndef HAS_COMPACT_ENTRYPOINTS + enum { + enum_flag4_ComputedRequiresStableEntryPoint = 0x01, + enum_flag4_RequiresStableEntryPoint = 0x02, + }; + BYTE m_bFlags4; // Used to hold more flags +#else BYTE m_methodIndex; // Used to hold the index into the chunk of this MethodDesc. Currently all 8 bits are used, but we could likely work with only 7 bits +#endif // The slot number of this MethodDesc in the vtable array. WORD m_wSlotNumber; @@ -1707,6 +1719,7 @@ class MethodDesc void EnumMemoryRegions(CLRDataEnumMemoryFlags flags); #endif +#ifdef HAS_COMPACT_ENTRYPOINTS BYTE GetMethodDescIndex() { return m_methodIndex; @@ -1717,6 +1730,7 @@ class MethodDesc _ASSERTE(index <= 255); m_methodIndex = (BYTE)index; } +#endif public: inline DWORD GetClassification() const diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index 6c39b78eb8f4b..6fdf4239b116e 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -7064,7 +7064,9 @@ VOID MethodTableBuilder::AllocAndInitMethodDescChunk(COUNT_T startIndex, COUNT_T MethodDesc * pMD = (MethodDesc *)((BYTE *)pChunk + offset); pMD->SetChunkIndex(pChunk); +#ifdef HAS_COMPACT_ENTRYPOINTS pMD->SetMethodDescIndex(methodDescCount); +#endif InitNewMethodDesc(pMDMethod, pMD); @@ -7108,7 +7110,9 @@ VOID MethodTableBuilder::AllocAndInitMethodDescChunk(COUNT_T startIndex, COUNT_T // Reset the chunk index pUnboxedMD->SetChunkIndex(pChunk); +#ifdef HAS_COMPACT_ENTRYPOINTS pUnboxedMD->SetMethodDescIndex(methodDescCount); +#endif if (bmtGenerics->GetNumGenericArgs() == 0) { pUnboxedMD->SetHasNonVtableSlot(); From 66f2b391b70db91282759f4f0642d700a7589809 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 3 May 2024 15:17:22 -0700 Subject: [PATCH 08/51] Fix typo --- src/coreclr/vm/method.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 40335ae7022e9..f99660a577fb9 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -1459,7 +1459,7 @@ class MethodDesc _ASSERTE(*pSlot == GetTemporaryEntryPoint()); // We may be in a race with another thread that will be setting HasStableEntryPoint // Just set it now along with HasPrecode - InterlockedUpdateFlags3(enum_flag3_HasStableEntryPoint | enum_flag3_HasPrecode, FALSE); + InterlockedUpdateFlags3(enum_flag3_HasStableEntryPoint | enum_flag3_HasPrecode, TRUE); } } } From 951a655ad7944f1c6bb19b99ec84d82634192ac5 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Mon, 6 May 2024 15:25:09 -0700 Subject: [PATCH 09/51] Fix interop identified issue where we sometime set a non Precode into an interface --- src/coreclr/vm/method.cpp | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index cfb631d15e062..eecc1ee1cf040 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -3376,7 +3376,14 @@ void MethodDesc::SetCodeEntryPoint(PCODE entryPoint) } else if (!HasStableEntryPoint()) { - SetStableEntryPointInterlocked(entryPoint); + if (RequiresStableEntryPoint()) + { + GetOrCreatePrecode()->SetTargetInterlocked(entryPoint); + } + else + { + SetStableEntryPointInterlocked(entryPoint); + } } } @@ -3485,6 +3492,7 @@ BOOL MethodDesc::SetStableEntryPointInterlocked(PCODE addr) BOOL fResult = InterlockedCompareExchangeT(pSlot, addr, pExpected) == pExpected; InterlockedUpdateFlags3(enum_flag3_HasStableEntryPoint, TRUE); + _ASSERTE(!RequiresStableEntryPoint()); // The RequiresStableEntryPoint scenarios should all result in a stable entry point which is a PreCode, so that it can be replaced and adjusted over time. return fResult; } From 45a0b3b3c9a09be315e26a9021c0052618f7f011 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Mon, 6 May 2024 16:44:37 -0700 Subject: [PATCH 10/51] Move ARM and X86 to disable compact entry points --- src/coreclr/vm/amd64/asmconstants.h | 4 ---- src/coreclr/vm/arm/cgencpu.h | 2 -- src/coreclr/vm/i386/asmconstants.h | 2 +- src/coreclr/vm/i386/cgencpu.h | 2 -- 4 files changed, 1 insertion(+), 9 deletions(-) diff --git a/src/coreclr/vm/amd64/asmconstants.h b/src/coreclr/vm/amd64/asmconstants.h index de5c423ccea1b..5c6e86c580fd9 100644 --- a/src/coreclr/vm/amd64/asmconstants.h +++ b/src/coreclr/vm/amd64/asmconstants.h @@ -98,10 +98,6 @@ ASMCONSTANTS_C_ASSERT(SIZEOF__ComPrestubMethodFrame ASMCONSTANTS_C_ASSERT(SIZEOF__ComMethodFrame == sizeof(ComMethodFrame)); -#define OFFSETOF__ComPlusCallMethodDesc__m_pComPlusCallInfo DBG_FRE(0x38, 0x10) -ASMCONSTANTS_C_ASSERT(OFFSETOF__ComPlusCallMethodDesc__m_pComPlusCallInfo - == offsetof(ComPlusCallMethodDesc, m_pComPlusCallInfo)); - #define OFFSETOF__ComPlusCallInfo__m_pILStub 0x0 ASMCONSTANTS_C_ASSERT(OFFSETOF__ComPlusCallInfo__m_pILStub == offsetof(ComPlusCallInfo, m_pILStub)); diff --git a/src/coreclr/vm/arm/cgencpu.h b/src/coreclr/vm/arm/cgencpu.h index d800551a5170a..b9894aaa4787c 100644 --- a/src/coreclr/vm/arm/cgencpu.h +++ b/src/coreclr/vm/arm/cgencpu.h @@ -71,8 +71,6 @@ EXTERN_C void checkStack(void); #define JUMP_ALLOCATE_SIZE 8 // # bytes to allocate for a jump instruction #define BACK_TO_BACK_JUMP_ALLOCATE_SIZE 8 // # bytes to allocate for a back to back jump instruction -#define HAS_COMPACT_ENTRYPOINTS 1 - #define HAS_NDIRECT_IMPORT_PRECODE 1 EXTERN_C void getFPReturn(int fpSize, INT64 *pRetVal); diff --git a/src/coreclr/vm/i386/asmconstants.h b/src/coreclr/vm/i386/asmconstants.h index 475a0f857ebd9..c1a577caa0f86 100644 --- a/src/coreclr/vm/i386/asmconstants.h +++ b/src/coreclr/vm/i386/asmconstants.h @@ -233,7 +233,7 @@ ASMCONSTANTS_C_ASSERT(OFFSETOF__FrameHandlerExRecord__m_pEntryFrame == offsetof( #endif -#define ComPlusCallMethodDesc__m_pComPlusCallInfo DBG_FRE(0x1C, 0x8) +#define ComPlusCallMethodDesc__m_pComPlusCallInfo DBG_FRE(0x20, 0xC) ASMCONSTANTS_C_ASSERT(ComPlusCallMethodDesc__m_pComPlusCallInfo == offsetof(ComPlusCallMethodDesc, m_pComPlusCallInfo)) #define ComPlusCallInfo__m_pRetThunk 0x10 diff --git a/src/coreclr/vm/i386/cgencpu.h b/src/coreclr/vm/i386/cgencpu.h index e99b8f542b590..05013a5018512 100644 --- a/src/coreclr/vm/i386/cgencpu.h +++ b/src/coreclr/vm/i386/cgencpu.h @@ -51,8 +51,6 @@ EXTERN_C void SinglecastDelegateInvokeStub(); #define JUMP_ALLOCATE_SIZE 8 // # bytes to allocate for a jump instruction #define BACK_TO_BACK_JUMP_ALLOCATE_SIZE 8 // # bytes to allocate for a back to back jump instruction -#define HAS_COMPACT_ENTRYPOINTS 1 - // Needed for PInvoke inlining in ngened images #define HAS_NDIRECT_IMPORT_PRECODE 1 From 4e7f41dfaea06f3d5c195cac4959c711ca2d9d49 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Tue, 7 May 2024 13:09:13 -0700 Subject: [PATCH 11/51] Attempt to fix build breaks --- src/coreclr/inc/gfunc_list.h | 2 +- src/coreclr/vm/arm/asmhelpers.S | 3 +++ src/coreclr/vm/methodtable.inl | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/coreclr/inc/gfunc_list.h b/src/coreclr/inc/gfunc_list.h index d5c5b67d9633e..44261568c5b26 100644 --- a/src/coreclr/inc/gfunc_list.h +++ b/src/coreclr/inc/gfunc_list.h @@ -13,7 +13,7 @@ DEFINE_DACGFN(DACNotifyCompilationFinished) DEFINE_DACGFN(ThePreStub) -#ifdef TARGET_ARM +#if defined(HAS_COMPACT_ENTRYPOINTS) && definded(TARGET_ARM) DEFINE_DACGFN(ThePreStubCompactARM) #endif diff --git a/src/coreclr/vm/arm/asmhelpers.S b/src/coreclr/vm/arm/asmhelpers.S index 27a44b62c119b..f868e549333b4 100644 --- a/src/coreclr/vm/arm/asmhelpers.S +++ b/src/coreclr/vm/arm/asmhelpers.S @@ -210,6 +210,7 @@ LOCAL_LABEL(LNullThis): NESTED_END ThePreStub, _TEXT +#ifdef HAS_COMPACT_ENTRYPOINTS // ------------------------------------------------------------------ NESTED_ENTRY ThePreStubCompactARM, _TEXT, NoHandler @@ -228,6 +229,8 @@ LOCAL_LABEL(LNullThis): b C_FUNC(ThePreStub) NESTED_END ThePreStubCompactARM, _TEXT +#endif + // ------------------------------------------------------------------ // This method does nothing. It's just a fixed function for the debugger to put a breakpoint on. LEAF_ENTRY ThePreStubPatch, _TEXT diff --git a/src/coreclr/vm/methodtable.inl b/src/coreclr/vm/methodtable.inl index ba9e0e6a4f21f..4865d46e9aad3 100644 --- a/src/coreclr/vm/methodtable.inl +++ b/src/coreclr/vm/methodtable.inl @@ -427,7 +427,7 @@ inline MethodDesc* MethodTable::GetMethodDescForSlot_NoThrow(DWORD slot) PCODE pCode = GetRestoredSlotIfExists(slot); - if (pCode == NULL) + if (pCode == (PCODE)NULL) { // This code path should only be hit for methods which have not been overriden MethodTable *pMTToSearchForMethodDesc = this->GetCanonicalMethodTable(); From f6e2fedb6628971d21b2c806471161ded5e8b5d5 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 8 May 2024 14:51:23 -0700 Subject: [PATCH 12/51] fix typo --- src/coreclr/inc/gfunc_list.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/inc/gfunc_list.h b/src/coreclr/inc/gfunc_list.h index 44261568c5b26..336d6a958b6f6 100644 --- a/src/coreclr/inc/gfunc_list.h +++ b/src/coreclr/inc/gfunc_list.h @@ -13,7 +13,7 @@ DEFINE_DACGFN(DACNotifyCompilationFinished) DEFINE_DACGFN(ThePreStub) -#if defined(HAS_COMPACT_ENTRYPOINTS) && definded(TARGET_ARM) +#if defined(HAS_COMPACT_ENTRYPOINTS) && defined(TARGET_ARM) DEFINE_DACGFN(ThePreStubCompactARM) #endif From f9de7776fbc4368300cd9b5f4e2af10434264a26 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 9 May 2024 13:09:14 -0700 Subject: [PATCH 13/51] Fix another Musl validation issue --- src/coreclr/vm/method.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index f99660a577fb9..dd6714cd3af1c 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -1441,7 +1441,7 @@ class MethodDesc WRAPPER_NO_CONTRACT; EnsureTemporaryEntryPoint(GetLoaderAllocator()); PCODE *pSlot = GetAddrOfSlot(); - if (*pSlot == NULL) + if (*pSlot == (PCODE)NULL) { if (RequiresStableEntryPoint()) { From 730fd7c969dcc1850ea8ead8d9565f53c8843912 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 10 May 2024 11:00:43 -0700 Subject: [PATCH 14/51] More tweaks around NULL handling --- src/coreclr/vm/method.cpp | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index eecc1ee1cf040..a18b7812918db 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -504,10 +504,10 @@ PCODE MethodDesc::GetMethodEntryPoint() TADDR pSlot = dac_cast(this) + size; #if !defined(HAS_COMPACT_ENTRYPOINTS) && !defined(DACCESS_COMPILE) - if (*PTR_PCODE(pSlot) == NULL) + if (*PTR_PCODE(pSlot) == (PCODE)NULL) { EnsureSlotFilled(); - _ASSERTE(*PTR_PCODE(pSlot) != NULL); + _ASSERTE(*PTR_PCODE(pSlot) != (PCODE)NULL); } #endif return *PTR_PCODE(pSlot); @@ -2217,7 +2217,7 @@ BOOL MethodDesc::IsPointingToPrestub() if (IsVersionableWithVtableSlotBackpatch()) { PCODE methodEntrypoint = GetMethodEntryPoint_NoAlloc(); - return methodEntrypoint == GetTemporaryEntryPoint_NoAlloc() && methodEntrypoint != NULL; + return methodEntrypoint == GetTemporaryEntryPoint_NoAlloc() && methodEntrypoint != (PCODE)NULL; } return TRUE; } @@ -2960,14 +2960,14 @@ PCODE MethodDesc::GetTemporaryEntryPoint() } CONTRACTL_END; - TADDR pEntryPoint = GetTemporaryEntryPoint_NoAlloc(); - if (pEntryPoint != NULL) + PCODE pEntryPoint = GetTemporaryEntryPoint_NoAlloc(); + if (pEntryPoint != (PCODE)NULL) return pEntryPoint; #ifndef DACCESS_COMPILE EnsureTemporaryEntryPoint(GetLoaderAllocator()); pEntryPoint = GetTemporaryEntryPoint_NoAlloc(); - _ASSERTE(pEntryPoint != NULL); + _ASSERTE(pEntryPoint != (PCODE)NULL); #ifdef _DEBUG MethodDesc * pMD = MethodDesc::GetMethodDescFromStubAddr(pEntryPoint); @@ -3016,7 +3016,7 @@ void MethodDesc::EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator) } CONTRACTL_END; - if (GetTemporaryEntryPoint_NoAlloc() == NULL) + if (GetTemporaryEntryPoint_NoAlloc() == (PCODE)NULL) { AllocMemTracker amt; EnsureTemporaryEntryPointCore(pLoaderAllocator, &amt); @@ -3033,18 +3033,18 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator } CONTRACTL_END; - if (GetTemporaryEntryPoint_NoAlloc() == NULL) + if (GetTemporaryEntryPoint_NoAlloc() == (PCODE)NULL) { PTR_PCODE pSlot = GetAddrOfSlot(); AllocMemTracker amt; Precode* pPrecode = Precode::Allocate(GetPrecodeType(), this, GetLoaderAllocator(), &amt); - if (InterlockedCompareExchangeT(&m_pTemporaryEntryPoint, pPrecode->GetEntryPoint(), (PCODE)NULL) == NULL) + if (InterlockedCompareExchangeT(&m_pTemporaryEntryPoint, pPrecode->GetEntryPoint(), (PCODE)NULL) == (PCODE)NULL) amt.SuppressRelease(); PCODE tempEntryPoint = GetTemporaryEntryPoint_NoAlloc(); - _ASSERTE(tempEntryPoint != NULL); + _ASSERTE(tempEntryPoint != (PCODE)NULL); if (*pSlot == NULL) { @@ -3096,7 +3096,7 @@ Precode* MethodDesc::GetOrCreatePrecode() PrecodeType requiredType = GetPrecodeType(); PrecodeType availableType = PRECODE_INVALID; - if (!GetMethodDescChunk()->HasCompactEntryPoints() && tempEntry != NULL) + if (!GetMethodDescChunk()->HasCompactEntryPoints() && tempEntry != (PCODE)NULL) { availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType(); } @@ -3114,7 +3114,7 @@ Precode* MethodDesc::GetOrCreatePrecode() if (InterlockedCompareExchangeT(pSlot, pPrecode->GetEntryPoint(), tempEntry) == tempEntry) amt.SuppressRelease(); } - else if (*pSlot == NULL) + else if (*pSlot == (PCODE)NULL) { InterlockedCompareExchangeT(pSlot, tempEntry, (PCODE)NULL); } From fb335e796357c8ff5660b58a305ae86b8737fd42 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 10 May 2024 16:32:12 -0700 Subject: [PATCH 15/51] Hopefully the last NULL issue --- src/coreclr/vm/method.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index a18b7812918db..e6875ce97e7ca 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -3046,7 +3046,7 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator PCODE tempEntryPoint = GetTemporaryEntryPoint_NoAlloc(); _ASSERTE(tempEntryPoint != (PCODE)NULL); - if (*pSlot == NULL) + if (*pSlot == (PCODE)NULL) { InterlockedCompareExchangeT(pSlot, tempEntryPoint, (PCODE)NULL); } From d31ebbbc8868dd9e7d7ccfc73ae066018314cf46 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Mon, 13 May 2024 14:24:34 -0700 Subject: [PATCH 16/51] Fix more NULL issues --- src/coreclr/vm/methodtable.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index 95ca6d89ef28c..71f06900aa616 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -8380,20 +8380,20 @@ PCODE MethodTable::GetRestoredSlot(DWORD slotNumber) PCODE slot = GetCanonicalMethodTable()->GetSlot(slotNumber); #ifndef DACCESS_COMPILE - if (slot == NULL) + if (slot == (PCODE)NULL) { // This is a slot that has not been filled in yet. This can happen if we are // looking at a slot which has not yet been given a temporary entry point. MethodDesc *pMD = GetCanonicalMethodTable()->GetMethodDescForSlot_NoThrow(slotNumber); PCODE temporaryEntryPoint = pMD->GetTemporaryEntryPoint(); slot = GetCanonicalMethodTable()->GetSlot(slotNumber); - if (slot == NULL) + if (slot == (PCODE)NULL) { InterlockedCompareExchangeT(GetCanonicalMethodTable()->GetSlotPtrRaw(slotNumber), temporaryEntryPoint, (PCODE)NULL); slot = GetCanonicalMethodTable()->GetSlot(slotNumber); } } - _ASSERTE(slot != NULL); + _ASSERTE(slot != (PCODE)NULL); #endif // DACCESS_COMPILE return slot; } From 466cabc1fbab3280ddac2d7691be03b648eefa98 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Tue, 4 Jun 2024 14:24:20 -0700 Subject: [PATCH 17/51] Fixup obvious issues --- src/coreclr/vm/method.cpp | 18 ++++++++++++------ src/coreclr/vm/method.hpp | 9 ++++++--- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 1e98b2e0c4a3f..3d8a1a8ade4e0 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -209,7 +209,7 @@ LoaderAllocator * MethodDesc::GetDomainSpecificLoaderAllocator() } -HRESULT MethodDesc::EnsureCodeDataExists() +HRESULT MethodDesc::EnsureCodeDataExists(AllocMemTracker *pamTracker) { CONTRACTL { @@ -224,7 +224,10 @@ HRESULT MethodDesc::EnsureCodeDataExists() LoaderHeap* heap = GetLoaderAllocator()->GetHighFrequencyHeap(); AllocMemTracker amTracker; - MethodDescCodeData* alloc = (MethodDescCodeData*)amTracker.Track_NoThrow(heap->AllocMem_NoThrow(S_SIZE_T(sizeof(MethodDescCodeData)))); + if (pamTracker == NULL) + pamTracker = &amTracker; + + MethodDescCodeData* alloc = (MethodDescCodeData*)pamTracker->Track_NoThrow(heap->AllocMem_NoThrow(S_SIZE_T(sizeof(MethodDescCodeData)))); if (alloc == NULL) return E_OUTOFMEMORY; @@ -240,7 +243,7 @@ HRESULT MethodDesc::SetMethodDescVersionState(PTR_MethodDescVersioningState stat WRAPPER_NO_CONTRACT; HRESULT hr; - IfFailRet(EnsureCodeDataExists()); + IfFailRet(EnsureCodeDataExists(NULL)); _ASSERTE(m_codeData != NULL); if (InterlockedCompareExchangeT(&m_codeData->VersioningState, state, NULL) != NULL) @@ -254,9 +257,10 @@ HRESULT MethodDesc::SetMethodDescVersionState(PTR_MethodDescVersioningState stat PTR_MethodDescVersioningState MethodDesc::GetMethodDescVersionState() { WRAPPER_NO_CONTRACT; - if (m_codeData == NULL) + PTR_MethodDescCodeData codeData = VolatileLoadWithoutBarrier(&m_codeData); + if (codeData == NULL) return NULL; - return m_codeData->VersioningState; + return VolatileLoadWithoutBarrier(&codeData->VersioningState); } //******************************************************************************* @@ -3087,7 +3091,9 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator AllocMemTracker amt; Precode* pPrecode = Precode::Allocate(GetPrecodeType(), this, GetLoaderAllocator(), &amt); - if (InterlockedCompareExchangeT(&m_pTemporaryEntryPoint, pPrecode->GetEntryPoint(), (PCODE)NULL) == (PCODE)NULL) + IfFailThrow(EnsureCodeDataExists(pamTracker)); + + if (InterlockedCompareExchangeT(&m_codeData->m_pTemporaryEntryPoint, pPrecode->GetEntryPoint(), (PCODE)NULL) == (PCODE)NULL) amt.SuppressRelease(); PCODE tempEntryPoint = GetTemporaryEntryPoint_NoAlloc(); diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 56bf256c6ba55..1965fc776b7a0 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -223,9 +223,12 @@ class MethodDesc #ifdef HAS_COMPACT_ENTRYPOINTS return GetTemporaryEntryPoint(); #else - return VolatileLoadWithoutBarrier(&m_pTemporaryEntryPoint); + PTR_MethodDescCodeData codeData = VolatileLoadWithoutBarrier(&m_codeData); + if (codeData == NULL) + return NULL; + return VolatileLoadWithoutBarrier(&codeData->m_pTemporaryEntryPoint); #endif - } + } void SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); @@ -1717,7 +1720,7 @@ class MethodDesc #endif #ifndef DACCESS_COMPILE - HRESULT EnsureCodeDataExists(); + HRESULT EnsureCodeDataExists(AllocMemTracker *pamTracker); HRESULT SetMethodDescVersionState(PTR_MethodDescVersioningState state); #endif //!DACCESS_COMPILE From 44ccb9d46ca895c047195470230cefec810e67ec Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 5 Jun 2024 11:52:59 -0700 Subject: [PATCH 18/51] Fix allocation behavior so we don't free the data too early or too late --- src/coreclr/vm/method.cpp | 8 ++++---- src/coreclr/vm/method.hpp | 5 +++++ 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 3d8a1a8ade4e0..f6a5269a65de3 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -3069,8 +3069,7 @@ void MethodDesc::EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator) if (GetTemporaryEntryPoint_NoAlloc() == (PCODE)NULL) { - AllocMemTracker amt; - EnsureTemporaryEntryPointCore(pLoaderAllocator, &amt); + EnsureTemporaryEntryPointCore(pLoaderAllocator, NULL); } } @@ -3089,12 +3088,13 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator PTR_PCODE pSlot = GetAddrOfSlot(); AllocMemTracker amt; - Precode* pPrecode = Precode::Allocate(GetPrecodeType(), this, GetLoaderAllocator(), &amt); + AllocMemTracker *pamTrackerPrecode = pamTracker != NULL ? pamTracker : &amt; + Precode* pPrecode = Precode::Allocate(GetPrecodeType(), this, GetLoaderAllocator(), pamTrackerPrecode); IfFailThrow(EnsureCodeDataExists(pamTracker)); if (InterlockedCompareExchangeT(&m_codeData->m_pTemporaryEntryPoint, pPrecode->GetEntryPoint(), (PCODE)NULL) == (PCODE)NULL) - amt.SuppressRelease(); + amt.SuppressRelease(); // We only need to suppress the release if we are working with a MethodDesc which is not newly allocated PCODE tempEntryPoint = GetTemporaryEntryPoint_NoAlloc(); _ASSERTE(tempEntryPoint != (PCODE)NULL); diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 1965fc776b7a0..05a1d4f7387a1 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -1433,6 +1433,9 @@ class MethodDesc #ifndef HAS_COMPACT_ENTRYPOINTS void EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator); + + // pamTracker must be NULL for a MethodDesc which cannot be freed by an external AllocMemTracker + // OR must be set to point to the same AllocMemTracker that controls allocation of the MethodDesc void EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); #ifndef DACCESS_COMPILE @@ -1720,6 +1723,8 @@ class MethodDesc #endif #ifndef DACCESS_COMPILE + // pamTracker must be NULL for a MethodDesc which cannot be freed by an external AllocMemTracker + // OR must be set to point to the same AllocMemTracker that controls allocation of the MethodDesc HRESULT EnsureCodeDataExists(AllocMemTracker *pamTracker); HRESULT SetMethodDescVersionState(PTR_MethodDescVersioningState state); From 37625006d5b4bc8f80209632394a19ba479cd14a Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 5 Jun 2024 14:12:30 -0700 Subject: [PATCH 19/51] Fix musl validation issue --- src/coreclr/vm/method.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 05a1d4f7387a1..941de3eaa7071 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -225,7 +225,7 @@ class MethodDesc #else PTR_MethodDescCodeData codeData = VolatileLoadWithoutBarrier(&m_codeData); if (codeData == NULL) - return NULL; + return (PCODE)NULL; return VolatileLoadWithoutBarrier(&codeData->m_pTemporaryEntryPoint); #endif } From a7b68c3fecb9d0ca4d085e2df1cf5168f2bd76a5 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Mon, 24 Jun 2024 17:02:46 -0700 Subject: [PATCH 20/51] Fix tiered compilation --- src/coreclr/vm/method.cpp | 109 +++++++++++++++++++++----- src/coreclr/vm/method.hpp | 32 ++++---- src/coreclr/vm/method.inl | 24 ++++++ src/coreclr/vm/methodtablebuilder.cpp | 7 +- 4 files changed, 129 insertions(+), 43 deletions(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index f6a5269a65de3..83650cd669032 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -964,6 +964,44 @@ WORD MethodDesc::InterlockedUpdateFlags3(WORD wMask, BOOL fSet) return wOldState; } +WORD MethodDescChunk::InterlockedUpdateFlags(WORD wMask, BOOL fSet) +{ + LIMITED_METHOD_CONTRACT; + + WORD wOldState = m_flagsAndTokenRange; + DWORD dwMask = wMask; + + // We need to make this operation atomic (multiple threads can play with the flags field at the same time). But the flags field + // is a word and we only have interlock operations over dwords. So we round down the flags field address to the nearest aligned + // dword (along with the intended bitfield mask). Note that we make the assumption that the flags word is aligned itself, so we + // only have two possibilities: the field already lies on a dword boundary or it's precisely one word out. + LONG* pdwFlags = (LONG*)((ULONG_PTR)&m_flagsAndTokenRange - (offsetof(MethodDescChunk, m_flagsAndTokenRange) & 0x3)); + +#ifdef _PREFAST_ +#pragma warning(push) +#pragma warning(disable:6326) // "Suppress PREFast warning about comparing two constants" +#endif // _PREFAST_ + +#if BIGENDIAN + if ((offsetof(MethodDescChunk, m_flagsAndTokenRange) & 0x3) == 0) { +#else // !BIGENDIAN + if ((offsetof(MethodDescChunk, m_flagsAndTokenRange) & 0x3) != 0) { +#endif // !BIGENDIAN + static_assert_no_msg(sizeof(m_flagsAndTokenRange) == 2); + dwMask <<= 16; + } +#ifdef _PREFAST_ +#pragma warning(pop) +#endif + + if (fSet) + InterlockedOr(pdwFlags, dwMask); + else + InterlockedAnd(pdwFlags, ~dwMask); + + return wOldState; +} + #endif // !DACCESS_COMPILE //******************************************************************************* @@ -3036,11 +3074,7 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc { WRAPPER_NO_CONTRACT; -#ifdef HAS_COMPACT_ENTRYPOINTS - GetMethodDescChunk()->EnsureTemporaryEntryPointsCreated(pLoaderAllocator, pamTracker); -#else EnsureTemporaryEntryPointCore(pLoaderAllocator, pamTracker); -#endif PTR_PCODE pSlot = GetAddrOfSlot(); #ifdef HAS_COMPACT_ENTRYPOINTS @@ -3085,6 +3119,7 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator if (GetTemporaryEntryPoint_NoAlloc() == (PCODE)NULL) { + GetMethodDescChunk()->DetermineAndSetIsEligibleForTieredCompilation(); PTR_PCODE pSlot = GetAddrOfSlot(); AllocMemTracker amt; @@ -3108,28 +3143,63 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator #endif //******************************************************************************* -#ifdef HAS_COMPACT_ENTRYPOINTS -void MethodDescChunk::CreateTemporaryEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker) +void MethodDescChunk::DetermineAndSetIsEligibleForTieredCompilation() { WRAPPER_NO_CONTRACT; - _ASSERTE(GetTemporaryEntryPoints() == NULL); + if (!DeterminedIfMethodsAreEligibleForTieredCompilation()) + { + int count = GetCount(); - TADDR temporaryEntryPoints = Precode::AllocateTemporaryEntryPoints(this, pLoaderAllocator, pamTracker); + // Determine eligibility for tiered compilation + { + MethodDesc *pMD = GetFirstMethodDesc(); + bool chunkContainsEligibleMethods = pMD->DetermineIsEligibleForTieredCompilationInvariantForAllMethodsInChunk(); -#ifdef HAS_COMPACT_ENTRYPOINTS - // Precodes allocated only if they provide more compact representation or if it is required - if (temporaryEntryPoints == NULL) - { - temporaryEntryPoints = AllocateCompactEntryPoints(pLoaderAllocator, pamTracker); - } -#endif // HAS_COMPACT_ENTRYPOINTS + #ifdef _DEBUG + // Validate every MethodDesc has the same result for DetermineIsEligibleForTieredCompilationInvariantForAllMethodsInChunk + MethodDesc *pMDDebug = GetFirstMethodDesc(); + for (int i = 0; i < count; ++i) + { + _ASSERTE(chunkContainsEligibleMethods == pMDDebug->DetermineIsEligibleForTieredCompilationInvariantForAllMethodsInChunk()); + pMDDebug = (MethodDesc *)(dac_cast(pMDDebug) + pMDDebug->SizeOf()); + } + #endif + if (chunkContainsEligibleMethods) + { + for (int i = 0; i < count; ++i) + { + if (pMD->DetermineAndSetIsEligibleForTieredCompilation()) + { + _ASSERTE(pMD->IsEligibleForTieredCompilation_NoCheckMethodDescChunk()); + } + else + { + _ASSERTE(!pMD->IsEligibleForTieredCompilation_NoCheckMethodDescChunk()); + } + + pMD = (MethodDesc *)(dac_cast(pMD) + pMD->SizeOf()); + } + } + } - m_pTemporaryEntryPoints = temporaryEntryPoints; + InterlockedUpdateFlags(enum_flag_DeterminedIsEligibleForTieredCompilation, TRUE); - _ASSERTE(GetTemporaryEntryPoints() != NULL); +#ifdef _DEBUG + { + MethodDesc *pMD = GetFirstMethodDesc(); + for (int i = 0; i < count; ++i) + { + _ASSERTE(pMD->IsEligibleForTieredCompilation() == pMD->IsEligibleForTieredCompilation_NoCheckMethodDescChunk()); + if (pMD->IsEligibleForTieredCompilation()) + { + _ASSERTE(!pMD->IsVersionableWithPrecode() || pMD->RequiresStableEntryPoint()); + } + } + } +#endif + } } -#endif // HAS_COMPACT_ENTRYPOINTS //******************************************************************************* @@ -3231,8 +3301,7 @@ bool MethodDesc::DetermineAndSetIsEligibleForTieredCompilation() // Functions with NoOptimization or AggressiveOptimization don't participate in tiering !IsJitOptimizationLevelRequested()) { - m_wFlags3AndTokenRemainder |= enum_flag3_IsEligibleForTieredCompilation; - _ASSERTE(IsVersionable()); + InterlockedUpdateFlags3(enum_flag3_IsEligibleForTieredCompilation, TRUE); return true; } #endif diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 941de3eaa7071..0f1cff5ae2b39 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -1090,16 +1090,8 @@ class MethodDesc public: - bool IsEligibleForTieredCompilation() - { - LIMITED_METHOD_DAC_CONTRACT; - -#ifdef FEATURE_TIERED_COMPILATION - return (m_wFlags3AndTokenRemainder & enum_flag3_IsEligibleForTieredCompilation) != 0; -#else - return false; -#endif - } + bool IsEligibleForTieredCompilation(); + bool IsEligibleForTieredCompilation_NoCheckMethodDescChunk(); // This method must return the same value for all methods in one MethodDescChunk bool DetermineIsEligibleForTieredCompilationInvariantForAllMethodsInChunk(); @@ -2205,10 +2197,14 @@ class MethodDescChunk // These are separate to allow the flags space available and used to be obvious here // and for the logic that splits the token to be algorithmically generated based on the // #define - enum_flag_HasCompactEntrypoints = 0x4000, // Compact temporary entry points + enum_flag_DeterminedIsEligibleForTieredCompilation = 0x4000, // Has this chunk had its methods been determined eligible for tiered compilation or not // unused = 0x8000, }; +#ifndef DACCESS_COMPILE + WORD InterlockedUpdateFlags(WORD wMask, BOOL fSet); +#endif + public: // // Allocates methodDescCount identical MethodDescs in smallest possible number of chunks. @@ -2221,6 +2217,12 @@ class MethodDescChunk MethodTable *initialMT, class AllocMemTracker *pamTracker); + bool DeterminedIfMethodsAreEligibleForTieredCompilation() + { + LIMITED_METHOD_DAC_CONTRACT; + return (VolatileLoadWithoutBarrier(&m_flagsAndTokenRange) & enum_flag_DeterminedIsEligibleForTieredCompilation) != 0; + } + #ifdef HAS_COMPACT_ENTRYPOINTS TADDR GetTemporaryEntryPoints() { @@ -2244,8 +2246,8 @@ class MethodDescChunk CreateTemporaryEntryPoints(pLoaderAllocator, pamTracker); } - void CreateTemporaryEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); #endif + void DetermineAndSetIsEligibleForTieredCompilation(); #ifdef HAS_COMPACT_ENTRYPOINTS // @@ -2363,12 +2365,6 @@ class MethodDescChunk #endif private: - void SetHasCompactEntryPoints() - { - LIMITED_METHOD_CONTRACT; - m_flagsAndTokenRange |= enum_flag_HasCompactEntrypoints; - } - void SetTokenRange(UINT16 tokenRange) { LIMITED_METHOD_CONTRACT; diff --git a/src/coreclr/vm/method.inl b/src/coreclr/vm/method.inl index c348cd5c7a0da..7ccd33e4e1282 100644 --- a/src/coreclr/vm/method.inl +++ b/src/coreclr/vm/method.inl @@ -6,6 +6,30 @@ #ifndef _METHOD_INL_ #define _METHOD_INL_ +inline bool MethodDesc::IsEligibleForTieredCompilation() +{ + LIMITED_METHOD_DAC_CONTRACT; + +#ifdef FEATURE_TIERED_COMPILATION + _ASSERTE(GetMethodDescChunk()->DeterminedIfMethodsAreEligibleForTieredCompilation()); + return (VolatileLoadWithoutBarrier(&m_wFlags3AndTokenRemainder) & enum_flag3_IsEligibleForTieredCompilation) != 0; +#else + return false; +#endif +} + +inline bool MethodDesc::IsEligibleForTieredCompilation_NoCheckMethodDescChunk() +{ + LIMITED_METHOD_CONTRACT; + + // Just like above, but without the assert. This is used in the path which initializes the flag. +#ifdef FEATURE_TIERED_COMPILATION + return (VolatileLoadWithoutBarrier(&m_wFlags3AndTokenRemainder) & enum_flag3_IsEligibleForTieredCompilation) != 0; +#else + return false; +#endif +} + inline InstantiatedMethodDesc* MethodDesc::AsInstantiatedMethodDesc() const { WRAPPER_NO_CONTRACT; diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index 630bcd224f344..34eefce914bf3 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -10841,16 +10841,13 @@ MethodTableBuilder::SetupMethodTable2( // Keep bmtInterface data around since we no longer write the flags (IsDeclaredOnType and // IsImplementedByParent) into the interface map (these flags are only required during type loading). -#ifdef HAS_COMPACT_ENTRYPOINTS { for (MethodDescChunk *pChunk = GetHalfBakedClass()->GetChunks(); pChunk != NULL; pChunk = pChunk->GetNextChunk()) { - // Make sure that temporary entrypoints are create for methods. NGEN uses temporary - // entrypoints as surrogate keys for precodes. - pChunk->EnsureTemporaryEntryPointsCreated(GetLoaderAllocator(), GetMemTracker()); + // Make sure that eligibility for versionability is computed + pChunk->DetermineAndSetIsEligibleForTieredCompilation(); } } -#endif { // copy onto the real vtable (methods only) //@GENERICS: Because we sometimes load an inexact parent (see ClassLoader::GetParent) the inherited slots might From 6a772d9c04f6900df15492b73ddc37f2bed9ef56 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Tue, 25 Jun 2024 10:15:53 -0700 Subject: [PATCH 21/51] Remove Compact Entrypoint logic --- src/coreclr/inc/gfunc_list.h | 4 - src/coreclr/vm/arm/asmhelpers.S | 21 -- src/coreclr/vm/class.h | 11 - src/coreclr/vm/i386/stublinkerx86.cpp | 4 - src/coreclr/vm/jitinterface.cpp | 4 - src/coreclr/vm/method.cpp | 502 +------------------------- src/coreclr/vm/method.hpp | 102 +----- src/coreclr/vm/methodtablebuilder.cpp | 15 +- src/coreclr/vm/precode.cpp | 171 --------- src/coreclr/vm/precode.h | 22 -- src/coreclr/vm/prestub.cpp | 19 - src/coreclr/vm/stubmgr.cpp | 7 - 12 files changed, 4 insertions(+), 878 deletions(-) diff --git a/src/coreclr/inc/gfunc_list.h b/src/coreclr/inc/gfunc_list.h index 336d6a958b6f6..b7bfa5dc6a5eb 100644 --- a/src/coreclr/inc/gfunc_list.h +++ b/src/coreclr/inc/gfunc_list.h @@ -13,10 +13,6 @@ DEFINE_DACGFN(DACNotifyCompilationFinished) DEFINE_DACGFN(ThePreStub) -#if defined(HAS_COMPACT_ENTRYPOINTS) && defined(TARGET_ARM) -DEFINE_DACGFN(ThePreStubCompactARM) -#endif - DEFINE_DACGFN(ThePreStubPatchLabel) #ifdef FEATURE_COMINTEROP DEFINE_DACGFN(Unknown_AddRef) diff --git a/src/coreclr/vm/arm/asmhelpers.S b/src/coreclr/vm/arm/asmhelpers.S index f868e549333b4..68ba006a560a3 100644 --- a/src/coreclr/vm/arm/asmhelpers.S +++ b/src/coreclr/vm/arm/asmhelpers.S @@ -210,27 +210,6 @@ LOCAL_LABEL(LNullThis): NESTED_END ThePreStub, _TEXT -#ifdef HAS_COMPACT_ENTRYPOINTS -// ------------------------------------------------------------------ - NESTED_ENTRY ThePreStubCompactARM, _TEXT, NoHandler - - // r12 - address of compact entry point + PC_REG_RELATIVE_OFFSET - - PROLOG_WITH_TRANSITION_BLOCK - - mov r0, r12 - - bl C_FUNC(PreStubGetMethodDescForCompactEntryPoint) - - mov r12, r0 // pMethodDesc - - EPILOG_WITH_TRANSITION_BLOCK_TAILCALL - - b C_FUNC(ThePreStub) - - NESTED_END ThePreStubCompactARM, _TEXT -#endif - // ------------------------------------------------------------------ // This method does nothing. It's just a fixed function for the debugger to put a breakpoint on. LEAF_ENTRY ThePreStubPatch, _TEXT diff --git a/src/coreclr/vm/class.h b/src/coreclr/vm/class.h index 75003bae383d5..6622828245984 100644 --- a/src/coreclr/vm/class.h +++ b/src/coreclr/vm/class.h @@ -2077,17 +2077,6 @@ inline PCODE GetPreStubEntryPoint() return GetEEFuncEntryPoint(ThePreStub); } -#if defined(HAS_COMPACT_ENTRYPOINTS) && defined(TARGET_ARM) - -EXTERN_C void STDCALL ThePreStubCompactARM(); - -inline PCODE GetPreStubCompactARMEntryPoint() -{ - return GetEEFuncEntryPoint(ThePreStubCompactARM); -} - -#endif // defined(HAS_COMPACT_ENTRYPOINTS) && defined(TARGET_ARM) - PCODE TheUMThunkPreStub(); PCODE TheVarargNDirectStub(BOOL hasRetBuffArg); diff --git a/src/coreclr/vm/i386/stublinkerx86.cpp b/src/coreclr/vm/i386/stublinkerx86.cpp index 11b2119b9e978..b5e2afab3be75 100644 --- a/src/coreclr/vm/i386/stublinkerx86.cpp +++ b/src/coreclr/vm/i386/stublinkerx86.cpp @@ -3060,9 +3060,7 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s #ifdef TARGET_AMD64 VOID StubLinkerCPU::EmitLoadMethodAddressIntoAX(MethodDesc *pMD) { -#ifndef HAS_COMPACT_ENTRYPOINTS pMD->EnsureSlotFilled(); -#endif if (pMD->HasStableEntryPoint()) { @@ -3083,9 +3081,7 @@ VOID StubLinkerCPU::EmitTailJumpToMethod(MethodDesc *pMD) EmitLoadMethodAddressIntoAX(pMD); Emit16(X86_INSTR_JMP_EAX); #else -#ifndef HAS_COMPACT_ENTRYPOINTS pMD->EnsureSlotFilled(); -#endif // Use direct call if possible if (pMD->HasStableEntryPoint()) { diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp index fd63cebbc6fe0..1cc377c14240e 100644 --- a/src/coreclr/vm/jitinterface.cpp +++ b/src/coreclr/vm/jitinterface.cpp @@ -8695,9 +8695,7 @@ void CEEInfo::getMethodVTableOffset (CORINFO_METHOD_HANDLE methodHnd, JIT_TO_EE_TRANSITION(); MethodDesc* method = GetMethod(methodHnd); -#ifndef HAS_COMPACT_ENTRYPOINTS method->EnsureSlotFilled(); -#endif //@GENERICS: shouldn't be doing this for instantiated methods as they live elsewhere _ASSERTE(!method->HasMethodInstantiation()); @@ -9156,9 +9154,7 @@ void CEEInfo::getFunctionEntryPoint(CORINFO_METHOD_HANDLE ftnHnd, // should never get here for EnC methods or if interception via remoting stub is required _ASSERTE(!ftn->InEnCEnabledModule()); -#ifndef HAS_COMPACT_ENTRYPOINTS ftn->EnsureSlotFilled(); -#endif ret = (void *)ftn->GetAddrOfSlot(); accessType = IAT_PVALUE; diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 83650cd669032..00e20bcea3258 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -497,7 +497,6 @@ Signature MethodDesc::GetSignature() return Signature(pSig, cSig); } -#ifndef HAS_COMPACT_ENTRYPOINTS PCODE MethodDesc::GetMethodEntryPoint_NoAlloc() { CONTRACTL @@ -526,17 +525,12 @@ PCODE MethodDesc::GetMethodEntryPoint_NoAlloc() _ASSERTE(GetMethodTable()->IsCanonicalMethodTable()); return GetMethodTable()->GetSlot(GetSlot()); } -#endif PCODE MethodDesc::GetMethodEntryPoint() { CONTRACTL { -#ifdef HAS_COMPACT_ENTRYPOINTS - NOTHROW; -#else THROWS; -#endif GC_NOTRIGGER; MODE_ANY; SUPPORTS_DAC; @@ -554,7 +548,7 @@ PCODE MethodDesc::GetMethodEntryPoint() TADDR pSlot = dac_cast(this) + size; -#if !defined(HAS_COMPACT_ENTRYPOINTS) && !defined(DACCESS_COMPILE) +#if !defined(DACCESS_COMPILE) if (*PTR_PCODE(pSlot) == (PCODE)NULL) { EnsureSlotFilled(); @@ -1820,13 +1814,6 @@ MethodDescChunk *MethodDescChunk::CreateChunk(LoaderHeap *pHeap, DWORD methodDes DWORD maxMethodDescsPerChunk = (DWORD)(MethodDescChunk::MaxSizeOfMethodDescs / oneSize); - // Limit the maximum MethodDescs per chunk by the number of precodes that can fit to a single memory page, - // since we allocate consecutive temporary entry points for all MethodDescs in the whole chunk. - DWORD maxPrecodesPerPage = Precode::GetMaxTemporaryEntryPointsCount(); - - if (maxPrecodesPerPage < maxMethodDescsPerChunk) - maxMethodDescsPerChunk = maxPrecodesPerPage; - if (methodDescCount == 0) methodDescCount = maxMethodDescsPerChunk; @@ -1849,9 +1836,6 @@ MethodDescChunk *MethodDescChunk::CreateChunk(LoaderHeap *pHeap, DWORD methodDes for (DWORD i = 0; i < count; i++) { pMD->SetChunkIndex(pChunk); -#ifdef HAS_COMPACT_ENTRYPOINTS - pMD->SetMethodDescIndex(i); -#endif pMD->SetClassification(classification); if (fNonVtableSlot) @@ -2117,10 +2101,8 @@ PCODE MethodDesc::TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags _ASSERTE((accessFlags & ~CORINFO_ACCESS_LDFTN) == 0); } -#ifndef HAS_COMPACT_ENTRYPOINTS if (RequiresStableEntryPoint() && !HasStableEntryPoint()) EnsureSlotFilled(); -#endif // We create stable entrypoints for these upfront if (IsWrapperStub() || IsEnCAddedMethod()) @@ -2168,16 +2150,11 @@ PCODE MethodDesc::TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags if (MayHavePrecode()) return GetOrCreatePrecode()->GetEntryPoint(); -#ifdef HAS_COMPACT_ENTRYPOINTS - // Caller has to call via slot or allocate funcptr stub - return NULL; -#else // HAS_COMPACT_ENTRYPOINTS // // Embed call to the temporary entrypoint into the code. It will be patched // to point to the actual code later. // return GetTemporaryEntryPoint(); -#endif // HAS_COMPACT_ENTRYPOINTS } //******************************************************************************* @@ -2414,9 +2391,6 @@ BOOL MethodDesc::RequiresMethodDescCallingConvention(BOOL fEstimateForChunk /*=F //******************************************************************************* BOOL MethodDesc::RequiresStableEntryPoint(BOOL fEstimateForChunk /*=FALSE*/) { -#ifdef HAS_COMPACT_ENTRYPOINTS - return RequiresStableEntryPointCore(fEstimateForChunk); -#else BYTE bFlags4 = VolatileLoadWithoutBarrier(&m_bFlags4); if (bFlags4 & enum_flag4_ComputedRequiresStableEntryPoint) { @@ -2430,7 +2404,6 @@ BOOL MethodDesc::RequiresStableEntryPoint(BOOL fEstimateForChunk /*=FALSE*/) VolatileStore(&m_bFlags4, (BYTE)(enum_flag4_ComputedRequiresStableEntryPoint | (fRequiresStableEntryPoint ? enum_flag4_RequiresStableEntryPoint : 0))); return fRequiresStableEntryPoint; } -#endif } BOOL MethodDesc::RequiresStableEntryPointCore(BOOL fEstimateForChunk) @@ -2577,14 +2550,6 @@ MethodDesc* MethodDesc::GetMethodDescFromStubAddr(PCODE addr, BOOL fSpeculative MethodDesc * pMD = NULL; -#ifdef HAS_COMPACT_ENTRYPOINTS - if (MethodDescChunk::IsCompactEntryPointAtAddress(addr)) - { - pMD = MethodDescChunk::GetMethodDescFromCompactEntryPoint(addr, fSpeculative); - RETURN(pMD); - } -#endif // HAS_COMPACT_ENTRYPOINTS - // Otherwise this must be some kind of precode // PTR_Precode pPrecode = Precode::GetPrecodeFromEntryPoint(addr, fSpeculative); @@ -2598,447 +2563,7 @@ MethodDesc* MethodDesc::GetMethodDescFromStubAddr(PCODE addr, BOOL fSpeculative RETURN(NULL); // Not found } -#ifdef HAS_COMPACT_ENTRYPOINTS - -#if defined(TARGET_X86) - -#include -static const struct CentralJumpCode { - BYTE m_movzxEAX[3]; - BYTE m_shlEAX[3]; - BYTE m_addEAX[1]; - MethodDesc* m_pBaseMD; - BYTE m_jmp[1]; - INT32 m_rel32; - - inline void Setup(CentralJumpCode* pCodeRX, MethodDesc* pMD, PCODE target, LoaderAllocator *pLoaderAllocator) { - WRAPPER_NO_CONTRACT; - m_pBaseMD = pMD; - m_rel32 = rel32UsingJumpStub(&pCodeRX->m_rel32, target, pMD, pLoaderAllocator); - } - - inline BOOL CheckTarget(TADDR target) { - LIMITED_METHOD_CONTRACT; - TADDR addr = rel32Decode(PTR_HOST_MEMBER_TADDR(CentralJumpCode, this, m_rel32)); - return (addr == target); - } -} -c_CentralJumpCode = { - { 0x0F, 0xB6, 0xC0 }, // movzx eax,al - { 0xC1, 0xE0, MethodDesc::ALIGNMENT_SHIFT }, // shl eax, MethodDesc::ALIGNMENT_SHIFT - { 0x05 }, NULL, // add eax, pBaseMD - { 0xE9 }, 0 // jmp PreStub -}; -#include - -#elif defined(TARGET_ARM) - -#include -struct CentralJumpCode { - BYTE m_ldrPC[4]; - BYTE m_short[2]; - MethodDescChunk *m_pChunk; - PCODE m_target; - - inline void Setup(PCODE target, MethodDescChunk *pChunk) { - WRAPPER_NO_CONTRACT; - - m_target = target; - m_pChunk = pChunk; - } - - inline BOOL CheckTarget(TADDR target) { - WRAPPER_NO_CONTRACT; - return ((TADDR)m_target == target); - } -} -c_CentralJumpCode = { - { 0xDF, 0xF8, 0x08, 0xF0 }, // ldr pc, =pTarget - { 0x00, 0x00 }, // short offset for alignment - 0, // pChunk - 0 // pTarget -}; -#include - -#else -#error Unsupported platform -#endif - -typedef DPTR(struct CentralJumpCode) PTR_CentralJumpCode; -#define TEP_CENTRAL_JUMP_SIZE sizeof(c_CentralJumpCode) -static_assert_no_msg((TEP_CENTRAL_JUMP_SIZE & 1) == 0); - -#define TEP_ENTRY_SIZE 4 - -#ifdef TARGET_ARM - -#define TEP_HALF_ENTRY_SIZE (TEP_ENTRY_SIZE / 2) - -// Compact entry point on arm consists of two thumb instructions: -// mov r12, pc -// b CentralJumpCode - -// First instruction 0x46fc -#define TEP_ENTRY_INSTR1_BYTE1 0xFC -#define TEP_ENTRY_INSTR1_BYTE2 0x46 - -// Mask for unconditional branch opcode -#define TEP_ENTRY_INSTR2_MASK1 0xE0 - -// Mask for opcode -#define TEP_ENTRY_INSTR2_MASK2 0xF8 - -// Bit used for ARM to identify compact entry points -#define COMPACT_ENTRY_ARM_CODE 0x2 - -/* static */ int MethodDescChunk::GetCompactEntryPointMaxCount () -{ - LIMITED_METHOD_DAC_CONTRACT; - - return MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB / TEP_ENTRY_SIZE; -} - -// Get offset from the start of current compact entry point to the CentralJumpCode -static uint16_t DecodeOffsetFromBranchToCentralJump (uint16_t instr) -{ - int16_t offset = decodeUnconditionalBranchThumb ((LPBYTE) &instr); - - offset += PC_REG_RELATIVE_OFFSET + TEP_HALF_ENTRY_SIZE; - - _ASSERTE (offset >= TEP_ENTRY_SIZE && (offset % TEP_ENTRY_SIZE == 0)); - - return (uint16_t) offset; -} - -#ifndef DACCESS_COMPILE - -// Encode branch instruction to central jump for current compact entry point -static uint16_t EncodeBranchToCentralJump (int16_t offset) -{ - _ASSERTE (offset >= 0 && (offset % TEP_ENTRY_SIZE == 0)); - - offset += TEP_HALF_ENTRY_SIZE - PC_REG_RELATIVE_OFFSET; - - uint16_t instr; - emitUnconditionalBranchThumb ((LPBYTE) &instr, offset); - - return instr; -} - -#endif // DACCESS_COMPILE - -#else // TARGET_ARM - -#define TEP_MAX_BEFORE_INDEX (1 + (127 / TEP_ENTRY_SIZE)) -#define TEP_MAX_BLOCK_INDEX (TEP_MAX_BEFORE_INDEX + (128 - TEP_CENTRAL_JUMP_SIZE) / TEP_ENTRY_SIZE) -#define TEP_FULL_BLOCK_SIZE (TEP_MAX_BLOCK_INDEX * TEP_ENTRY_SIZE + TEP_CENTRAL_JUMP_SIZE) - -#endif // TARGET_ARM - -BOOL MethodDescChunk::IsCompactEntryPointAtAddress(PCODE addr) -{ - LIMITED_METHOD_DAC_CONTRACT; - -#if defined(TARGET_X86) || defined(TARGET_AMD64) - - // Compact entrypoints start at odd addresses - return (addr & 1) != 0; - -#elif defined(TARGET_ARM) - - // Compact entrypoints start at odd addresses (thumb) with second bit set to 1 - uint8_t compactEntryPointMask = THUMB_CODE | COMPACT_ENTRY_ARM_CODE; - return (addr & compactEntryPointMask) == compactEntryPointMask; - -#else - #error Unsupported platform -#endif -} - -//******************************************************************************* -/* static */ MethodDesc* MethodDescChunk::GetMethodDescFromCompactEntryPoint(PCODE addr, BOOL fSpeculative /*=FALSE*/) -{ - LIMITED_METHOD_CONTRACT; - -#ifdef DACCESS_COMPILE - // Always use speculative checks with DAC - fSpeculative = TRUE; -#endif - - // Always do consistency check in debug - if (fSpeculative INDEBUG(|| TRUE)) - { -#ifdef TARGET_ARM - TADDR instrCodeAddr = PCODEToPINSTR(addr); - if (!IsCompactEntryPointAtAddress(addr) || - *PTR_BYTE(instrCodeAddr) != TEP_ENTRY_INSTR1_BYTE1 || - *PTR_BYTE(instrCodeAddr+1) != TEP_ENTRY_INSTR1_BYTE2) -#else // TARGET_ARM - if ((addr & 3) != 1 || - *PTR_BYTE(addr) != X86_INSTR_MOV_AL || - *PTR_BYTE(addr+2) != X86_INSTR_JMP_REL8) -#endif // TARGET_ARM - { - if (fSpeculative) return NULL; - _ASSERTE(!"Unexpected code in temporary entrypoint"); - } - } - -#ifdef TARGET_ARM - - // On ARM compact entry points are thumb - _ASSERTE ((addr & THUMB_CODE) != 0); - addr = addr - THUMB_CODE; - - // Get offset for CentralJumpCode from current compact entry point - PTR_UINT16 pBranchInstr = (PTR_UINT16(addr)) + 1; - uint16_t offset = DecodeOffsetFromBranchToCentralJump (*pBranchInstr); - - TADDR centralJump = addr + offset; - int index = (centralJump - addr - TEP_ENTRY_SIZE) / TEP_ENTRY_SIZE; - -#else // TARGET_ARM - - int index = *PTR_BYTE(addr+1); - TADDR centralJump = addr + 4 + *PTR_SBYTE(addr+3); - -#endif // TARGET_ARM - - CentralJumpCode* pCentralJumpCode = PTR_CentralJumpCode(centralJump); - - // Always do consistency check in debug - if (fSpeculative INDEBUG(|| TRUE)) - { - SIZE_T i; - for (i = 0; i < TEP_CENTRAL_JUMP_SIZE; i++) - { - BYTE b = ((BYTE*)&c_CentralJumpCode)[i]; - if (b != 0 && b != *PTR_BYTE(centralJump+i)) - { - if (fSpeculative) return NULL; - _ASSERTE(!"Unexpected code in temporary entrypoint"); - } - } - -#ifdef TARGET_ARM - - _ASSERTE_IMPL(pCentralJumpCode->CheckTarget(GetPreStubCompactARMEntryPoint())); - -#else // TARGET_ARM - - _ASSERTE_IMPL(pCentralJumpCode->CheckTarget(GetPreStubEntryPoint())); - -#endif // TARGET_ARM - } - -#ifdef TARGET_ARM - // Go through all MethodDesc in MethodDescChunk and find the one with the required index - PTR_MethodDescChunk pChunk = *((DPTR(PTR_MethodDescChunk))(centralJump + offsetof(CentralJumpCode, m_pChunk))); - TADDR pMD = PTR_HOST_TO_TADDR (pChunk->GetFirstMethodDesc ()); - - _ASSERTE (index >= 0 && index < ((int) pChunk->GetCount ())); - - index = ((int) pChunk->GetCount ()) - 1 - index; - - SIZE_T totalSize = 0; - int curIndex = 0; - - while (index != curIndex) - { - SIZE_T sizeCur = (PTR_MethodDesc (pMD))->SizeOf (); - totalSize += sizeCur; - - pMD += sizeCur; - ++curIndex; - } - - return PTR_MethodDesc (pMD); -#else // TARGET_ARM - return PTR_MethodDesc((TADDR)pCentralJumpCode->m_pBaseMD + index * MethodDesc::ALIGNMENT); -#endif // TARGET_ARM -} - //******************************************************************************* -SIZE_T MethodDescChunk::SizeOfCompactEntryPoints(int count) -{ - LIMITED_METHOD_DAC_CONTRACT; - -#ifdef TARGET_ARM - - return COMPACT_ENTRY_ARM_CODE + count * TEP_ENTRY_SIZE + TEP_CENTRAL_JUMP_SIZE; - -#else // TARGET_ARM - - int fullBlocks = count / TEP_MAX_BLOCK_INDEX; - int remainder = count % TEP_MAX_BLOCK_INDEX; - - return 1 + (fullBlocks * TEP_FULL_BLOCK_SIZE) + - (remainder * TEP_ENTRY_SIZE) + ((remainder != 0) ? TEP_CENTRAL_JUMP_SIZE : 0); - -#endif // TARGET_ARM -} - -#ifndef DACCESS_COMPILE -TADDR MethodDescChunk::AllocateCompactEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker) -{ - CONTRACTL { - THROWS; - GC_NOTRIGGER; - } CONTRACTL_END; - - int count = GetCount(); - - SIZE_T size = SizeOfCompactEntryPoints(count); - - TADDR temporaryEntryPoints = (TADDR)pamTracker->Track(pLoaderAllocator->GetPrecodeHeap()->AllocAlignedMem(size, sizeof(TADDR))); - ExecutableWriterHolder temporaryEntryPointsWriterHolder((void *)temporaryEntryPoints, size); - size_t rxOffset = temporaryEntryPoints - (TADDR)temporaryEntryPointsWriterHolder.GetRW(); - -#ifdef TARGET_ARM - BYTE* p = (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + COMPACT_ENTRY_ARM_CODE; - int relOffset = count * TEP_ENTRY_SIZE - TEP_ENTRY_SIZE; // relative offset for the short jump - - _ASSERTE (relOffset < MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB); -#else // TARGET_ARM - // make the temporary entrypoints unaligned, so they are easy to identify - BYTE* p = (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + 1; - int indexInBlock = TEP_MAX_BLOCK_INDEX; // recompute relOffset in first iteration - int relOffset = 0; // relative offset for the short jump -#endif // TARGET_ARM - - MethodDesc * pBaseMD = 0; // index of the start of the block - - MethodDesc * pMD = GetFirstMethodDesc(); - for (int index = 0; index < count; index++) - { -#ifdef TARGET_ARM - - uint8_t *pMovInstrByte1 = (uint8_t *)p; - uint8_t *pMovInstrByte2 = (uint8_t *)p+1; - uint16_t *pBranchInstr = ((uint16_t *)p)+1; - - *pMovInstrByte1 = TEP_ENTRY_INSTR1_BYTE1; - *pMovInstrByte2 = TEP_ENTRY_INSTR1_BYTE2; - *pBranchInstr = EncodeBranchToCentralJump ((int16_t) relOffset); - - p += TEP_ENTRY_SIZE; - -#else // TARGET_ARM - - if (indexInBlock == TEP_MAX_BLOCK_INDEX) - { - relOffset = (min(count - index, TEP_MAX_BEFORE_INDEX) - 1) * TEP_ENTRY_SIZE; - indexInBlock = 0; - pBaseMD = pMD; - } - - *(p+0) = X86_INSTR_MOV_AL; - int methodDescIndex = pMD->GetMethodDescChunkIndex() - pBaseMD->GetMethodDescChunkIndex(); - _ASSERTE(FitsInU1(methodDescIndex)); - *(p+1) = (BYTE)methodDescIndex; - - *(p+2) = X86_INSTR_JMP_REL8; - _ASSERTE(FitsInI1(relOffset)); - *(p+3) = (BYTE)relOffset; - - p += TEP_ENTRY_SIZE; static_assert_no_msg(TEP_ENTRY_SIZE == 4); - - if (relOffset == 0) - { - CentralJumpCode* pCode = (CentralJumpCode*)p; - CentralJumpCode* pCodeRX = (CentralJumpCode*)(p + rxOffset); - - memcpy(pCode, &c_CentralJumpCode, TEP_CENTRAL_JUMP_SIZE); - - pCode->Setup(pCodeRX, pBaseMD, GetPreStubEntryPoint(), pLoaderAllocator); - - p += TEP_CENTRAL_JUMP_SIZE; - - relOffset -= TEP_CENTRAL_JUMP_SIZE; - } - - indexInBlock++; - -#endif // TARGET_ARM - - relOffset -= TEP_ENTRY_SIZE; - pMD = (MethodDesc *)((BYTE *)pMD + pMD->SizeOf()); - } - -#ifdef TARGET_ARM - - CentralJumpCode* pCode = (CentralJumpCode*)p; - memcpy(pCode, &c_CentralJumpCode, TEP_CENTRAL_JUMP_SIZE); - pCode->Setup (GetPreStubCompactARMEntryPoint(), this); - - _ASSERTE(p + TEP_CENTRAL_JUMP_SIZE == (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + size); - -#else // TARGET_ARM - - _ASSERTE(p == (BYTE*)temporaryEntryPointsWriterHolder.GetRW() + size); - -#endif // TARGET_ARM - - ClrFlushInstructionCache((LPVOID)temporaryEntryPoints, size); - - SetHasCompactEntryPoints(); - return temporaryEntryPoints; -} -#endif // !DACCESS_COMPILE - -#endif // HAS_COMPACT_ENTRYPOINTS - -//******************************************************************************* -#ifdef HAS_COMPACT_ENTRYPOINTS -PCODE MethodDescChunk::GetTemporaryEntryPoint(int index) -{ - LIMITED_METHOD_CONTRACT; - -#ifdef HAS_COMPACT_ENTRYPOINTS - if (HasCompactEntryPoints()) - { -#ifdef TARGET_ARM - - return GetTemporaryEntryPoints() + COMPACT_ENTRY_ARM_CODE + THUMB_CODE + index * TEP_ENTRY_SIZE; - -#else // TARGET_ARM - - int fullBlocks = index / TEP_MAX_BLOCK_INDEX; - int remainder = index % TEP_MAX_BLOCK_INDEX; - - return GetTemporaryEntryPoints() + 1 + (fullBlocks * TEP_FULL_BLOCK_SIZE) + - (remainder * TEP_ENTRY_SIZE) + ((remainder >= TEP_MAX_BEFORE_INDEX) ? TEP_CENTRAL_JUMP_SIZE : 0); - -#endif // TARGET_ARM - } -#endif // HAS_COMPACT_ENTRYPOINTS - - return Precode::GetPrecodeForTemporaryEntryPoint(GetTemporaryEntryPoints(), index)->GetEntryPoint(); -} -#endif // HAS_COMPACT_ENTRYPOINTS - -#ifdef HAS_COMPACT_ENTRYPOINTS -PCODE MethodDesc::GetTemporaryEntryPoint() -{ - CONTRACTL - { - NOTHROW; - GC_NOTRIGGER; - MODE_ANY; - } - CONTRACTL_END; - - MethodDescChunk* pChunk = GetMethodDescChunk(); - TADDR pEntryPoint = pChunk->GetTemporaryEntryPoint(GetMethodDescIndex()); - -#ifdef _DEBUG - MethodDesc * pMD = MethodDesc::GetMethodDescFromStubAddr(pEntryPoint); - _ASSERTE(PTR_HOST_TO_TADDR(this) == PTR_HOST_TO_TADDR(pMD)); -#endif - - return pEntryPoint; -} -#else PCODE MethodDesc::GetTemporaryEntryPoint() { CONTRACTL @@ -3066,7 +2591,6 @@ PCODE MethodDesc::GetTemporaryEntryPoint() #endif return pEntryPoint; } -#endif // HAS_COMPACT_ENTRYPOINTS #ifndef DACCESS_COMPILE //******************************************************************************* @@ -3077,9 +2601,6 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc EnsureTemporaryEntryPointCore(pLoaderAllocator, pamTracker); PTR_PCODE pSlot = GetAddrOfSlot(); -#ifdef HAS_COMPACT_ENTRYPOINTS - _ASSERTE(*pSlot == NULL); -#endif *pSlot = GetTemporaryEntryPoint(); if (RequiresStableEntryPoint()) @@ -3090,7 +2611,6 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc } } -#ifndef HAS_COMPACT_ENTRYPOINTS void MethodDesc::EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator) { CONTRACTL @@ -3140,7 +2660,6 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator } } } -#endif //******************************************************************************* void MethodDescChunk::DetermineAndSetIsEligibleForTieredCompilation() @@ -3219,7 +2738,7 @@ Precode* MethodDesc::GetOrCreatePrecode() PrecodeType requiredType = GetPrecodeType(); PrecodeType availableType = PRECODE_INVALID; - if (!GetMethodDescChunk()->HasCompactEntryPoints() && tempEntry != (PCODE)NULL) + if (tempEntry != (PCODE)NULL) { availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType(); } @@ -3227,7 +2746,6 @@ Precode* MethodDesc::GetOrCreatePrecode() // Allocate the precode if necessary if (requiredType != availableType) { - // code:Precode::AllocateTemporaryEntryPoints should always create precode of the right type for dynamic methods. // If we took this path for dynamic methods, the precode may leak since we may allocate it in domain-neutral loader heap. _ASSERTE(!IsLCGMethod()); @@ -4105,22 +3623,6 @@ MethodDescChunk::EnumMemoryRegions(CLRDataEnumMemoryFlags flags) { pMT->EnumMemoryRegions(flags); } -#ifdef HAS_COMPACT_ENTRYPOINTS - SIZE_T size; - -#ifdef HAS_COMPACT_ENTRYPOINTS - if (HasCompactEntryPoints()) - { - size = SizeOfCompactEntryPoints(GetCount()); - } - else -#endif // HAS_COMPACT_ENTRYPOINTS - { - size = Precode::SizeOfTemporaryEntryPoints(GetTemporaryEntryPoints(), GetCount()); - } - - DacEnumMemoryRegion(GetTemporaryEntryPoints(), size); -#endif // HAS_COMPACT_ENTRYPOINTS MethodDesc * pMD = GetFirstMethodDesc(); MethodDesc * pOldMD = NULL; diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 0f1cff5ae2b39..7b8d23ff1ca6b 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -162,9 +162,7 @@ enum MethodDescFlags struct MethodDescCodeData final { PTR_MethodDescVersioningState VersioningState; -#ifndef HAS_COMPACT_ENTRYPOINTS PCODE m_pTemporaryEntryPoint; -#endif }; using PTR_MethodDescCodeData = DPTR(MethodDescCodeData); @@ -220,14 +218,10 @@ class MethodDesc PCODE GetTemporaryEntryPoint_NoAlloc() { LIMITED_METHOD_CONTRACT; -#ifdef HAS_COMPACT_ENTRYPOINTS - return GetTemporaryEntryPoint(); -#else PTR_MethodDescCodeData codeData = VolatileLoadWithoutBarrier(&m_codeData); if (codeData == NULL) return (PCODE)NULL; return VolatileLoadWithoutBarrier(&codeData->m_pTemporaryEntryPoint); -#endif } void SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); @@ -1413,17 +1407,8 @@ class MethodDesc // PCODE GetMethodEntryPoint(); - PCODE GetMethodEntryPoint_NoAlloc() -#ifdef HAS_COMPACT_ENTRYPOINTS - { - WRAPPER_NO_CONTRACT; - return GetMethodEntryPoint(); - } -#else - ; -#endif + PCODE GetMethodEntryPoint_NoAlloc(); -#ifndef HAS_COMPACT_ENTRYPOINTS void EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator); // pamTracker must be NULL for a MethodDesc which cannot be freed by an external AllocMemTracker @@ -1459,7 +1444,6 @@ class MethodDesc } } #endif // DACCESS_COMPILE -#endif //******************************************************************************* // Returns the address of the native code. @@ -1680,15 +1664,11 @@ class MethodDesc BYTE m_chunkIndex; -#ifndef HAS_COMPACT_ENTRYPOINTS enum { enum_flag4_ComputedRequiresStableEntryPoint = 0x01, enum_flag4_RequiresStableEntryPoint = 0x02, }; BYTE m_bFlags4; // Used to hold more flags -#else - BYTE m_methodIndex; // Used to hold the index into the chunk of this MethodDesc. Currently all 8 bits are used, but we could likely work with only 7 bits -#endif WORD m_wSlotNumber; // The slot number of this MethodDesc in the vtable array. WORD m_wFlags; // See MethodDescFlags @@ -1699,21 +1679,6 @@ class MethodDesc void EnumMemoryRegions(CLRDataEnumMemoryFlags flags); #endif -#ifdef HAS_COMPACT_ENTRYPOINTS - BYTE GetMethodDescIndex() - { - LIMITED_METHOD_CONTRACT; - return m_methodIndex; - } - - void SetMethodDescIndex(COUNT_T index) - { - LIMITED_METHOD_CONTRACT; - _ASSERTE(index <= 255); - m_methodIndex = (BYTE)index; - } -#endif - #ifndef DACCESS_COMPILE // pamTracker must be NULL for a MethodDesc which cannot be freed by an external AllocMemTracker // OR must be set to point to the same AllocMemTracker that controls allocation of the MethodDesc @@ -2223,58 +2188,8 @@ class MethodDescChunk return (VolatileLoadWithoutBarrier(&m_flagsAndTokenRange) & enum_flag_DeterminedIsEligibleForTieredCompilation) != 0; } -#ifdef HAS_COMPACT_ENTRYPOINTS - TADDR GetTemporaryEntryPoints() - { - LIMITED_METHOD_CONTRACT; - return m_pTemporaryEntryPoints; - } - - PCODE GetTemporaryEntryPoint(int index); - - void EnsureTemporaryEntryPointsCreated(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker) - { - CONTRACTL - { - THROWS; - GC_NOTRIGGER; - MODE_ANY; - } - CONTRACTL_END; - - if (GetTemporaryEntryPoints() == (TADDR)0) - CreateTemporaryEntryPoints(pLoaderAllocator, pamTracker); - } - -#endif void DetermineAndSetIsEligibleForTieredCompilation(); -#ifdef HAS_COMPACT_ENTRYPOINTS - // - // There two implementation options for temporary entrypoints: - // - // (1) Compact entrypoints. They provide as dense entrypoints as possible, but can't be patched - // to point to the final code. The call to unjitted method is indirect call via slot. - // - // (2) Precodes. The precode will be patched to point to the final code eventually, thus - // the temporary entrypoint can be embedded in the code. The call to unjitted method is - // direct call to direct jump. - // - // We use (1) for x86 and (2) for 64-bit to get the best performance on each platform. - // For ARM (1) is used. - - TADDR AllocateCompactEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); - - static MethodDesc* GetMethodDescFromCompactEntryPoint(PCODE addr, BOOL fSpeculative = FALSE); - static SIZE_T SizeOfCompactEntryPoints(int count); - - static BOOL IsCompactEntryPointAtAddress(PCODE addr); - -#ifdef TARGET_ARM - static int GetCompactEntryPointMaxCount (); -#endif // TARGET_ARM -#endif // HAS_COMPACT_ENTRYPOINTS - FORCEINLINE PTR_MethodTable GetMethodTable() { LIMITED_METHOD_DAC_CONTRACT; @@ -2328,17 +2243,6 @@ class MethodDescChunk return m_count + 1; } - inline BOOL HasCompactEntryPoints() - { - LIMITED_METHOD_DAC_CONTRACT; - -#ifdef HAS_COMPACT_ENTRYPOINTS - return (m_flagsAndTokenRange & enum_flag_HasCompactEntrypoints) != 0; -#else - return FALSE; -#endif - } - inline UINT16 GetTokRange() { LIMITED_METHOD_DAC_CONTRACT; @@ -2377,10 +2281,6 @@ class MethodDescChunk PTR_MethodDescChunk m_next; -#ifdef HAS_COMPACT_ENTRYPOINTS - TADDR m_pTemporaryEntryPoints; -#endif - BYTE m_size; // The size of this chunk minus 1 (in multiples of MethodDesc::ALIGNMENT) BYTE m_count; // The number of MethodDescs in this chunk minus 1 UINT16 m_flagsAndTokenRange; diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index 34eefce914bf3..4aa3cbb4677a4 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -6955,9 +6955,6 @@ VOID MethodTableBuilder::AllocAndInitMethodDescs() SIZE_T sizeOfMethodDescs = 0; // current running size of methodDesc chunk int startIndex = 0; // start of the current chunk (index into bmtMethod array) - // Limit the maximum MethodDescs per chunk by the number of precodes that can fit to a single memory page, - // since we allocate consecutive temporary entry points for all MethodDescs in the whole chunk. - DWORD maxPrecodesPerPage = Precode::GetMaxTemporaryEntryPointsCount(); DWORD methodDescCount = 0; DeclaredMethodIterator it(*this); @@ -6998,8 +6995,7 @@ VOID MethodTableBuilder::AllocAndInitMethodDescs() } if (tokenRange != currentTokenRange || - sizeOfMethodDescs + size > MethodDescChunk::MaxSizeOfMethodDescs || - methodDescCount + currentSlotMethodDescCount > maxPrecodesPerPage) + sizeOfMethodDescs + size > MethodDescChunk::MaxSizeOfMethodDescs) { if (sizeOfMethodDescs != 0) { @@ -7065,10 +7061,6 @@ VOID MethodTableBuilder::AllocAndInitMethodDescChunk(COUNT_T startIndex, COUNT_T MethodDesc * pMD = (MethodDesc *)((BYTE *)pChunk + offset); pMD->SetChunkIndex(pChunk); -#ifdef HAS_COMPACT_ENTRYPOINTS - pMD->SetMethodDescIndex(methodDescCount); -#endif - InitNewMethodDesc(pMDMethod, pMD); #ifdef _PREFAST_ @@ -7111,9 +7103,6 @@ VOID MethodTableBuilder::AllocAndInitMethodDescChunk(COUNT_T startIndex, COUNT_T // Reset the chunk index pUnboxedMD->SetChunkIndex(pChunk); -#ifdef HAS_COMPACT_ENTRYPOINTS - pUnboxedMD->SetMethodDescIndex(methodDescCount); -#endif if (bmtGenerics->GetNumGenericArgs() == 0) { pUnboxedMD->SetHasNonVtableSlot(); @@ -10897,9 +10886,7 @@ MethodTableBuilder::SetupMethodTable2( if ((pMD->GetSlot() == iCurSlot) && (GetParentMethodTable() == NULL || iCurSlot >= GetParentMethodTable()->GetNumVirtuals())) continue; // For cases where the method is defining the method desc slot, we don't need to fill it in yet -#ifndef HAS_COMPACT_ENTRYPOINTS pMD->EnsureTemporaryEntryPointCore(GetLoaderAllocator(), GetMemTracker()); -#endif // HAS_COMPACT_ENTRYPOINTS PCODE addr = pMD->GetTemporaryEntryPoint(); _ASSERTE(addr != NULL); diff --git a/src/coreclr/vm/precode.cpp b/src/coreclr/vm/precode.cpp index 9e9b36ede97b9..4dbc3e4394834 100644 --- a/src/coreclr/vm/precode.cpp +++ b/src/coreclr/vm/precode.cpp @@ -199,32 +199,6 @@ PCODE Precode::TryToSkipFixupPrecode(PCODE addr) return 0; } -Precode* Precode::GetPrecodeForTemporaryEntryPoint(TADDR temporaryEntryPoints, int index) -{ - WRAPPER_NO_CONTRACT; - PrecodeType t = PTR_Precode(temporaryEntryPoints)->GetType(); - SIZE_T oneSize = SizeOfTemporaryEntryPoint(t); - return PTR_Precode(temporaryEntryPoints + index * oneSize); -} - -SIZE_T Precode::SizeOfTemporaryEntryPoints(PrecodeType t, int count) -{ - WRAPPER_NO_CONTRACT; - SUPPORTS_DAC; - - SIZE_T oneSize = SizeOfTemporaryEntryPoint(t); - return count * oneSize; -} - -SIZE_T Precode::SizeOfTemporaryEntryPoints(TADDR temporaryEntryPoints, int count) -{ - WRAPPER_NO_CONTRACT; - SUPPORTS_DAC; - - PrecodeType precodeType = PTR_Precode(temporaryEntryPoints)->GetType(); - return SizeOfTemporaryEntryPoints(precodeType, count); -} - #ifndef DACCESS_COMPILE Precode* Precode::Allocate(PrecodeType t, MethodDesc* pMD, @@ -384,144 +358,6 @@ void Precode::Reset() } } -/* static */ -TADDR Precode::AllocateTemporaryEntryPoints(MethodDescChunk * pChunk, - LoaderAllocator * pLoaderAllocator, - AllocMemTracker * pamTracker) -{ - WRAPPER_NO_CONTRACT; - - MethodDesc* pFirstMD = pChunk->GetFirstMethodDesc(); - - int count = pChunk->GetCount(); - - // Determine eligibility for tiered compilation -#ifdef HAS_COMPACT_ENTRYPOINTS - bool hasMethodDescVersionableWithPrecode = false; -#endif - { - MethodDesc *pMD = pChunk->GetFirstMethodDesc(); - bool chunkContainsEligibleMethods = pMD->DetermineIsEligibleForTieredCompilationInvariantForAllMethodsInChunk(); - -#ifdef _DEBUG - // Validate every MethodDesc has the same result for DetermineIsEligibleForTieredCompilationInvariantForAllMethodsInChunk - MethodDesc *pMDDebug = pChunk->GetFirstMethodDesc(); - for (int i = 0; i < count; ++i) - { - _ASSERTE(chunkContainsEligibleMethods == pMDDebug->DetermineIsEligibleForTieredCompilationInvariantForAllMethodsInChunk()); - pMDDebug = (MethodDesc *)(dac_cast(pMDDebug) + pMDDebug->SizeOf()); - } -#endif -#ifndef HAS_COMPACT_ENTRYPOINTS - if (chunkContainsEligibleMethods) -#endif - { - for (int i = 0; i < count; ++i) - { - if (chunkContainsEligibleMethods && pMD->DetermineAndSetIsEligibleForTieredCompilation()) - { - _ASSERTE(pMD->IsEligibleForTieredCompilation()); - _ASSERTE(!pMD->IsVersionableWithPrecode() || pMD->RequiresStableEntryPoint()); - } - -#ifdef HAS_COMPACT_ENTRYPOINTS - if (pMD->IsVersionableWithPrecode()) - { - _ASSERTE(pMD->RequiresStableEntryPoint()); - hasMethodDescVersionableWithPrecode = true; - } -#endif - - pMD = (MethodDesc *)(dac_cast(pMD) + pMD->SizeOf()); - } - } - } - - PrecodeType t = PRECODE_STUB; - bool preallocateJumpStubs = false; - -#ifdef HAS_FIXUP_PRECODE - // Default to faster fixup precode if possible - t = PRECODE_FIXUP; -#endif // HAS_FIXUP_PRECODE - - SIZE_T totalSize = SizeOfTemporaryEntryPoints(t, count); - -#ifdef HAS_COMPACT_ENTRYPOINTS - // Note that these are just best guesses to save memory. If we guessed wrong, - // we will allocate a new exact type of precode in GetOrCreatePrecode. - BOOL fForcedPrecode = hasMethodDescVersionableWithPrecode || pFirstMD->RequiresStableEntryPoint(count > 1); - -#ifdef TARGET_ARM - if (pFirstMD->RequiresMethodDescCallingConvention(count > 1) - || count >= MethodDescChunk::GetCompactEntryPointMaxCount ()) - { - // We do not pass method desc on scratch register - fForcedPrecode = TRUE; - } -#endif // TARGET_ARM - - if (!fForcedPrecode && (totalSize > MethodDescChunk::SizeOfCompactEntryPoints(count))) - return NULL; -#endif - - TADDR temporaryEntryPoints; - SIZE_T oneSize = SizeOfTemporaryEntryPoint(t); - MethodDesc * pMD = pChunk->GetFirstMethodDesc(); - - if (t == PRECODE_FIXUP || t == PRECODE_STUB) - { - LoaderHeap *pStubHeap; - if (t == PRECODE_FIXUP) - { - pStubHeap = pLoaderAllocator->GetFixupPrecodeHeap(); - } - else - { - pStubHeap = pLoaderAllocator->GetNewStubPrecodeHeap(); - } - - temporaryEntryPoints = (TADDR)pamTracker->Track(pStubHeap->AllocAlignedMem(totalSize, 1)); - TADDR entryPoint = temporaryEntryPoints; - for (int i = 0; i < count; i++) - { - ((Precode *)entryPoint)->Init((Precode *)entryPoint, t, pMD, pLoaderAllocator); - - _ASSERTE((Precode *)entryPoint == GetPrecodeForTemporaryEntryPoint(temporaryEntryPoints, i)); - entryPoint += oneSize; - - pMD = (MethodDesc *)(dac_cast(pMD) + pMD->SizeOf()); - } - } - else - { - _ASSERTE(FALSE); - temporaryEntryPoints = (TADDR)pamTracker->Track(pLoaderAllocator->GetPrecodeHeap()->AllocAlignedMem(totalSize, AlignOf(t))); - ExecutableWriterHolder entryPointsWriterHolder((void*)temporaryEntryPoints, totalSize); - - TADDR entryPoint = temporaryEntryPoints; - TADDR entryPointRW = (TADDR)entryPointsWriterHolder.GetRW(); - for (int i = 0; i < count; i++) - { - ((Precode *)entryPointRW)->Init((Precode *)entryPoint, t, pMD, pLoaderAllocator); - - _ASSERTE((Precode *)entryPoint == GetPrecodeForTemporaryEntryPoint(temporaryEntryPoints, i)); - entryPoint += oneSize; - entryPointRW += oneSize; - - pMD = (MethodDesc *)(dac_cast(pMD) + pMD->SizeOf()); - } - } - -#ifdef FEATURE_PERFMAP - PerfMap::LogStubs(__FUNCTION__, "PRECODE_STUB", (PCODE)temporaryEntryPoints, count * oneSize); -#endif - - ClrFlushInstructionCache((LPVOID)temporaryEntryPoints, count * oneSize); - - return temporaryEntryPoints; -} - #endif // !DACCESS_COMPILE #ifdef DACCESS_COMPILE @@ -801,13 +637,6 @@ BOOL DoesSlotCallPrestub(PCODE pCode) TADDR pInstr = dac_cast(PCODEToPINSTR(pCode)); -#ifdef HAS_COMPACT_ENTRYPOINTS - if (MethodDescChunk::GetMethodDescFromCompactEntryPoint(pCode, TRUE) != NULL) - { - return TRUE; - } -#endif - if (!IS_ALIGNED(pInstr, PRECODE_ALIGNMENT)) { return FALSE; diff --git a/src/coreclr/vm/precode.h b/src/coreclr/vm/precode.h index fac2433c7428a..7203911cc13db 100644 --- a/src/coreclr/vm/precode.h +++ b/src/coreclr/vm/precode.h @@ -467,12 +467,6 @@ class Precode { { SUPPORTS_DAC; unsigned int align = PRECODE_ALIGNMENT; - -#if defined(TARGET_ARM) && defined(HAS_COMPACT_ENTRYPOINTS) - // Precodes have to be aligned to allow fast compact entry points check - _ASSERTE (align >= sizeof(void*)); -#endif // TARGET_ARM && HAS_COMPACT_ENTRYPOINTS - return align; } @@ -585,22 +579,6 @@ class Precode { return ALIGN_UP(SizeOf(t), AlignOf(t)); } - static Precode * GetPrecodeForTemporaryEntryPoint(TADDR temporaryEntryPoints, int index); - - static SIZE_T SizeOfTemporaryEntryPoints(PrecodeType t, int count); - static SIZE_T SizeOfTemporaryEntryPoints(TADDR temporaryEntryPoints, int count); - - static TADDR AllocateTemporaryEntryPoints(MethodDescChunk* pChunk, - LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); - - static DWORD GetMaxTemporaryEntryPointsCount() - { - SIZE_T maxPrecodeCodeSize = Max(FixupPrecode::CodeSize, StubPrecode::CodeSize); - SIZE_T count = GetStubCodePageSize() / maxPrecodeCodeSize; - _ASSERTE(count < MAXDWORD); - return (DWORD)count; - } - #ifdef DACCESS_COMPILE void EnumMemoryRegions(CLRDataEnumMemoryFlags flags); #endif diff --git a/src/coreclr/vm/prestub.cpp b/src/coreclr/vm/prestub.cpp index a36f4c4679b9b..01c77de70fcf0 100644 --- a/src/coreclr/vm/prestub.cpp +++ b/src/coreclr/vm/prestub.cpp @@ -145,10 +145,8 @@ PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BO } } -#ifndef HAS_COMPACT_ENTRYPOINTS // Patch the fake entrypoint if necessary Precode::GetPrecodeFromEntryPoint(pExpected)->SetTargetInterlocked(pTarget); -#endif // HAS_COMPACT_ENTRYPOINTS } if (HasNonVtableSlot()) @@ -2553,21 +2551,6 @@ Stub * MakeInstantiatingStubWorker(MethodDesc *pMD) } #endif // defined(FEATURE_SHARE_GENERIC_CODE) -#if defined (HAS_COMPACT_ENTRYPOINTS) && defined (TARGET_ARM) - -extern "C" MethodDesc * STDCALL PreStubGetMethodDescForCompactEntryPoint (PCODE pCode) -{ - _ASSERTE (pCode >= PC_REG_RELATIVE_OFFSET); - - pCode = (PCODE) (pCode - PC_REG_RELATIVE_OFFSET + THUMB_CODE); - - _ASSERTE (MethodDescChunk::IsCompactEntryPointAtAddress (pCode)); - - return MethodDescChunk::GetMethodDescFromCompactEntryPoint(pCode, FALSE); -} - -#endif // defined (HAS_COMPACT_ENTRYPOINTS) && defined (TARGET_ARM) - //============================================================================= // This function generates the real code when from Preemptive mode. // It is specifically designed to work with the UnmanagedCallersOnlyAttribute. @@ -2859,9 +2842,7 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT, CallerGCMode callerGCMo { pCode = GetStubForInteropMethod(this); -#ifndef HAS_COMPACT_ENTRYPOINTS EnsureSlotFilled(); -#endif GetPrecode()->SetTargetInterlocked(pCode); RETURN GetStableEntryPoint(); diff --git a/src/coreclr/vm/stubmgr.cpp b/src/coreclr/vm/stubmgr.cpp index 84ae3e1fd0d7d..3b03cdecf983e 100644 --- a/src/coreclr/vm/stubmgr.cpp +++ b/src/coreclr/vm/stubmgr.cpp @@ -1009,13 +1009,6 @@ BOOL PrecodeStubManager::DoTraceStub(PCODE stubStartAddress, MethodDesc* pMD = NULL; -#ifdef HAS_COMPACT_ENTRYPOINTS - if (MethodDescChunk::IsCompactEntryPointAtAddress(stubStartAddress)) - { - pMD = MethodDescChunk::GetMethodDescFromCompactEntryPoint(stubStartAddress); - } - else -#endif // HAS_COMPACT_ENTRYPOINTS { // When the target slot points to the fixup part of the fixup precode, we need to compensate // for that to get the actual stub address From b2360adab931519892c5051fa24522b80b213ab1 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 26 Jun 2024 16:30:02 -0700 Subject: [PATCH 22/51] Add new ISOSDacInterface15 api --- src/coreclr/debug/daccess/dacimpl.h | 3 + src/coreclr/debug/daccess/request.cpp | 49 ++++++++++ src/coreclr/inc/sospriv.idl | 10 ++ src/coreclr/pal/prebuilt/idl/sospriv_i.cpp | 3 + src/coreclr/pal/prebuilt/inc/sospriv.h | 105 +++++++++++++++++++++ src/coreclr/vm/frames.cpp | 2 +- src/coreclr/vm/method.cpp | 2 + src/coreclr/vm/method.hpp | 17 ++++ 8 files changed, 190 insertions(+), 1 deletion(-) diff --git a/src/coreclr/debug/daccess/dacimpl.h b/src/coreclr/debug/daccess/dacimpl.h index 4b05e401a06b1..1110d9258dcb9 100644 --- a/src/coreclr/debug/daccess/dacimpl.h +++ b/src/coreclr/debug/daccess/dacimpl.h @@ -1223,6 +1223,9 @@ class ClrDataAccess virtual HRESULT STDMETHODCALLTYPE GetThreadStaticBaseAddress(CLRDATA_ADDRESS methodTable, CLRDATA_ADDRESS thread, CLRDATA_ADDRESS *nonGCStaticsAddress, CLRDATA_ADDRESS *GCStaticsAddress); virtual HRESULT STDMETHODCALLTYPE GetMethodTableInitializationFlags(CLRDATA_ADDRESS methodTable, MethodTableInitializationFlags *initializationStatus); + // ISOSDacInterface15 + virtual HRESULT STDMETHODCALLTYPE GetMethodTableSlotMethodDesc(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA_ADDRESS *value); + // // ClrDataAccess. // diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index 8269a67d56db5..463bbe4c2fc6c 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -425,6 +425,10 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA { // Now get the slot: *value = mTable->GetRestoredSlotIfExists(slot); + if (*value == 0) + { + hr = E_NOT_VALID_STATE; + } } else { @@ -436,6 +440,51 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA if (pMD->GetSlot() == slot) { *value = pMD->GetMethodEntryPoint_NoAlloc(); + if (*value == 0) + { + hr = E_NOT_VALID_STATE; + } + else + { + hr = S_OK; + } + } + } + } + + SOSDacLeave(); + return hr; +} + +HRESULT +ClrDataAccess::GetMethodTableSlotMethodDesc(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA_ADDRESS *value) +{ + if (mt == 0 || value == NULL) + return E_INVALIDARG; + + SOSDacEnter(); + + PTR_MethodTable mTable = PTR_MethodTable(TO_TADDR(mt)); + BOOL bIsFree = FALSE; + if (!DacValidateMethodTable(mTable, bIsFree)) + { + hr = E_INVALIDARG; + } + else if (slot < mTable->GetNumVtableSlots()) + { + *value = HOST_CDADDR(mTable->GetMethodDescForSlot_NoThrow(slot)); + hr = S_OK; + } + else + { + hr = E_INVALIDARG; + MethodTable::IntroducedMethodIterator it(mTable); + for (; it.IsValid() && FAILED(hr); it.Next()) + { + MethodDesc* pMD = it.GetMethodDesc(); + if (pMD->GetSlot() == slot) + { + *value = HOST_CDADDR(pMD); hr = S_OK; } } diff --git a/src/coreclr/inc/sospriv.idl b/src/coreclr/inc/sospriv.idl index c377df57a1530..67a805dd2b367 100644 --- a/src/coreclr/inc/sospriv.idl +++ b/src/coreclr/inc/sospriv.idl @@ -519,3 +519,13 @@ interface ISOSDacInterface14 : IUnknown HRESULT GetThreadStaticBaseAddress(CLRDATA_ADDRESS methodTable, CLRDATA_ADDRESS thread, CLRDATA_ADDRESS *nonGCStaticsAddress, CLRDATA_ADDRESS *GCStaticsAddress); HRESULT GetMethodTableInitializationFlags(CLRDATA_ADDRESS methodTable, MethodTableInitializationFlags *initializationStatus); } + +[ + object, + local, + uuid(7ed81261-52a9-4a23-a358-c3313dea30a8) +] +interface ISOSDacInterface15 : IUnknown +{ + HRESULT GetMethodTableSlotMethodDesc(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA_ADDRESS *value); +} diff --git a/src/coreclr/pal/prebuilt/idl/sospriv_i.cpp b/src/coreclr/pal/prebuilt/idl/sospriv_i.cpp index f070ae5816a8a..e2a88e3f38ed0 100644 --- a/src/coreclr/pal/prebuilt/idl/sospriv_i.cpp +++ b/src/coreclr/pal/prebuilt/idl/sospriv_i.cpp @@ -121,6 +121,9 @@ MIDL_DEFINE_GUID(IID, IID_ISOSDacInterface13,0x3176a8ed,0x597b,0x4f54,0xa7,0x1f, MIDL_DEFINE_GUID(IID, IID_ISOSDacInterface14,0x9aa22aca,0x6dc6,0x4a0c,0xb4,0xe0,0x70,0xd2,0x41,0x6b,0x98,0x37); + +MIDL_DEFINE_GUID(IID, IID_ISOSDacInterface15,0x7ed81261,0x52a9,0x4a23,0xa3,0x58,0xc3,0x31,0x3d,0xea,0x30,0xa8); + #undef MIDL_DEFINE_GUID #ifdef __cplusplus diff --git a/src/coreclr/pal/prebuilt/inc/sospriv.h b/src/coreclr/pal/prebuilt/inc/sospriv.h index 855696ef0ce4e..1aa2c83009f1e 100644 --- a/src/coreclr/pal/prebuilt/inc/sospriv.h +++ b/src/coreclr/pal/prebuilt/inc/sospriv.h @@ -3333,6 +3333,27 @@ EXTERN_C const IID IID_ISOSDacInterface13; #define ISOSDacInterface13_TraverseLoaderHeap(This,loaderHeapAddr,kind,pCallback) \ ( (This)->lpVtbl -> TraverseLoaderHeap(This,loaderHeapAddr,kind,pCallback) ) +#define ISOSDacInterface13_GetDomainLoaderAllocator(This,domainAddress,pLoaderAllocator) \ + ( (This)->lpVtbl -> GetDomainLoaderAllocator(This,domainAddress,pLoaderAllocator) ) + +#define ISOSDacInterface13_GetLoaderAllocatorHeapNames(This,count,ppNames,pNeeded) \ + ( (This)->lpVtbl -> GetLoaderAllocatorHeapNames(This,count,ppNames,pNeeded) ) + +#define ISOSDacInterface13_GetLoaderAllocatorHeaps(This,loaderAllocator,count,pLoaderHeaps,pKinds,pNeeded) \ + ( (This)->lpVtbl -> GetLoaderAllocatorHeaps(This,loaderAllocator,count,pLoaderHeaps,pKinds,pNeeded) ) + +#define ISOSDacInterface13_GetHandleTableMemoryRegions(This,ppEnum) \ + ( (This)->lpVtbl -> GetHandleTableMemoryRegions(This,ppEnum) ) + +#define ISOSDacInterface13_GetGCBookkeepingMemoryRegions(This,ppEnum) \ + ( (This)->lpVtbl -> GetGCBookkeepingMemoryRegions(This,ppEnum) ) + +#define ISOSDacInterface13_GetGCFreeRegions(This,ppEnum) \ + ( (This)->lpVtbl -> GetGCFreeRegions(This,ppEnum) ) + +#define ISOSDacInterface13_LockedFlush(This) \ + ( (This)->lpVtbl -> LockedFlush(This) ) + #endif /* COBJMACROS */ @@ -3456,6 +3477,90 @@ EXTERN_C const IID IID_ISOSDacInterface14; #endif /* __ISOSDacInterface14_INTERFACE_DEFINED__ */ +#ifndef __ISOSDacInterface15_INTERFACE_DEFINED__ +#define __ISOSDacInterface15_INTERFACE_DEFINED__ + +/* interface ISOSDacInterface15 */ +/* [uuid][local][object] */ + + +EXTERN_C const IID IID_ISOSDacInterface15; + +#if defined(__cplusplus) && !defined(CINTERFACE) + + MIDL_INTERFACE("7ed81261-52a9-4a23-a358-c3313dea30a8") + ISOSDacInterface15 : public IUnknown + { + public: + virtual HRESULT STDMETHODCALLTYPE GetMethodTableSlotMethodDesc( + CLRDATA_ADDRESS mt, + unsigned int slot, + CLRDATA_ADDRESS *value) = 0; + + }; + + +#else /* C style interface */ + + typedef struct ISOSDacInterface15Vtbl + { + BEGIN_INTERFACE + + HRESULT ( STDMETHODCALLTYPE *QueryInterface )( + ISOSDacInterface15 * This, + /* [in] */ REFIID riid, + /* [annotation][iid_is][out] */ + _COM_Outptr_ void **ppvObject); + + ULONG ( STDMETHODCALLTYPE *AddRef )( + ISOSDacInterface15 * This); + + ULONG ( STDMETHODCALLTYPE *Release )( + ISOSDacInterface15 * This); + + HRESULT ( STDMETHODCALLTYPE *GetMethodTableSlotMethodDesc )( + ISOSDacInterface15 * This, + CLRDATA_ADDRESS mt, + unsigned int slot, + CLRDATA_ADDRESS *value); + + END_INTERFACE + } ISOSDacInterface15Vtbl; + + interface ISOSDacInterface15 + { + CONST_VTBL struct ISOSDacInterface15Vtbl *lpVtbl; + }; + + + +#ifdef COBJMACROS + + +#define ISOSDacInterface15_QueryInterface(This,riid,ppvObject) \ + ( (This)->lpVtbl -> QueryInterface(This,riid,ppvObject) ) + +#define ISOSDacInterface15_AddRef(This) \ + ( (This)->lpVtbl -> AddRef(This) ) + +#define ISOSDacInterface15_Release(This) \ + ( (This)->lpVtbl -> Release(This) ) + + +#define ISOSDacInterface15_GetMethodTableSlotMethodDesc(This,mt,slot,value) \ + ( (This)->lpVtbl -> GetMethodTableSlotMethodDesc(This,mt,slot,value) ) + +#endif /* COBJMACROS */ + + +#endif /* C style interface */ + + + + +#endif /* __ISOSDacInterface15_INTERFACE_DEFINED__ */ + + /* Additional Prototypes for ALL interfaces */ /* end of Additional Prototypes */ diff --git a/src/coreclr/vm/frames.cpp b/src/coreclr/vm/frames.cpp index 6d6af83ed5635..fc2076ac4449c 100644 --- a/src/coreclr/vm/frames.cpp +++ b/src/coreclr/vm/frames.cpp @@ -566,7 +566,7 @@ BOOL PrestubMethodFrame::TraceFrame(Thread *thread, BOOL fromPatch, // native code versions, even if they aren't the one that was reported by this trace, see // DebuggerController::PatchTrace() under case TRACE_MANAGED. This alleviates the StubManager from having to prevent the // race that occurs here. - trace->InitForStub(GetFunction()->GetMethodEntryPoint()); + trace->InitForStub(GetFunction()->GetMethodEntryPoint_NoAlloc()); } else { diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 00e20bcea3258..7353a59834c79 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -526,6 +526,7 @@ PCODE MethodDesc::GetMethodEntryPoint_NoAlloc() return GetMethodTable()->GetSlot(GetSlot()); } +#ifndef DACCESS_COMPILE PCODE MethodDesc::GetMethodEntryPoint() { CONTRACTL @@ -561,6 +562,7 @@ PCODE MethodDesc::GetMethodEntryPoint() _ASSERTE(GetMethodTable()->IsCanonicalMethodTable()); return GetMethodTable()->GetRestoredSlot(GetSlot()); } +#endif // DACCESS_COMPILE PTR_PCODE MethodDesc::GetAddrOfSlot() { diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 7b8d23ff1ca6b..63d7236110410 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -226,6 +226,7 @@ class MethodDesc void SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); +#ifndef DACCESS_COMPILE PCODE GetInitialEntryPointForCopiedSlot() { CONTRACTL @@ -242,6 +243,7 @@ class MethodDesc } return GetMethodEntryPoint(); } +#endif inline BOOL HasPrecode() { @@ -1206,6 +1208,7 @@ class MethodDesc return GetTemporaryEntryPoint(); } +#ifndef DACCESS_COMPILE // Gets the entry point stored in the primary storage location for backpatching. Entry point slot backpatch uses this entry // point as an oracle to determine if the entry point actually changed and warrants backpatching. PCODE GetEntryPointToBackpatch_Locked() @@ -1218,6 +1221,7 @@ class MethodDesc _ASSERTE(IsVersionableWithVtableSlotBackpatch()); return GetMethodEntryPoint(); } +#endif // DACCESS_COMPILE // Sets the entry point stored in the primary storage location for backpatching. Entry point slot backpatch uses this entry // point as an oracle to determine if the entry point actually changed and warrants backpatching. @@ -1364,6 +1368,7 @@ class MethodDesc ULONG GetRVA(); public: +#ifndef DACCESS_COMPILE // Returns address of code to call. The address is good for one immediate invocation only. // Use GetMultiCallableAddrOfCode() to get address that can be invoked multiple times. // @@ -1377,6 +1382,7 @@ class MethodDesc _ASSERTE(!IsGenericMethodDefinition()); return GetMethodEntryPoint(); } +#endif // This one is used to implement "ldftn". PCODE GetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags = CORINFO_ACCESS_LDFTN); @@ -1398,6 +1404,7 @@ class MethodDesc PCODE GetSingleCallableAddrOfVirtualizedCode(OBJECTREF *orThis, TypeHandle staticTH); PCODE GetMultiCallableAddrOfVirtualizedCode(OBJECTREF *orThis, TypeHandle staticTH); +#ifndef DACCESS_COMPILE // The current method entrypoint. It is simply the value of the current method slot. // GetMethodEntryPoint() should be used to get an opaque method entrypoint, for instance // when copying or searching vtables. It should not be used to get address to call. @@ -1405,8 +1412,18 @@ class MethodDesc // GetSingleCallableAddrOfCode() and GetStableEntryPoint() are aliases with stricter preconditions. // Use of these aliases is as appropriate. // + // Calling this function will allocate an Entrypoint and associate it with the MethodDesc if it + // doesn't already exist. PCODE GetMethodEntryPoint(); +#endif + // The current method entrypoint. It is simply the value of the current method slot. + // GetMethodEntryPoint() should be used to get an opaque method entrypoint, for instance + // when copying or searching vtables. It should not be used to get address to call. + // + // GetSingleCallableAddrOfCode() and GetStableEntryPoint() are aliases with stricter preconditions. + // Use of these aliases is as appropriate. + // PCODE GetMethodEntryPoint_NoAlloc(); void EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator); From 837dc0bf702ad566296494d7250de7a829c264f3 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 26 Jun 2024 16:58:36 -0700 Subject: [PATCH 23/51] Fix some naming of NoAlloc to a more clear IfExists suffix --- src/coreclr/debug/daccess/request.cpp | 2 +- src/coreclr/vm/frames.cpp | 2 +- src/coreclr/vm/method.cpp | 28 +++++++++++++-------------- src/coreclr/vm/method.hpp | 10 +++++----- src/coreclr/vm/methodtable.cpp | 4 ++-- 5 files changed, 22 insertions(+), 24 deletions(-) diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index 463bbe4c2fc6c..c0b84a15ffeab 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -439,7 +439,7 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA MethodDesc * pMD = it.GetMethodDesc(); if (pMD->GetSlot() == slot) { - *value = pMD->GetMethodEntryPoint_NoAlloc(); + *value = pMD->GetMethodEntryPointIfExists(); if (*value == 0) { hr = E_NOT_VALID_STATE; diff --git a/src/coreclr/vm/frames.cpp b/src/coreclr/vm/frames.cpp index fc2076ac4449c..78f480a9e4ef1 100644 --- a/src/coreclr/vm/frames.cpp +++ b/src/coreclr/vm/frames.cpp @@ -566,7 +566,7 @@ BOOL PrestubMethodFrame::TraceFrame(Thread *thread, BOOL fromPatch, // native code versions, even if they aren't the one that was reported by this trace, see // DebuggerController::PatchTrace() under case TRACE_MANAGED. This alleviates the StubManager from having to prevent the // race that occurs here. - trace->InitForStub(GetFunction()->GetMethodEntryPoint_NoAlloc()); + trace->InitForStub(GetFunction()->GetMethodEntryPointIfExists()); } else { diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 7353a59834c79..3f5ea596140be 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -497,7 +497,7 @@ Signature MethodDesc::GetSignature() return Signature(pSig, cSig); } -PCODE MethodDesc::GetMethodEntryPoint_NoAlloc() +PCODE MethodDesc::GetMethodEntryPointIfExists() { CONTRACTL { @@ -511,7 +511,7 @@ PCODE MethodDesc::GetMethodEntryPoint_NoAlloc() // Similarly to SetMethodEntryPoint(), it is up to the caller to ensure that calls to this function are appropriately // synchronized - // Keep implementations of MethodDesc::GetMethodEntryPoint and MethodDesc::GetAddrOfSlot in sync! + // Keep implementations of MethodDesc::GetMethodEntryPoint, MethodDesc::GetMethodEntryPointIfExists, and MethodDesc::GetAddrOfSlot in sync! if (HasNonVtableSlot()) { @@ -541,7 +541,7 @@ PCODE MethodDesc::GetMethodEntryPoint() // Similarly to SetMethodEntryPoint(), it is up to the caller to ensure that calls to this function are appropriately // synchronized - // Keep implementations of MethodDesc::GetMethodEntryPoint and MethodDesc::GetAddrOfSlot in sync! + // Keep implementations of MethodDesc::GetMethodEntryPoint, MethodDesc::GetMethodEntryPointIfExists, and MethodDesc::GetAddrOfSlot in sync! if (HasNonVtableSlot()) { @@ -549,13 +549,11 @@ PCODE MethodDesc::GetMethodEntryPoint() TADDR pSlot = dac_cast(this) + size; -#if !defined(DACCESS_COMPILE) if (*PTR_PCODE(pSlot) == (PCODE)NULL) { EnsureSlotFilled(); _ASSERTE(*PTR_PCODE(pSlot) != (PCODE)NULL); } -#endif return *PTR_PCODE(pSlot); } @@ -575,7 +573,7 @@ PTR_PCODE MethodDesc::GetAddrOfSlot() } CONTRACTL_END; - // Keep implementations of MethodDesc::GetMethodEntryPoint and MethodDesc::GetAddrOfSlot in sync! + // Keep implementations of MethodDesc::GetMethodEntryPoint, MethodDesc::GetMethodEntryPointIfExists, and MethodDesc::GetAddrOfSlot in sync! if (HasNonVtableSlot()) { SIZE_T size = GetBaseSize(); @@ -2284,8 +2282,8 @@ BOOL MethodDesc::IsPointingToPrestub() { if (IsVersionableWithVtableSlotBackpatch()) { - PCODE methodEntrypoint = GetMethodEntryPoint_NoAlloc(); - return methodEntrypoint == GetTemporaryEntryPoint_NoAlloc() && methodEntrypoint != (PCODE)NULL; + PCODE methodEntrypoint = GetMethodEntryPointIfExists(); + return methodEntrypoint == GetTemporaryEntryPointIfExists() && methodEntrypoint != (PCODE)NULL; } return TRUE; } @@ -2576,13 +2574,13 @@ PCODE MethodDesc::GetTemporaryEntryPoint() } CONTRACTL_END; - PCODE pEntryPoint = GetTemporaryEntryPoint_NoAlloc(); + PCODE pEntryPoint = GetTemporaryEntryPointIfExists(); if (pEntryPoint != (PCODE)NULL) return pEntryPoint; #ifndef DACCESS_COMPILE EnsureTemporaryEntryPoint(GetLoaderAllocator()); - pEntryPoint = GetTemporaryEntryPoint_NoAlloc(); + pEntryPoint = GetTemporaryEntryPointIfExists(); _ASSERTE(pEntryPoint != (PCODE)NULL); #ifdef _DEBUG @@ -2623,7 +2621,7 @@ void MethodDesc::EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator) } CONTRACTL_END; - if (GetTemporaryEntryPoint_NoAlloc() == (PCODE)NULL) + if (GetTemporaryEntryPointIfExists() == (PCODE)NULL) { EnsureTemporaryEntryPointCore(pLoaderAllocator, NULL); } @@ -2639,7 +2637,7 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator } CONTRACTL_END; - if (GetTemporaryEntryPoint_NoAlloc() == (PCODE)NULL) + if (GetTemporaryEntryPointIfExists() == (PCODE)NULL) { GetMethodDescChunk()->DetermineAndSetIsEligibleForTieredCompilation(); PTR_PCODE pSlot = GetAddrOfSlot(); @@ -2650,10 +2648,10 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator IfFailThrow(EnsureCodeDataExists(pamTracker)); - if (InterlockedCompareExchangeT(&m_codeData->m_pTemporaryEntryPoint, pPrecode->GetEntryPoint(), (PCODE)NULL) == (PCODE)NULL) + if (InterlockedCompareExchangeT(&m_codeData->TemporaryEntryPoint, pPrecode->GetEntryPoint(), (PCODE)NULL) == (PCODE)NULL) amt.SuppressRelease(); // We only need to suppress the release if we are working with a MethodDesc which is not newly allocated - PCODE tempEntryPoint = GetTemporaryEntryPoint_NoAlloc(); + PCODE tempEntryPoint = GetTemporaryEntryPointIfExists(); _ASSERTE(tempEntryPoint != (PCODE)NULL); if (*pSlot == (PCODE)NULL) @@ -2735,7 +2733,7 @@ Precode* MethodDesc::GetOrCreatePrecode() } PTR_PCODE pSlot = GetAddrOfSlot(); - PCODE tempEntry = GetTemporaryEntryPoint_NoAlloc(); + PCODE tempEntry = GetTemporaryEntryPointIfExists(); PrecodeType requiredType = GetPrecodeType(); PrecodeType availableType = PRECODE_INVALID; diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 63d7236110410..860ee969a335f 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -162,7 +162,7 @@ enum MethodDescFlags struct MethodDescCodeData final { PTR_MethodDescVersioningState VersioningState; - PCODE m_pTemporaryEntryPoint; + PCODE TemporaryEntryPoint; }; using PTR_MethodDescCodeData = DPTR(MethodDescCodeData); @@ -207,7 +207,7 @@ class MethodDesc _ASSERTE(HasStableEntryPoint()); _ASSERTE(!IsVersionableWithVtableSlotBackpatch()); - return GetMethodEntryPoint_NoAlloc(); + return GetMethodEntryPointIfExists(); } void SetMethodEntryPoint(PCODE addr); @@ -215,13 +215,13 @@ class MethodDesc PCODE GetTemporaryEntryPoint(); - PCODE GetTemporaryEntryPoint_NoAlloc() + PCODE GetTemporaryEntryPointIfExists() { LIMITED_METHOD_CONTRACT; PTR_MethodDescCodeData codeData = VolatileLoadWithoutBarrier(&m_codeData); if (codeData == NULL) return (PCODE)NULL; - return VolatileLoadWithoutBarrier(&codeData->m_pTemporaryEntryPoint); + return VolatileLoadWithoutBarrier(&codeData->TemporaryEntryPoint); } void SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); @@ -1424,7 +1424,7 @@ class MethodDesc // GetSingleCallableAddrOfCode() and GetStableEntryPoint() are aliases with stricter preconditions. // Use of these aliases is as appropriate. // - PCODE GetMethodEntryPoint_NoAlloc(); + PCODE GetMethodEntryPointIfExists(); void EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator); diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index 56872914f2a8a..26fcf58994b11 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -1706,7 +1706,7 @@ MethodTable::DebugDumpVtable(LPCUTF8 szClassName, BOOL fDebug) name, pszName, IsMdFinal(dwAttrs) ? " (final)" : "", - (VOID *)pMD->GetMethodEntryPoint_NoAlloc(), + (VOID *)pMD->GetMethodEntryPointIfExists(), pMD->GetSlot() ); OutputDebugStringUtf8(buff); @@ -1720,7 +1720,7 @@ MethodTable::DebugDumpVtable(LPCUTF8 szClassName, BOOL fDebug) pMD->GetClass()->GetDebugClassName(), pszName, IsMdFinal(dwAttrs) ? " (final)" : "", - (VOID *)pMD->GetMethodEntryPoint_NoAlloc(), + (VOID *)pMD->GetMethodEntryPointIfExists(), pMD->GetSlot() )); } From cb70e1d8c8954bc9f10e99569bc55ec32c9938ea Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 26 Jun 2024 17:10:03 -0700 Subject: [PATCH 24/51] Remove way in which GetTemporaryEntryPoint behaves differently for DAC builds, and then remove GetTemporaryEntrypoint usage from DAC entirely in favor of GetTemporaryEntryPointIfExists --- src/coreclr/debug/daccess/request.cpp | 12 ++++++++---- src/coreclr/vm/method.cpp | 4 ++-- src/coreclr/vm/method.hpp | 6 ++++++ 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index c0b84a15ffeab..71568ebba253e 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -219,11 +219,15 @@ BOOL DacValidateMD(PTR_MethodDesc pMD) if (retval) { - MethodDesc *pMDCheck = MethodDesc::GetMethodDescFromStubAddr(pMD->GetTemporaryEntryPoint(), TRUE); - - if (PTR_HOST_TO_TADDR(pMD) != PTR_HOST_TO_TADDR(pMDCheck)) + PCODE tempEntryPoint = pMD->GetTemporaryEntryPointIfExists(); + if (tempEntryPoint != (PCODE)NULL) { - retval = FALSE; + MethodDesc *pMDCheck = MethodDesc::GetMethodDescFromStubAddr(tempEntryPoint, TRUE); + + if (PTR_HOST_TO_TADDR(pMD) != PTR_HOST_TO_TADDR(pMDCheck)) + { + retval = FALSE; + } } } diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 3f5ea596140be..c9b7a503f146a 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -2564,6 +2564,7 @@ MethodDesc* MethodDesc::GetMethodDescFromStubAddr(PCODE addr, BOOL fSpeculative } //******************************************************************************* +#ifndef DACCESS_COMPILE PCODE MethodDesc::GetTemporaryEntryPoint() { CONTRACTL @@ -2578,7 +2579,6 @@ PCODE MethodDesc::GetTemporaryEntryPoint() if (pEntryPoint != (PCODE)NULL) return pEntryPoint; -#ifndef DACCESS_COMPILE EnsureTemporaryEntryPoint(GetLoaderAllocator()); pEntryPoint = GetTemporaryEntryPointIfExists(); _ASSERTE(pEntryPoint != (PCODE)NULL); @@ -2588,9 +2588,9 @@ PCODE MethodDesc::GetTemporaryEntryPoint() _ASSERTE(PTR_HOST_TO_TADDR(this) == PTR_HOST_TO_TADDR(pMD)); #endif -#endif return pEntryPoint; } +#endif #ifndef DACCESS_COMPILE //******************************************************************************* diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 860ee969a335f..dcddc63bae922 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -213,7 +213,9 @@ class MethodDesc void SetMethodEntryPoint(PCODE addr); BOOL SetStableEntryPointInterlocked(PCODE addr); +#ifndef DACCESS_COMPILE PCODE GetTemporaryEntryPoint(); +#endif PCODE GetTemporaryEntryPointIfExists() { @@ -1196,6 +1198,7 @@ class MethodDesc private: +#ifndef DACCESS_COMPILE // Gets the prestub entry point to use for backpatching. Entry point slot backpatch uses this entry point as an oracle to // determine if the entry point actually changed and warrants backpatching. PCODE GetPrestubEntryPointToBackpatch() @@ -1207,6 +1210,7 @@ class MethodDesc _ASSERTE(IsVersionableWithVtableSlotBackpatch()); return GetTemporaryEntryPoint(); } +#endif // DACCESS_COMPILE #ifndef DACCESS_COMPILE // Gets the entry point stored in the primary storage location for backpatching. Entry point slot backpatch uses this entry @@ -1256,11 +1260,13 @@ class MethodDesc BackpatchEntryPointSlots(entryPoint, false /* isPrestubEntryPoint */); } +#ifndef DACCESS_COMPILE void BackpatchToResetEntryPointSlots() { WRAPPER_NO_CONTRACT; BackpatchEntryPointSlots(GetPrestubEntryPointToBackpatch(), true /* isPrestubEntryPoint */); } +#endif // DACCESS_COMPILE private: void BackpatchEntryPointSlots(PCODE entryPoint, bool isPrestubEntryPoint) From 7f0f6140f2d99ef955b06327e03c983f762bd983 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 26 Jun 2024 19:49:21 -0700 Subject: [PATCH 25/51] Attempt to reduce most of the use of EnsureSlotFilled. Untested, but its late. --- src/coreclr/inc/corinfo.h | 2 +- .../tools/Common/JitInterface/CorInfoTypes.cs | 2 +- src/coreclr/vm/arm/stubs.cpp | 7 +- src/coreclr/vm/arm64/stubs.cpp | 7 +- src/coreclr/vm/i386/stublinkerx86.cpp | 15 ++- src/coreclr/vm/jitinterface.cpp | 1 - src/coreclr/vm/loongarch64/stubs.cpp | 7 +- src/coreclr/vm/method.cpp | 121 +++++++++++++----- src/coreclr/vm/method.hpp | 48 +++---- src/coreclr/vm/prestub.cpp | 3 +- src/coreclr/vm/riscv64/stubs.cpp | 8 +- 11 files changed, 140 insertions(+), 81 deletions(-) diff --git a/src/coreclr/inc/corinfo.h b/src/coreclr/inc/corinfo.h index db5499b81fd33..77a90f0725870 100644 --- a/src/coreclr/inc/corinfo.h +++ b/src/coreclr/inc/corinfo.h @@ -890,7 +890,7 @@ enum CORINFO_ACCESS_FLAGS { CORINFO_ACCESS_ANY = 0x0000, // Normal access CORINFO_ACCESS_THIS = 0x0001, // Accessed via the this reference - // UNUSED = 0x0002, + CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT = 0x0002, // Prefer access to a method via slot over using the temporary entrypoint CORINFO_ACCESS_NONNULL = 0x0004, // Instance is guaranteed non-null diff --git a/src/coreclr/tools/Common/JitInterface/CorInfoTypes.cs b/src/coreclr/tools/Common/JitInterface/CorInfoTypes.cs index 3eba98954c1c5..b88f6a60cd26b 100644 --- a/src/coreclr/tools/Common/JitInterface/CorInfoTypes.cs +++ b/src/coreclr/tools/Common/JitInterface/CorInfoTypes.cs @@ -546,7 +546,7 @@ public enum CORINFO_ACCESS_FLAGS { CORINFO_ACCESS_ANY = 0x0000, // Normal access CORINFO_ACCESS_THIS = 0x0001, // Accessed via the this reference - // CORINFO_ACCESS_UNUSED = 0x0002, + CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT = 0x0002, // Prefer access to a method via slot over using the temporary entrypoint CORINFO_ACCESS_NONNULL = 0x0004, // Instance is guaranteed non-null diff --git a/src/coreclr/vm/arm/stubs.cpp b/src/coreclr/vm/arm/stubs.cpp index 6642a688649b7..37cdadbad6373 100644 --- a/src/coreclr/vm/arm/stubs.cpp +++ b/src/coreclr/vm/arm/stubs.cpp @@ -1381,15 +1381,16 @@ VOID StubLinkerCPU::EmitShuffleThunk(ShuffleEntry *pShuffleEntryArray) void StubLinkerCPU::ThumbEmitTailCallManagedMethod(MethodDesc *pMD) { - pMD->EnsureSlotFilled(); + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); // Use direct call if possible. - if (pMD->HasStableEntryPoint()) + if (multiCallableAddr != (PCODE)NULL) { // mov r12, #entry_point - ThumbEmitMovConstant(ThumbReg(12), (TADDR)pMD->GetStableEntryPoint()); + ThumbEmitMovConstant(ThumbReg(12), (TADDR)multiCallableAddr); } else { + _ASSERTE(!pMD->HasStableEntryPoint()); // mov r12, #slotaddress ThumbEmitMovConstant(ThumbReg(12), (TADDR)pMD->GetAddrOfSlot()); diff --git a/src/coreclr/vm/arm64/stubs.cpp b/src/coreclr/vm/arm64/stubs.cpp index 134f9735ab384..aca5d2f95e6c6 100644 --- a/src/coreclr/vm/arm64/stubs.cpp +++ b/src/coreclr/vm/arm64/stubs.cpp @@ -1625,15 +1625,16 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { - pMD->EnsureTemporaryEntryPoint(pMD->GetLoaderAllocator()); + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); // Use direct call if possible. - if (pMD->HasStableEntryPoint()) + if (multiCallableAddr != (PCODE)NULL) { - EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetStableEntryPoint()), fTailCall, FALSE); + EmitCallLabel(NewExternalCodeLabel((LPVOID)multiCallableAddr), fTailCall, FALSE); } else { + _ASSERTE(!pMD->HasStableEntryPoint()); EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetAddrOfSlot()), fTailCall, TRUE); } } diff --git a/src/coreclr/vm/i386/stublinkerx86.cpp b/src/coreclr/vm/i386/stublinkerx86.cpp index b5e2afab3be75..2c2a012923dc7 100644 --- a/src/coreclr/vm/i386/stublinkerx86.cpp +++ b/src/coreclr/vm/i386/stublinkerx86.cpp @@ -3060,14 +3060,15 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s #ifdef TARGET_AMD64 VOID StubLinkerCPU::EmitLoadMethodAddressIntoAX(MethodDesc *pMD) { - pMD->EnsureSlotFilled(); + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); - if (pMD->HasStableEntryPoint()) + if (multiCallableAddr != (PCODE)NULL) { - X86EmitRegLoad(kRAX, pMD->GetStableEntryPoint());// MOV RAX, DWORD + X86EmitRegLoad(kRAX, multiCallableAddr);// MOV RAX, DWORD } else { + _ASSERTE(!pMD->HasStableEntryPoint()); X86EmitRegLoad(kRAX, (UINT_PTR)pMD->GetAddrOfSlot()); // MOV RAX, DWORD X86EmitIndexRegLoad(kRAX, kRAX); // MOV RAX, [RAX] @@ -3081,14 +3082,16 @@ VOID StubLinkerCPU::EmitTailJumpToMethod(MethodDesc *pMD) EmitLoadMethodAddressIntoAX(pMD); Emit16(X86_INSTR_JMP_EAX); #else - pMD->EnsureSlotFilled(); + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); // Use direct call if possible - if (pMD->HasStableEntryPoint()) + if (multiCallableAddr != (PCODE)NULL) { - X86EmitNearJump(NewExternalCodeLabel((LPVOID) pMD->GetStableEntryPoint())); + X86EmitNearJump(NewExternalCodeLabel((LPVOID)multiCallableAddr)); } else { + _ASSERTE(!pMD->HasStableEntryPoint()); + // jmp [slot] Emit16(0x25ff); Emit32((DWORD)(size_t)pMD->GetAddrOfSlot()); diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp index 1cc377c14240e..19254f711d9c3 100644 --- a/src/coreclr/vm/jitinterface.cpp +++ b/src/coreclr/vm/jitinterface.cpp @@ -9154,7 +9154,6 @@ void CEEInfo::getFunctionEntryPoint(CORINFO_METHOD_HANDLE ftnHnd, // should never get here for EnC methods or if interception via remoting stub is required _ASSERTE(!ftn->InEnCEnabledModule()); - ftn->EnsureSlotFilled(); ret = (void *)ftn->GetAddrOfSlot(); accessType = IAT_PVALUE; diff --git a/src/coreclr/vm/loongarch64/stubs.cpp b/src/coreclr/vm/loongarch64/stubs.cpp index 2f1975a22d8bc..6b771ba22a8c1 100644 --- a/src/coreclr/vm/loongarch64/stubs.cpp +++ b/src/coreclr/vm/loongarch64/stubs.cpp @@ -1476,15 +1476,16 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { - pMD->EnsureSlotFilled(); + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); // Use direct call if possible. - if (pMD->HasStableEntryPoint()) + if (multiCallableAddr != (PCODE)NULL) { - EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetStableEntryPoint()), fTailCall, FALSE); + EmitCallLabel(NewExternalCodeLabel((LPVOID)multiCallableAddr), fTailCall, FALSE); } else { + _ASSERTE(!pMD->HasStableEntryPoint()); EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetAddrOfSlot()), fTailCall, TRUE); } } diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index c9b7a503f146a..d02d16f4c1f32 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -958,6 +958,57 @@ WORD MethodDesc::InterlockedUpdateFlags3(WORD wMask, BOOL fSet) return wOldState; } +BYTE MethodDesc::InterlockedUpdateFlags4(BYTE bMask, BOOL fSet) +{ + LIMITED_METHOD_CONTRACT; + + BYTE bOldState = m_bFlags4; + DWORD dwMask = bMask; + + // We need to make this operation atomic (multiple threads can play with the flags field at the same time). But the flags field + // is a word and we only have interlock operations over dwords. So we round down the flags field address to the nearest aligned + // dword (along with the intended bitfield mask). Note that we make the assumption that the flags word is aligned itself, so we + // only have four possibilities: the field already lies on a dword boundary or it's 1, 2 or 3 bytes out + LONG* pdwFlags = (LONG*)((ULONG_PTR)&m_bFlags4 - (offsetof(MethodDesc, m_bFlags4) & 0x3)); + +#ifdef _PREFAST_ +#pragma warning(push) +#pragma warning(disable:6326) // "Suppress PREFast warning about comparing two constants" +#endif // _PREFAST_ + +#if BIGENDIAN + if ((offsetof(MethodDesc, m_bFlags4) & 0x3) == 0) { +#else // !BIGENDIAN + if ((offsetof(MethodDesc, m_bFlags4) & 0x3) == 3) { +#endif // !BIGENDIAN + dwMask <<= 24; + } +#if BIGENDIAN + else if ((offsetof(MethodDesc, m_bFlags4) & 0x3) == 1) { +#else // !BIGENDIAN + else if ((offsetof(MethodDesc, m_bFlags4) & 0x3) == 2) { +#endif // !BIGENDIAN + dwMask <<= 16; + } +#if BIGENDIAN + else if ((offsetof(MethodDesc, m_bFlags4) & 0x3) == 2) { +#else // !BIGENDIAN + else if ((offsetof(MethodDesc, m_bFlags4) & 0x3) == 1) { +#endif // !BIGENDIAN + dwMask <<= 8; + } +#ifdef _PREFAST_ +#pragma warning(pop) +#endif + + if (fSet) + InterlockedOr(pdwFlags, dwMask); + else + InterlockedAnd(pdwFlags, ~dwMask); + + return bOldState; +} + WORD MethodDescChunk::InterlockedUpdateFlags(WORD wMask, BOOL fSet) { LIMITED_METHOD_CONTRACT; @@ -2102,7 +2153,7 @@ PCODE MethodDesc::TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags } if (RequiresStableEntryPoint() && !HasStableEntryPoint()) - EnsureSlotFilled(); + GetOrCreatePrecode(); // We create stable entrypoints for these upfront if (IsWrapperStub() || IsEnCAddedMethod()) @@ -2143,6 +2194,10 @@ PCODE MethodDesc::TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags if (IsVersionableWithVtableSlotBackpatch()) { // Caller has to call via slot or allocate funcptr stub + + // But we need to ensure that some entrypoint is allocated and present in the slot, so that + // it can be used. + EnsureTemporaryEntryPoint(); return (PCODE)NULL; } @@ -2150,11 +2205,24 @@ PCODE MethodDesc::TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_FLAGS accessFlags if (MayHavePrecode()) return GetOrCreatePrecode()->GetEntryPoint(); - // - // Embed call to the temporary entrypoint into the code. It will be patched - // to point to the actual code later. - // - return GetTemporaryEntryPoint(); + _ASSERTE(!RequiresStableEntryPoint()); + + if (accessFlags & CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT) + { + // If this access flag is set, prefer returning NULL over returning the temporary entrypoint + // But we need to ensure that some entrypoint is allocated and present in the slot, so that + // it can be used. + EnsureTemporaryEntryPoint(); + return (PCODE)NULL; + } + else + { + // + // Embed call to the temporary entrypoint into the code. It will be patched + // to point to the actual code later. + // + return GetTemporaryEntryPoint(); + } } //******************************************************************************* @@ -2401,7 +2469,8 @@ BOOL MethodDesc::RequiresStableEntryPoint(BOOL fEstimateForChunk /*=FALSE*/) if (fEstimateForChunk) return RequiresStableEntryPointCore(fEstimateForChunk); BOOL fRequiresStableEntryPoint = RequiresStableEntryPointCore(FALSE); - VolatileStore(&m_bFlags4, (BYTE)(enum_flag4_ComputedRequiresStableEntryPoint | (fRequiresStableEntryPoint ? enum_flag4_RequiresStableEntryPoint : 0))); + BYTE requiresStableEntrypointFlags = (BYTE)(enum_flag4_ComputedRequiresStableEntryPoint | (fRequiresStableEntryPoint ? enum_flag4_RequiresStableEntryPoint : 0)); + InterlockedUpdateFlags4(requiresStableEntrypointFlags, TRUE); return fRequiresStableEntryPoint; } } @@ -2651,13 +2720,14 @@ void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator if (InterlockedCompareExchangeT(&m_codeData->TemporaryEntryPoint, pPrecode->GetEntryPoint(), (PCODE)NULL) == (PCODE)NULL) amt.SuppressRelease(); // We only need to suppress the release if we are working with a MethodDesc which is not newly allocated - PCODE tempEntryPoint = GetTemporaryEntryPointIfExists(); + PCODE tempEntryPoint = m_codeData->TemporaryEntryPoint; _ASSERTE(tempEntryPoint != (PCODE)NULL); if (*pSlot == (PCODE)NULL) { InterlockedCompareExchangeT(pSlot, tempEntryPoint, (PCODE)NULL); } + InterlockedUpdateFlags4(enum_flag4_TemporaryEntryPointAssigned, TRUE); } } @@ -2732,38 +2802,21 @@ Precode* MethodDesc::GetOrCreatePrecode() return GetPrecode(); } - PTR_PCODE pSlot = GetAddrOfSlot(); - PCODE tempEntry = GetTemporaryEntryPointIfExists(); + PCODE tempEntry = GetTemporaryEntryPoint(); +#ifdef _DEBUG + PTR_PCODE pSlot = GetAddrOfSlot(); PrecodeType requiredType = GetPrecodeType(); - PrecodeType availableType = PRECODE_INVALID; - - if (tempEntry != (PCODE)NULL) - { - availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType(); - } - - // Allocate the precode if necessary - if (requiredType != availableType) - { - // If we took this path for dynamic methods, the precode may leak since we may allocate it in domain-neutral loader heap. - _ASSERTE(!IsLCGMethod()); - - AllocMemTracker amt; - Precode* pPrecode = Precode::Allocate(requiredType, this, GetLoaderAllocator(), &amt); - - if (InterlockedCompareExchangeT(pSlot, pPrecode->GetEntryPoint(), tempEntry) == tempEntry) - amt.SuppressRelease(); - } - else if (*pSlot == (PCODE)NULL) - { - InterlockedCompareExchangeT(pSlot, tempEntry, (PCODE)NULL); - } + PrecodeType availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType(); + _ASSERTE(requiredType == availableType); + _ASSERTE(*pSlot != NULL); + _ASSERTE(*pSlot == tempEntry); +#endif // Set the flags atomically InterlockedUpdateFlags3(enum_flag3_HasStableEntryPoint | enum_flag3_HasPrecode, TRUE); - return Precode::GetPrecodeFromEntryPoint(*pSlot); + return Precode::GetPrecodeFromEntryPoint(tempEntry); } bool MethodDesc::DetermineIsEligibleForTieredCompilationInvariantForAllMethodsInChunk() diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index dcddc63bae922..da71e5e9afe7b 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -220,10 +220,19 @@ class MethodDesc PCODE GetTemporaryEntryPointIfExists() { LIMITED_METHOD_CONTRACT; - PTR_MethodDescCodeData codeData = VolatileLoadWithoutBarrier(&m_codeData); - if (codeData == NULL) + BYTE flags4 = VolatileLoad(&m_bFlags4); + if (flags4 & enum_flag4_TemporaryEntryPointAssigned) + { + PTR_MethodDescCodeData codeData = VolatileLoadWithoutBarrier(&m_codeData); + _ASSERTE(codeData != NULL); + PCODE temporaryEntryPoint = codeData->TemporaryEntryPoint; + _ASSERTE(temporaryEntryPoint != (PCODE)NULL); + return temporaryEntryPoint; + } + else + { return (PCODE)NULL; - return VolatileLoadWithoutBarrier(&codeData->TemporaryEntryPoint); + } } void SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); @@ -1443,27 +1452,15 @@ class MethodDesc { WRAPPER_NO_CONTRACT; EnsureTemporaryEntryPoint(GetLoaderAllocator()); + +#ifdef _DEBUG PCODE *pSlot = GetAddrOfSlot(); - if (*pSlot == (PCODE)NULL) - { - if (RequiresStableEntryPoint()) - { - GetOrCreatePrecode(); - } - else - { - *pSlot = GetTemporaryEntryPoint(); - } - } - else + _ASSERTE(*pSlot != (PCODE)NULL); +#endif + + if (RequiresStableEntryPoint() && !HasStableEntryPoint()) { - if (RequiresStableEntryPoint() && !HasStableEntryPoint()) - { - _ASSERTE(*pSlot == GetTemporaryEntryPoint()); - // We may be in a race with another thread that will be setting HasStableEntryPoint - // Just set it now along with HasPrecode - InterlockedUpdateFlags3(enum_flag3_HasStableEntryPoint | enum_flag3_HasPrecode, TRUE); - } + GetOrCreatePrecode(); } } #endif // DACCESS_COMPILE @@ -1690,7 +1687,10 @@ class MethodDesc enum { enum_flag4_ComputedRequiresStableEntryPoint = 0x01, enum_flag4_RequiresStableEntryPoint = 0x02, + enum_flag4_TemporaryEntryPointAssigned = 0x04, }; + + void InterlockedSetFlags4(BYTE mask, BYTE newValue); BYTE m_bFlags4; // Used to hold more flags WORD m_wSlotNumber; // The slot number of this MethodDesc in the vtable array. @@ -1801,8 +1801,6 @@ class MethodDesc SIZE_T SizeOf(); - WORD InterlockedUpdateFlags3(WORD wMask, BOOL fSet); - inline BOOL HaveValueTypeParametersBeenWalked() { LIMITED_METHOD_DAC_CONTRACT; @@ -2191,6 +2189,8 @@ class MethodDescChunk #ifndef DACCESS_COMPILE WORD InterlockedUpdateFlags(WORD wMask, BOOL fSet); + WORD InterlockedUpdateFlags3(WORD wMask, BOOL fSet); + BYTE InterlockedUpdateFlags4(BYTE bMask, BOOL fSet); #endif public: diff --git a/src/coreclr/vm/prestub.cpp b/src/coreclr/vm/prestub.cpp index 01c77de70fcf0..ad518f984f624 100644 --- a/src/coreclr/vm/prestub.cpp +++ b/src/coreclr/vm/prestub.cpp @@ -2842,8 +2842,7 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT, CallerGCMode callerGCMo { pCode = GetStubForInteropMethod(this); - EnsureSlotFilled(); - GetPrecode()->SetTargetInterlocked(pCode); + GetOrCreatePrecode()->SetTargetInterlocked(pCode); RETURN GetStableEntryPoint(); } diff --git a/src/coreclr/vm/riscv64/stubs.cpp b/src/coreclr/vm/riscv64/stubs.cpp index dac2189c43274..dafe58b725f37 100644 --- a/src/coreclr/vm/riscv64/stubs.cpp +++ b/src/coreclr/vm/riscv64/stubs.cpp @@ -1521,14 +1521,16 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { - pMD->EnsureSlotFilled(); + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); + // Use direct call if possible. - if (pMD->HasStableEntryPoint()) + if (multiCallableAddr != (PCODE)NULL) { - EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetStableEntryPoint()), fTailCall, FALSE); + EmitCallLabel(NewExternalCodeLabel((LPVOID)multiCallableAddr), fTailCall, FALSE); } else { + _ASSERTE(!pMD->HasStableEntryPoint()); EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetAddrOfSlot()), fTailCall, TRUE); } } From f6a82603d7a8a872ce1045412601f34e8bd18be1 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 26 Jun 2024 19:58:34 -0700 Subject: [PATCH 26/51] Fix the build before sending to github --- src/coreclr/vm/method.cpp | 6 +++--- src/coreclr/vm/method.hpp | 20 ++++++++++++++++---- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index d02d16f4c1f32..6706cab670a4f 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -2648,7 +2648,7 @@ PCODE MethodDesc::GetTemporaryEntryPoint() if (pEntryPoint != (PCODE)NULL) return pEntryPoint; - EnsureTemporaryEntryPoint(GetLoaderAllocator()); + EnsureTemporaryEntryPoint(); pEntryPoint = GetTemporaryEntryPointIfExists(); _ASSERTE(pEntryPoint != (PCODE)NULL); @@ -2680,7 +2680,7 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc } } -void MethodDesc::EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator) +void MethodDesc::EnsureTemporaryEntryPoint() { CONTRACTL { @@ -2692,7 +2692,7 @@ void MethodDesc::EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator) if (GetTemporaryEntryPointIfExists() == (PCODE)NULL) { - EnsureTemporaryEntryPointCore(pLoaderAllocator, NULL); + EnsureTemporaryEntryPointCore(GetLoaderAllocator(), NULL); } } diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index da71e5e9afe7b..6bbecb8853739 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -643,7 +643,11 @@ class MethodDesc #endif // !FEATURE_COMINTEROP // Update flags in a thread safe manner. +#ifndef DACCESS_COMPILE WORD InterlockedUpdateFlags(WORD wMask, BOOL fSet); + WORD InterlockedUpdateFlags3(WORD wMask, BOOL fSet); + BYTE InterlockedUpdateFlags4(BYTE bMask, BOOL fSet); +#endif // If the method is in an Edit and Continue (EnC) module, then // we DON'T want to backpatch this, ever. We MUST always call @@ -662,11 +666,13 @@ class MethodDesc return (m_wFlags & mdfNotInline); } +#ifndef DACCESS_COMPILE inline void SetNotInline(BOOL set) { WRAPPER_NO_CONTRACT; InterlockedUpdateFlags(mdfNotInline, set); } +#endif // DACCESS_COMPILE #ifndef DACCESS_COMPILE VOID EnsureActive(); @@ -686,11 +692,13 @@ class MethodDesc //================================================================ // +#ifndef DACCESS_COMPILE inline void ClearFlagsOnUpdate() { WRAPPER_NO_CONTRACT; SetNotInline(FALSE); } +#endif // DACCESS_COMPILE // Restore the MethodDesc to it's initial, pristine state, so that // it can be reused for new code (eg. for EnC, method rental, etc.) @@ -1441,7 +1449,7 @@ class MethodDesc // PCODE GetMethodEntryPointIfExists(); - void EnsureTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator); + void EnsureTemporaryEntryPoint(); // pamTracker must be NULL for a MethodDesc which cannot be freed by an external AllocMemTracker // OR must be set to point to the same AllocMemTracker that controls allocation of the MethodDesc @@ -1451,7 +1459,7 @@ class MethodDesc void EnsureSlotFilled() { WRAPPER_NO_CONTRACT; - EnsureTemporaryEntryPoint(GetLoaderAllocator()); + EnsureTemporaryEntryPoint(); #ifdef _DEBUG PCODE *pSlot = GetAddrOfSlot(); @@ -1807,11 +1815,13 @@ class MethodDesc return (m_wFlags & mdfValueTypeParametersWalked) != 0; } +#ifndef DACCESS_COMPILE inline void SetValueTypeParametersWalked() { LIMITED_METHOD_CONTRACT; InterlockedUpdateFlags(mdfValueTypeParametersWalked, TRUE); } +#endif // DACCESS_COMPILE inline BOOL HaveValueTypeParametersBeenLoaded() { @@ -1819,11 +1829,13 @@ class MethodDesc return (m_wFlags & mdfValueTypeParametersLoaded) != 0; } +#ifndef DACCESS_COMPILE inline void SetValueTypeParametersLoaded() { LIMITED_METHOD_CONTRACT; InterlockedUpdateFlags(mdfValueTypeParametersLoaded, TRUE); } +#endif // DACCESS_COMPILE #ifdef FEATURE_TYPEEQUIVALENCE inline BOOL DoesNotHaveEquivalentValuetypeParameters() @@ -1832,11 +1844,13 @@ class MethodDesc return (m_wFlags & mdfDoesNotHaveEquivalentValuetypeParameters) != 0; } +#ifndef DACCESS_COMPILE inline void SetDoesNotHaveEquivalentValuetypeParameters() { LIMITED_METHOD_CONTRACT; InterlockedUpdateFlags(mdfDoesNotHaveEquivalentValuetypeParameters, TRUE); } +#endif // DACCESS_COMPILE #endif // FEATURE_TYPEEQUIVALENCE // @@ -2189,8 +2203,6 @@ class MethodDescChunk #ifndef DACCESS_COMPILE WORD InterlockedUpdateFlags(WORD wMask, BOOL fSet); - WORD InterlockedUpdateFlags3(WORD wMask, BOOL fSet); - BYTE InterlockedUpdateFlags4(BYTE bMask, BOOL fSet); #endif public: From 97e8f7e140348840d38acf4f2eba3fe4c96922f1 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 27 Jun 2024 07:26:27 -0700 Subject: [PATCH 27/51] Fix unix build break, and invalid assert --- src/coreclr/debug/daccess/request.cpp | 4 ++-- src/coreclr/vm/method.cpp | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index c54bbd49bf676..888bb6b2185a2 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -431,7 +431,7 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA *value = mTable->GetRestoredSlotIfExists(slot); if (*value == 0) { - hr = E_NOT_VALID_STATE; + hr = S_FALSE; } } else @@ -446,7 +446,7 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA *value = pMD->GetMethodEntryPointIfExists(); if (*value == 0) { - hr = E_NOT_VALID_STATE; + hr = S_FALSE; } else { diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index ced1635cd225e..dfa8b4ccbb8a9 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -2671,7 +2671,7 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc #ifdef _DEBUG PTR_PCODE pSlot = GetAddrOfSlot(); - _ASSERTE(*pSlot == (PCODE)NULL); + _ASSERTE(*pSlot != (PCODE)NULL); #endif if (RequiresStableEntryPoint()) From abae47448286d706665007f033940913975f80d2 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 27 Jun 2024 10:27:34 -0700 Subject: [PATCH 28/51] Improve assertion checks to validate that we don't allocate temporary entrypoints that will be orphaned if the type doesn't actually end up published. --- src/coreclr/vm/clsload.cpp | 6 ++++++ src/coreclr/vm/method.cpp | 6 ++++++ src/coreclr/vm/methodtable.cpp | 4 ++++ src/coreclr/vm/methodtable.h | 25 ++++++++++++++++++++++++- 4 files changed, 40 insertions(+), 1 deletion(-) diff --git a/src/coreclr/vm/clsload.cpp b/src/coreclr/vm/clsload.cpp index a85db9e3b96d1..6f3eebf105f2e 100644 --- a/src/coreclr/vm/clsload.cpp +++ b/src/coreclr/vm/clsload.cpp @@ -2783,6 +2783,12 @@ TypeHandle ClassLoader::PublishType(const TypeKey *pTypeKey, TypeHandle typeHnd) } CONTRACTL_END; +#ifdef _DEBUG + if (!typeHnd.IsTypeDesc()) + { + typeHnd.AsMethodTable()->GetAuxiliaryData()->SetIsPublished(); + } +#endif if (pTypeKey->IsConstructed()) { diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index dfa8b4ccbb8a9..441bc400f1788 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -2644,6 +2644,8 @@ PCODE MethodDesc::GetTemporaryEntryPoint() } CONTRACTL_END; + _ASSERTE(GetMethodTable()->GetAuxiliaryData()->IsPublished()); + PCODE pEntryPoint = GetTemporaryEntryPointIfExists(); if (pEntryPoint != (PCODE)NULL) return pEntryPoint; @@ -2692,6 +2694,10 @@ void MethodDesc::EnsureTemporaryEntryPoint() } CONTRACTL_END; + // Since this can allocate memory that won't be freed, we need to make sure that the associated MethodTable + // is fully allocated and permanent. + _ASSERTE(GetMethodTable()->GetAuxiliaryData()->IsPublished()); + if (GetTemporaryEntryPointIfExists() == (PCODE)NULL) { EnsureTemporaryEntryPointCore(GetLoaderAllocator(), NULL); diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index 0cd31752072f8..783d7f0200194 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -7585,6 +7585,10 @@ PCODE MethodTable::GetRestoredSlot(DWORD slotNumber) SUPPORTS_DAC; } CONTRACTL_END; + // Since this can allocate memory that won't be freed until the LoaderAllocator is release, we need + // to make sure that the associated MethodTable is fully allocated and permanent. + _ASSERTE(GetAuxiliaryData()->IsPublished()); + // // Keep in sync with code:MethodTable::GetRestoredSlotMT // diff --git a/src/coreclr/vm/methodtable.h b/src/coreclr/vm/methodtable.h index 89952a43a9620..5b2536cfef904 100644 --- a/src/coreclr/vm/methodtable.h +++ b/src/coreclr/vm/methodtable.h @@ -332,7 +332,9 @@ struct MethodTableAuxiliaryData enum_flag_CanCompareBitsOrUseFastGetHashCode = 0x0004, // Is any field type or sub field type overridden Equals or GetHashCode enum_flag_HasApproxParent = 0x0010, - // enum_unused = 0x0020, +#ifdef _DEBUG + enum_flag_IsPublished = 0x0020, +#endif enum_flag_IsNotFullyLoaded = 0x0040, enum_flag_DependenciesLoaded = 0x0080, // class and all dependencies loaded up to CLASS_LOADED_BUT_NOT_VERIFIED @@ -496,6 +498,24 @@ struct MethodTableAuxiliaryData } +#ifdef _DEBUG +#ifndef DACCESS_COMPILE + void SetIsPublished() + { + LIMITED_METHOD_CONTRACT; + + // Array's parent is always precise + m_dwFlags |= (MethodTableAuxiliaryData::enum_flag_IsPublished); + } +#endif + + bool IsPublished() const + { + LIMITED_METHOD_CONTRACT; + return (VolatileLoad(&m_dwFlags) & enum_flag_IsPublished); + } +#endif // _DEBUG + // The NonVirtualSlots array grows backwards, so this pointer points at just AFTER the first entry in the array // To access, use a construct like... GetNonVirtualSlotsArray(pAuxiliaryData)[-(1 + index)] static inline PTR_PCODE GetNonVirtualSlotsArray(PTR_Const_MethodTableAuxiliaryData pAuxiliaryData) @@ -1618,6 +1638,9 @@ class MethodTable // MethodDesc* GetMethodDescForSlot(DWORD slot); + + // This api produces the same result as GetMethodDescForSlot_NoThrow, but it uses a variation on the + // algorithm that does not allocate a temporary entrypoint for the slot if it doesn't exist. MethodDesc* GetMethodDescForSlot_NoThrow(DWORD slot); static MethodDesc* GetMethodDescForSlotAddress(PCODE addr, BOOL fSpeculative = FALSE); From 82732744658aeb1478303b7a0a79fc9f46a61446 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 27 Jun 2024 11:09:53 -0700 Subject: [PATCH 29/51] Remove unused parameters and add contracts --- src/coreclr/vm/arm/stubs.cpp | 2 ++ src/coreclr/vm/arm64/stubs.cpp | 4 ++++ src/coreclr/vm/array.cpp | 5 ++--- src/coreclr/vm/class.cpp | 2 +- src/coreclr/vm/class.h | 1 - src/coreclr/vm/clsload.cpp | 2 +- src/coreclr/vm/dynamicmethod.cpp | 2 +- src/coreclr/vm/genmeth.cpp | 6 +++--- src/coreclr/vm/i386/stublinkerx86.cpp | 4 ++++ src/coreclr/vm/ilstubcache.cpp | 2 +- src/coreclr/vm/loongarch64/stubs.cpp | 4 ++++ src/coreclr/vm/method.cpp | 12 ++++++++---- src/coreclr/vm/method.hpp | 4 ++-- src/coreclr/vm/methodtablebuilder.cpp | 2 +- src/coreclr/vm/riscv64/stubs.cpp | 4 ++++ 15 files changed, 38 insertions(+), 18 deletions(-) diff --git a/src/coreclr/vm/arm/stubs.cpp b/src/coreclr/vm/arm/stubs.cpp index 089e1ed7a38c5..7e54e0d4de156 100644 --- a/src/coreclr/vm/arm/stubs.cpp +++ b/src/coreclr/vm/arm/stubs.cpp @@ -1381,6 +1381,8 @@ VOID StubLinkerCPU::EmitShuffleThunk(ShuffleEntry *pShuffleEntryArray) void StubLinkerCPU::ThumbEmitTailCallManagedMethod(MethodDesc *pMD) { + STANDARD_VM_CONTRACT; + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); // Use direct call if possible. if (multiCallableAddr != (PCODE)NULL) diff --git a/src/coreclr/vm/arm64/stubs.cpp b/src/coreclr/vm/arm64/stubs.cpp index 94d113cfcac75..02d6aaaae5e6f 100644 --- a/src/coreclr/vm/arm64/stubs.cpp +++ b/src/coreclr/vm/arm64/stubs.cpp @@ -1614,6 +1614,8 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndirect) { + STANDARD_VM_CONTRACT; + BranchInstructionFormat::VariationCodes variationCode = BranchInstructionFormat::VariationCodes::BIF_VAR_JUMP; if (!fTailCall) variationCode = static_cast(variationCode | BranchInstructionFormat::VariationCodes::BIF_VAR_CALL); @@ -1626,6 +1628,8 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { + STANDARD_VM_CONTRACT; + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); // Use direct call if possible. diff --git a/src/coreclr/vm/array.cpp b/src/coreclr/vm/array.cpp index 3b2b778f4c50f..c06a7761cef9f 100644 --- a/src/coreclr/vm/array.cpp +++ b/src/coreclr/vm/array.cpp @@ -185,7 +185,6 @@ void ArrayClass::InitArrayMethodDesc( PCCOR_SIGNATURE pShortSig, DWORD cShortSig, DWORD dwVtableSlot, - LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker) { STANDARD_VM_CONTRACT; @@ -198,7 +197,7 @@ void ArrayClass::InitArrayMethodDesc( pNewMD->SetStoredMethodSig(pShortSig, cShortSig); _ASSERTE(!pNewMD->MayHaveNativeCode()); - pNewMD->SetTemporaryEntryPoint(pLoaderAllocator, pamTracker); + pNewMD->SetTemporaryEntryPoint(pamTracker); #ifdef _DEBUG _ASSERTE(pNewMD->GetMethodName() && GetDebugClassName()); @@ -509,7 +508,7 @@ MethodTable* Module::CreateArrayMethodTable(TypeHandle elemTypeHnd, CorElementTy pClass->GenerateArrayAccessorCallSig(dwFuncRank, dwFuncType, &pSig, &cSig, pAllocator, pamTracker, FALSE); - pClass->InitArrayMethodDesc(pNewMD, pSig, cSig, numVirtuals + dwMethodIndex, pAllocator, pamTracker); + pClass->InitArrayMethodDesc(pNewMD, pSig, cSig, numVirtuals + dwMethodIndex, pamTracker); dwMethodIndex++; } diff --git a/src/coreclr/vm/class.cpp b/src/coreclr/vm/class.cpp index 6c0052636f6ae..9ac7e1e41f405 100644 --- a/src/coreclr/vm/class.cpp +++ b/src/coreclr/vm/class.cpp @@ -801,7 +801,7 @@ HRESULT EEClass::AddMethodDesc( COMMA_INDEBUG(NULL) ); - pNewMD->SetTemporaryEntryPoint(pAllocator, &dummyAmTracker); + pNewMD->SetTemporaryEntryPoint(&dummyAmTracker); // [TODO] if an exception is thrown, asserts will fire in EX_CATCH_HRESULT() // during an EnC operation due to the debugger thread not being able to diff --git a/src/coreclr/vm/class.h b/src/coreclr/vm/class.h index a9b44e482ac6d..eb55012a747b6 100644 --- a/src/coreclr/vm/class.h +++ b/src/coreclr/vm/class.h @@ -1975,7 +1975,6 @@ class ArrayClass : public EEClass PCCOR_SIGNATURE pShortSig, DWORD cShortSig, DWORD dwVtableSlot, - LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); // Generate a short sig for an array accessor diff --git a/src/coreclr/vm/clsload.cpp b/src/coreclr/vm/clsload.cpp index 6f3eebf105f2e..d41a2d6bd180d 100644 --- a/src/coreclr/vm/clsload.cpp +++ b/src/coreclr/vm/clsload.cpp @@ -2786,7 +2786,7 @@ TypeHandle ClassLoader::PublishType(const TypeKey *pTypeKey, TypeHandle typeHnd) #ifdef _DEBUG if (!typeHnd.IsTypeDesc()) { - typeHnd.AsMethodTable()->GetAuxiliaryData()->SetIsPublished(); + typeHnd.AsMethodTable()->GetAuxiliaryDataForWrite()->SetIsPublished(); } #endif diff --git a/src/coreclr/vm/dynamicmethod.cpp b/src/coreclr/vm/dynamicmethod.cpp index 12c5d6f0386f7..6d674130010f8 100644 --- a/src/coreclr/vm/dynamicmethod.cpp +++ b/src/coreclr/vm/dynamicmethod.cpp @@ -189,7 +189,7 @@ void DynamicMethodTable::AddMethodsToList() pResolver->m_DynamicMethodTable = this; pNewMD->m_pResolver = pResolver; - pNewMD->SetTemporaryEntryPoint(m_pDomain->GetLoaderAllocator(), &amt); + pNewMD->SetTemporaryEntryPoint(&amt); #ifdef _DEBUG pNewMD->m_pDebugMethodTable = m_pMethodTable; diff --git a/src/coreclr/vm/genmeth.cpp b/src/coreclr/vm/genmeth.cpp index a4d28d12eff61..64394585c8fc3 100644 --- a/src/coreclr/vm/genmeth.cpp +++ b/src/coreclr/vm/genmeth.cpp @@ -440,7 +440,7 @@ InstantiatedMethodDesc::NewInstantiatedMethodDesc(MethodTable *pExactMT, // Check that whichever field holds the inst. got setup correctly _ASSERTE((PVOID)pNewMD->GetMethodInstantiation().GetRawArgs() == (PVOID)pInstOrPerInstInfo); - pNewMD->SetTemporaryEntryPoint(pAllocator, &amt); + pNewMD->SetTemporaryEntryPoint(&amt); { // The canonical instantiation is exempt from constraint checks. It's used as the basis @@ -905,7 +905,7 @@ MethodDesc::FindOrCreateAssociatedMethodDesc(MethodDesc* pDefMD, pResultMD->SetIsUnboxingStub(); pResultMD->AsInstantiatedMethodDesc()->SetupWrapperStubWithInstantiations(pMDescInCanonMT, 0, NULL); - pResultMD->SetTemporaryEntryPoint(pAllocator, &amt); + pResultMD->SetTemporaryEntryPoint(&amt); amt.SuppressRelease(); @@ -986,7 +986,7 @@ MethodDesc::FindOrCreateAssociatedMethodDesc(MethodDesc* pDefMD, pNonUnboxingStub->GetNumGenericMethodArgs(), (TypeHandle *)pNonUnboxingStub->GetMethodInstantiation().GetRawArgs()); - pResultMD->SetTemporaryEntryPoint(pAllocator, &amt); + pResultMD->SetTemporaryEntryPoint(&amt); amt.SuppressRelease(); diff --git a/src/coreclr/vm/i386/stublinkerx86.cpp b/src/coreclr/vm/i386/stublinkerx86.cpp index 06fd718b22a8e..9af0ee3e86521 100644 --- a/src/coreclr/vm/i386/stublinkerx86.cpp +++ b/src/coreclr/vm/i386/stublinkerx86.cpp @@ -3109,6 +3109,8 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s #ifdef TARGET_AMD64 VOID StubLinkerCPU::EmitLoadMethodAddressIntoAX(MethodDesc *pMD) { + STANDARD_VM_CONTRACT; + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); if (multiCallableAddr != (PCODE)NULL) @@ -3127,6 +3129,8 @@ VOID StubLinkerCPU::EmitLoadMethodAddressIntoAX(MethodDesc *pMD) VOID StubLinkerCPU::EmitTailJumpToMethod(MethodDesc *pMD) { + STANDARD_VM_CONTRACT; + #ifdef TARGET_AMD64 EmitLoadMethodAddressIntoAX(pMD); Emit16(X86_INSTR_JMP_EAX); diff --git a/src/coreclr/vm/ilstubcache.cpp b/src/coreclr/vm/ilstubcache.cpp index 8d3c4fe20b27f..6748827ad5d5a 100644 --- a/src/coreclr/vm/ilstubcache.cpp +++ b/src/coreclr/vm/ilstubcache.cpp @@ -195,7 +195,7 @@ MethodDesc* ILStubCache::CreateNewMethodDesc(LoaderHeap* pCreationHeap, MethodTa // the no metadata part of the method desc pMD->m_pszMethodName = (PTR_CUTF8)"IL_STUB"; pMD->InitializeFlags(DynamicMethodDesc::FlagPublic | DynamicMethodDesc::FlagIsILStub); - pMD->SetTemporaryEntryPoint(pMT->GetLoaderAllocator(), pamTracker); + pMD->SetTemporaryEntryPoint(pamTracker); // // convert signature to a compatible signature if needed diff --git a/src/coreclr/vm/loongarch64/stubs.cpp b/src/coreclr/vm/loongarch64/stubs.cpp index 3af2011673a1c..bb375189fc7d0 100644 --- a/src/coreclr/vm/loongarch64/stubs.cpp +++ b/src/coreclr/vm/loongarch64/stubs.cpp @@ -1465,6 +1465,8 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndirect) { + STANDARD_VM_CONTRACT; + BranchInstructionFormat::VariationCodes variationCode = BranchInstructionFormat::VariationCodes::BIF_VAR_JUMP; if (!fTailCall) variationCode = static_cast(variationCode | BranchInstructionFormat::VariationCodes::BIF_VAR_CALL); @@ -1477,6 +1479,8 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { + STANDARD_VM_CONTRACT; + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); // Use direct call if possible. diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 441bc400f1788..65efabf897ca3 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -218,6 +218,10 @@ HRESULT MethodDesc::EnsureCodeDataExists(AllocMemTracker *pamTracker) } CONTRACTL_END; + // Assert that the associated type is published. This isn't quite sufficient to cover the case of allocating + // this while creating a standalone MethodDesc, but catches most of the cases where lost allocations are easy to have happen. + _ASSERTE(pamTracker != NULL || GetMethodTable()->GetAuxiliaryData()->IsPublished()); + if (m_codeData != NULL) return S_OK; @@ -2665,11 +2669,11 @@ PCODE MethodDesc::GetTemporaryEntryPoint() #ifndef DACCESS_COMPILE //******************************************************************************* -void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker) +void MethodDesc::SetTemporaryEntryPoint(AllocMemTracker *pamTracker) { WRAPPER_NO_CONTRACT; - EnsureTemporaryEntryPointCore(pLoaderAllocator, pamTracker); + EnsureTemporaryEntryPointCore(pamTracker); #ifdef _DEBUG PTR_PCODE pSlot = GetAddrOfSlot(); @@ -2700,11 +2704,11 @@ void MethodDesc::EnsureTemporaryEntryPoint() if (GetTemporaryEntryPointIfExists() == (PCODE)NULL) { - EnsureTemporaryEntryPointCore(GetLoaderAllocator(), NULL); + EnsureTemporaryEntryPointCore(NULL); } } -void MethodDesc::EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker) +void MethodDesc::EnsureTemporaryEntryPointCore(AllocMemTracker *pamTracker) { CONTRACTL { diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 520a0dd787299..1b2ecd52b44c9 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -235,7 +235,7 @@ class MethodDesc } } - void SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); + void SetTemporaryEntryPoint(AllocMemTracker *pamTracker); #ifndef DACCESS_COMPILE PCODE GetInitialEntryPointForCopiedSlot() @@ -1453,7 +1453,7 @@ class MethodDesc // pamTracker must be NULL for a MethodDesc which cannot be freed by an external AllocMemTracker // OR must be set to point to the same AllocMemTracker that controls allocation of the MethodDesc - void EnsureTemporaryEntryPointCore(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); + void EnsureTemporaryEntryPointCore(AllocMemTracker *pamTracker); #ifndef DACCESS_COMPILE void EnsureSlotFilled() diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index ce9750583a04d..3c95c79740cd9 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -10867,7 +10867,7 @@ MethodTableBuilder::SetupMethodTable2( if ((pMD->GetSlot() == iCurSlot) && (GetParentMethodTable() == NULL || iCurSlot >= GetParentMethodTable()->GetNumVirtuals())) continue; // For cases where the method is defining the method desc slot, we don't need to fill it in yet - pMD->EnsureTemporaryEntryPointCore(GetLoaderAllocator(), GetMemTracker()); + pMD->EnsureTemporaryEntryPointCore(GetMemTracker()); PCODE addr = pMD->GetTemporaryEntryPoint(); _ASSERTE(addr != (PCODE)NULL); diff --git a/src/coreclr/vm/riscv64/stubs.cpp b/src/coreclr/vm/riscv64/stubs.cpp index 8faa933994d7f..559e0b70de033 100644 --- a/src/coreclr/vm/riscv64/stubs.cpp +++ b/src/coreclr/vm/riscv64/stubs.cpp @@ -1511,6 +1511,8 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndirect) { + STANDARD_VM_CONTRACT; + BranchInstructionFormat::VariationCodes variationCode = BranchInstructionFormat::VariationCodes::BIF_VAR_JUMP; if (!fTailCall) variationCode = static_cast(variationCode | BranchInstructionFormat::VariationCodes::BIF_VAR_CALL); @@ -1522,6 +1524,8 @@ void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndir void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) { + STANDARD_VM_CONTRACT; + PCODE multiCallableAddr = pMD->TryGetMultiCallableAddrOfCode(CORINFO_ACCESS_PREFER_SLOT_OVER_TEMPORARY_ENTRYPOINT); // Use direct call if possible. From 6999c2ba639606c3c0d1982305fc66c7596586fe Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 27 Jun 2024 11:37:46 -0700 Subject: [PATCH 30/51] Update method-descriptor.md --- docs/design/coreclr/botr/method-descriptor.md | 72 ++----------------- 1 file changed, 6 insertions(+), 66 deletions(-) diff --git a/docs/design/coreclr/botr/method-descriptor.md b/docs/design/coreclr/botr/method-descriptor.md index 496ecc792af6f..fdbc62605f6e2 100644 --- a/docs/design/coreclr/botr/method-descriptor.md +++ b/docs/design/coreclr/botr/method-descriptor.md @@ -85,7 +85,9 @@ DWORD MethodDesc::GetAttrs() Method Slots ------------ -Each MethodDesc has a slot, which contains the entry point of the method. The slot and entry point must exist for all methods, even the ones that never run like abstract methods. There are multiple places in the runtime that depend on the 1:1 mapping between entry points and MethodDescs, making this relationship an invariant. +Each MethodDesc has a slot, which contains the entry point of the method. The slot must exist for all methods, even the ones that never run like abstract methods. There are multiple places in the runtime that depend on the 1:1 mapping between entry points and MethodDescs, making this relationship an invariant. + +Each MethodDesc logically has an entry point, but we do not allocate these eagerly at MethodDesc creation time. The invariant is that once the method is identified as a method to run, or is used in virtual overriding, we will allocate the entrypoint. The slot is either in MethodTable or in MethodDesc itself. The location of the slot is determined by `mdcHasNonVtableSlot` bit on MethodDesc. @@ -185,8 +187,6 @@ The target of the temporary entry point is a PreStub, which is a special kind of The **stable entry point** is either the native code or the precode. The **native code** is either jitted code or code saved in NGen image. It is common to talk about jitted code when we actually mean native code. -Temporary entry points are never saved into NGen images. All entry points in NGen images are stable entry points that are never changed. It is an important optimization that reduced private working set. - ![Figure 2](images/methoddesc-fig2.png) Figure 2 Entry Point State Diagram @@ -208,6 +208,7 @@ The methods to get callable entry points from MethodDesc are: - `MethodDesc::GetSingleCallableAddrOfCode` - `MethodDesc::GetMultiCallableAddrOfCode` +- `MethodDesc::TryGetMultiCallableAddrOfCode` - `MethodDesc::GetSingleCallableAddrOfVirtualizedCode` - `MethodDesc::GetMultiCallableAddrOfVirtualizedCode` @@ -220,7 +221,7 @@ The type of precode has to be cheaply computable from the instruction sequence. **StubPrecode** -StubPrecode is the basic precode type. It loads MethodDesc into a scratch register and then jumps. It must be implemented for precodes to work. It is used as fallback when no other specialized precode type is available. +StubPrecode is the basic precode type. It loads MethodDesc into a scratch register2 and then jumps. It must be implemented for precodes to work. It is used as fallback when no other specialized precode type is available. All other precodes types are optional optimizations that the platform specific files turn on via HAS\_XXX\_PRECODE defines. @@ -236,7 +237,7 @@ StubPrecode looks like this on x86: FixupPrecode is used when the final target does not require MethodDesc in scratch register2. The FixupPrecode saves a few cycles by avoiding loading MethodDesc into the scratch register. -The most common usage of FixupPrecode is for method fixups in NGen images. +Most stubs used are the more efficient form, we currently can use this form for everything but interop methods when a specialized form of Precode is not required. The initial state of the FixupPrecode on x86: @@ -254,67 +255,6 @@ Once it has been patched to point to final target: 2 Passing MethodDesc in scratch register is sometimes referred to as **MethodDesc Calling Convention**. -**FixupPrecode chunks** - -FixupPrecode chunk is a space efficient representation of multiple FixupPrecodes. It mirrors the idea of MethodDescChunk by hoisting the similar MethodDesc pointers from multiple FixupPrecodes to a shared area. - -The FixupPrecode chunk saves space and improves code density of the precodes. The code density improvement from FixupPrecode chunks resulted in 1% - 2% gain in big server scenarios on x64. - -The FixupPrecode chunks looks like this on x86: - - jmp Target2 - pop edi // dummy instruction that marks the type of the precode - db MethodDescChunkIndex - db 2 (PrecodeChunkIndex) - - jmp Target1 - pop edi - db MethodDescChunkIndex - db 1 (PrecodeChunkIndex) - - jmp Target0 - pop edi - db MethodDescChunkIndex - db 0 (PrecodeChunkIndex) - - dw pMethodDescBase - -One FixupPrecode chunk corresponds to one MethodDescChunk. There is no 1:1 mapping between the FixupPrecodes in the chunk and MethodDescs in MethodDescChunk though. Each FixupPrecode has index of the method it belongs to. It allows allocating the FixupPrecode in the chunk only for methods that need it. - -**Compact entry points** - -Compact entry point is a space efficient implementation of temporary entry points. - -Temporary entry points implemented using StubPrecode or FixupPrecode can be patched to point to the actual code. Jitted code can call temporary entry point directly. The temporary entry point can be multicallable entry points in this case. - -Compact entry points cannot be patched to point to the actual code. Jitted code cannot call them directly. They are trading off speed for size. Calls to these entry points are indirected via slots in a table (FuncPtrStubs) that are patched to point to the actual entry point eventually. A request for a multicallable entry point allocates a StubPrecode or FixupPrecode on demand in this case. - -The raw speed difference is the cost of an indirect call for a compact entry point vs. the cost of one direct call and one direct jump on the given platform. The later used to be faster by a few percent in large server scenario since it can be predicted by the hardware better (2005). It is not always the case on current (2015) hardware. - -The compact entry points have been historically implemented on x86 only. Their additional complexity, space vs. speed trade-off and hardware advancements made them unjustified on other platforms. - -The compact entry point on x86 looks like this: - - entrypoint0: - mov al,0 - jmp short Dispatch - - entrypoint1: - mov al,1 - jmp short Dispatch - - entrypoint2: - mov al,2 - jmp short Dispatch - - Dispatch: - movzx eax,al - shl eax, 3 - add eax, pBaseMD - jmp PreStub - -The allocation of temporary entry points always tries to pick the smallest temporary entry point from the available choices. For example, a single compact entry point is bigger than a single StubPrecode on x86. The StubPrecode will be preferred over the compact entry point in this case. The allocation of the precode for a stable entry point will try to reuse an allocated temporary entry point precode if one exists of the matching type. - **ThisPtrRetBufPrecode** ThisPtrRetBufPrecode is used to switch a return buffer and the this pointer for open instance delegates returning valuetypes. It is used to convert the calling convention of MyValueType Bar(Foo x) to the calling convention of MyValueType Foo::Bar(). From 24fa6c243b115a144fc32955a9826098c06f4f69 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 27 Jun 2024 11:47:05 -0700 Subject: [PATCH 31/51] Fix musl validation issue --- src/coreclr/vm/method.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 65efabf897ca3..8c1d2b78105f5 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -2821,7 +2821,7 @@ Precode* MethodDesc::GetOrCreatePrecode() PrecodeType requiredType = GetPrecodeType(); PrecodeType availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType(); _ASSERTE(requiredType == availableType); - _ASSERTE(*pSlot != NULL); + _ASSERTE(*pSlot != (PCODE)NULL); _ASSERTE(*pSlot == tempEntry); #endif From d6964195b71eb06951cc57d194b89022420f77e1 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Thu, 27 Jun 2024 17:25:34 -0700 Subject: [PATCH 32/51] Adjust SOS api to be an enumerator --- src/coreclr/debug/daccess/daccess.cpp | 38 ++++++ src/coreclr/debug/daccess/dacimpl.h | 22 +++- src/coreclr/debug/daccess/request.cpp | 76 ++++++++--- src/coreclr/inc/sospriv.idl | 35 +++++- src/coreclr/pal/prebuilt/idl/sospriv_i.cpp | 3 + src/coreclr/pal/prebuilt/inc/sospriv.h | 140 +++++++++++++++++++-- 6 files changed, 288 insertions(+), 26 deletions(-) diff --git a/src/coreclr/debug/daccess/daccess.cpp b/src/coreclr/debug/daccess/daccess.cpp index 6dd0f52fa2e55..261380f508bb2 100644 --- a/src/coreclr/debug/daccess/daccess.cpp +++ b/src/coreclr/debug/daccess/daccess.cpp @@ -8340,6 +8340,44 @@ HRESULT DacMemoryEnumerator::Next(unsigned int count, SOSMemoryRegion regions[], return i < count ? S_FALSE : S_OK; } +HRESULT DacMethodTableSlotEnumerator::Skip(unsigned int count) +{ + mIteratorIndex += count; + return S_OK; +} + +HRESULT DacMethodTableSlotEnumerator::Reset() +{ + mIteratorIndex = 0; + return S_OK; +} + +HRESULT DacMethodTableSlotEnumerator::GetCount(unsigned int* pCount) +{ + if (!pCount) + return E_POINTER; + + mMethods.GetCount(); + return S_OK; +} + +HRESULT DacMethodTableSlotEnumerator::Next(unsigned int count, SOSMethodData methods[], unsigned int* pFetched) +{ + if (!pFetched) + return E_POINTER; + + if (!methods) + return E_POINTER; + + unsigned int i = 0; + while (i < count && mIteratorIndex < mMethods.GetCount()) + { + methods[i++] = mMethods.Get(mIteratorIndex++); + } + + *pFetched = i; + return i < count ? S_FALSE : S_OK; +} HRESULT DacGCBookkeepingEnumerator::Init() { diff --git a/src/coreclr/debug/daccess/dacimpl.h b/src/coreclr/debug/daccess/dacimpl.h index 90064855e15f3..a3490cf2d9e23 100644 --- a/src/coreclr/debug/daccess/dacimpl.h +++ b/src/coreclr/debug/daccess/dacimpl.h @@ -1224,7 +1224,7 @@ class ClrDataAccess virtual HRESULT STDMETHODCALLTYPE GetMethodTableInitializationFlags(CLRDATA_ADDRESS methodTable, MethodTableInitializationFlags *initializationStatus); // ISOSDacInterface15 - virtual HRESULT STDMETHODCALLTYPE GetMethodTableSlotMethodDesc(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA_ADDRESS *value); + virtual HRESULT GetMethodTableSlotEnumerator(CLRDATA_ADDRESS mt, ISOSMethodEnum **enumerator); // // ClrDataAccess. @@ -1991,6 +1991,26 @@ class DacMemoryEnumerator : public DefaultCOMImpl +{ +public: + DacMethodTableSlotEnumerator() {} + virtual ~DacMethodTableSlotEnumerator() {} + + HRESULT Init(PTR_MethodTable mTable); + + HRESULT STDMETHODCALLTYPE Skip(unsigned int count); + HRESULT STDMETHODCALLTYPE Reset(); + HRESULT STDMETHODCALLTYPE GetCount(unsigned int *pCount); + HRESULT STDMETHODCALLTYPE Next(unsigned int count, SOSMethodData methods[], unsigned int *pFetched); + +protected: + DacReferenceList mMethods; + +private: + unsigned int mIteratorIndex; +}; + class DacHandleTableMemoryEnumerator : public DacMemoryEnumerator { public: diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index 888bb6b2185a2..8c1d71a89e882 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -461,9 +461,9 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA } HRESULT -ClrDataAccess::GetMethodTableSlotMethodDesc(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA_ADDRESS *value) +ClrDataAccess::GetMethodTableSlotEnumerator(CLRDATA_ADDRESS mt, ISOSMethodEnum **enumerator) { - if (mt == 0 || value == NULL) + if (mt == 0 || enumerator == NULL) return E_INVALIDARG; SOSDacEnter(); @@ -474,23 +474,17 @@ ClrDataAccess::GetMethodTableSlotMethodDesc(CLRDATA_ADDRESS mt, unsigned int slo { hr = E_INVALIDARG; } - else if (slot < mTable->GetNumVtableSlots()) - { - *value = HOST_CDADDR(mTable->GetMethodDescForSlot_NoThrow(slot)); - hr = S_OK; - } else { - hr = E_INVALIDARG; - MethodTable::IntroducedMethodIterator it(mTable); - for (; it.IsValid() && FAILED(hr); it.Next()) + DacMethodTableSlotEnumerator *methodTableSlotEnumerator = new (nothrow) DacMethodTableSlotEnumerator(); + *enumerator = methodTableSlotEnumerator; + if (*enumerator == NULL) { - MethodDesc* pMD = it.GetMethodDesc(); - if (pMD->GetSlot() == slot) - { - *value = HOST_CDADDR(pMD); - hr = S_OK; - } + hr = E_OUTOFMEMORY; + } + else + { + hr = methodTableSlotEnumerator->Init(mTable); } } @@ -498,6 +492,56 @@ ClrDataAccess::GetMethodTableSlotMethodDesc(CLRDATA_ADDRESS mt, unsigned int slo return hr; } +HRESULT DacMethodTableSlotEnumerator::Init(PTR_MethodTable mTable) +{ + unsigned int slot = 0; + + SOSMethodData methodData; + WORD numVtableSlots = mTable->GetNumVtableSlots(); + while (slot < numVtableSlots) + { + MethodDesc* pMD = mTable->GetMethodDescForSlot_NoThrow(slot); + methodData.MethodDesc = HOST_CDADDR(pMD); + methodData.Entrypoint = mTable->GetRestoredSlotIfExists(slot); + methodData.DefininingMethodTable = PTR_CDADDR(pMD->GetMethodTable()); + methodData.DefiningModule = HOST_CDADDR(pMD->GetModule()); + methodData.Token = pMD->GetMemberDef(); + + methodData.Slot = slot; + + if (!mMethods.Add(methodData)) + return E_OUTOFMEMORY; + } + + MethodTable::IntroducedMethodIterator it(mTable); + for (; it.IsValid(); it.Next()) + { + MethodDesc* pMD = it.GetMethodDesc(); + WORD slot = pMD->GetSlot(); + if (slot >= numVtableSlots) + { + methodData.MethodDesc = HOST_CDADDR(pMD); + methodData.Entrypoint = pMD->GetMethodEntryPointIfExists(); + methodData.DefininingMethodTable = PTR_CDADDR(pMD->GetMethodTable()); + methodData.DefiningModule = HOST_CDADDR(pMD->GetModule()); + methodData.Token = pMD->GetMemberDef(); + + if (slot == MethodTable::NO_SLOT) + { + methodData.Slot = 0xFFFFFFFF; + } + else + { + methodData.Slot = slot; + } + + if (!mMethods.Add(methodData)) + return E_OUTOFMEMORY; + } + } + + return S_OK; +} HRESULT ClrDataAccess::GetCodeHeapList(CLRDATA_ADDRESS jitManager, unsigned int count, struct DacpJitCodeHeapInfo codeHeaps[], unsigned int *pNeeded) diff --git a/src/coreclr/inc/sospriv.idl b/src/coreclr/inc/sospriv.idl index f4fb57eface0c..141f597dcb4e9 100644 --- a/src/coreclr/inc/sospriv.idl +++ b/src/coreclr/inc/sospriv.idl @@ -520,6 +520,39 @@ interface ISOSDacInterface14 : IUnknown HRESULT GetMethodTableInitializationFlags(CLRDATA_ADDRESS methodTable, MethodTableInitializationFlags *initializationStatus); } +cpp_quote("#ifndef _SOS_MethodData") +cpp_quote("#define _SOS_MethodData") + +typedef struct _SOSMethodData +{ + // At least one of MethodDesc, Entrypoint, or Token/DefiningMethodTable/DefiningModule is guaranteed to be set. + // Multiple of them may be set as well + CLRDATA_ADDRESS MethodDesc; + + CLRDATA_ADDRESS Entrypoint; + + CLRDATA_ADDRESS DefininingMethodTable; // Useful for when the method is inherited from a parent type which is instantiated + CLRDATA_ADDRESS DefiningModule; + unsigned int Token; + + // Slot data, a given MethodDesc may be present in multiple slots for a single MethodTable + unsigned int Slot; // Will be set to 0xFFFFFFFF for EnC added methods +} SOSMethodData; + +cpp_quote("#endif //_SOS_MethodData") + +[ + object, + local, + uuid(3c0fe725-c324-4a4f-8100-d399588a662e) +] +interface ISOSMethodEnum : ISOSEnum +{ + HRESULT Next([in] unsigned int count, + [out, size_is(count), length_is(*pNeeded)] SOSMethodData handles[], + [out] unsigned int *pNeeded); +} + [ object, local, @@ -527,5 +560,5 @@ interface ISOSDacInterface14 : IUnknown ] interface ISOSDacInterface15 : IUnknown { - HRESULT GetMethodTableSlotMethodDesc(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA_ADDRESS *value); + HRESULT GetMethodTableSlotEnumerator(CLRDATA_ADDRESS mt, ISOSMethodEnum **enumerator); } diff --git a/src/coreclr/pal/prebuilt/idl/sospriv_i.cpp b/src/coreclr/pal/prebuilt/idl/sospriv_i.cpp index e2a88e3f38ed0..579be51d356f7 100644 --- a/src/coreclr/pal/prebuilt/idl/sospriv_i.cpp +++ b/src/coreclr/pal/prebuilt/idl/sospriv_i.cpp @@ -122,6 +122,9 @@ MIDL_DEFINE_GUID(IID, IID_ISOSDacInterface13,0x3176a8ed,0x597b,0x4f54,0xa7,0x1f, MIDL_DEFINE_GUID(IID, IID_ISOSDacInterface14,0x9aa22aca,0x6dc6,0x4a0c,0xb4,0xe0,0x70,0xd2,0x41,0x6b,0x98,0x37); +MIDL_DEFINE_GUID(IID, IID_ISOSMethodEnum,0x3c0fe725,0xc324,0x4a4f,0x81,0x00,0xd3,0x99,0x58,0x8a,0x66,0x2e); + + MIDL_DEFINE_GUID(IID, IID_ISOSDacInterface15,0x7ed81261,0x52a9,0x4a23,0xa3,0x58,0xc3,0x31,0x3d,0xea,0x30,0xa8); #undef MIDL_DEFINE_GUID diff --git a/src/coreclr/pal/prebuilt/inc/sospriv.h b/src/coreclr/pal/prebuilt/inc/sospriv.h index 22c4309a99e4a..a3d741f740def 100644 --- a/src/coreclr/pal/prebuilt/inc/sospriv.h +++ b/src/coreclr/pal/prebuilt/inc/sospriv.h @@ -3477,6 +3477,132 @@ EXTERN_C const IID IID_ISOSDacInterface14; #endif /* __ISOSDacInterface14_INTERFACE_DEFINED__ */ +/* interface __MIDL_itf_sospriv_0000_0019 */ +/* [local] */ + +#ifndef _SOS_MethodData +#define _SOS_MethodData +typedef struct _SOSMethodData + { + CLRDATA_ADDRESS MethodDesc; + CLRDATA_ADDRESS Entrypoint; + CLRDATA_ADDRESS DefininingMethodTable; + CLRDATA_ADDRESS DefiningModule; + unsigned int Token; + unsigned int Slot; + } SOSMethodData; + +#endif //_SOS_MethodData + + +extern RPC_IF_HANDLE __MIDL_itf_sospriv_0000_0019_v0_0_c_ifspec; +extern RPC_IF_HANDLE __MIDL_itf_sospriv_0000_0019_v0_0_s_ifspec; + +#ifndef __ISOSMethodEnum_INTERFACE_DEFINED__ +#define __ISOSMethodEnum_INTERFACE_DEFINED__ + +/* interface ISOSMethodEnum */ +/* [uuid][local][object] */ + + +EXTERN_C const IID IID_ISOSMethodEnum; + +#if defined(__cplusplus) && !defined(CINTERFACE) + + MIDL_INTERFACE("3c0fe725-c324-4a4f-8100-d399588a662e") + ISOSMethodEnum : public ISOSEnum + { + public: + virtual HRESULT STDMETHODCALLTYPE Next( + /* [in] */ unsigned int count, + /* [length_is][size_is][out] */ SOSMethodData handles[ ], + /* [out] */ unsigned int *pNeeded) = 0; + + }; + + +#else /* C style interface */ + + typedef struct ISOSMethodEnumVtbl + { + BEGIN_INTERFACE + + HRESULT ( STDMETHODCALLTYPE *QueryInterface )( + ISOSMethodEnum * This, + /* [in] */ REFIID riid, + /* [annotation][iid_is][out] */ + _COM_Outptr_ void **ppvObject); + + ULONG ( STDMETHODCALLTYPE *AddRef )( + ISOSMethodEnum * This); + + ULONG ( STDMETHODCALLTYPE *Release )( + ISOSMethodEnum * This); + + HRESULT ( STDMETHODCALLTYPE *Skip )( + ISOSMethodEnum * This, + /* [in] */ unsigned int count); + + HRESULT ( STDMETHODCALLTYPE *Reset )( + ISOSMethodEnum * This); + + HRESULT ( STDMETHODCALLTYPE *GetCount )( + ISOSMethodEnum * This, + /* [out] */ unsigned int *pCount); + + HRESULT ( STDMETHODCALLTYPE *Next )( + ISOSMethodEnum * This, + /* [in] */ unsigned int count, + /* [length_is][size_is][out] */ SOSMethodData handles[ ], + /* [out] */ unsigned int *pNeeded); + + END_INTERFACE + } ISOSMethodEnumVtbl; + + interface ISOSMethodEnum + { + CONST_VTBL struct ISOSMethodEnumVtbl *lpVtbl; + }; + + + +#ifdef COBJMACROS + + +#define ISOSMethodEnum_QueryInterface(This,riid,ppvObject) \ + ( (This)->lpVtbl -> QueryInterface(This,riid,ppvObject) ) + +#define ISOSMethodEnum_AddRef(This) \ + ( (This)->lpVtbl -> AddRef(This) ) + +#define ISOSMethodEnum_Release(This) \ + ( (This)->lpVtbl -> Release(This) ) + + +#define ISOSMethodEnum_Skip(This,count) \ + ( (This)->lpVtbl -> Skip(This,count) ) + +#define ISOSMethodEnum_Reset(This) \ + ( (This)->lpVtbl -> Reset(This) ) + +#define ISOSMethodEnum_GetCount(This,pCount) \ + ( (This)->lpVtbl -> GetCount(This,pCount) ) + + +#define ISOSMethodEnum_Next(This,count,handles,pNeeded) \ + ( (This)->lpVtbl -> Next(This,count,handles,pNeeded) ) + +#endif /* COBJMACROS */ + + +#endif /* C style interface */ + + + + +#endif /* __ISOSMethodEnum_INTERFACE_DEFINED__ */ + + #ifndef __ISOSDacInterface15_INTERFACE_DEFINED__ #define __ISOSDacInterface15_INTERFACE_DEFINED__ @@ -3492,10 +3618,9 @@ EXTERN_C const IID IID_ISOSDacInterface15; ISOSDacInterface15 : public IUnknown { public: - virtual HRESULT STDMETHODCALLTYPE GetMethodTableSlotMethodDesc( + virtual HRESULT STDMETHODCALLTYPE GetMethodTableSlotEnumerator( CLRDATA_ADDRESS mt, - unsigned int slot, - CLRDATA_ADDRESS *value) = 0; + ISOSMethodEnum **enumerator) = 0; }; @@ -3518,11 +3643,10 @@ EXTERN_C const IID IID_ISOSDacInterface15; ULONG ( STDMETHODCALLTYPE *Release )( ISOSDacInterface15 * This); - HRESULT ( STDMETHODCALLTYPE *GetMethodTableSlotMethodDesc )( + HRESULT ( STDMETHODCALLTYPE *GetMethodTableSlotEnumerator )( ISOSDacInterface15 * This, CLRDATA_ADDRESS mt, - unsigned int slot, - CLRDATA_ADDRESS *value); + ISOSMethodEnum **enumerator); END_INTERFACE } ISOSDacInterface15Vtbl; @@ -3547,8 +3671,8 @@ EXTERN_C const IID IID_ISOSDacInterface15; ( (This)->lpVtbl -> Release(This) ) -#define ISOSDacInterface15_GetMethodTableSlotMethodDesc(This,mt,slot,value) \ - ( (This)->lpVtbl -> GetMethodTableSlotMethodDesc(This,mt,slot,value) ) +#define ISOSDacInterface15_GetMethodTableSlotEnumerator(This,mt,enumerator) \ + ( (This)->lpVtbl -> GetMethodTableSlotEnumerator(This,mt,enumerator) ) #endif /* COBJMACROS */ From 27154946d577cb010c4dbc97584b1fa8c0e367b4 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 28 Jun 2024 11:24:42 -0700 Subject: [PATCH 33/51] Fix assertion issues noted Fix ISOSDacInterface15 to actually work --- src/coreclr/debug/daccess/daccess.cpp | 4 ++++ src/coreclr/debug/daccess/dacimpl.h | 8 ++++++-- src/coreclr/debug/daccess/request.cpp | 2 +- src/coreclr/vm/method.cpp | 24 ++++++++++++++++++++++-- src/coreclr/vm/method.hpp | 2 ++ src/coreclr/vm/methodtable.cpp | 11 ++++++++++- src/coreclr/vm/methodtable.h | 10 ++++++++++ src/coreclr/vm/methodtable.inl | 6 ++++++ src/coreclr/vm/methodtablebuilder.cpp | 8 +++++--- 9 files changed, 66 insertions(+), 9 deletions(-) diff --git a/src/coreclr/debug/daccess/daccess.cpp b/src/coreclr/debug/daccess/daccess.cpp index 261380f508bb2..8dcdf8095ed32 100644 --- a/src/coreclr/debug/daccess/daccess.cpp +++ b/src/coreclr/debug/daccess/daccess.cpp @@ -3239,6 +3239,10 @@ ClrDataAccess::QueryInterface(THIS_ { ifaceRet = static_cast(this); } + else if (IsEqualIID(interfaceId, __uuidof(ISOSDacInterface15))) + { + ifaceRet = static_cast(this); + } else { *iface = NULL; diff --git a/src/coreclr/debug/daccess/dacimpl.h b/src/coreclr/debug/daccess/dacimpl.h index a3490cf2d9e23..7bff40f01604e 100644 --- a/src/coreclr/debug/daccess/dacimpl.h +++ b/src/coreclr/debug/daccess/dacimpl.h @@ -818,7 +818,8 @@ class ClrDataAccess public ISOSDacInterface11, public ISOSDacInterface12, public ISOSDacInterface13, - public ISOSDacInterface14 + public ISOSDacInterface14, + public ISOSDacInterface15 { public: ClrDataAccess(ICorDebugDataTarget * pTarget, ICLRDataTarget * pLegacyTarget=0); @@ -1994,7 +1995,10 @@ class DacMemoryEnumerator : public DefaultCOMImpl { public: - DacMethodTableSlotEnumerator() {} + DacMethodTableSlotEnumerator() : mIteratorIndex(0) + { + } + virtual ~DacMethodTableSlotEnumerator() {} HRESULT Init(PTR_MethodTable mTable); diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index 8c1d71a89e882..caf3d9dedda12 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -507,7 +507,7 @@ HRESULT DacMethodTableSlotEnumerator::Init(PTR_MethodTable mTable) methodData.DefiningModule = HOST_CDADDR(pMD->GetModule()); methodData.Token = pMD->GetMemberDef(); - methodData.Slot = slot; + methodData.Slot = slot++; if (!mMethods.Add(methodData)) return E_OUTOFMEMORY; diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 8c1d2b78105f5..400e321d756d5 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -2673,6 +2673,7 @@ void MethodDesc::SetTemporaryEntryPoint(AllocMemTracker *pamTracker) { WRAPPER_NO_CONTRACT; + _ASSERTE(pamTracker != NULL); EnsureTemporaryEntryPointCore(pamTracker); #ifdef _DEBUG @@ -2683,8 +2684,8 @@ void MethodDesc::SetTemporaryEntryPoint(AllocMemTracker *pamTracker) if (RequiresStableEntryPoint()) { // The rest of the system assumes that certain methods always have stable entrypoints. - // Create them now. - GetOrCreatePrecode(); + // Mark the precode as such + MarkPrecodeAsStableEntrypoint(); } } @@ -2809,6 +2810,10 @@ Precode* MethodDesc::GetOrCreatePrecode() WRAPPER_NO_CONTRACT; _ASSERTE(!IsVersionableWithVtableSlotBackpatch()); + // Since this can allocate memory that won't be freed, we need to make sure that the associated MethodTable + // is fully allocated and permanent. + _ASSERTE(GetMethodTable()->GetAuxiliaryData()->IsPublished()); + if (HasPrecode()) { return GetPrecode(); @@ -2831,6 +2836,21 @@ Precode* MethodDesc::GetOrCreatePrecode() return Precode::GetPrecodeFromEntryPoint(tempEntry); } +void MethodDesc::MarkPrecodeAsStableEntrypoint() +{ +#if _DEBUG + PCODE tempEntry = GetTemporaryEntryPointIfExists(); + _ASSERTE(tempEntry != NULL); + PrecodeType requiredType = GetPrecodeType(); + PrecodeType availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType(); + _ASSERTE(requiredType == availableType); +#endif + _ASSERTE(!HasPrecode()); + _ASSERTE(RequiresStableEntryPoint()); + + InterlockedUpdateFlags3(enum_flag3_HasStableEntryPoint | enum_flag3_HasPrecode, TRUE); +} + bool MethodDesc::DetermineIsEligibleForTieredCompilationInvariantForAllMethodsInChunk() { #ifdef FEATURE_TIERED_COMPILATION diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 1b2ecd52b44c9..bb5a1370329c4 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -297,6 +297,8 @@ class MethodDesc } Precode* GetOrCreatePrecode(); + void MarkPrecodeAsStableEntrypoint(); + // Given a code address return back the MethodDesc whenever possible // diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index 783d7f0200194..2eab9ae45818c 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -748,6 +748,7 @@ MethodTable* CreateMinimalMethodTable(Module* pContainingModule, #ifdef _DEBUG pClass->SetDebugClassName("dynamicClass"); pMT->SetDebugClassName("dynamicClass"); + pMT->GetAuxiliaryDataForWrite()->SetIsPublished(); #endif LOG((LF_BCL, LL_INFO10, "Level1 - MethodTable created {0x%p}\n", pClass)); @@ -6702,7 +6703,7 @@ MethodDesc *MethodTable::MethodDataObject::GetImplMethodDesc(UINT32 slotNumber) if (pMDRet == NULL) { _ASSERTE(slotNumber < GetNumVirtuals()); - pMDRet = m_pDeclMT->GetMethodDescForSlot(slotNumber); + pMDRet = m_pDeclMT->GetMethodDescForSlot_NoThrow(slotNumber); _ASSERTE(CheckPointer(pMDRet)); pEntry->SetImplMethodDesc(pMDRet); } @@ -6921,6 +6922,14 @@ DispatchSlot MethodTable::MethodDataInterfaceImpl::GetImplSlot(UINT32 slotNumber return m_pImpl->GetImplSlot(implSlotNumber); } +//========================================================================================== +bool MethodTable::MethodDataInterfaceImpl::IsImplSlotNull(UINT32 slotNumber) +{ + WRAPPER_NO_CONTRACT; + UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber); + return (implSlotNumber == INVALID_SLOT_NUMBER); +} + //========================================================================================== UINT32 MethodTable::MethodDataInterfaceImpl::GetImplSlotNumber(UINT32 slotNumber) { diff --git a/src/coreclr/vm/methodtable.h b/src/coreclr/vm/methodtable.h index 5b2536cfef904..d850b894ad623 100644 --- a/src/coreclr/vm/methodtable.h +++ b/src/coreclr/vm/methodtable.h @@ -3007,6 +3007,7 @@ public : virtual MethodData *GetImplMethodData() = 0; MethodTable *GetImplMethodTable() { return m_pImplMT; } virtual DispatchSlot GetImplSlot(UINT32 slotNumber) = 0; + virtual bool IsImplSlotNull(UINT32 slotNumber) = 0; // Returns INVALID_SLOT_NUMBER if no implementation exists. virtual UINT32 GetImplSlotNumber(UINT32 slotNumber) = 0; virtual MethodDesc *GetImplMethodDesc(UINT32 slotNumber) = 0; @@ -3119,6 +3120,7 @@ public : virtual MethodData *GetImplMethodData() { LIMITED_METHOD_CONTRACT; return this; } virtual DispatchSlot GetImplSlot(UINT32 slotNumber); + virtual bool IsImplSlotNull(UINT32 slotNumber) { LIMITED_METHOD_CONTRACT; return false; } // Every valid slot on an actual MethodTable has a MethodDesc which is associated with it virtual UINT32 GetImplSlotNumber(UINT32 slotNumber); virtual MethodDesc *GetImplMethodDesc(UINT32 slotNumber); virtual void InvalidateCachedVirtualSlot(UINT32 slotNumber); @@ -3259,6 +3261,12 @@ public : { LIMITED_METHOD_CONTRACT; return this; } virtual DispatchSlot GetImplSlot(UINT32 slotNumber) { WRAPPER_NO_CONTRACT; return DispatchSlot(m_pDeclMT->GetRestoredSlot(slotNumber)); } + virtual bool IsImplSlotNull(UINT32 slotNumber) + { + // Every valid slot on an actual MethodTable has a MethodDesc which is associated with it + LIMITED_METHOD_CONTRACT; + return false; + } virtual UINT32 GetImplSlotNumber(UINT32 slotNumber) { LIMITED_METHOD_CONTRACT; return slotNumber; } virtual MethodDesc *GetImplMethodDesc(UINT32 slotNumber); @@ -3305,6 +3313,7 @@ public : virtual MethodTable *GetImplMethodTable() { WRAPPER_NO_CONTRACT; return m_pImpl->GetImplMethodTable(); } virtual DispatchSlot GetImplSlot(UINT32 slotNumber); + virtual bool IsImplSlotNull(UINT32 slotNumber); virtual UINT32 GetImplSlotNumber(UINT32 slotNumber); virtual MethodDesc *GetImplMethodDesc(UINT32 slotNumber); virtual void InvalidateCachedVirtualSlot(UINT32 slotNumber); @@ -3429,6 +3438,7 @@ public : inline BOOL IsVirtual() const; inline UINT32 GetNumVirtuals() const; inline DispatchSlot GetTarget() const; + inline bool IsTargetNull() const; // Can be called only if IsValid()=TRUE inline MethodDesc *GetMethodDesc() const; diff --git a/src/coreclr/vm/methodtable.inl b/src/coreclr/vm/methodtable.inl index fb0164ead4895..8e01d14b9ee6d 100644 --- a/src/coreclr/vm/methodtable.inl +++ b/src/coreclr/vm/methodtable.inl @@ -576,6 +576,12 @@ inline DispatchSlot MethodTable::MethodIterator::GetTarget() const { return m_pMethodData->GetImplSlot(m_iCur); } +inline bool MethodTable::MethodIterator::IsTargetNull() const { + LIMITED_METHOD_CONTRACT; + CONSISTENCY_CHECK(IsValid()); + return m_pMethodData->IsImplSlotNull(m_iCur); +} + //========================================================================================== inline MethodDesc *MethodTable::MethodIterator::GetMethodDesc() const { LIMITED_METHOD_CONTRACT; diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index 3c95c79740cd9..3d9b4a5c8f90b 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -10868,7 +10868,9 @@ MethodTableBuilder::SetupMethodTable2( continue; // For cases where the method is defining the method desc slot, we don't need to fill it in yet pMD->EnsureTemporaryEntryPointCore(GetMemTracker()); - PCODE addr = pMD->GetTemporaryEntryPoint(); + // Use the IfExists variant, as GetTemporaryEntrypoint isn't safe to call during MethodTable construction, as it might allocate + // without using the MemTracker. + PCODE addr = pMD->GetTemporaryEntryPointIfExists(); _ASSERTE(addr != (PCODE)NULL); if (pMD->HasNonVtableSlot()) @@ -10884,7 +10886,7 @@ MethodTableBuilder::SetupMethodTable2( { // The rest of the system assumes that certain methods always have stable entrypoints. // Create them now. - pMD->GetOrCreatePrecode(); + pMD->MarkPrecodeAsStableEntrypoint(); } } } @@ -11246,7 +11248,7 @@ void MethodTableBuilder::VerifyVirtualMethodsImplemented(MethodTable::MethodData MethodTable::MethodIterator it(hData); for (; it.IsValid() && it.IsVirtual(); it.Next()) { - if (it.GetTarget().IsNull()) + if (it.IsTargetNull()) { MethodDesc *pMD = it.GetDeclMethodDesc(); From 47f80436a6921ac5a0e907e30d9ff85c9677cc09 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 28 Jun 2024 11:54:54 -0700 Subject: [PATCH 34/51] Remove GetRestoredSlotIfExists - Its the same as GetSlot .... just replace it with that function. --- src/coreclr/debug/daccess/request.cpp | 4 ++-- src/coreclr/vm/methodtable.cpp | 12 ------------ src/coreclr/vm/methodtable.h | 1 - src/coreclr/vm/methodtable.inl | 2 +- 4 files changed, 3 insertions(+), 16 deletions(-) diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index caf3d9dedda12..186e45a33139e 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -428,7 +428,7 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA else if (slot < mTable->GetNumVtableSlots()) { // Now get the slot: - *value = mTable->GetRestoredSlotIfExists(slot); + *value = mTable->GetSlot(slot); if (*value == 0) { hr = S_FALSE; @@ -502,7 +502,7 @@ HRESULT DacMethodTableSlotEnumerator::Init(PTR_MethodTable mTable) { MethodDesc* pMD = mTable->GetMethodDescForSlot_NoThrow(slot); methodData.MethodDesc = HOST_CDADDR(pMD); - methodData.Entrypoint = mTable->GetRestoredSlotIfExists(slot); + methodData.Entrypoint = mTable->GetSlot(slot); methodData.DefininingMethodTable = PTR_CDADDR(pMD->GetMethodTable()); methodData.DefiningModule = HOST_CDADDR(pMD->GetModule()); methodData.Token = pMD->GetMemberDef(); diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index 2eab9ae45818c..73003741e8a6e 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -7572,18 +7572,6 @@ Module *MethodTable::GetDefiningModuleForOpenType() RETURN NULL; } -PCODE MethodTable::GetRestoredSlotIfExists(DWORD slotNumber) -{ - CONTRACTL { - NOTHROW; - GC_NOTRIGGER; - MODE_ANY; - SUPPORTS_DAC; - } CONTRACTL_END; - - return GetCanonicalMethodTable()->GetSlot(slotNumber); -} - //========================================================================================== PCODE MethodTable::GetRestoredSlot(DWORD slotNumber) { diff --git a/src/coreclr/vm/methodtable.h b/src/coreclr/vm/methodtable.h index d850b894ad623..2696b72fa9357 100644 --- a/src/coreclr/vm/methodtable.h +++ b/src/coreclr/vm/methodtable.h @@ -1646,7 +1646,6 @@ class MethodTable static MethodDesc* GetMethodDescForSlotAddress(PCODE addr, BOOL fSpeculative = FALSE); PCODE GetRestoredSlot(DWORD slot); - PCODE GetRestoredSlotIfExists(DWORD slot); // Returns MethodTable that GetRestoredSlot get its values from MethodTable * GetRestoredSlotMT(DWORD slot); diff --git a/src/coreclr/vm/methodtable.inl b/src/coreclr/vm/methodtable.inl index 8e01d14b9ee6d..751a194d93942 100644 --- a/src/coreclr/vm/methodtable.inl +++ b/src/coreclr/vm/methodtable.inl @@ -426,7 +426,7 @@ inline MethodDesc* MethodTable::GetMethodDescForSlot_NoThrow(DWORD slot) } CONTRACTL_END; - PCODE pCode = GetRestoredSlotIfExists(slot); + PCODE pCode = GetSlot(slot); if (pCode == (PCODE)NULL) { From 93184bcb69c2f7ba73f353c5a1168e18fb588b93 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 28 Jun 2024 11:59:28 -0700 Subject: [PATCH 35/51] Update src/coreclr/debug/daccess/daccess.cpp Co-authored-by: Jan Kotas --- src/coreclr/debug/daccess/daccess.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/debug/daccess/daccess.cpp b/src/coreclr/debug/daccess/daccess.cpp index 8dcdf8095ed32..5a91413e58fa1 100644 --- a/src/coreclr/debug/daccess/daccess.cpp +++ b/src/coreclr/debug/daccess/daccess.cpp @@ -8361,7 +8361,7 @@ HRESULT DacMethodTableSlotEnumerator::GetCount(unsigned int* pCount) if (!pCount) return E_POINTER; - mMethods.GetCount(); + *pCount = mMethods.GetCount(); return S_OK; } From 70836cbe41df4a96e8eb61b0daab3383f54d9780 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 28 Jun 2024 11:59:38 -0700 Subject: [PATCH 36/51] Update docs/design/coreclr/botr/method-descriptor.md Co-authored-by: Jan Kotas --- docs/design/coreclr/botr/method-descriptor.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/design/coreclr/botr/method-descriptor.md b/docs/design/coreclr/botr/method-descriptor.md index fdbc62605f6e2..4bf3358928d58 100644 --- a/docs/design/coreclr/botr/method-descriptor.md +++ b/docs/design/coreclr/botr/method-descriptor.md @@ -85,7 +85,7 @@ DWORD MethodDesc::GetAttrs() Method Slots ------------ -Each MethodDesc has a slot, which contains the entry point of the method. The slot must exist for all methods, even the ones that never run like abstract methods. There are multiple places in the runtime that depend on the 1:1 mapping between entry points and MethodDescs, making this relationship an invariant. +Each MethodDesc has a slot, which contains the current entry point of the method. The slot must exist for all methods, even the ones that never run like abstract methods. There are multiple places in the runtime that depend on mapping between entry points and MethodDescs. Each MethodDesc logically has an entry point, but we do not allocate these eagerly at MethodDesc creation time. The invariant is that once the method is identified as a method to run, or is used in virtual overriding, we will allocate the entrypoint. From 5bf0fb35fce8e81b6992a4ce7ad61129a8c5bf73 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 28 Jun 2024 11:59:48 -0700 Subject: [PATCH 37/51] Update src/coreclr/vm/methodtable.inl Co-authored-by: Jan Kotas --- src/coreclr/vm/methodtable.inl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/methodtable.inl b/src/coreclr/vm/methodtable.inl index 751a194d93942..68f6bd28aa3bd 100644 --- a/src/coreclr/vm/methodtable.inl +++ b/src/coreclr/vm/methodtable.inl @@ -449,7 +449,7 @@ inline MethodDesc* MethodTable::GetMethodDescForSlot_NoThrow(DWORD slot) } // This is an optimization that we can take advantage of if we're trying to get the MethodDesc - // for an interface virtual, since their slots usually point to stub. + // for an interface virtual, since their slots point to stub. if (IsInterface() && slot < GetNumVirtuals()) { return MethodDesc::GetMethodDescFromStubAddr(pCode); From 70100521efea5661218e3008ff80b9e1b6e18572 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 28 Jun 2024 12:00:03 -0700 Subject: [PATCH 38/51] Update src/coreclr/vm/methodtable.h Co-authored-by: Jan Kotas --- src/coreclr/vm/methodtable.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/methodtable.h b/src/coreclr/vm/methodtable.h index 2696b72fa9357..654a8b658fad6 100644 --- a/src/coreclr/vm/methodtable.h +++ b/src/coreclr/vm/methodtable.h @@ -1639,7 +1639,7 @@ class MethodTable MethodDesc* GetMethodDescForSlot(DWORD slot); - // This api produces the same result as GetMethodDescForSlot_NoThrow, but it uses a variation on the + // This api produces the same result as GetMethodDescForSlot, but it uses a variation on the // algorithm that does not allocate a temporary entrypoint for the slot if it doesn't exist. MethodDesc* GetMethodDescForSlot_NoThrow(DWORD slot); From fb2f987257c844d9d23ea68bbe12a33b3ddfc83c Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 28 Jun 2024 16:30:05 -0700 Subject: [PATCH 39/51] Fix GetMethodDescForSlot_NoThrow Try removing EnsureSlotFilled Implement IsEligibleForTieredCompilation in terms of IsEligibleForTieredCompilation_NoCheckMethodDescChunk --- src/coreclr/vm/method.cpp | 2 +- src/coreclr/vm/method.hpp | 19 +------------------ src/coreclr/vm/method.inl | 6 ++---- src/coreclr/vm/methodtable.inl | 2 +- 4 files changed, 5 insertions(+), 24 deletions(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 400e321d756d5..bd6362ba29122 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -555,7 +555,7 @@ PCODE MethodDesc::GetMethodEntryPoint() if (*PTR_PCODE(pSlot) == (PCODE)NULL) { - EnsureSlotFilled(); + EnsureTemporaryEntryPoint(); _ASSERTE(*PTR_PCODE(pSlot) != (PCODE)NULL); } return *PTR_PCODE(pSlot); diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index bb5a1370329c4..7bd36ec41babd 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -1451,30 +1451,13 @@ class MethodDesc // PCODE GetMethodEntryPointIfExists(); + // Ensure that the temporary entrypoint is allocated, and the slot is filled with some value void EnsureTemporaryEntryPoint(); // pamTracker must be NULL for a MethodDesc which cannot be freed by an external AllocMemTracker // OR must be set to point to the same AllocMemTracker that controls allocation of the MethodDesc void EnsureTemporaryEntryPointCore(AllocMemTracker *pamTracker); -#ifndef DACCESS_COMPILE - void EnsureSlotFilled() - { - WRAPPER_NO_CONTRACT; - EnsureTemporaryEntryPoint(); - -#ifdef _DEBUG - PCODE *pSlot = GetAddrOfSlot(); - _ASSERTE(*pSlot != (PCODE)NULL); -#endif - - if (RequiresStableEntryPoint() && !HasStableEntryPoint()) - { - GetOrCreatePrecode(); - } - } -#endif // DACCESS_COMPILE - //******************************************************************************* // Returns the address of the native code. PCODE GetNativeCode(); diff --git a/src/coreclr/vm/method.inl b/src/coreclr/vm/method.inl index 51f05af0eed97..bf25d6f695570 100644 --- a/src/coreclr/vm/method.inl +++ b/src/coreclr/vm/method.inl @@ -12,15 +12,13 @@ inline bool MethodDesc::IsEligibleForTieredCompilation() #ifdef FEATURE_TIERED_COMPILATION _ASSERTE(GetMethodDescChunk()->DeterminedIfMethodsAreEligibleForTieredCompilation()); - return (VolatileLoadWithoutBarrier(&m_wFlags3AndTokenRemainder) & enum_flag3_IsEligibleForTieredCompilation) != 0; -#else - return false; #endif + return IsEligibleForTieredCompilation_NoCheckMethodDescChunk(); } inline bool MethodDesc::IsEligibleForTieredCompilation_NoCheckMethodDescChunk() { - LIMITED_METHOD_CONTRACT; + LIMITED_METHOD_DAC_CONTRACT; // Just like above, but without the assert. This is used in the path which initializes the flag. #ifdef FEATURE_TIERED_COMPILATION diff --git a/src/coreclr/vm/methodtable.inl b/src/coreclr/vm/methodtable.inl index 8783c4d04fe11..9fd53e9d2a5da 100644 --- a/src/coreclr/vm/methodtable.inl +++ b/src/coreclr/vm/methodtable.inl @@ -437,7 +437,7 @@ inline MethodDesc* MethodTable::GetMethodDescForSlot_NoThrow(DWORD slot) } CONTRACTL_END; - PCODE pCode = GetSlot(slot); + PCODE pCode = GetCanonicalMethodTable()->GetSlot(slot); if (pCode == (PCODE)NULL) { From c5af14055cb9749c6045f00b12cd728518563e0f Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Mon, 1 Jul 2024 11:45:34 -0700 Subject: [PATCH 40/51] Fix missing change intended in last commit --- src/coreclr/vm/jitinterface.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp index 2b75115e2df5c..249ddfea3a8c9 100644 --- a/src/coreclr/vm/jitinterface.cpp +++ b/src/coreclr/vm/jitinterface.cpp @@ -8541,7 +8541,7 @@ void CEEInfo::getMethodVTableOffset (CORINFO_METHOD_HANDLE methodHnd, JIT_TO_EE_TRANSITION(); MethodDesc* method = GetMethod(methodHnd); - method->EnsureSlotFilled(); + method->EnsureTemporaryEntryPoint(); //@GENERICS: shouldn't be doing this for instantiated methods as they live elsewhere _ASSERTE(!method->HasMethodInstantiation()); From 02811271c8d1492d638aa1c940beb923ab910ffc Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Mon, 1 Jul 2024 18:16:04 -0700 Subject: [PATCH 41/51] Fix some more IsPublished memory use issues --- src/coreclr/vm/method.hpp | 23 ++++++++++++++++++++--- src/coreclr/vm/methodtable.inl | 2 +- src/coreclr/vm/methodtablebuilder.cpp | 7 +++---- 3 files changed, 24 insertions(+), 8 deletions(-) diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index 7bd36ec41babd..dccab97d3629d 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -238,7 +238,7 @@ class MethodDesc void SetTemporaryEntryPoint(AllocMemTracker *pamTracker); #ifndef DACCESS_COMPILE - PCODE GetInitialEntryPointForCopiedSlot() + PCODE GetInitialEntryPointForCopiedSlot(MethodTable *pMTBeingCreated, AllocMemTracker* pamTracker) { CONTRACTL { @@ -248,11 +248,28 @@ class MethodDesc } CONTRACTL_END; + if (pMTBeingCreated != GetMethodTable()) + { + pamTracker = NULL; + } + + // If EnsureTemporaryEntryPointCore is called, then + // both GetTemporaryEntryPointIfExists and GetSlot() + // are guaranteed to return a NON-NULL PCODE. + EnsureTemporaryEntryPointCore(pamTracker); + + PCODE result; if (IsVersionableWithVtableSlotBackpatch()) { - return GetTemporaryEntryPoint(); + result = GetTemporaryEntryPointIfExists(); } - return GetMethodEntryPoint(); + else + { + result = GetSlot(); + } + _ASSERTE(result != (PCODE)NULL); + + return result; } #endif diff --git a/src/coreclr/vm/methodtable.inl b/src/coreclr/vm/methodtable.inl index 9fd53e9d2a5da..72983211f78b1 100644 --- a/src/coreclr/vm/methodtable.inl +++ b/src/coreclr/vm/methodtable.inl @@ -479,7 +479,7 @@ inline void MethodTable::CopySlotFrom(UINT32 slotNumber, MethodDataWrapper &hSou MethodDesc *pMD = hSourceMTData->GetImplMethodDesc(slotNumber); _ASSERTE(CheckPointer(pMD)); _ASSERTE(pMD == pSourceMT->GetMethodDescForSlot(slotNumber)); - SetSlot(slotNumber, pMD->GetInitialEntryPointForCopiedSlot()); + SetSlot(slotNumber, pMD->GetInitialEntryPointForCopiedSlot(NULL, NULL)); } //========================================================================================== diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index 3d9b4a5c8f90b..45eff0f880a50 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -10855,7 +10855,7 @@ MethodTableBuilder::SetupMethodTable2( // DWORD indirectionIndex = MethodTable::GetIndexOfVtableIndirection(iCurSlot); if (GetParentMethodTable()->GetVtableIndirections()[indirectionIndex] != pMT->GetVtableIndirections()[indirectionIndex]) - pMT->SetSlot(iCurSlot, pMD->GetInitialEntryPointForCopiedSlot()); + pMT->SetSlot(iCurSlot, pMD->GetInitialEntryPointForCopiedSlot(pMT, GetMemTracker())); } else { @@ -10949,7 +10949,7 @@ MethodTableBuilder::SetupMethodTable2( if (pMD != pOriginalMD) { // Copy the slot value in the method's original slot. - pMT->SetSlot(i, pOriginalMD->GetInitialEntryPointForCopiedSlot()); + pMT->SetSlot(i, pOriginalMD->GetInitialEntryPointForCopiedSlot(pMT, GetMemTracker())); hMTData->InvalidateCachedVirtualSlot(i); // Update the pMD to the new method desc we just copied over ourselves with. This will @@ -11006,8 +11006,7 @@ MethodTableBuilder::SetupMethodTable2( // If we fail to find an _IMPLEMENTATION_ for the interface MD, then // we are a ComImportMethod, otherwise we still be a ComImportMethod or // we can be a ManagedMethod. - DispatchSlot impl(it.GetTarget()); - if (!impl.IsNull()) + if (!it.IsTargetNull()) { pClsMD = it.GetMethodDesc(); From 56137a26ca7ced03bc7c7d6d664e34634ade86b9 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Tue, 2 Jul 2024 14:58:38 -0700 Subject: [PATCH 42/51] Call the right GetSlot method --- src/coreclr/vm/method.hpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/coreclr/vm/method.hpp b/src/coreclr/vm/method.hpp index dccab97d3629d..b649e620dc7ea 100644 --- a/src/coreclr/vm/method.hpp +++ b/src/coreclr/vm/method.hpp @@ -265,7 +265,8 @@ class MethodDesc } else { - result = GetSlot(); + _ASSERTE(GetMethodTable()->IsCanonicalMethodTable()); + result = GetMethodTable()->GetSlot(GetSlot()); } _ASSERTE(result != (PCODE)NULL); From 98aa3c916224d893252f4544b5e5c84fd150f332 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Tue, 2 Jul 2024 15:16:15 -0700 Subject: [PATCH 43/51] Move another scenario to NoThrow, I think this should clear up our tests... --- src/coreclr/vm/methodtable.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index 163a156871920..a32d91a520fa7 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -6762,7 +6762,7 @@ void MethodTable::MethodDataObject::InvalidateCachedVirtualSlot(UINT32 slotNumbe MethodDesc *MethodTable::MethodDataInterface::GetDeclMethodDesc(UINT32 slotNumber) { WRAPPER_NO_CONTRACT; - return m_pDeclMT->GetMethodDescForSlot(slotNumber); + return m_pDeclMT->GetMethodDescForSlot_NoThrow(slotNumber); } //========================================================================================== From 7cf8acd5aa5e1af94bc01b2e171e4b8665375bfb Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Tue, 2 Jul 2024 16:14:50 -0700 Subject: [PATCH 44/51] Add additional IsPublished check --- src/coreclr/vm/methodtable.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index a32d91a520fa7..f9ae35553d43e 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -6442,6 +6442,8 @@ InteropMethodTableData *MethodTable::GetComInteropData() GC_TRIGGERS; } CONTRACTL_END; + _ASSERTE(GetAuxiliaryData()->IsPublished()); + InteropMethodTableData *pData = LookupComInteropData(); if (!pData) From efe8f1af26c19ac630e62301398439b46e0467c2 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 3 Jul 2024 09:46:37 -0700 Subject: [PATCH 45/51] Fix MUSL validation build error and Windows x86 build error --- src/coreclr/debug/daccess/dacimpl.h | 2 +- src/coreclr/vm/method.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/coreclr/debug/daccess/dacimpl.h b/src/coreclr/debug/daccess/dacimpl.h index 7bff40f01604e..136c10f489628 100644 --- a/src/coreclr/debug/daccess/dacimpl.h +++ b/src/coreclr/debug/daccess/dacimpl.h @@ -1225,7 +1225,7 @@ class ClrDataAccess virtual HRESULT STDMETHODCALLTYPE GetMethodTableInitializationFlags(CLRDATA_ADDRESS methodTable, MethodTableInitializationFlags *initializationStatus); // ISOSDacInterface15 - virtual HRESULT GetMethodTableSlotEnumerator(CLRDATA_ADDRESS mt, ISOSMethodEnum **enumerator); + virtual HRESULT STDMETHODCALLTYPE GetMethodTableSlotEnumerator(CLRDATA_ADDRESS mt, ISOSMethodEnum **enumerator); // // ClrDataAccess. diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index bd6362ba29122..59d3ad2c93683 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -2840,7 +2840,7 @@ void MethodDesc::MarkPrecodeAsStableEntrypoint() { #if _DEBUG PCODE tempEntry = GetTemporaryEntryPointIfExists(); - _ASSERTE(tempEntry != NULL); + _ASSERTE(tempEntry != (PCODE)NULL); PrecodeType requiredType = GetPrecodeType(); PrecodeType availableType = Precode::GetPrecodeFromEntryPoint(tempEntry)->GetType(); _ASSERTE(requiredType == availableType); From d7d39482aec6c608f6dec841eb8c36dd24aec416 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 3 Jul 2024 11:30:46 -0700 Subject: [PATCH 46/51] Address code review feedback --- src/coreclr/debug/daccess/request.cpp | 4 +++- src/coreclr/vm/class.cpp | 6 +++--- src/coreclr/vm/classcompat.cpp | 6 +++--- src/coreclr/vm/clsload.cpp | 4 ++++ src/coreclr/vm/method.cpp | 4 ++-- src/coreclr/vm/methodtable.h | 11 +++++++++-- src/coreclr/vm/methodtable.inl | 2 +- 7 files changed, 25 insertions(+), 12 deletions(-) diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp index 186e45a33139e..bea6c7000fa8d 100644 --- a/src/coreclr/debug/daccess/request.cpp +++ b/src/coreclr/debug/daccess/request.cpp @@ -452,6 +452,7 @@ ClrDataAccess::GetMethodTableSlot(CLRDATA_ADDRESS mt, unsigned int slot, CLRDATA { hr = S_OK; } + break; } } } @@ -496,11 +497,11 @@ HRESULT DacMethodTableSlotEnumerator::Init(PTR_MethodTable mTable) { unsigned int slot = 0; - SOSMethodData methodData; WORD numVtableSlots = mTable->GetNumVtableSlots(); while (slot < numVtableSlots) { MethodDesc* pMD = mTable->GetMethodDescForSlot_NoThrow(slot); + SOSMethodData methodData = {0}; methodData.MethodDesc = HOST_CDADDR(pMD); methodData.Entrypoint = mTable->GetSlot(slot); methodData.DefininingMethodTable = PTR_CDADDR(pMD->GetMethodTable()); @@ -520,6 +521,7 @@ HRESULT DacMethodTableSlotEnumerator::Init(PTR_MethodTable mTable) WORD slot = pMD->GetSlot(); if (slot >= numVtableSlots) { + SOSMethodData methodData = {0}; methodData.MethodDesc = HOST_CDADDR(pMD); methodData.Entrypoint = pMD->GetMethodEntryPointIfExists(); methodData.DefininingMethodTable = PTR_CDADDR(pMD->GetMethodTable()); diff --git a/src/coreclr/vm/class.cpp b/src/coreclr/vm/class.cpp index 9ac7e1e41f405..9bbc027af4996 100644 --- a/src/coreclr/vm/class.cpp +++ b/src/coreclr/vm/class.cpp @@ -1407,7 +1407,7 @@ void ClassLoader::ValidateMethodsWithCovariantReturnTypes(MethodTable* pMT) { // The real check is that the MethodDesc's must not match, but a simple VTable check will // work most of the time, and is far faster than the GetMethodDescForSlot method. - _ASSERTE(pMT->GetMethodDescForSlot(i) == pParentMT->GetMethodDescForSlot(i)); + _ASSERTE(pMT->GetMethodDescForSlot_NoThrow(i) == pParentMT->GetMethodDescForSlot_NoThrow(i)); continue; } MethodDesc* pMD = pMT->GetMethodDescForSlot(i); @@ -1525,7 +1525,7 @@ void ClassLoader::PropagateCovariantReturnMethodImplSlots(MethodTable* pMT) { // The real check is that the MethodDesc's must not match, but a simple VTable check will // work most of the time, and is far faster than the GetMethodDescForSlot method. - _ASSERTE(pMT->GetMethodDescForSlot(i) == pParentMT->GetMethodDescForSlot(i)); + _ASSERTE(pMT->GetMethodDescForSlot_NoThrow(i) == pParentMT->GetMethodDescForSlot_NoThrow(i)); continue; } @@ -1575,7 +1575,7 @@ void ClassLoader::PropagateCovariantReturnMethodImplSlots(MethodTable* pMT) // This is a vtable slot that needs to be updated to the new overriding method because of the // presence of the attribute. pMT->SetSlot(j, pMT->GetSlot(i)); - _ASSERT(pMT->GetMethodDescForSlot(j) == pMD); + _ASSERT(pMT->GetMethodDescForSlot_NoThrow(j) == pMD); if (!hMTData.IsNull()) hMTData->UpdateImplMethodDesc(pMD, j); diff --git a/src/coreclr/vm/classcompat.cpp b/src/coreclr/vm/classcompat.cpp index ece9384703b2b..81145deddfeca 100644 --- a/src/coreclr/vm/classcompat.cpp +++ b/src/coreclr/vm/classcompat.cpp @@ -1478,7 +1478,7 @@ VOID MethodTableBuilder::BuildInteropVTable_PlaceVtableMethods( // Set the slot and interop data bmtVT->SetMethodDescForSlot(dwSlot, pMD); bmtVT->ppSDVtable[dwSlot] = pMDData; - _ASSERTE( bmtVT->GetMethodDescForSlot(dwSlot) != NULL); + _ASSERTE( bmtVT->GetMethodDescForSlot_NoThrow(dwSlot) != NULL); bmtInterface->ppInterfaceMethodDescList[it.GetSlotNumber()] = NULL; bmtInterface->ppInterfaceDeclMethodDescList[it.GetSlotNumber()] = NULL; } @@ -1513,9 +1513,9 @@ VOID MethodTableBuilder::BuildInteropVTable_PlaceVtableMethods( } // Set the vtable slot - _ASSERTE(bmtVT->GetMethodDescForSlot(bmtVT->wCurrentVtableSlot) == NULL); + _ASSERTE(bmtVT->GetMethodDescForSlot_NoThrow(bmtVT->wCurrentVtableSlot) == NULL); bmtVT->SetMethodDescForSlot(bmtVT->wCurrentVtableSlot, pMD); - _ASSERTE(bmtVT->GetMethodDescForSlot(bmtVT->wCurrentVtableSlot) != NULL); + _ASSERTE(bmtVT->GetMethodDescForSlot_NoThrow(bmtVT->wCurrentVtableSlot) != NULL); bmtVT->ppSDVtable[bmtVT->wCurrentVtableSlot] = pMDData; // Increment the current vtable slot diff --git a/src/coreclr/vm/clsload.cpp b/src/coreclr/vm/clsload.cpp index d41a2d6bd180d..3e735d6bbe764 100644 --- a/src/coreclr/vm/clsload.cpp +++ b/src/coreclr/vm/clsload.cpp @@ -2786,6 +2786,10 @@ TypeHandle ClassLoader::PublishType(const TypeKey *pTypeKey, TypeHandle typeHnd) #ifdef _DEBUG if (!typeHnd.IsTypeDesc()) { + // The IsPublished flag is used by various asserts to assure that allocations of + // MethodTable associated memory which do not use the AllocMemTracker of the MethodTableBuilder + // aren't permitted until the MethodTable is in a state where the MethodTable object + // cannot be freed (except by freeing an entire LoaderAllocator) typeHnd.AsMethodTable()->GetAuxiliaryDataForWrite()->SetIsPublished(); } #endif diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 59d3ad2c93683..4788a278a667b 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -970,8 +970,8 @@ BYTE MethodDesc::InterlockedUpdateFlags4(BYTE bMask, BOOL fSet) DWORD dwMask = bMask; // We need to make this operation atomic (multiple threads can play with the flags field at the same time). But the flags field - // is a word and we only have interlock operations over dwords. So we round down the flags field address to the nearest aligned - // dword (along with the intended bitfield mask). Note that we make the assumption that the flags word is aligned itself, so we + // is a byte and we only have interlock operations over dwords. So we round down the flags field address to the nearest aligned + // dword (along with the intended bitfield mask). Note that we make the assumption that the flags byte is aligned itself, so we // only have four possibilities: the field already lies on a dword boundary or it's 1, 2 or 3 bytes out LONG* pdwFlags = (LONG*)((ULONG_PTR)&m_bFlags4 - (offsetof(MethodDesc, m_bFlags4) & 0x3)); diff --git a/src/coreclr/vm/methodtable.h b/src/coreclr/vm/methodtable.h index 654a8b658fad6..3126d3acb873d 100644 --- a/src/coreclr/vm/methodtable.h +++ b/src/coreclr/vm/methodtable.h @@ -333,6 +333,8 @@ struct MethodTableAuxiliaryData enum_flag_HasApproxParent = 0x0010, #ifdef _DEBUG + // The MethodTable is in the right state to be published, and will be inevitably. + // Currently DEBUG only as it does not affect behavior in any way in a release build enum_flag_IsPublished = 0x0020, #endif enum_flag_IsNotFullyLoaded = 0x0040, @@ -500,15 +502,16 @@ struct MethodTableAuxiliaryData #ifdef _DEBUG #ifndef DACCESS_COMPILE + // Used in DEBUG builds to indicate that the MethodTable is in the right state to be published, and will be inevitably. void SetIsPublished() { LIMITED_METHOD_CONTRACT; - - // Array's parent is always precise m_dwFlags |= (MethodTableAuxiliaryData::enum_flag_IsPublished); } #endif + // The MethodTable is in the right state to be published, and will be inevitably. + // Currently DEBUG only as it does not affect behavior in any way in a release build bool IsPublished() const { LIMITED_METHOD_CONTRACT; @@ -1637,6 +1640,10 @@ class MethodTable // Slots <-> the MethodDesc associated with the slot. // + // Get the MethodDesc that implements a given slot + // NOTE: Since this may fill in the slot with a temporary entrypoint if that hasn't happened + // yet, when writing asserts, GetMethodDescForSlot_NoThrow should be used to avoid + // the presence of an assert hiding bugs. MethodDesc* GetMethodDescForSlot(DWORD slot); // This api produces the same result as GetMethodDescForSlot, but it uses a variation on the diff --git a/src/coreclr/vm/methodtable.inl b/src/coreclr/vm/methodtable.inl index 72983211f78b1..37600f2644008 100644 --- a/src/coreclr/vm/methodtable.inl +++ b/src/coreclr/vm/methodtable.inl @@ -478,7 +478,7 @@ inline void MethodTable::CopySlotFrom(UINT32 slotNumber, MethodDataWrapper &hSou MethodDesc *pMD = hSourceMTData->GetImplMethodDesc(slotNumber); _ASSERTE(CheckPointer(pMD)); - _ASSERTE(pMD == pSourceMT->GetMethodDescForSlot(slotNumber)); + _ASSERTE(pMD == pSourceMT->GetMethodDescForSlot_NoThrow(slotNumber)); SetSlot(slotNumber, pMD->GetInitialEntryPointForCopiedSlot(NULL, NULL)); } From 29571eb406b54b56baab00bad5863764a7fa7169 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Wed, 3 Jul 2024 14:13:45 -0700 Subject: [PATCH 47/51] Fix classcompat build --- src/coreclr/vm/classcompat.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/coreclr/vm/classcompat.cpp b/src/coreclr/vm/classcompat.cpp index 81145deddfeca..ece9384703b2b 100644 --- a/src/coreclr/vm/classcompat.cpp +++ b/src/coreclr/vm/classcompat.cpp @@ -1478,7 +1478,7 @@ VOID MethodTableBuilder::BuildInteropVTable_PlaceVtableMethods( // Set the slot and interop data bmtVT->SetMethodDescForSlot(dwSlot, pMD); bmtVT->ppSDVtable[dwSlot] = pMDData; - _ASSERTE( bmtVT->GetMethodDescForSlot_NoThrow(dwSlot) != NULL); + _ASSERTE( bmtVT->GetMethodDescForSlot(dwSlot) != NULL); bmtInterface->ppInterfaceMethodDescList[it.GetSlotNumber()] = NULL; bmtInterface->ppInterfaceDeclMethodDescList[it.GetSlotNumber()] = NULL; } @@ -1513,9 +1513,9 @@ VOID MethodTableBuilder::BuildInteropVTable_PlaceVtableMethods( } // Set the vtable slot - _ASSERTE(bmtVT->GetMethodDescForSlot_NoThrow(bmtVT->wCurrentVtableSlot) == NULL); + _ASSERTE(bmtVT->GetMethodDescForSlot(bmtVT->wCurrentVtableSlot) == NULL); bmtVT->SetMethodDescForSlot(bmtVT->wCurrentVtableSlot, pMD); - _ASSERTE(bmtVT->GetMethodDescForSlot_NoThrow(bmtVT->wCurrentVtableSlot) != NULL); + _ASSERTE(bmtVT->GetMethodDescForSlot(bmtVT->wCurrentVtableSlot) != NULL); bmtVT->ppSDVtable[bmtVT->wCurrentVtableSlot] = pMDData; // Increment the current vtable slot From 3ce4e4a5688b7e05d75e4948359a2f87f9185406 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 5 Jul 2024 10:22:26 -0700 Subject: [PATCH 48/51] Update src/coreclr/vm/method.cpp Co-authored-by: Aaron Robinson --- src/coreclr/vm/method.cpp | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 4788a278a667b..b513e1d5955fd 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -2771,14 +2771,8 @@ void MethodDescChunk::DetermineAndSetIsEligibleForTieredCompilation() { for (int i = 0; i < count; ++i) { - if (pMD->DetermineAndSetIsEligibleForTieredCompilation()) - { - _ASSERTE(pMD->IsEligibleForTieredCompilation_NoCheckMethodDescChunk()); - } - else - { - _ASSERTE(!pMD->IsEligibleForTieredCompilation_NoCheckMethodDescChunk()); - } + bool isEligible = pMD->DetermineAndSetIsEligibleForTieredCompilation(); + _ASSERTE(isEligible == pMD->IsEligibleForTieredCompilation_NoCheckMethodDescChunk()); pMD = (MethodDesc *)(dac_cast(pMD) + pMD->SizeOf()); } From a4c8803e0429778fbc98d62a70a78a1da1400eab Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Tue, 9 Jul 2024 09:59:27 -0700 Subject: [PATCH 49/51] Remove assert that is invalid because TryGetMulticCallableAddrOfCode can return NULL ... and then another thread could produce a stable entrypoint and the assert could lose the race --- src/coreclr/vm/arm/stubs.cpp | 1 - src/coreclr/vm/arm64/stubs.cpp | 1 - src/coreclr/vm/i386/stublinkerx86.cpp | 3 --- src/coreclr/vm/loongarch64/stubs.cpp | 1 - src/coreclr/vm/riscv64/stubs.cpp | 1 - 5 files changed, 7 deletions(-) diff --git a/src/coreclr/vm/arm/stubs.cpp b/src/coreclr/vm/arm/stubs.cpp index 7e54e0d4de156..fe58e072c1195 100644 --- a/src/coreclr/vm/arm/stubs.cpp +++ b/src/coreclr/vm/arm/stubs.cpp @@ -1392,7 +1392,6 @@ void StubLinkerCPU::ThumbEmitTailCallManagedMethod(MethodDesc *pMD) } else { - _ASSERTE(!pMD->HasStableEntryPoint()); // mov r12, #slotaddress ThumbEmitMovConstant(ThumbReg(12), (TADDR)pMD->GetAddrOfSlot()); diff --git a/src/coreclr/vm/arm64/stubs.cpp b/src/coreclr/vm/arm64/stubs.cpp index 02d6aaaae5e6f..f12caa8583488 100644 --- a/src/coreclr/vm/arm64/stubs.cpp +++ b/src/coreclr/vm/arm64/stubs.cpp @@ -1639,7 +1639,6 @@ void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) } else { - _ASSERTE(!pMD->HasStableEntryPoint()); EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetAddrOfSlot()), fTailCall, TRUE); } } diff --git a/src/coreclr/vm/i386/stublinkerx86.cpp b/src/coreclr/vm/i386/stublinkerx86.cpp index 9af0ee3e86521..87b4d69dfbda4 100644 --- a/src/coreclr/vm/i386/stublinkerx86.cpp +++ b/src/coreclr/vm/i386/stublinkerx86.cpp @@ -3119,7 +3119,6 @@ VOID StubLinkerCPU::EmitLoadMethodAddressIntoAX(MethodDesc *pMD) } else { - _ASSERTE(!pMD->HasStableEntryPoint()); X86EmitRegLoad(kRAX, (UINT_PTR)pMD->GetAddrOfSlot()); // MOV RAX, DWORD X86EmitIndexRegLoad(kRAX, kRAX); // MOV RAX, [RAX] @@ -3143,8 +3142,6 @@ VOID StubLinkerCPU::EmitTailJumpToMethod(MethodDesc *pMD) } else { - _ASSERTE(!pMD->HasStableEntryPoint()); - // jmp [slot] Emit16(0x25ff); Emit32((DWORD)(size_t)pMD->GetAddrOfSlot()); diff --git a/src/coreclr/vm/loongarch64/stubs.cpp b/src/coreclr/vm/loongarch64/stubs.cpp index bb375189fc7d0..8f9b46325db37 100644 --- a/src/coreclr/vm/loongarch64/stubs.cpp +++ b/src/coreclr/vm/loongarch64/stubs.cpp @@ -1490,7 +1490,6 @@ void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) } else { - _ASSERTE(!pMD->HasStableEntryPoint()); EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetAddrOfSlot()), fTailCall, TRUE); } } diff --git a/src/coreclr/vm/riscv64/stubs.cpp b/src/coreclr/vm/riscv64/stubs.cpp index 559e0b70de033..507248848c6bb 100644 --- a/src/coreclr/vm/riscv64/stubs.cpp +++ b/src/coreclr/vm/riscv64/stubs.cpp @@ -1535,7 +1535,6 @@ void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall) } else { - _ASSERTE(!pMD->HasStableEntryPoint()); EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetAddrOfSlot()), fTailCall, TRUE); } } From 34e9a75e56a8b408bc109a5130ada25976c33f75 Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Fri, 12 Jul 2024 16:27:02 -0700 Subject: [PATCH 50/51] Final (hopefully) code review tweaks. --- src/coreclr/vm/method.cpp | 13 +++---------- src/coreclr/vm/methodtable.cpp | 2 +- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 7826b9f0367db..17964fd005c83 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -3089,20 +3089,13 @@ void MethodDesc::SetCodeEntryPoint(PCODE entryPoint) // can continue assuming it was successful, similarly to it successfully updating the target and another thread // updating the target again shortly afterwards. } - else if (HasPrecode()) + else if (RequiresStableEntryPoint()) { - GetPrecode()->SetTargetInterlocked(entryPoint); + GetOrCreatePrecode()->SetTargetInterlocked(entryPoint); } else if (!HasStableEntryPoint()) { - if (RequiresStableEntryPoint()) - { - GetOrCreatePrecode()->SetTargetInterlocked(entryPoint); - } - else - { - SetStableEntryPointInterlocked(entryPoint); - } + SetStableEntryPointInterlocked(entryPoint); } } diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index 22735df356d27..e6c78910ded7c 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -6725,7 +6725,7 @@ MethodDesc *MethodTable::MethodDataObject::GetImplMethodDesc(UINT32 slotNumber) { CONTRACTL { - THROWS; + NOTHROW; GC_NOTRIGGER; MODE_ANY; } From 2ed2b9c9fcb8380f113b8201247c50e683c8fd2f Mon Sep 17 00:00:00 2001 From: David Wrighton Date: Sun, 14 Jul 2024 08:06:10 -0700 Subject: [PATCH 51/51] Its possible for GetOrCreatePrecode to be called for cases where it isn't REQUIRED. we need to handle that case. --- src/coreclr/vm/method.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/coreclr/vm/method.cpp b/src/coreclr/vm/method.cpp index 17964fd005c83..4662b86d8300c 100644 --- a/src/coreclr/vm/method.cpp +++ b/src/coreclr/vm/method.cpp @@ -3089,8 +3089,11 @@ void MethodDesc::SetCodeEntryPoint(PCODE entryPoint) // can continue assuming it was successful, similarly to it successfully updating the target and another thread // updating the target again shortly afterwards. } - else if (RequiresStableEntryPoint()) + else if (HasPrecode() || RequiresStableEntryPoint()) { + // Use this path if there already exists a Precode, OR if RequiresStableEntryPoint is set. + // + // RequiresStableEntryPoint currently requires that the entrypoint must be a Precode GetOrCreatePrecode()->SetTargetInterlocked(entryPoint); } else if (!HasStableEntryPoint())