diff --git a/src/ZstdSharp/Unsafe/HUF_CStream_t.cs b/src/ZstdSharp/Unsafe/HUF_CStream_t.cs index 5bae54a..c28390d 100644 --- a/src/ZstdSharp/Unsafe/HUF_CStream_t.cs +++ b/src/ZstdSharp/Unsafe/HUF_CStream_t.cs @@ -1,6 +1,3 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; - namespace ZstdSharp.Unsafe { public unsafe struct HUF_CStream_t @@ -14,52 +11,12 @@ public unsafe struct _bitContainer_e__FixedBuffer { public nuint e0; public nuint e1; - public ref nuint this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_bitContainer_e__FixedBuffer, nuint>(this) + index); - } - - public ref nuint this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_bitContainer_e__FixedBuffer, nuint>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator nuint*(in _bitContainer_e__FixedBuffer t) => RefToPointer<_bitContainer_e__FixedBuffer, nuint>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static nuint* operator +(in _bitContainer_e__FixedBuffer t, nuint index) => RefToPointer<_bitContainer_e__FixedBuffer, nuint>(t) + index; } public unsafe struct _bitPos_e__FixedBuffer { public nuint e0; public nuint e1; - public ref nuint this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_bitPos_e__FixedBuffer, nuint>(this) + index); - } - - public ref nuint this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_bitPos_e__FixedBuffer, nuint>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator nuint*(in _bitPos_e__FixedBuffer t) => RefToPointer<_bitPos_e__FixedBuffer, nuint>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static nuint* operator +(in _bitPos_e__FixedBuffer t, nuint index) => RefToPointer<_bitPos_e__FixedBuffer, nuint>(t) + index; } } } \ No newline at end of file diff --git a/src/ZstdSharp/Unsafe/HUF_DecompressFastArgs.cs b/src/ZstdSharp/Unsafe/HUF_DecompressFastArgs.cs index 3eef488..fe2190a 100644 --- a/src/ZstdSharp/Unsafe/HUF_DecompressFastArgs.cs +++ b/src/ZstdSharp/Unsafe/HUF_DecompressFastArgs.cs @@ -1,8 +1,3 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; -using InlineIL; -using static InlineIL.IL.Emit; - namespace ZstdSharp.Unsafe { /** @@ -32,65 +27,6 @@ public unsafe struct _ip_e__FixedBuffer public byte* e1; public byte* e2; public byte* e3; - public ref byte* this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get - { - Ldarg_0(); - Conv_U(); - Ldarg_1(); - Conv_I(); - Sizeof(new TypeRef(typeof(byte*))); - Conv_I(); - Mul(); - Add(); - return ref *(byte**)IL.ReturnPointer(); - } - } - - public ref byte* this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get - { - Ldarg_0(); - Conv_U(); - Ldarg_1(); - Conv_I(); - Sizeof(new TypeRef(typeof(byte*))); - Conv_I(); - Mul(); - Add(); - return ref *(byte**)IL.ReturnPointer(); - } - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator byte**(in _ip_e__FixedBuffer t) - { - Ldarg_0(); - Conv_U(); - return (byte**)IL.ReturnPointer(); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static byte** operator +(in _ip_e__FixedBuffer t, nuint index) - { - Ldarg_0(); - Conv_U(); - Ldarg_1(); - Conv_I(); - Sizeof(new TypeRef(typeof(byte*))); - Conv_I(); - Mul(); - Add(); - return (byte**)IL.ReturnPointer(); - } } public unsafe struct _op_e__FixedBuffer @@ -99,65 +35,6 @@ public unsafe struct _op_e__FixedBuffer public byte* e1; public byte* e2; public byte* e3; - public ref byte* this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get - { - Ldarg_0(); - Conv_U(); - Ldarg_1(); - Conv_I(); - Sizeof(new TypeRef(typeof(byte*))); - Conv_I(); - Mul(); - Add(); - return ref *(byte**)IL.ReturnPointer(); - } - } - - public ref byte* this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get - { - Ldarg_0(); - Conv_U(); - Ldarg_1(); - Conv_I(); - Sizeof(new TypeRef(typeof(byte*))); - Conv_I(); - Mul(); - Add(); - return ref *(byte**)IL.ReturnPointer(); - } - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator byte**(in _op_e__FixedBuffer t) - { - Ldarg_0(); - Conv_U(); - return (byte**)IL.ReturnPointer(); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static byte** operator +(in _op_e__FixedBuffer t, nuint index) - { - Ldarg_0(); - Conv_U(); - Ldarg_1(); - Conv_I(); - Sizeof(new TypeRef(typeof(byte*))); - Conv_I(); - Mul(); - Add(); - return (byte**)IL.ReturnPointer(); - } } public unsafe struct _iend_e__FixedBuffer @@ -166,65 +43,6 @@ public unsafe struct _iend_e__FixedBuffer public byte* e1; public byte* e2; public byte* e3; - public ref byte* this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get - { - Ldarg_0(); - Conv_U(); - Ldarg_1(); - Conv_I(); - Sizeof(new TypeRef(typeof(byte*))); - Conv_I(); - Mul(); - Add(); - return ref *(byte**)IL.ReturnPointer(); - } - } - - public ref byte* this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get - { - Ldarg_0(); - Conv_U(); - Ldarg_1(); - Conv_I(); - Sizeof(new TypeRef(typeof(byte*))); - Conv_I(); - Mul(); - Add(); - return ref *(byte**)IL.ReturnPointer(); - } - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator byte**(in _iend_e__FixedBuffer t) - { - Ldarg_0(); - Conv_U(); - return (byte**)IL.ReturnPointer(); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static byte** operator +(in _iend_e__FixedBuffer t, nuint index) - { - Ldarg_0(); - Conv_U(); - Ldarg_1(); - Conv_I(); - Sizeof(new TypeRef(typeof(byte*))); - Conv_I(); - Mul(); - Add(); - return (byte**)IL.ReturnPointer(); - } } } } \ No newline at end of file diff --git a/src/ZstdSharp/Unsafe/HUF_ReadDTableX2_Workspace.cs b/src/ZstdSharp/Unsafe/HUF_ReadDTableX2_Workspace.cs index 2c32a80..af92d16 100644 --- a/src/ZstdSharp/Unsafe/HUF_ReadDTableX2_Workspace.cs +++ b/src/ZstdSharp/Unsafe/HUF_ReadDTableX2_Workspace.cs @@ -1,6 +1,3 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; - namespace ZstdSharp.Unsafe { public unsafe struct HUF_ReadDTableX2_Workspace @@ -25,26 +22,6 @@ public unsafe struct _rankVal_e__FixedBuffer public rankValCol_t e9; public rankValCol_t e10; public rankValCol_t e11; - public ref rankValCol_t this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_rankVal_e__FixedBuffer, rankValCol_t>(this) + index); - } - - public ref rankValCol_t this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_rankVal_e__FixedBuffer, rankValCol_t>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator rankValCol_t*(in _rankVal_e__FixedBuffer t) => RefToPointer<_rankVal_e__FixedBuffer, rankValCol_t>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static rankValCol_t* operator +(in _rankVal_e__FixedBuffer t, nuint index) => RefToPointer<_rankVal_e__FixedBuffer, rankValCol_t>(t) + index; } public unsafe struct _sortedSymbol_e__FixedBuffer @@ -305,26 +282,6 @@ public unsafe struct _sortedSymbol_e__FixedBuffer public sortedSymbol_t e253; public sortedSymbol_t e254; public sortedSymbol_t e255; - public ref sortedSymbol_t this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_sortedSymbol_e__FixedBuffer, sortedSymbol_t>(this) + index); - } - - public ref sortedSymbol_t this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_sortedSymbol_e__FixedBuffer, sortedSymbol_t>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator sortedSymbol_t*(in _sortedSymbol_e__FixedBuffer t) => RefToPointer<_sortedSymbol_e__FixedBuffer, sortedSymbol_t>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static sortedSymbol_t* operator +(in _sortedSymbol_e__FixedBuffer t, nuint index) => RefToPointer<_sortedSymbol_e__FixedBuffer, sortedSymbol_t>(t) + index; } } } \ No newline at end of file diff --git a/src/ZstdSharp/Unsafe/HUF_buildCTable_wksp_tables.cs b/src/ZstdSharp/Unsafe/HUF_buildCTable_wksp_tables.cs index 36ad23b..e3fcf43 100644 --- a/src/ZstdSharp/Unsafe/HUF_buildCTable_wksp_tables.cs +++ b/src/ZstdSharp/Unsafe/HUF_buildCTable_wksp_tables.cs @@ -1,6 +1,3 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; - namespace ZstdSharp.Unsafe { public struct HUF_buildCTable_wksp_tables @@ -521,26 +518,6 @@ public unsafe struct _huffNodeTbl_e__FixedBuffer public nodeElt_s e509; public nodeElt_s e510; public nodeElt_s e511; - public ref nodeElt_s this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_huffNodeTbl_e__FixedBuffer, nodeElt_s>(this) + index); - } - - public ref nodeElt_s this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_huffNodeTbl_e__FixedBuffer, nodeElt_s>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator nodeElt_s*(in _huffNodeTbl_e__FixedBuffer t) => RefToPointer<_huffNodeTbl_e__FixedBuffer, nodeElt_s>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static nodeElt_s* operator +(in _huffNodeTbl_e__FixedBuffer t, nuint index) => RefToPointer<_huffNodeTbl_e__FixedBuffer, nodeElt_s>(t) + index; } public unsafe struct _rankPosition_e__FixedBuffer @@ -737,26 +714,6 @@ public unsafe struct _rankPosition_e__FixedBuffer public rankPos e189; public rankPos e190; public rankPos e191; - public ref rankPos this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_rankPosition_e__FixedBuffer, rankPos>(this) + index); - } - - public ref rankPos this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_rankPosition_e__FixedBuffer, rankPos>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator rankPos*(in _rankPosition_e__FixedBuffer t) => RefToPointer<_rankPosition_e__FixedBuffer, rankPos>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static rankPos* operator +(in _rankPosition_e__FixedBuffer t, nuint index) => RefToPointer<_rankPosition_e__FixedBuffer, rankPos>(t) + index; } } } \ No newline at end of file diff --git a/src/ZstdSharp/Unsafe/HUF_compress_tables_t.cs b/src/ZstdSharp/Unsafe/HUF_compress_tables_t.cs index 3bd525d..ada6574 100644 --- a/src/ZstdSharp/Unsafe/HUF_compress_tables_t.cs +++ b/src/ZstdSharp/Unsafe/HUF_compress_tables_t.cs @@ -1,6 +1,3 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; - namespace ZstdSharp.Unsafe { public unsafe struct HUF_compress_tables_t @@ -267,26 +264,6 @@ public unsafe struct _CTable_e__FixedBuffer public nuint e254; public nuint e255; public nuint e256; - public ref nuint this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_CTable_e__FixedBuffer, nuint>(this) + index); - } - - public ref nuint this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_CTable_e__FixedBuffer, nuint>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator nuint*(in _CTable_e__FixedBuffer t) => RefToPointer<_CTable_e__FixedBuffer, nuint>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static nuint* operator +(in _CTable_e__FixedBuffer t, nuint index) => RefToPointer<_CTable_e__FixedBuffer, nuint>(t) + index; } } } \ No newline at end of file diff --git a/src/ZstdSharp/Unsafe/HufCompress.cs b/src/ZstdSharp/Unsafe/HufCompress.cs index 79a79a3..68b04ca 100644 --- a/src/ZstdSharp/Unsafe/HufCompress.cs +++ b/src/ZstdSharp/Unsafe/HufCompress.cs @@ -612,7 +612,7 @@ private static void HUF_buildCTableFromTree(nuint* CTable, nodeElt_s* huffNode, private static nuint HUF_buildCTable_wksp(nuint* CTable, uint* count, uint maxSymbolValue, uint maxNbBits, void* workSpace, nuint wkspSize) { HUF_buildCTable_wksp_tables* wksp_tables = (HUF_buildCTable_wksp_tables*)HUF_alignUpWorkspace(workSpace, &wkspSize, sizeof(uint)); - nodeElt_s* huffNode0 = (nodeElt_s*)wksp_tables->huffNodeTbl; + nodeElt_s* huffNode0 = &wksp_tables->huffNodeTbl.e0; nodeElt_s* huffNode = huffNode0 + 1; int nonNullRank; if (wkspSize < (nuint)sizeof(HUF_buildCTable_wksp_tables)) @@ -622,7 +622,7 @@ private static nuint HUF_buildCTable_wksp(nuint* CTable, uint* count, uint maxSy if (maxSymbolValue > 255) return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_maxSymbolValue_tooLarge)); memset(huffNode0, 0, (uint)(sizeof(nodeElt_s) * 512)); - HUF_sort(huffNode, count, maxSymbolValue, wksp_tables->rankPosition); + HUF_sort(huffNode, count, maxSymbolValue, &wksp_tables->rankPosition.e0); nonNullRank = HUF_buildTree(huffNode, maxSymbolValue); maxNbBits = HUF_setMaxHeight(huffNode, (uint)nonNullRank, maxNbBits); if (maxNbBits > 12) @@ -693,18 +693,18 @@ private static void HUF_addBits(HUF_CStream_t* bitC, nuint elt, int idx, int kFa { assert(idx <= 1); assert(HUF_getNbBits(elt) <= 12); - bitC->bitContainer[idx] >>= (int)HUF_getNbBits(elt); - bitC->bitContainer[idx] |= kFast != 0 ? HUF_getValueFast(elt) : HUF_getValue(elt); - bitC->bitPos[idx] += HUF_getNbBitsFast(elt); - assert((bitC->bitPos[idx] & 0xFF) <= (nuint)(sizeof(nuint) * 8)); + (&bitC->bitContainer.e0)[idx] >>= (int)HUF_getNbBits(elt); + (&bitC->bitContainer.e0)[idx] |= kFast != 0 ? HUF_getValueFast(elt) : HUF_getValue(elt); + (&bitC->bitPos.e0)[idx] += HUF_getNbBitsFast(elt); + assert(((&bitC->bitPos.e0)[idx] & 0xFF) <= (nuint)(sizeof(nuint) * 8)); } [MethodImpl(MethodImplOptions.AggressiveInlining)] [InlineMethod.Inline] private static void HUF_zeroIndex1(HUF_CStream_t* bitC) { - bitC->bitContainer[1] = 0; - bitC->bitPos[1] = 0; + bitC->bitContainer.e1 = 0; + bitC->bitPos.e1 = 0; } /*! HUF_mergeIndex1() : @@ -715,11 +715,11 @@ private static void HUF_zeroIndex1(HUF_CStream_t* bitC) [InlineMethod.Inline] private static void HUF_mergeIndex1(HUF_CStream_t* bitC) { - assert((bitC->bitPos[1] & 0xFF) < (nuint)(sizeof(nuint) * 8)); - bitC->bitContainer[0] >>= (int)(bitC->bitPos[1] & 0xFF); - bitC->bitContainer[0] |= bitC->bitContainer[1]; - bitC->bitPos[0] += bitC->bitPos[1]; - assert((bitC->bitPos[0] & 0xFF) <= (nuint)(sizeof(nuint) * 8)); + assert((bitC->bitPos.e1 & 0xFF) < (nuint)(sizeof(nuint) * 8)); + bitC->bitContainer.e0 >>= (int)(bitC->bitPos.e1 & 0xFF); + bitC->bitContainer.e0 |= bitC->bitContainer.e1; + bitC->bitPos.e0 += bitC->bitPos.e1; + assert((bitC->bitPos.e0 & 0xFF) <= (nuint)(sizeof(nuint) * 8)); } /*! HUF_flushBits() : @@ -734,11 +734,11 @@ private static void HUF_mergeIndex1(HUF_CStream_t* bitC) private static void HUF_flushBits(HUF_CStream_t* bitC, int kFast) { /* The upper bits of bitPos are noisy, so we must mask by 0xFF. */ - nuint nbBits = bitC->bitPos[0] & 0xFF; + nuint nbBits = bitC->bitPos.e0 & 0xFF; nuint nbBytes = nbBits >> 3; /* The top nbBits bits of bitContainer are the ones we need. */ - nuint bitContainer = bitC->bitContainer[0] >> (int)((nuint)(sizeof(nuint) * 8) - nbBits); - bitC->bitPos[0] &= 7; + nuint bitContainer = bitC->bitContainer.e0 >> (int)((nuint)(sizeof(nuint) * 8) - nbBits); + bitC->bitPos.e0 &= 7; assert(nbBits > 0); assert(nbBits <= (nuint)(sizeof(nuint) * 8)); assert(bitC->ptr <= bitC->endPtr); @@ -768,7 +768,7 @@ private static nuint HUF_closeCStream(HUF_CStream_t* bitC) HUF_addBits(bitC, HUF_endMark(), 0, 0); HUF_flushBits(bitC, 0); { - nuint nbBits = bitC->bitPos[0] & 0xFF; + nuint nbBits = bitC->bitPos.e0 & 0xFF; if (bitC->ptr >= bitC->endPtr) return 0; return (nuint)(bitC->ptr - bitC->startPtr) + (nuint)(nbBits > 0 ? 1 : 0); @@ -1176,9 +1176,9 @@ private static nuint HUF_compress_internal(void* dst, nuint dstSize, void* src, return HUF_compressCTable_internal(ostart, op, oend, src, srcSize, nbStreams, oldHufTable, flags); } - huffLog = HUF_optimalTableLog(huffLog, srcSize, maxSymbolValue, &table->wksps, (nuint)sizeof(_wksps_e__Union), (nuint*)table->CTable, table->count, flags); + huffLog = HUF_optimalTableLog(huffLog, srcSize, maxSymbolValue, &table->wksps, (nuint)sizeof(_wksps_e__Union), &table->CTable.e0, table->count, flags); { - nuint maxBits = HUF_buildCTable_wksp((nuint*)table->CTable, table->count, maxSymbolValue, huffLog, &table->wksps.buildCTable_wksp, (nuint)sizeof(HUF_buildCTable_wksp_tables)); + nuint maxBits = HUF_buildCTable_wksp(&table->CTable.e0, table->count, maxSymbolValue, huffLog, &table->wksps.buildCTable_wksp, (nuint)sizeof(HUF_buildCTable_wksp_tables)); { nuint _var_err__ = maxBits; if (ERR_isError(_var_err__)) @@ -1191,17 +1191,17 @@ private static nuint HUF_compress_internal(void* dst, nuint dstSize, void* src, { nuint ctableSize = maxSymbolValue + 2; nuint unusedSize = sizeof(ulong) * 257 - ctableSize * (nuint)sizeof(nuint); - memset(table->CTable + ctableSize, 0, (uint)unusedSize); + memset(&table->CTable.e0 + ctableSize, 0, (uint)unusedSize); } { - nuint hSize = HUF_writeCTable_wksp(op, dstSize, (nuint*)table->CTable, maxSymbolValue, huffLog, &table->wksps.writeCTable_wksp, (nuint)sizeof(HUF_WriteCTableWksp)); + nuint hSize = HUF_writeCTable_wksp(op, dstSize, &table->CTable.e0, maxSymbolValue, huffLog, &table->wksps.writeCTable_wksp, (nuint)sizeof(HUF_WriteCTableWksp)); if (ERR_isError(hSize)) return hSize; if (repeat != null && *repeat != HUF_repeat.HUF_repeat_none) { nuint oldSize = HUF_estimateCompressedSize(oldHufTable, table->count, maxSymbolValue); - nuint newSize = HUF_estimateCompressedSize((nuint*)table->CTable, table->count, maxSymbolValue); + nuint newSize = HUF_estimateCompressedSize(&table->CTable.e0, table->count, maxSymbolValue); if (oldSize <= hSize + newSize || hSize + 12 >= srcSize) { return HUF_compressCTable_internal(ostart, op, oend, src, srcSize, nbStreams, oldHufTable, flags); @@ -1220,10 +1220,10 @@ private static nuint HUF_compress_internal(void* dst, nuint dstSize, void* src, } if (oldHufTable != null) - memcpy(oldHufTable, (void*)table->CTable, sizeof(ulong) * 257); + memcpy(oldHufTable, &table->CTable.e0, sizeof(ulong) * 257); } - return HUF_compressCTable_internal(ostart, op, oend, src, srcSize, nbStreams, (nuint*)table->CTable, flags); + return HUF_compressCTable_internal(ostart, op, oend, src, srcSize, nbStreams, &table->CTable.e0, flags); } /** HUF_compress1X_repeat() : diff --git a/src/ZstdSharp/Unsafe/HufDecompress.cs b/src/ZstdSharp/Unsafe/HufDecompress.cs index 726d01e..0fe5420 100644 --- a/src/ZstdSharp/Unsafe/HufDecompress.cs +++ b/src/ZstdSharp/Unsafe/HufDecompress.cs @@ -47,30 +47,30 @@ private static nuint HUF_DecompressFastArgs_init(HUF_DecompressFastArgs* args, v nuint length2 = MEM_readLE16(istart + 2); nuint length3 = MEM_readLE16(istart + 4); nuint length4 = srcSize - (length1 + length2 + length3 + 6); - args->iend[0] = istart + 6; - args->iend[1] = args->iend[0] + length1; - args->iend[2] = args->iend[1] + length2; - args->iend[3] = args->iend[2] + length3; + args->iend.e0 = istart + 6; + args->iend.e1 = args->iend.e0 + length1; + args->iend.e2 = args->iend.e1 + length2; + args->iend.e3 = args->iend.e2 + length3; if (length1 < 16 || length2 < 8 || length3 < 8 || length4 < 8) return 0; if (length4 > srcSize) return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_corruption_detected)); } - args->ip[0] = args->iend[1] - sizeof(ulong); - args->ip[1] = args->iend[2] - sizeof(ulong); - args->ip[2] = args->iend[3] - sizeof(ulong); - args->ip[3] = (byte*)src + srcSize - sizeof(ulong); - args->op[0] = (byte*)dst; - args->op[1] = args->op[0] + (dstSize + 3) / 4; - args->op[2] = args->op[1] + (dstSize + 3) / 4; - args->op[3] = args->op[2] + (dstSize + 3) / 4; - if (args->op[3] >= oend) + args->ip.e0 = args->iend.e1 - sizeof(ulong); + args->ip.e1 = args->iend.e2 - sizeof(ulong); + args->ip.e2 = args->iend.e3 - sizeof(ulong); + args->ip.e3 = (byte*)src + srcSize - sizeof(ulong); + args->op.e0 = (byte*)dst; + args->op.e1 = args->op.e0 + (dstSize + 3) / 4; + args->op.e2 = args->op.e1 + (dstSize + 3) / 4; + args->op.e3 = args->op.e2 + (dstSize + 3) / 4; + if (args->op.e3 >= oend) return 0; - args->bits[0] = HUF_initFastDStream(args->ip[0]); - args->bits[1] = HUF_initFastDStream(args->ip[1]); - args->bits[2] = HUF_initFastDStream(args->ip[2]); - args->bits[3] = HUF_initFastDStream(args->ip[3]); + args->bits[0] = HUF_initFastDStream(args->ip.e0); + args->bits[1] = HUF_initFastDStream(args->ip.e1); + args->bits[2] = HUF_initFastDStream(args->ip.e2); + args->bits[3] = HUF_initFastDStream(args->ip.e3); args->ilimit = ilimit; args->oend = oend; args->dt = dt; @@ -79,16 +79,16 @@ private static nuint HUF_DecompressFastArgs_init(HUF_DecompressFastArgs* args, v private static nuint HUF_initRemainingDStream(BIT_DStream_t* bit, HUF_DecompressFastArgs* args, int stream, byte* segmentEnd) { - if (args->op[stream] > segmentEnd) + if ((&args->op.e0)[stream] > segmentEnd) return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_corruption_detected)); - if (args->ip[stream] < args->iend[stream] - 8) + if ((&args->ip.e0)[stream] < (&args->iend.e0)[stream] - 8) return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_corruption_detected)); assert(sizeof(nuint) == 8); - bit->bitContainer = MEM_readLEST(args->ip[stream]); + bit->bitContainer = MEM_readLEST((&args->ip.e0)[stream]); bit->bitsConsumed = ZSTD_countTrailingZeros64(args->bits[stream]); - bit->start = (sbyte*)args->iend[0]; + bit->start = (sbyte*)args->iend.e0; bit->limitPtr = bit->start + sizeof(nuint); - bit->ptr = (sbyte*)args->ip[stream]; + bit->ptr = (sbyte*)(&args->ip.e0)[stream]; return 0; } @@ -493,14 +493,14 @@ private static void HUF_decompress4X1_usingDTable_internal_fast_c_loop(HUF_Decom bits1 = args->bits[1]; bits2 = args->bits[2]; bits3 = args->bits[3]; - ip0 = args->ip[0]; - ip1 = args->ip[1]; - ip2 = args->ip[2]; - ip3 = args->ip[3]; - op0 = args->op[0]; - op1 = args->op[1]; - op2 = args->op[2]; - op3 = args->op[3]; + ip0 = args->ip.e0; + ip1 = args->ip.e1; + ip2 = args->ip.e2; + ip3 = args->ip.e3; + op0 = args->op.e0; + op1 = args->op.e1; + op2 = args->op.e2; + op3 = args->op.e3; assert(BitConverter.IsLittleEndian); assert(!MEM_32bits); for (; ; ) @@ -769,14 +769,14 @@ private static void HUF_decompress4X1_usingDTable_internal_fast_c_loop(HUF_Decom args->bits[1] = bits1; args->bits[2] = bits2; args->bits[3] = bits3; - args->ip[0] = ip0; - args->ip[1] = ip1; - args->ip[2] = ip2; - args->ip[3] = ip3; - args->op[0] = op0; - args->op[1] = op1; - args->op[2] = op2; - args->op[3] = op3; + args->ip.e0 = ip0; + args->ip.e1 = ip1; + args->ip.e2 = ip2; + args->ip.e3 = ip3; + args->op.e0 = op0; + args->op.e1 = op1; + args->op.e2 = op2; + args->op.e3 = op3; } /** @@ -804,13 +804,13 @@ private static nuint HUF_decompress4X1_usingDTable_internal_fast(void* dst, nuin return 0; } - assert(args.ip[0] >= args.ilimit); + assert(args.ip.e0 >= args.ilimit); loopFn(&args); - assert(args.ip[0] >= iend); - assert(args.ip[1] >= iend); - assert(args.ip[2] >= iend); - assert(args.ip[3] >= iend); - assert(args.op[3] <= oend); + assert(args.ip.e0 >= iend); + assert(args.ip.e1 >= iend); + assert(args.ip.e2 >= iend); + assert(args.ip.e3 >= iend); + assert(args.op.e3 <= oend); { nuint segmentSize = (dstSize + 3) / 4; byte* segmentEnd = (byte*)dst; @@ -830,8 +830,8 @@ private static nuint HUF_decompress4X1_usingDTable_internal_fast(void* dst, nuin } } - args.op[i] += HUF_decodeStreamX1(args.op[i], &bit, segmentEnd, (HUF_DEltX1*)dt, 11); - if (args.op[i] != segmentEnd) + (&args.op.e0)[i] += HUF_decodeStreamX1((&args.op.e0)[i], &bit, segmentEnd, (HUF_DEltX1*)dt, 11); + if ((&args.op.e0)[i] != segmentEnd) return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_corruption_detected)); } } @@ -1048,7 +1048,7 @@ private static void HUF_fillDTableX2Level2(HUF_DEltX2* DTable, uint targetLog, u private static void HUF_fillDTableX2(HUF_DEltX2* DTable, uint targetLog, sortedSymbol_t* sortedList, uint* rankStart, rankValCol_t* rankValOrigin, uint maxWeight, uint nbBitsBaseline) { - uint* rankVal = (uint*)rankValOrigin[0]; + uint* rankVal = (uint*)&rankValOrigin[0]; /* note : targetLog >= srcLog, hence scaleLog <= 1 */ int scaleLog = (int)(nbBitsBaseline - targetLog); uint minBits = nbBitsBaseline - maxWeight; @@ -1071,7 +1071,7 @@ private static void HUF_fillDTableX2(HUF_DEltX2* DTable, uint targetLog, sortedS minWeight = 1; for (s = begin; s != end; ++s) { - HUF_fillDTableX2Level2(DTable + start, targetLog, nbBits, (uint*)rankValOrigin[nbBits], minWeight, wEnd, sortedList, rankStart, nbBitsBaseline, sortedList[s].symbol); + HUF_fillDTableX2Level2(DTable + start, targetLog, nbBits, (uint*)&rankValOrigin[nbBits], minWeight, wEnd, sortedList, rankStart, nbBitsBaseline, sortedList[s].symbol); start += (int)length; } } @@ -1130,14 +1130,14 @@ private static nuint HUF_readDTableX2_wksp(uint* DTable, void* src, nuint srcSiz { uint w = wksp->weightList[s]; uint r = rankStart[w]++; - wksp->sortedSymbol[r].symbol = (byte)s; + (&wksp->sortedSymbol.e0)[r].symbol = (byte)s; } rankStart[0] = 0; } { - uint* rankVal0 = (uint*)wksp->rankVal[0]; + uint* rankVal0 = (uint*)&wksp->rankVal.e0; { /* tableLog <= maxTableLog */ int rescale = (int)(maxTableLog - tableLog - 1); @@ -1156,7 +1156,7 @@ private static nuint HUF_readDTableX2_wksp(uint* DTable, void* src, nuint srcSiz uint consumed; for (consumed = minBits; consumed < maxTableLog - minBits + 1; consumed++) { - uint* rankValPtr = (uint*)wksp->rankVal[consumed]; + uint* rankValPtr = (uint*)&(&wksp->rankVal.e0)[consumed]; uint w; for (w = 1; w < maxW + 1; w++) { @@ -1166,7 +1166,7 @@ private static nuint HUF_readDTableX2_wksp(uint* DTable, void* src, nuint srcSiz } } - HUF_fillDTableX2(dt, maxTableLog, (sortedSymbol_t*)wksp->sortedSymbol, wksp->rankStart0, (rankValCol_t*)wksp->rankVal, maxW, tableLog + 1); + HUF_fillDTableX2(dt, maxTableLog, &wksp->sortedSymbol.e0, wksp->rankStart0, &wksp->rankVal.e0, maxW, tableLog + 1); dtd.tableLog = (byte)maxTableLog; dtd.tableType = 1; memcpy(DTable, &dtd, (uint)sizeof(DTableDesc)); @@ -1428,14 +1428,14 @@ private static void HUF_decompress4X2_usingDTable_internal_fast_c_loop(HUF_Decom bits1 = args->bits[1]; bits2 = args->bits[2]; bits3 = args->bits[3]; - ip0 = args->ip[0]; - ip1 = args->ip[1]; - ip2 = args->ip[2]; - ip3 = args->ip[3]; - op0 = args->op[0]; - op1 = args->op[1]; - op2 = args->op[2]; - op3 = args->op[3]; + ip0 = args->ip.e0; + ip1 = args->ip.e1; + ip2 = args->ip.e2; + ip3 = args->ip.e3; + op0 = args->op.e0; + op1 = args->op.e1; + op2 = args->op.e2; + op3 = args->op.e3; oend0 = op1; oend1 = op2; oend2 = op3; @@ -1752,14 +1752,14 @@ private static void HUF_decompress4X2_usingDTable_internal_fast_c_loop(HUF_Decom args->bits[1] = bits1; args->bits[2] = bits2; args->bits[3] = bits3; - args->ip[0] = ip0; - args->ip[1] = ip1; - args->ip[2] = ip2; - args->ip[3] = ip3; - args->op[0] = op0; - args->op[1] = op1; - args->op[2] = op2; - args->op[3] = op3; + args->ip.e0 = ip0; + args->ip.e1 = ip1; + args->ip.e2 = ip2; + args->ip.e3 = ip3; + args->op.e0 = op0; + args->op.e1 = op1; + args->op.e2 = op2; + args->op.e3 = op3; } private static nuint HUF_decompress4X2_usingDTable_internal_fast(void* dst, nuint dstSize, void* cSrc, nuint cSrcSize, uint* DTable, delegate* managed loopFn) @@ -1782,13 +1782,13 @@ private static nuint HUF_decompress4X2_usingDTable_internal_fast(void* dst, nuin return 0; } - assert(args.ip[0] >= args.ilimit); + assert(args.ip.e0 >= args.ilimit); loopFn(&args); - assert(args.ip[0] >= iend); - assert(args.ip[1] >= iend); - assert(args.ip[2] >= iend); - assert(args.ip[3] >= iend); - assert(args.op[3] <= oend); + assert(args.ip.e0 >= iend); + assert(args.ip.e1 >= iend); + assert(args.ip.e2 >= iend); + assert(args.ip.e3 >= iend); + assert(args.op.e3 <= oend); { nuint segmentSize = (dstSize + 3) / 4; byte* segmentEnd = (byte*)dst; @@ -1808,8 +1808,8 @@ private static nuint HUF_decompress4X2_usingDTable_internal_fast(void* dst, nuin } } - args.op[i] += HUF_decodeStreamX2(args.op[i], &bit, segmentEnd, (HUF_DEltX2*)dt, 11); - if (args.op[i] != segmentEnd) + (&args.op.e0)[i] += HUF_decodeStreamX2((&args.op.e0)[i], &bit, segmentEnd, (HUF_DEltX2*)dt, 11); + if ((&args.op.e0)[i] != segmentEnd) return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_corruption_detected)); } } diff --git a/src/ZstdSharp/Unsafe/ZSTD_entropyDTables_t.cs b/src/ZstdSharp/Unsafe/ZSTD_entropyDTables_t.cs index 4c49e8d..3ed29f2 100644 --- a/src/ZstdSharp/Unsafe/ZSTD_entropyDTables_t.cs +++ b/src/ZstdSharp/Unsafe/ZSTD_entropyDTables_t.cs @@ -1,6 +1,3 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; - namespace ZstdSharp.Unsafe { public unsafe struct ZSTD_entropyDTables_t @@ -530,26 +527,6 @@ public unsafe struct _LLTable_e__FixedBuffer public ZSTD_seqSymbol e510; public ZSTD_seqSymbol e511; public ZSTD_seqSymbol e512; - public ref ZSTD_seqSymbol this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_LLTable_e__FixedBuffer, ZSTD_seqSymbol>(this) + index); - } - - public ref ZSTD_seqSymbol this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_LLTable_e__FixedBuffer, ZSTD_seqSymbol>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator ZSTD_seqSymbol*(in _LLTable_e__FixedBuffer t) => RefToPointer<_LLTable_e__FixedBuffer, ZSTD_seqSymbol>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static ZSTD_seqSymbol* operator +(in _LLTable_e__FixedBuffer t, nuint index) => RefToPointer<_LLTable_e__FixedBuffer, ZSTD_seqSymbol>(t) + index; } public unsafe struct _OFTable_e__FixedBuffer @@ -811,26 +788,6 @@ public unsafe struct _OFTable_e__FixedBuffer public ZSTD_seqSymbol e254; public ZSTD_seqSymbol e255; public ZSTD_seqSymbol e256; - public ref ZSTD_seqSymbol this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_OFTable_e__FixedBuffer, ZSTD_seqSymbol>(this) + index); - } - - public ref ZSTD_seqSymbol this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_OFTable_e__FixedBuffer, ZSTD_seqSymbol>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator ZSTD_seqSymbol*(in _OFTable_e__FixedBuffer t) => RefToPointer<_OFTable_e__FixedBuffer, ZSTD_seqSymbol>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static ZSTD_seqSymbol* operator +(in _OFTable_e__FixedBuffer t, nuint index) => RefToPointer<_OFTable_e__FixedBuffer, ZSTD_seqSymbol>(t) + index; } public unsafe struct _MLTable_e__FixedBuffer @@ -1348,26 +1305,6 @@ public unsafe struct _MLTable_e__FixedBuffer public ZSTD_seqSymbol e510; public ZSTD_seqSymbol e511; public ZSTD_seqSymbol e512; - public ref ZSTD_seqSymbol this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_MLTable_e__FixedBuffer, ZSTD_seqSymbol>(this) + index); - } - - public ref ZSTD_seqSymbol this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_MLTable_e__FixedBuffer, ZSTD_seqSymbol>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator ZSTD_seqSymbol*(in _MLTable_e__FixedBuffer t) => RefToPointer<_MLTable_e__FixedBuffer, ZSTD_seqSymbol>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static ZSTD_seqSymbol* operator +(in _MLTable_e__FixedBuffer t, nuint index) => RefToPointer<_MLTable_e__FixedBuffer, ZSTD_seqSymbol>(t) + index; } } } \ No newline at end of file diff --git a/src/ZstdSharp/Unsafe/ZSTD_hufCTables_t.cs b/src/ZstdSharp/Unsafe/ZSTD_hufCTables_t.cs index 6a17b4d..08d8e5b 100644 --- a/src/ZstdSharp/Unsafe/ZSTD_hufCTables_t.cs +++ b/src/ZstdSharp/Unsafe/ZSTD_hufCTables_t.cs @@ -1,6 +1,3 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; - namespace ZstdSharp.Unsafe { public unsafe struct ZSTD_hufCTables_t @@ -266,26 +263,6 @@ public unsafe struct _CTable_e__FixedBuffer public nuint e254; public nuint e255; public nuint e256; - public ref nuint this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_CTable_e__FixedBuffer, nuint>(this) + index); - } - - public ref nuint this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_CTable_e__FixedBuffer, nuint>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator nuint*(in _CTable_e__FixedBuffer t) => RefToPointer<_CTable_e__FixedBuffer, nuint>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static nuint* operator +(in _CTable_e__FixedBuffer t, nuint index) => RefToPointer<_CTable_e__FixedBuffer, nuint>(t) + index; } } } \ No newline at end of file diff --git a/src/ZstdSharp/Unsafe/ZstdCompress.cs b/src/ZstdSharp/Unsafe/ZstdCompress.cs index c51cc95..01c1417 100644 --- a/src/ZstdSharp/Unsafe/ZstdCompress.cs +++ b/src/ZstdSharp/Unsafe/ZstdCompress.cs @@ -3610,16 +3610,16 @@ private static nuint ZSTD_buildBlockEntropyStats_literals(void* src, nuint srcSi } } - if (repeat == HUF_repeat.HUF_repeat_check && HUF_validateCTable((nuint*)prevHuf->CTable, countWksp, maxSymbolValue) == 0) + if (repeat == HUF_repeat.HUF_repeat_check && HUF_validateCTable(&prevHuf->CTable.e0, countWksp, maxSymbolValue) == 0) { repeat = HUF_repeat.HUF_repeat_none; } - memset((void*)nextHuf->CTable, 0, sizeof(ulong) * 257); - huffLog = HUF_optimalTableLog(huffLog, srcSize, maxSymbolValue, nodeWksp, nodeWkspSize, (nuint*)nextHuf->CTable, countWksp, hufFlags); + memset(&nextHuf->CTable.e0, 0, sizeof(ulong) * 257); + huffLog = HUF_optimalTableLog(huffLog, srcSize, maxSymbolValue, nodeWksp, nodeWkspSize, &nextHuf->CTable.e0, countWksp, hufFlags); assert(huffLog <= 11); { - nuint maxBits = HUF_buildCTable_wksp((nuint*)nextHuf->CTable, countWksp, maxSymbolValue, huffLog, nodeWksp, nodeWkspSize); + nuint maxBits = HUF_buildCTable_wksp(&nextHuf->CTable.e0, countWksp, maxSymbolValue, huffLog, nodeWksp, nodeWkspSize); { nuint err_code = maxBits; if (ERR_isError(err_code)) @@ -3632,11 +3632,11 @@ private static nuint ZSTD_buildBlockEntropyStats_literals(void* src, nuint srcSi } { - nuint newCSize = HUF_estimateCompressedSize((nuint*)nextHuf->CTable, countWksp, maxSymbolValue); - nuint hSize = HUF_writeCTable_wksp(hufMetadata->hufDesBuffer, sizeof(byte) * 128, (nuint*)nextHuf->CTable, maxSymbolValue, huffLog, nodeWksp, nodeWkspSize); + nuint newCSize = HUF_estimateCompressedSize(&nextHuf->CTable.e0, countWksp, maxSymbolValue); + nuint hSize = HUF_writeCTable_wksp(hufMetadata->hufDesBuffer, sizeof(byte) * 128, &nextHuf->CTable.e0, maxSymbolValue, huffLog, nodeWksp, nodeWkspSize); if (repeat != HUF_repeat.HUF_repeat_none) { - nuint oldCSize = HUF_estimateCompressedSize((nuint*)prevHuf->CTable, countWksp, maxSymbolValue); + nuint oldCSize = HUF_estimateCompressedSize(&prevHuf->CTable.e0, countWksp, maxSymbolValue); if (oldCSize < srcSize && (oldCSize <= hSize + newCSize || hSize + 12 >= srcSize)) { memcpy(nextHuf, prevHuf, (uint)sizeof(ZSTD_hufCTables_t)); @@ -3752,7 +3752,7 @@ private static nuint ZSTD_estimateBlockSize_literal(byte* literals, nuint litSiz if (ERR_isError(largest)) return litSize; { - nuint cLitSizeEstimate = HUF_estimateCompressedSize((nuint*)huf->CTable, countWksp, maxSymbolValue); + nuint cLitSizeEstimate = HUF_estimateCompressedSize(&huf->CTable.e0, countWksp, maxSymbolValue); if (writeEntropy != 0) cLitSizeEstimate += hufMetadata->hufDesSize; if (singleStream == 0) @@ -4936,7 +4936,7 @@ private static nuint ZSTD_loadCEntropy(ZSTD_compressedBlockState_t* bs, void* wo { uint maxSymbolValue = 255; uint hasZeroWeights = 1; - nuint hufHeaderSize = HUF_readCTable((nuint*)bs->entropy.huf.CTable, &maxSymbolValue, dictPtr, (nuint)(dictEnd - dictPtr), &hasZeroWeights); + nuint hufHeaderSize = HUF_readCTable(&bs->entropy.huf.CTable.e0, &maxSymbolValue, dictPtr, (nuint)(dictEnd - dictPtr), &hasZeroWeights); if (hasZeroWeights == 0) bs->entropy.huf.repeatMode = HUF_repeat.HUF_repeat_valid; if (ERR_isError(hufHeaderSize)) diff --git a/src/ZstdSharp/Unsafe/ZstdCompressInternal.cs b/src/ZstdSharp/Unsafe/ZstdCompressInternal.cs index 2184af0..7c6b04c 100644 --- a/src/ZstdSharp/Unsafe/ZstdCompressInternal.cs +++ b/src/ZstdSharp/Unsafe/ZstdCompressInternal.cs @@ -712,7 +712,7 @@ private static void ZSTD_checkDictValidity(ZSTD_window_t* window, void* blockEnd } } - public static readonly byte* stringToByte_20_00 = GetArrayPointer(new byte[] { 32, 0 }); + private static readonly byte* stringToByte_20_00 = GetArrayPointer(new byte[] { 32, 0 }); [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void ZSTD_window_init(ZSTD_window_t* window) { diff --git a/src/ZstdSharp/Unsafe/ZstdCompressLiterals.cs b/src/ZstdSharp/Unsafe/ZstdCompressLiterals.cs index c0f51d6..5b894e9 100644 --- a/src/ZstdSharp/Unsafe/ZstdCompressLiterals.cs +++ b/src/ZstdSharp/Unsafe/ZstdCompressLiterals.cs @@ -128,7 +128,7 @@ private static nuint ZSTD_compressLiterals(void* dst, nuint dstCapacity, void* s if (repeat == HUF_repeat.HUF_repeat_valid && lhSize == 3) singleStream = 1; huf_compress = singleStream != 0 ? &HUF_compress1X_repeat : &HUF_compress4X_repeat; - cLitSize = huf_compress(ostart + lhSize, dstCapacity - lhSize, src, srcSize, 255, 11, entropyWorkspace, entropyWorkspaceSize, (nuint*)nextHuf->CTable, &repeat, flags); + cLitSize = huf_compress(ostart + lhSize, dstCapacity - lhSize, src, srcSize, 255, 11, entropyWorkspace, entropyWorkspaceSize, &nextHuf->CTable.e0, &repeat, flags); if (repeat != HUF_repeat.HUF_repeat_none) { hType = symbolEncodingType_e.set_repeat; diff --git a/src/ZstdSharp/Unsafe/ZstdCompressSuperblock.cs b/src/ZstdSharp/Unsafe/ZstdCompressSuperblock.cs index c19f797..4bfdd32 100644 --- a/src/ZstdSharp/Unsafe/ZstdCompressSuperblock.cs +++ b/src/ZstdSharp/Unsafe/ZstdCompressSuperblock.cs @@ -229,7 +229,7 @@ private static nuint ZSTD_compressSubBlock(ZSTD_entropyCTables_t* entropy, ZSTD_ byte* oend = ostart + dstCapacity; byte* op = ostart + ZSTD_blockHeaderSize; { - nuint cLitSize = ZSTD_compressSubBlock_literal((nuint*)entropy->huf.CTable, &entropyMetadata->hufMetadata, literals, litSize, op, (nuint)(oend - op), bmi2, writeLitEntropy, litEntropyWritten); + nuint cLitSize = ZSTD_compressSubBlock_literal(&entropy->huf.CTable.e0, &entropyMetadata->hufMetadata, literals, litSize, op, (nuint)(oend - op), bmi2, writeLitEntropy, litEntropyWritten); { nuint err_code = cLitSize; if (ERR_isError(err_code)) @@ -283,7 +283,7 @@ private static nuint ZSTD_estimateSubBlockSize_literal(byte* literals, nuint lit if (ERR_isError(largest)) return litSize; { - nuint cLitSizeEstimate = HUF_estimateCompressedSize((nuint*)huf->CTable, countWksp, maxSymbolValue); + nuint cLitSizeEstimate = HUF_estimateCompressedSize(&huf->CTable.e0, countWksp, maxSymbolValue); if (writeEntropy != 0) cLitSizeEstimate += hufMetadata->hufDesSize; return cLitSizeEstimate + literalSectionHeaderSize; diff --git a/src/ZstdSharp/Unsafe/ZstdDdict.cs b/src/ZstdSharp/Unsafe/ZstdDdict.cs index cadd108..13355e9 100644 --- a/src/ZstdSharp/Unsafe/ZstdDdict.cs +++ b/src/ZstdSharp/Unsafe/ZstdDdict.cs @@ -39,9 +39,9 @@ private static void ZSTD_copyDDictParameters(ZSTD_DCtx_s* dctx, ZSTD_DDict_s* dd { dctx->litEntropy = 1; dctx->fseEntropy = 1; - dctx->LLTptr = ddict->entropy.LLTable; - dctx->MLTptr = ddict->entropy.MLTable; - dctx->OFTptr = ddict->entropy.OFTable; + dctx->LLTptr = &ddict->entropy.LLTable.e0; + dctx->MLTptr = &ddict->entropy.MLTable.e0; + dctx->OFTptr = &ddict->entropy.OFTable.e0; dctx->HUFptr = ddict->entropy.hufTable; dctx->entropy.rep[0] = ddict->entropy.rep[0]; dctx->entropy.rep[1] = ddict->entropy.rep[1]; diff --git a/src/ZstdSharp/Unsafe/ZstdDecompress.cs b/src/ZstdSharp/Unsafe/ZstdDecompress.cs index 9f18773..309b8af 100644 --- a/src/ZstdSharp/Unsafe/ZstdDecompress.cs +++ b/src/ZstdSharp/Unsafe/ZstdDecompress.cs @@ -1539,7 +1539,7 @@ private static nuint ZSTD_loadDEntropy(ZSTD_entropyDTables_t* entropy, void* dic return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_dictionary_corrupted)); } - ZSTD_buildFSETable((ZSTD_seqSymbol*)entropy->OFTable, offcodeNCount, offcodeMaxValue, OF_base, OF_bits, offcodeLog, entropy->workspace, sizeof(uint) * 157, 0); + ZSTD_buildFSETable(&entropy->OFTable.e0, offcodeNCount, offcodeMaxValue, OF_base, OF_bits, offcodeLog, entropy->workspace, sizeof(uint) * 157, 0); dictPtr += offcodeHeaderSize; } @@ -1562,7 +1562,7 @@ private static nuint ZSTD_loadDEntropy(ZSTD_entropyDTables_t* entropy, void* dic return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_dictionary_corrupted)); } - ZSTD_buildFSETable((ZSTD_seqSymbol*)entropy->MLTable, matchlengthNCount, matchlengthMaxValue, ML_base, ML_bits, matchlengthLog, entropy->workspace, sizeof(uint) * 157, 0); + ZSTD_buildFSETable(&entropy->MLTable.e0, matchlengthNCount, matchlengthMaxValue, ML_base, ML_bits, matchlengthLog, entropy->workspace, sizeof(uint) * 157, 0); dictPtr += matchlengthHeaderSize; } @@ -1585,7 +1585,7 @@ private static nuint ZSTD_loadDEntropy(ZSTD_entropyDTables_t* entropy, void* dic return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_dictionary_corrupted)); } - ZSTD_buildFSETable((ZSTD_seqSymbol*)entropy->LLTable, litlengthNCount, litlengthMaxValue, LL_base, LL_bits, litlengthLog, entropy->workspace, sizeof(uint) * 157, 0); + ZSTD_buildFSETable(&entropy->LLTable.e0, litlengthNCount, litlengthMaxValue, LL_base, LL_bits, litlengthLog, entropy->workspace, sizeof(uint) * 157, 0); dictPtr += litlengthHeaderSize; } @@ -1657,9 +1657,9 @@ public static nuint ZSTD_decompressBegin(ZSTD_DCtx_s* dctx) dctx->dictID = 0; dctx->bType = blockType_e.bt_reserved; memcpy(dctx->entropy.rep, repStartValue, sizeof(uint) * 3); - dctx->LLTptr = dctx->entropy.LLTable; - dctx->MLTptr = dctx->entropy.MLTable; - dctx->OFTptr = dctx->entropy.OFTable; + dctx->LLTptr = &dctx->entropy.LLTable.e0; + dctx->MLTptr = &dctx->entropy.MLTable.e0; + dctx->OFTptr = &dctx->entropy.OFTable.e0; dctx->HUFptr = dctx->entropy.hufTable; return 0; } diff --git a/src/ZstdSharp/Unsafe/ZstdDecompressBlock.cs b/src/ZstdSharp/Unsafe/ZstdDecompressBlock.cs index f78a322..63d3e79 100644 --- a/src/ZstdSharp/Unsafe/ZstdDecompressBlock.cs +++ b/src/ZstdSharp/Unsafe/ZstdDecompressBlock.cs @@ -659,7 +659,7 @@ private static nuint ZSTD_decodeSeqHeaders(ZSTD_DCtx_s* dctx, int* nbSeqPtr, voi symbolEncodingType_e MLtype = (symbolEncodingType_e)(*ip >> 2 & 3); ip++; { - nuint llhSize = ZSTD_buildSeqTable((ZSTD_seqSymbol*)dctx->entropy.LLTable, &dctx->LLTptr, LLtype, 35, 9, ip, (nuint)(iend - ip), LL_base, LL_bits, LL_defaultDTable, dctx->fseEntropy, dctx->ddictIsCold, nbSeq, dctx->workspace, sizeof(uint) * 640, ZSTD_DCtx_get_bmi2(dctx)); + nuint llhSize = ZSTD_buildSeqTable(&dctx->entropy.LLTable.e0, &dctx->LLTptr, LLtype, 35, 9, ip, (nuint)(iend - ip), LL_base, LL_bits, LL_defaultDTable, dctx->fseEntropy, dctx->ddictIsCold, nbSeq, dctx->workspace, sizeof(uint) * 640, ZSTD_DCtx_get_bmi2(dctx)); if (ERR_isError(llhSize)) { return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_corruption_detected)); @@ -669,7 +669,7 @@ private static nuint ZSTD_decodeSeqHeaders(ZSTD_DCtx_s* dctx, int* nbSeqPtr, voi } { - nuint ofhSize = ZSTD_buildSeqTable((ZSTD_seqSymbol*)dctx->entropy.OFTable, &dctx->OFTptr, OFtype, 31, 8, ip, (nuint)(iend - ip), OF_base, OF_bits, OF_defaultDTable, dctx->fseEntropy, dctx->ddictIsCold, nbSeq, dctx->workspace, sizeof(uint) * 640, ZSTD_DCtx_get_bmi2(dctx)); + nuint ofhSize = ZSTD_buildSeqTable(&dctx->entropy.OFTable.e0, &dctx->OFTptr, OFtype, 31, 8, ip, (nuint)(iend - ip), OF_base, OF_bits, OF_defaultDTable, dctx->fseEntropy, dctx->ddictIsCold, nbSeq, dctx->workspace, sizeof(uint) * 640, ZSTD_DCtx_get_bmi2(dctx)); if (ERR_isError(ofhSize)) { return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_corruption_detected)); @@ -679,7 +679,7 @@ private static nuint ZSTD_decodeSeqHeaders(ZSTD_DCtx_s* dctx, int* nbSeqPtr, voi } { - nuint mlhSize = ZSTD_buildSeqTable((ZSTD_seqSymbol*)dctx->entropy.MLTable, &dctx->MLTptr, MLtype, 52, 9, ip, (nuint)(iend - ip), ML_base, ML_bits, ML_defaultDTable, dctx->fseEntropy, dctx->ddictIsCold, nbSeq, dctx->workspace, sizeof(uint) * 640, ZSTD_DCtx_get_bmi2(dctx)); + nuint mlhSize = ZSTD_buildSeqTable(&dctx->entropy.MLTable.e0, &dctx->MLTptr, MLtype, 52, 9, ip, (nuint)(iend - ip), ML_base, ML_bits, ML_defaultDTable, dctx->fseEntropy, dctx->ddictIsCold, nbSeq, dctx->workspace, sizeof(uint) * 640, ZSTD_DCtx_get_bmi2(dctx)); if (ERR_isError(mlhSize)) { return unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_corruption_detected)); @@ -1125,29 +1125,29 @@ private static seq_t ZSTD_decodeSequence(seqState_t* seqState, ZSTD_longOffset_e BIT_reloadDStream(&seqState->DStream); } - seqState->prevOffset[2] = seqState->prevOffset[1]; - seqState->prevOffset[1] = seqState->prevOffset[0]; - seqState->prevOffset[0] = offset; + seqState->prevOffset.e2 = seqState->prevOffset.e1; + seqState->prevOffset.e1 = seqState->prevOffset.e0; + seqState->prevOffset.e0 = offset; } else { uint ll0 = llDInfo->baseValue == 0 ? 1U : 0U; if (ofBits == 0) { - offset = seqState->prevOffset[ll0]; - seqState->prevOffset[1] = seqState->prevOffset[ll0 == 0 ? 1 : 0]; - seqState->prevOffset[0] = offset; + offset = (&seqState->prevOffset.e0)[ll0]; + seqState->prevOffset.e1 = (&seqState->prevOffset.e0)[ll0 == 0 ? 1 : 0]; + seqState->prevOffset.e0 = offset; } else { offset = ofBase + ll0 + BIT_readBitsFast(&seqState->DStream, 1); { - nuint temp = offset == 3 ? seqState->prevOffset[0] - 1 : seqState->prevOffset[offset]; + nuint temp = offset == 3 ? seqState->prevOffset.e0 - 1 : (&seqState->prevOffset.e0)[offset]; temp += temp == 0 ? 1U : 0U; if (offset != 1) - seqState->prevOffset[2] = seqState->prevOffset[1]; - seqState->prevOffset[1] = seqState->prevOffset[0]; - seqState->prevOffset[0] = offset = temp; + seqState->prevOffset.e2 = seqState->prevOffset.e1; + seqState->prevOffset.e1 = seqState->prevOffset.e0; + seqState->prevOffset.e0 = offset = temp; } } } @@ -1196,7 +1196,7 @@ private static nuint ZSTD_decompressSequences_bodySplitLitBuffer(ZSTD_DCtx_s* dc { uint i; for (i = 0; i < 3; i++) - seqState.prevOffset[i] = dctx->entropy.rep[i]; + (&seqState.prevOffset.e0)[i] = dctx->entropy.rep[i]; } if (ERR_isError(BIT_initDStream(&seqState.DStream, ip, (nuint)(iend - ip)))) @@ -1279,7 +1279,7 @@ private static nuint ZSTD_decompressSequences_bodySplitLitBuffer(ZSTD_DCtx_s* dc { uint i; for (i = 0; i < 3; i++) - dctx->entropy.rep[i] = (uint)seqState.prevOffset[i]; + dctx->entropy.rep[i] = (uint)(&seqState.prevOffset.e0)[i]; } } @@ -1342,7 +1342,7 @@ private static nuint ZSTD_decompressSequences_body(ZSTD_DCtx_s* dctx, void* dst, { uint i; for (i = 0; i < 3; i++) - seqState.prevOffset[i] = dctx->entropy.rep[i]; + (&seqState.prevOffset.e0)[i] = dctx->entropy.rep[i]; } if (ERR_isError(BIT_initDStream(&seqState.DStream, ip, (nuint)(iend - ip)))) @@ -1460,7 +1460,7 @@ private static nuint ZSTD_decompressSequences_body(ZSTD_DCtx_s* dctx, void* dst, { uint i; for (i = 0; i < 3; i++) - dctx->entropy.rep[i] = (uint)seqState.prevOffset[i]; + dctx->entropy.rep[i] = (uint)(&seqState.prevOffset.e0)[i]; } } @@ -1537,7 +1537,7 @@ private static nuint ZSTD_decompressSequencesLong_body(ZSTD_DCtx_s* dctx, void* { int i; for (i = 0; i < 3; i++) - seqState.prevOffset[i] = dctx->entropy.rep[i]; + (&seqState.prevOffset.e0)[i] = dctx->entropy.rep[i]; } assert(dst != null); @@ -1649,7 +1649,7 @@ private static nuint ZSTD_decompressSequencesLong_body(ZSTD_DCtx_s* dctx, void* { uint i; for (i = 0; i < 3; i++) - dctx->entropy.rep[i] = (uint)seqState.prevOffset[i]; + dctx->entropy.rep[i] = (uint)(&seqState.prevOffset.e0)[i]; } } diff --git a/src/ZstdSharp/Unsafe/ZstdLdm.cs b/src/ZstdSharp/Unsafe/ZstdLdm.cs index 754e991..251158b 100644 --- a/src/ZstdSharp/Unsafe/ZstdLdm.cs +++ b/src/ZstdSharp/Unsafe/ZstdLdm.cs @@ -291,7 +291,7 @@ private static void ZSTD_ldm_fillHashTable(ldmState_t* ldmState, byte* ip, byte* byte* @base = ldmState->window.@base; byte* istart = ip; ldmRollingHashState_t hashState; - nuint* splits = (nuint*)ldmState->splitIndices; + nuint* splits = &ldmState->splitIndices.e0; uint numSplits; ZSTD_ldm_gear_init(&hashState, @params); while (ip < iend) @@ -357,8 +357,8 @@ private static nuint ZSTD_ldm_generateSequences_internal(ldmState_t* ldmState, r /* Rolling hash state */ ldmRollingHashState_t hashState; /* Arrays for staged-processing */ - nuint* splits = (nuint*)ldmState->splitIndices; - ldmMatchCandidate_t* candidates = (ldmMatchCandidate_t*)ldmState->matchCandidates; + nuint* splits = &ldmState->splitIndices.e0; + ldmMatchCandidate_t* candidates = &ldmState->matchCandidates.e0; uint numSplits; if (srcSize < minMatchLength) return (nuint)(iend - anchor); diff --git a/src/ZstdSharp/Unsafe/ZstdOpt.cs b/src/ZstdSharp/Unsafe/ZstdOpt.cs index 7022aa7..412d79c 100644 --- a/src/ZstdSharp/Unsafe/ZstdOpt.cs +++ b/src/ZstdSharp/Unsafe/ZstdOpt.cs @@ -120,7 +120,7 @@ private static void ZSTD_rescaleFreqs(optState_t* optPtr, byte* src, nuint srcSi { /* scale to 2K */ const uint scaleLog = 11; - uint bitCost = HUF_getNbBitsFromCTable((nuint*)optPtr->symbolCosts->huf.CTable, lit); + uint bitCost = HUF_getNbBitsFromCTable(&optPtr->symbolCosts->huf.CTable.e0, lit); assert(bitCost <= scaleLog); optPtr->litFreq[lit] = (uint)(bitCost != 0 ? 1 << (int)(scaleLog - bitCost) : 1); optPtr->litSum += optPtr->litFreq[lit]; diff --git a/src/ZstdSharp/Unsafe/ldmState_t.cs b/src/ZstdSharp/Unsafe/ldmState_t.cs index 1d69d36..f165353 100644 --- a/src/ZstdSharp/Unsafe/ldmState_t.cs +++ b/src/ZstdSharp/Unsafe/ldmState_t.cs @@ -1,6 +1,3 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; - namespace ZstdSharp.Unsafe { public unsafe struct ldmState_t @@ -79,26 +76,6 @@ public unsafe struct _splitIndices_e__FixedBuffer public nuint e61; public nuint e62; public nuint e63; - public ref nuint this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_splitIndices_e__FixedBuffer, nuint>(this) + index); - } - - public ref nuint this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_splitIndices_e__FixedBuffer, nuint>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator nuint*(in _splitIndices_e__FixedBuffer t) => RefToPointer<_splitIndices_e__FixedBuffer, nuint>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static nuint* operator +(in _splitIndices_e__FixedBuffer t, nuint index) => RefToPointer<_splitIndices_e__FixedBuffer, nuint>(t) + index; } public unsafe struct _matchCandidates_e__FixedBuffer @@ -167,26 +144,6 @@ public unsafe struct _matchCandidates_e__FixedBuffer public ldmMatchCandidate_t e61; public ldmMatchCandidate_t e62; public ldmMatchCandidate_t e63; - public ref ldmMatchCandidate_t this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_matchCandidates_e__FixedBuffer, ldmMatchCandidate_t>(this) + index); - } - - public ref ldmMatchCandidate_t this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_matchCandidates_e__FixedBuffer, ldmMatchCandidate_t>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator ldmMatchCandidate_t*(in _matchCandidates_e__FixedBuffer t) => RefToPointer<_matchCandidates_e__FixedBuffer, ldmMatchCandidate_t>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static ldmMatchCandidate_t* operator +(in _matchCandidates_e__FixedBuffer t, nuint index) => RefToPointer<_matchCandidates_e__FixedBuffer, ldmMatchCandidate_t>(t) + index; } } } \ No newline at end of file diff --git a/src/ZstdSharp/Unsafe/rankValCol_t.cs b/src/ZstdSharp/Unsafe/rankValCol_t.cs index 0501543..d6c4fb5 100644 --- a/src/ZstdSharp/Unsafe/rankValCol_t.cs +++ b/src/ZstdSharp/Unsafe/rankValCol_t.cs @@ -1,13 +1,7 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; - namespace ZstdSharp.Unsafe { public unsafe struct rankValCol_t { public fixed uint Body[13]; - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator uint*(in rankValCol_t t) => RefToPointer(t); } } \ No newline at end of file diff --git a/src/ZstdSharp/Unsafe/seqState_t.cs b/src/ZstdSharp/Unsafe/seqState_t.cs index dfec253..2743a14 100644 --- a/src/ZstdSharp/Unsafe/seqState_t.cs +++ b/src/ZstdSharp/Unsafe/seqState_t.cs @@ -1,6 +1,3 @@ -using System.Runtime.CompilerServices; -using static ZstdSharp.UnsafeHelper; - namespace ZstdSharp.Unsafe { public unsafe struct seqState_t @@ -15,26 +12,6 @@ public unsafe struct _prevOffset_e__FixedBuffer public nuint e0; public nuint e1; public nuint e2; - public ref nuint this[nuint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_prevOffset_e__FixedBuffer, nuint>(this) + index); - } - - public ref nuint this[nint index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - get => ref *(RefToPointer<_prevOffset_e__FixedBuffer, nuint>(this) + index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static implicit operator nuint*(in _prevOffset_e__FixedBuffer t) => RefToPointer<_prevOffset_e__FixedBuffer, nuint>(t); - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static nuint* operator +(in _prevOffset_e__FixedBuffer t, nuint index) => RefToPointer<_prevOffset_e__FixedBuffer, nuint>(t) + index; } } } \ No newline at end of file diff --git a/src/ZstdSharp/UnsafeHelper.cs b/src/ZstdSharp/UnsafeHelper.cs index 72d57ec..2fcd255 100644 --- a/src/ZstdSharp/UnsafeHelper.cs +++ b/src/ZstdSharp/UnsafeHelper.cs @@ -3,7 +3,7 @@ using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using InlineIL; -using static InlineIL.IL.Emit; + // ReSharper disable InconsistentNaming // ReSharper disable IdentifierTypo @@ -141,22 +141,8 @@ public static void SkipInit(out T value) * System.Runtime.CompilerServices.Unsafe.SkipInit(out value); * in .NET 5+ */ - Ret(); + IL.Emit.Ret(); throw IL.Unreachable(); } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - [InlineMethod.Inline] - public static TTo* RefToPointer(in TFrom t) where TTo : unmanaged - { - /* - * Can be rewritten with - * (TTo*)System.Runtime.CompilerServices.Unsafe.AsPointer(ref System.Runtime.CompilerServices.Unsafe.AsRef(t)); - * but unfortunately reduces inlining - */ - Ldarg_0(); - Conv_U(); - return IL.ReturnPointer(); - } } }