diff --git a/Hi3Helper.SharpHDiffPatch/Binary/CombinedStream.cs b/Hi3Helper.SharpHDiffPatch/Binary/CombinedStream.cs
new file mode 100644
index 0000000..a1a3e2d
--- /dev/null
+++ b/Hi3Helper.SharpHDiffPatch/Binary/CombinedStream.cs
@@ -0,0 +1,368 @@
+/*
+ * Original Code by lassevk
+ * https://raw.githubusercontent.com/lassevk/Streams/master/Streams/CombinedStream.cs
+ */
+
+using System;
+using System.IO;
+using System.Linq;
+
+namespace Hi3Helper.EncTool
+{
+ public struct NewFileCombinedStreamStruct
+ {
+ public FileStream stream;
+ public long size;
+ }
+
+ ///
+ /// This class is a descendant that manages multiple underlying
+ /// streams which are considered to be chained together to one large stream. Only reading
+ /// and seeking is allowed, writing will throw exceptions.
+ ///
+ public sealed class CombinedStream : Stream
+ {
+ private Stream[] _UnderlyingStreams;
+ private long[] _UnderlyingStartingPositions;
+ private long _TotalLength;
+
+ private long _Position;
+ private int _Index;
+
+ ///
+ /// Constructs a new on top of the specified array
+ /// of streams.
+ ///
+ ///
+ /// An array of objects that will be chained together and
+ /// considered to be one big stream.
+ ///
+ public CombinedStream(params Stream[] underlyingStreams)
+ {
+ if (underlyingStreams == null)
+ throw new ArgumentNullException("underlyingStreams");
+ foreach (Stream stream in underlyingStreams)
+ {
+ if (stream == null)
+ throw new ArgumentException("underlyingStreams contains a null stream reference", "underlyingStreams");
+ if (!stream.CanRead)
+ throw new InvalidOperationException("CanRead not true for all streams");
+ if (!stream.CanSeek)
+ throw new InvalidOperationException("CanSeek not true for all streams");
+ }
+
+ _UnderlyingStreams = new Stream[underlyingStreams.Length];
+ _UnderlyingStartingPositions = new long[underlyingStreams.Length];
+ Array.Copy(underlyingStreams, _UnderlyingStreams, underlyingStreams.Length);
+
+ _Position = 0;
+ _Index = 0;
+
+ _UnderlyingStartingPositions[0] = 0;
+ for (int index = 1; index < _UnderlyingStartingPositions.Length; index++)
+ _UnderlyingStartingPositions[index] = _UnderlyingStartingPositions[index - 1] + _UnderlyingStreams[index - 1].Length;
+
+ _TotalLength = _UnderlyingStartingPositions[_UnderlyingStartingPositions.Length - 1] + _UnderlyingStreams[_UnderlyingStreams.Length - 1].Length;
+ }
+
+ ///
+ /// Constructs a new on top of the specified array
+ /// of streams.
+ ///
+ ///
+ /// An array of objects that will be chained together and
+ /// considered to be one big stream.
+ ///
+ public CombinedStream(params NewFileCombinedStreamStruct[] underlyingStreams)
+ {
+ if (underlyingStreams == null)
+ throw new ArgumentNullException("underlyingStreams");
+
+ _UnderlyingStreams = new Stream[underlyingStreams.Length];
+ _UnderlyingStartingPositions = new long[underlyingStreams.Length];
+
+ foreach (NewFileCombinedStreamStruct stream in underlyingStreams)
+ {
+ if (stream.stream == null)
+ throw new ArgumentException("underlyingStreams contains a null stream reference", "underlyingStreams");
+ if (!stream.stream.CanRead)
+ throw new InvalidOperationException("CanRead not true for all streams");
+ if (!stream.stream.CanSeek)
+ throw new InvalidOperationException("CanSeek not true for all streams");
+
+ stream.stream.SetLength(stream.size);
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[CombinedStream.ctor()] Initializing file with length {stream.size} bytes: {stream.stream.Name}");
+#endif
+ }
+
+ Array.Copy(underlyingStreams.Select(x => x.stream).ToArray(), _UnderlyingStreams, underlyingStreams.Length);
+
+ _Position = 0;
+ _Index = 0;
+
+ _UnderlyingStartingPositions[0] = 0;
+ for (int index = 1; index < _UnderlyingStartingPositions.Length; index++)
+ _UnderlyingStartingPositions[index] = _UnderlyingStartingPositions[index - 1] + underlyingStreams[index - 1].size;
+
+ _TotalLength = _UnderlyingStartingPositions[_UnderlyingStartingPositions.Length - 1] + underlyingStreams[_UnderlyingStreams.Length - 1].size;
+
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[CombinedStream.ctor()] Total length of the CombinedStream: {_TotalLength} bytes with total of {underlyingStreams.Length} streams");
+#endif
+ }
+
+ ///
+ /// Gets a value indicating whether the current stream supports reading.
+ ///
+ ///
+ /// true.
+ ///
+ ///
+ /// Always true for .
+ ///
+ public override bool CanRead
+ {
+ get
+ {
+ return true;
+ }
+ }
+
+ ///
+ /// Gets a value indicating whether the current stream supports seeking.
+ ///
+ ///
+ /// true.
+ ///
+ ///
+ /// Always true for .
+ ///
+ public override bool CanSeek
+ {
+ get
+ {
+ return true;
+ }
+ }
+
+ ///
+ /// Gets a value indicating whether the current stream supports writing.
+ ///
+ ///
+ /// false.
+ ///
+ ///
+ /// Always false for .
+ ///
+ public override bool CanWrite
+ {
+ get
+ {
+ return true;
+ }
+ }
+
+ ///
+ /// When overridden in a derived class, clears all buffers for this stream and causes any buffered data to be written to the underlying device.
+ ///
+ /// An I/O error occurs.
+ public override void Flush()
+ {
+ foreach (Stream stream in _UnderlyingStreams)
+ stream.Flush();
+ }
+
+ protected override void Dispose(bool disposing)
+ {
+ base.Dispose(disposing);
+ if (_UnderlyingStreams != null)
+ {
+ foreach (Stream stream in _UnderlyingStreams)
+ stream.Dispose();
+ }
+ }
+
+ ///
+ /// Gets the total length in bytes of the underlying streams.
+ ///
+ ///
+ /// The total length of the underlying streams.
+ ///
+ ///
+ /// A long value representing the total length of the underlying streams in bytes.
+ ///
+ /// A class derived from Stream does not support seeking.
+ /// Methods were called after the stream was closed.
+ public override long Length
+ {
+ get
+ {
+ return _TotalLength;
+ }
+ }
+
+ ///
+ /// Gets or sets the position within the current stream.
+ ///
+ ///
+ /// The current position within the stream.
+ /// An I/O error occurs.
+ /// The stream does not support seeking.
+ /// Methods were called after the stream was closed.
+ public override long Position
+ {
+ get
+ {
+ return _Position;
+ }
+
+ set
+ {
+ if (value < 0 || value > _TotalLength)
+ throw new ArgumentOutOfRangeException("value");
+
+ _Position = value;
+ if (value == _TotalLength)
+ _Index = _UnderlyingStreams.Length - 1;
+ else
+ {
+ while (_Index > 0 && _Position < _UnderlyingStartingPositions[_Index])
+ _Index--;
+
+ while (_Index < _UnderlyingStreams.Length - 1 && _Position >= _UnderlyingStartingPositions[_Index] + _UnderlyingStreams[_Index].Length)
+ _Index++;
+ }
+ }
+ }
+
+ ///
+ /// Reads a sequence of bytes from the current stream and advances the position within the stream by the number of bytes read.
+ ///
+ /// An array of bytes. When this method returns, the buffer contains the specified byte array with the values between offset and (offset + count - 1) replaced by the bytes read from the current source.
+ /// The zero-based byte offset in buffer at which to begin storing the data read from the current stream.
+ /// The maximum number of bytes to be read from the current stream.
+ ///
+ /// The total number of bytes read into the buffer. This can be less than the number of bytes requested if that many bytes are not currently available, or zero (0) if the end of the stream has been reached.
+ ///
+ /// The sum of offset and count is larger than the buffer length.
+ /// Methods were called after the stream was closed.
+ /// The stream does not support reading.
+ /// buffer is null.
+ /// An I/O error occurs.
+ /// offset or count is negative.
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ int result = 0;
+ while (count > 0)
+ {
+ _UnderlyingStreams[_Index].Position = _Position - _UnderlyingStartingPositions[_Index];
+ int bytesRead = _UnderlyingStreams[_Index].Read(buffer, offset, count);
+ result += bytesRead;
+ offset += bytesRead;
+ count -= bytesRead;
+ _Position += bytesRead;
+
+ if (count > 0)
+ {
+ if (_Index < _UnderlyingStreams.Length - 1)
+ {
+ _Index++;
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[CombinedStream.Read()] Moving the stream to Index: {_Index}");
+#endif
+ }
+ else
+ break;
+ }
+ }
+
+ return result;
+ }
+
+ ///
+ /// Sets the position within the current stream.
+ ///
+ /// A byte offset relative to the origin parameter.
+ /// A value of type indicating the reference point used to obtain the new position.
+ ///
+ /// The new position within the current stream.
+ ///
+ /// An I/O error occurs.
+ /// The stream does not support seeking, such as if the stream is constructed from a pipe or console output.
+ /// Methods were called after the stream was closed.
+ public override long Seek(long offset, SeekOrigin origin)
+ {
+ switch (origin)
+ {
+ case SeekOrigin.Begin:
+ Position = offset;
+ break;
+
+ case SeekOrigin.Current:
+ Position += offset;
+ break;
+
+ case SeekOrigin.End:
+ Position = Length + offset;
+ break;
+ }
+
+ return Position;
+ }
+
+ ///
+ /// Throws since the
+ /// class does not supports changing the length.
+ ///
+ /// The desired length of the current stream in bytes.
+ ///
+ /// does not support this operation.
+ ///
+ public override void SetLength(long value)
+ {
+ throw new NotSupportedException("The method or operation is not supported by CombinedStream.");
+ }
+
+ ///
+ /// Throws since the
+ /// class does not supports writing to the underlying streams.
+ ///
+ /// An array of bytes. This method copies count bytes from buffer to the current stream.
+ /// The zero-based byte offset in buffer at which to begin copying bytes to the current stream.
+ /// The number of bytes to be written to the current stream.
+ ///
+ /// does not support this operation.
+ ///
+ public override void Write(byte[] buffer, int offset, int count)
+ {
+ while (count > 0)
+ {
+ _UnderlyingStreams[_Index].Position = _Position - _UnderlyingStartingPositions[_Index];
+ int bytesRead = count;
+ int remainedMaxLength = (int)(_UnderlyingStreams[_Index].Length - _UnderlyingStreams[_Index].Position);
+ if (remainedMaxLength < count)
+ {
+ bytesRead = remainedMaxLength;
+ }
+ _UnderlyingStreams[_Index].Write(buffer, offset, bytesRead);
+ offset += bytesRead;
+ count -= bytesRead;
+ _Position += bytesRead;
+
+ if (count > 0)
+ {
+ if (_Index < _UnderlyingStreams.Length - 1)
+ {
+ _Index++;
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[CombinedStream.Write()] Moving the stream to Index: {_Index}");
+#endif
+ }
+ else
+ break;
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Hi3Helper.SharpHDiffPatch/HDiffPatch.cs b/Hi3Helper.SharpHDiffPatch/HDiffPatch.cs
index bd499c6..c36f25d 100644
--- a/Hi3Helper.SharpHDiffPatch/HDiffPatch.cs
+++ b/Hi3Helper.SharpHDiffPatch/HDiffPatch.cs
@@ -1,20 +1,21 @@
-using System;
-using System.IO;
+using System.IO;
namespace Hi3Helper.SharpHDiffPatch
{
public enum CompressionMode
{
+ nocomp,
lzma,
- zstd,
- nocomp
+ zstd
}
public enum ChecksumMode
{
- crc32
+ nochecksum,
+ crc32,
+ fadler64
}
- public struct THDiffzHead
+ public class THDiffzHead
{
public ulong typesEndPos;
public ulong coverCount;
@@ -31,7 +32,7 @@ public struct THDiffzHead
public ulong coverEndPos;
}
- public struct TDirDiffInfo
+ public class TDirDiffInfo
{
public bool isInputDir;
public bool isOutputDir;
@@ -53,7 +54,7 @@ public struct TDirDiffInfo
public CompressedHDiffInfo hdiffinfo;
}
- public struct SingleCompressedHDiffInfo
+ public class SingleCompressedHDiffInfo
{
public CompressionMode compMode;
public string headerMagic;
@@ -62,7 +63,7 @@ public struct SingleCompressedHDiffInfo
public THDiffzHead headInfo;
}
- public struct CompressedHDiffInfo
+ public class CompressedHDiffInfo
{
public CompressionMode compMode;
public string patchPath;
@@ -75,7 +76,7 @@ public struct CompressedHDiffInfo
public THDiffzHead headInfo;
}
- public struct HDiffHeaderInfo
+ public class HDiffHeaderInfo
{
public CompressionMode compMode;
public ChecksumMode checksumMode;
@@ -110,18 +111,18 @@ public struct HDiffHeaderInfo
public ulong hdiffDataSize;
}
- public sealed partial class HDiffPatch
+ public sealed partial class HDiffPatch : IPatch
{
- private CompressedHDiffInfo singleHDiffInfo;
- private TDirDiffInfo tDirDiffInfo;
- private HDiffHeaderInfo headerInfo;
- private Stream diffStream;
- private string diffPath;
- private string headerInfoLine;
- private bool isPatchDir = true;
+ private CompressedHDiffInfo singleHDiffInfo { get; set; }
+ private TDirDiffInfo tDirDiffInfo { get; set; }
+ private HDiffHeaderInfo headerInfo { get; set; }
+ private Stream diffStream { get; set; }
+ private string diffPath { get; set; }
+ private bool isPatchDir { get; set; }
public HDiffPatch()
{
+ isPatchDir = true;
}
#region Header Initialization
@@ -134,251 +135,16 @@ public void Initialize(string diff)
using (BinaryReader sr = new BinaryReader(diffStream))
{
- TryParseHeaderInfo(sr);
+ isPatchDir = Header.TryParseHeaderInfo(sr, this.diffPath, this.tDirDiffInfo, this.singleHDiffInfo, this.headerInfo);
}
}
- public void Patch(string inputPath, string outputPath, bool useBufferedPatch = true)
+ public void Patch(string inputPath, string outputPath, bool useBufferedPatch)
{
- if (isPatchDir && tDirDiffInfo.isInputDir && tDirDiffInfo.isOutputDir)
- {
- // TODO
- RunDirectoryPatch(inputPath, outputPath);
- }
- else
- {
- PatchSingle patchSingle = new PatchSingle(singleHDiffInfo);
- patchSingle.Patch(inputPath, outputPath, useBufferedPatch);
- }
- }
-
- private void TryParseHeaderInfo(BinaryReader sr)
- {
- headerInfoLine = sr.ReadStringToNull();
-
- if (headerInfoLine.Length > 64 || !headerInfoLine.StartsWith("HDIFF")) throw new FormatException("This is not a HDiff file format!");
-
- string[] hInfoArr = headerInfoLine.Split('&');
- if (hInfoArr.Length == 2)
- {
- byte pFileVer = TryGetVersion(hInfoArr[0]);
- if (pFileVer != 13) throw new FormatException($"HDiff version is unsupported. This patcher only supports the single patch file with version: 13 only!");
-
- isPatchDir = false;
-
- singleHDiffInfo = new CompressedHDiffInfo();
- singleHDiffInfo.headerMagic = hInfoArr[0];
-
- TryParseCompressionEnum(hInfoArr[1], out singleHDiffInfo.compMode);
- }
- else if (hInfoArr.Length != 3) throw new IndexOutOfRangeException($"Header info is incomplete! Expecting 3 parts but got {hInfoArr.Length} part(s) instead (Raw: {headerInfoLine})");
-
- if (isPatchDir)
- {
- byte hInfoVer = TryGetVersion(hInfoArr[0]);
- if (hInfoVer != 19) throw new FormatException($"HDiff version is unsupported. This patcher only supports the directory patch file with version: 19 only!");
-
- if (!Enum.TryParse(hInfoArr[1], true, out headerInfo.compMode)) throw new FormatException($"This patcher doesn't support {hInfoArr[1]} compression!");
- if (!Enum.TryParse(hInfoArr[2], true, out headerInfo.checksumMode)) throw new FormatException($"This patcher doesn't support {hInfoArr[2]} checksum!");
-
- TryReadDirHeaderNumInfo(sr);
- TryAssignDirHeaderExtents(sr);
- }
- else
- {
- GetSingleCompressedHDiffInfo(sr);
- }
- }
-
- private bool TryParseCompressionEnum(string input, out CompressionMode compOut)
- {
- if (input == string.Empty)
- {
- compOut = CompressionMode.nocomp;
- return true;
- }
-
- throw new NotSupportedException("This patcher doesn't support patching with compression at the moment");
- // return Enum.TryParse(input, out compOut);
- }
-
- private void TryAssignDirHeaderExtents(BinaryReader sr)
- {
- ulong curPos = (ulong)sr.BaseStream.Position;
- headerInfo.headDataOffset = curPos;
-
- curPos += (headerInfo.headDataCompressedSize > 0 ? headerInfo.headDataCompressedSize : headerInfo.headDataSize);
- headerInfo.privateExternDataOffset = curPos;
-
- curPos += headerInfo.privateExternDataSize;
- tDirDiffInfo.externDataOffset = curPos;
-
- curPos += tDirDiffInfo.externDataSize;
- headerInfo.hdiffDataOffset = curPos;
- headerInfo.hdiffDataSize = (ulong)sr.BaseStream.Length - curPos;
-
- TryReadTDirHDiffInfo(sr);
- }
-
- private void TryReadTDirHDiffInfo(BinaryReader sr)
- {
- tDirDiffInfo.isSingleCompressedDiff = false;
- tDirDiffInfo.sdiffInfo.stepMemSize = 0;
-
- if (IsSingleCompressedHDiff(sr))
- {
- // TODO
- }
- else
- {
- GetNonSingleCompressedHDiffInfo(sr);
- }
- }
-
- private void GetSingleCompressedHDiffInfo(BinaryReader sr)
- {
- singleHDiffInfo.patchPath = diffPath;
- singleHDiffInfo.headInfo.typesEndPos = (ulong)sr.BaseStream.Position;
- singleHDiffInfo.newDataSize = sr.ReadUInt64VarInt();
- singleHDiffInfo.oldDataSize = sr.ReadUInt64VarInt();
-
- singleHDiffInfo.headInfo.coverCount = sr.ReadUInt64VarInt();
- singleHDiffInfo.headInfo.compressSizeBeginPos = (ulong)sr.BaseStream.Position;
- singleHDiffInfo.headInfo.cover_buf_size = sr.ReadUInt64VarInt();
- singleHDiffInfo.headInfo.compress_cover_buf_size = sr.ReadUInt64VarInt();
- singleHDiffInfo.headInfo.rle_ctrlBuf_size = sr.ReadUInt64VarInt();
- singleHDiffInfo.headInfo.compress_rle_ctrlBuf_size = sr.ReadUInt64VarInt();
- singleHDiffInfo.headInfo.rle_codeBuf_size = sr.ReadUInt64VarInt();
- singleHDiffInfo.headInfo.compress_rle_codeBuf_size = sr.ReadUInt64VarInt();
- singleHDiffInfo.headInfo.newDataDiff_size = sr.ReadUInt64VarInt();
- singleHDiffInfo.headInfo.compress_newDataDiff_size = sr.ReadUInt64VarInt();
-
- singleHDiffInfo.headInfo.headEndPos = (ulong)sr.BaseStream.Position;
- singleHDiffInfo.compressedCount = (ulong)((singleHDiffInfo.headInfo.compress_cover_buf_size > 1) ? 1 : 0)
- + (ulong)((singleHDiffInfo.headInfo.compress_rle_ctrlBuf_size > 1) ? 1 : 0)
- + (ulong)((singleHDiffInfo.headInfo.compress_rle_codeBuf_size > 1) ? 1 : 0)
- + (ulong)((singleHDiffInfo.headInfo.compress_newDataDiff_size > 1) ? 1 : 0);
-
- singleHDiffInfo.headInfo.coverEndPos = singleHDiffInfo.headInfo.headEndPos
- + (singleHDiffInfo.headInfo.compress_cover_buf_size > 0 ?
- singleHDiffInfo.headInfo.compress_cover_buf_size :
- singleHDiffInfo.headInfo.cover_buf_size);
- }
-
- private void GetNonSingleCompressedHDiffInfo(BinaryReader sr)
- {
- if (!tDirDiffInfo.hdiffinfo.headerMagic.StartsWith("HDIFF")) throw new InvalidDataException("The compression chunk magic is not valid!");
- byte magicVersion = TryGetVersion(tDirDiffInfo.hdiffinfo.headerMagic);
-
- if (magicVersion != 13) throw new InvalidDataException($"The compression chunk format: v{magicVersion} is not supported!");
-
- tDirDiffInfo.hdiffinfo.headInfo.typesEndPos = (ulong)sr.BaseStream.Position;
- tDirDiffInfo.newDataSize = sr.ReadUInt64VarInt();
- tDirDiffInfo.oldDataSize = sr.ReadUInt64VarInt();
-
- tDirDiffInfo.hdiffinfo.headInfo.coverCount = sr.ReadUInt64VarInt();
- tDirDiffInfo.hdiffinfo.headInfo.compressSizeBeginPos = (ulong)sr.BaseStream.Position;
- tDirDiffInfo.hdiffinfo.headInfo.cover_buf_size = sr.ReadUInt64VarInt();
- tDirDiffInfo.hdiffinfo.headInfo.compress_cover_buf_size = sr.ReadUInt64VarInt();
- tDirDiffInfo.hdiffinfo.headInfo.rle_ctrlBuf_size = sr.ReadUInt64VarInt();
- tDirDiffInfo.hdiffinfo.headInfo.compress_rle_ctrlBuf_size = sr.ReadUInt64VarInt();
- tDirDiffInfo.hdiffinfo.headInfo.rle_codeBuf_size = sr.ReadUInt64VarInt();
- tDirDiffInfo.hdiffinfo.headInfo.compress_rle_codeBuf_size = sr.ReadUInt64VarInt();
- tDirDiffInfo.hdiffinfo.headInfo.newDataDiff_size = sr.ReadUInt64VarInt();
- tDirDiffInfo.hdiffinfo.headInfo.compress_newDataDiff_size = sr.ReadUInt64VarInt();
-
- tDirDiffInfo.hdiffinfo.headInfo.headEndPos = (ulong)sr.BaseStream.Position;
- tDirDiffInfo.compressedCount = (ulong)((tDirDiffInfo.hdiffinfo.headInfo.compress_cover_buf_size > 1) ? 1 : 0)
- + (ulong)((tDirDiffInfo.hdiffinfo.headInfo.compress_rle_ctrlBuf_size > 1) ? 1 : 0)
- + (ulong)((tDirDiffInfo.hdiffinfo.headInfo.compress_rle_codeBuf_size > 1) ? 1 : 0)
- + (ulong)((tDirDiffInfo.hdiffinfo.headInfo.compress_newDataDiff_size > 1) ? 1 : 0);
-
- tDirDiffInfo.hdiffinfo.headInfo.coverEndPos = tDirDiffInfo.hdiffinfo.headInfo.headEndPos
- + (tDirDiffInfo.hdiffinfo.headInfo.compress_cover_buf_size > 0 ?
- tDirDiffInfo.hdiffinfo.headInfo.compress_cover_buf_size :
- tDirDiffInfo.hdiffinfo.headInfo.cover_buf_size);
- }
-
- private bool IsSingleCompressedHDiff(BinaryReader sr)
- {
- sr.BaseStream.Position = (long)headerInfo.hdiffDataOffset;
- string singleCompressedHeaderLine = sr.ReadStringToNull();
- string[] singleCompressedHeaderArr = singleCompressedHeaderLine.Split('&');
-
- if (singleCompressedHeaderArr[0].Equals("HDIFFSF20"))
- {
- // TODO
- }
- else
- {
- tDirDiffInfo.hdiffinfo = new CompressedHDiffInfo();
- tDirDiffInfo.hdiffinfo.headInfo = new THDiffzHead();
-
- if (!Enum.TryParse(singleCompressedHeaderArr[1], true, out tDirDiffInfo.hdiffinfo.compMode)) throw new FormatException($"The compression chunk has unsupported compression: {singleCompressedHeaderArr[1]}");
- tDirDiffInfo.hdiffinfo.headerMagic = singleCompressedHeaderArr[0];
- return false;
- }
-
- return true;
- }
-
- private void TryReadDirHeaderNumInfo(BinaryReader sr)
- {
- tDirDiffInfo.isInputDir = sr.ReadBoolean();
- tDirDiffInfo.isOutputDir = sr.ReadBoolean();
-
- headerInfo.inputDirCount = sr.ReadUInt64VarInt();
- headerInfo.inputSumSize = sr.ReadUInt64VarInt();
-
- headerInfo.outputDirCount = sr.ReadUInt64VarInt();
- headerInfo.outputSumSize = sr.ReadUInt64VarInt();
-
- headerInfo.inputRefFileCount = sr.ReadUInt64VarInt();
- headerInfo.inputRefFileSize = sr.ReadUInt64VarInt();
-
- headerInfo.outputRefFileCount = sr.ReadUInt64VarInt();
- headerInfo.outputRefFileSize = sr.ReadUInt64VarInt();
-
- headerInfo.sameFilePairCount = sr.ReadUInt64VarInt();
- headerInfo.sameFileSize = sr.ReadUInt64VarInt();
-
- headerInfo.newExecuteCount = (int)sr.ReadUInt64VarInt();
- headerInfo.privateReservedDataSize = sr.ReadUInt64VarInt();
- headerInfo.privateExternDataSize = sr.ReadUInt64VarInt();
- tDirDiffInfo.externDataSize = sr.ReadUInt64VarInt();
-
- headerInfo.compressSizeBeginPos = sr.BaseStream.Position;
-
- headerInfo.headDataSize = sr.ReadUInt64VarInt();
- headerInfo.headDataCompressedSize = sr.ReadUInt64VarInt();
- tDirDiffInfo.checksumByteSize = (byte)sr.ReadUInt64VarInt();
-
- tDirDiffInfo.checksumOffset = sr.BaseStream.Position;
- tDirDiffInfo.dirDataIsCompressed = headerInfo.headDataCompressedSize > 0;
-
- if (tDirDiffInfo.checksumByteSize > 0)
- {
- TrySeekHeader(sr, tDirDiffInfo.checksumByteSize * 4);
- }
- }
-
- private void TrySeekHeader(BinaryReader sr, int skipLongSize)
- {
- int len = 4096;
- if (len > skipLongSize)
- {
- len = skipLongSize;
- }
-
- sr.BaseStream.Seek(len, SeekOrigin.Current);
- }
-
- private byte TryGetVersion(string str)
- {
- string num = str.Substring(5);
- if (byte.TryParse(num, out byte ret)) return ret;
-
- throw new InvalidDataException($"Version string is invalid! Value: {num} (Raw: {str})");
+ IPatch patcher = isPatchDir && tDirDiffInfo.isInputDir && tDirDiffInfo.isOutputDir ?
+ new PatchDir(tDirDiffInfo, headerInfo, diffPath) :
+ new PatchSingle(singleHDiffInfo);
+ patcher.Patch(inputPath, outputPath, useBufferedPatch);
}
#endregion
}
diff --git a/Hi3Helper.SharpHDiffPatch/Hash/Adler64/Adler64.cs b/Hi3Helper.SharpHDiffPatch/Hash/Adler64/Adler64.cs
new file mode 100644
index 0000000..d444f00
--- /dev/null
+++ b/Hi3Helper.SharpHDiffPatch/Hash/Adler64/Adler64.cs
@@ -0,0 +1,334 @@
+/*
+ * Adler64-SIMD implementation by Anarh2404 under MIT License
+ * https://github.com/Anarh2404/AdlerSimd
+ *
+ * And original Adler (Adler32) implementation by Jean-Loup Gailly and Mark Adler from ZLib
+ * https://www.zlib.net | https://github.com/madler/zlib
+ */
+
+using System;
+using System.Runtime.InteropServices;
+using System.Runtime.Intrinsics;
+using System.Runtime.Intrinsics.X86;
+
+namespace Hi3Helper.SharpHDiffPatch.Hash
+{
+ public static class Adler64
+ {
+ private const ulong MOD64 = 4294967291;
+ private const uint NMAX64 = 363898415;
+ private const int MAXPART = 363898400;
+ private const int BLOCK_SIZE = 32;
+
+ public static ulong GetAdler64(ReadOnlySpan buffer, ulong adler = 1)
+ {
+ ulong s1 = adler & 0xffffffff;
+ ulong s2 = adler >> 32;
+ if (Ssse3.IsSupported)
+ {
+ return GetSse(buffer, s1, s2);
+ }
+ return GetSimpleOptimized(buffer, s1, s2);
+ }
+
+ internal static ulong GetSimple(ReadOnlySpan buffer, ulong s1, ulong s2)
+ {
+ foreach (var n in buffer)
+ {
+ s1 = (s1 + n) % MOD64;
+ s2 = (s2 + s1) % MOD64;
+ }
+
+ return (s2 << 32) | s1;
+ }
+
+
+ internal static ulong GetSimpleOptimized(ReadOnlySpan buf, ulong adler, ulong sum2)
+ {
+ // Workaroud
+ // TODO: try to find the problem
+
+ if (buf.Length > MAXPART)
+ {
+ int parts = (buf.Length / MAXPART) + 1;
+ ulong result = 0;
+ for (int i = 0; i < parts; i++)
+ {
+ var start = MAXPART * i;
+ var count = Math.Min(buf.Length - start, MAXPART);
+ var slice = buf.Slice(start, count);
+ result = GetSimpleOptimizedInternal(slice, adler, sum2);
+ adler = result & 0xffffffff;
+ sum2 = result >> 32;
+ }
+
+ return result;
+ }
+
+ return GetSimpleOptimizedInternal(buf, adler, sum2);
+ }
+
+ internal static ulong GetSimpleOptimizedInternal(ReadOnlySpan buf, ulong adler, ulong sum2)
+ {
+ ulong n;
+ ulong len = (ulong)buf.Length;
+ if (len == 1)
+ {
+ adler += buf[0];
+ if (adler >= MOD64)
+ adler -= MOD64;
+ sum2 += adler;
+ if (sum2 >= MOD64)
+ sum2 -= MOD64;
+ return adler | (sum2 << 32);
+ }
+ var idx = 0;
+ if (len < 16)
+ {
+ while (len-- != 0)
+ {
+ adler += buf[idx++];
+ sum2 += adler;
+ }
+ if (adler >= MOD64)
+ adler -= MOD64;
+ sum2 %= MOD64; /* only added so many BASE's */
+ return adler | (sum2 << 32);
+ }
+
+ /* do length NMAX blocks -- requires just one modulo operation */
+
+ while (len >= NMAX64)
+ {
+ len -= NMAX64;
+ n = NMAX64 / 16; /* NMAX is divisible by 16 */
+ do
+ {
+ /* 16 sums unrolled */
+ adler += buf[idx + 0];
+ sum2 += adler;
+ adler += buf[idx + 1];
+ sum2 += adler;
+ adler += buf[idx + 2];
+ sum2 += adler;
+ adler += buf[idx + 3];
+ sum2 += adler;
+ adler += buf[idx + 4];
+ sum2 += adler;
+ adler += buf[idx + 5];
+ sum2 += adler;
+ adler += buf[idx + 6];
+ sum2 += adler;
+ adler += buf[idx + 7];
+ sum2 += adler;
+ adler += buf[idx + 8];
+ sum2 += adler;
+ adler += buf[idx + 9];
+ sum2 += adler;
+ adler += buf[idx + 10];
+ sum2 += adler;
+ adler += buf[idx + 11];
+ sum2 += adler;
+ adler += buf[idx + 12];
+ sum2 += adler;
+ adler += buf[idx + 13];
+ sum2 += adler;
+ adler += buf[idx + 14];
+ sum2 += adler;
+ adler += buf[idx + 15];
+ sum2 += adler;
+
+ idx += 16;
+ } while (--n != 0);
+ adler %= MOD64;
+ sum2 %= MOD64;
+ }
+
+ /* do remaining bytes (less than NMAX, still just one modulo) */
+ if (len > 0)
+ { /* avoid modulos if none remaining */
+ while (len >= 16)
+ {
+ len -= 16;
+ /* 16 sums unrolled */
+ adler += buf[idx + 0];
+ sum2 += adler;
+ adler += buf[idx + 1];
+ sum2 += adler;
+ adler += buf[idx + 2];
+ sum2 += adler;
+ adler += buf[idx + 3];
+ sum2 += adler;
+ adler += buf[idx + 4];
+ sum2 += adler;
+ adler += buf[idx + 5];
+ sum2 += adler;
+ adler += buf[idx + 6];
+ sum2 += adler;
+ adler += buf[idx + 7];
+ sum2 += adler;
+ adler += buf[idx + 8];
+ sum2 += adler;
+ adler += buf[idx + 9];
+ sum2 += adler;
+ adler += buf[idx + 10];
+ sum2 += adler;
+ adler += buf[idx + 11];
+ sum2 += adler;
+ adler += buf[idx + 12];
+ sum2 += adler;
+ adler += buf[idx + 13];
+ sum2 += adler;
+ adler += buf[idx + 14];
+ sum2 += adler;
+ adler += buf[idx + 15];
+ sum2 += adler;
+ idx += 16;
+ }
+ while (len-- != 0)
+ {
+ adler += buf[idx++];
+ sum2 += adler;
+ }
+ adler %= MOD64;
+ sum2 %= MOD64;
+ }
+
+ /* return recombined sums */
+ return adler | (sum2 << 32);
+ }
+
+ internal unsafe static ulong GetSse(ReadOnlySpan buffer, ulong s1, ulong s2)
+ {
+ uint len = (uint)buffer.Length;
+
+ uint blocks = len / BLOCK_SIZE;
+ len = len - blocks * BLOCK_SIZE;
+
+ Vector128 tap1 = Vector128.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17);
+ Vector128 tap2 = Vector128.Create(16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
+ Vector128 zero = Vector128.Zero;
+ Vector128 onesShort = Vector128.Create(1, 1, 1, 1, 1, 1, 1, 1);
+ Vector128 onesInt = Vector128.Create(1, 1, 1, 1);
+ Vector128 shuffleMask2301 = Vector128.Create((byte)4, 5, 6, 7, 0, 1, 2, 3, 12, 13, 14, 15, 8, 9, 10, 11);
+ Vector128 shuffleMask1032 = Vector128.Create((byte)8, 9, 10, 11, 12, 13, 14, 15, 0, 1, 2, 3, 4, 5, 6, 7);
+ Vector128 shuffleMaskTrim = Vector128.Create(0, 1, 2, 3, 255, 255, 255, 255, 8, 9, 10, 11, 255, 255, 255, 255);
+ // A B C D -> B A D C
+ const int S2301 = 2 << 6 | 3 << 4 | 0 << 2 | 1;
+
+
+ fixed (byte* bufPtr = &MemoryMarshal.GetReference(buffer))
+ {
+ var buf = bufPtr;
+
+ while (blocks != 0)
+ {
+ uint n = NMAX64 / BLOCK_SIZE;
+ if (n > blocks)
+ {
+ n = blocks;
+ }
+
+ blocks -= n;
+
+ // Process n blocks of data. At most NMAX data bytes can be
+ // processed before s2 must be reduced modulo BASE.
+ Vector128 v_ps = Vector128.Create(0, s1 * n);
+ Vector128 v_s2 = Vector128.Create(0, s2);
+ Vector128 v_s1 = Vector128.Create(0ul, 0);
+
+ do
+ {
+ // Load 32 input bytes.
+ Vector128 bytes1 = Sse2.LoadVector128(&buf[0]);
+ Vector128 bytes2 = Sse2.LoadVector128(&buf[16]);
+
+
+ // Add previous block byte sum to v_ps.
+ v_ps = Sse2.Add(v_ps, v_s1);
+
+
+
+ // Horizontally add the bytes for s1, multiply-adds the
+ // bytes by [ 32, 31, 30, ... ] for s2.
+ Vector128 sad1 = Sse2.SumAbsoluteDifferences(bytes1, zero);
+ v_s1 = Sse2.Add(v_s1, sad1.AsUInt64());
+ Vector128 mad11 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
+ Vector128 mad12 = Sse2.MultiplyAddAdjacent(mad11, onesShort);
+ var mad121 = Sse2.Add(mad12, Sse2.Shuffle(mad12, S2301));
+ var madTrimmed1 = Ssse3.Shuffle(mad121.AsByte(), shuffleMaskTrim);
+ var madTimmed1ULong = madTrimmed1.AsUInt64();
+ v_s2 = Sse2.Add(v_s2, madTimmed1ULong);
+
+
+
+ Vector128 sad2 = Sse2.SumAbsoluteDifferences(bytes2, zero);
+ v_s1 = Sse2.Add(v_s1, sad2.AsUInt64());
+ Vector128 mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
+ Vector128 mad22 = Sse2.MultiplyAddAdjacent(mad2, onesShort);
+ var mad221 = Sse2.Add(mad22, Sse2.Shuffle(mad22, S2301));
+ var madTrimmed2 = Ssse3.Shuffle(mad221.AsByte(), shuffleMaskTrim);
+ var madTimmed2ULong = madTrimmed2.AsUInt64();
+ v_s2 = Sse2.Add(v_s2, madTimmed2ULong);
+
+
+ buf += BLOCK_SIZE;
+
+ n--;
+ } while (n != 0);
+
+
+ var shifted = Sse2.ShiftLeftLogical(v_ps, 5);
+ v_s2 = Sse2.Add(v_s2, shifted);
+
+ s1 += v_s1.GetElement(0);
+ s1 += v_s1.GetElement(1);
+
+
+ s2 = v_s2.GetElement(0);
+ s2 += v_s2.GetElement(1);
+
+ s1 %= MOD64;
+ s2 %= MOD64;
+ }
+
+ if (len > 0)
+ {
+ if (len >= 16)
+ {
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ s2 += (s1 += *buf++);
+ len -= 16;
+ }
+
+ while (len-- > 0)
+ {
+ s2 += (s1 += *buf++);
+ }
+ if (s1 >= MOD64)
+ {
+ s1 -= MOD64;
+ }
+
+ s2 %= MOD64;
+ }
+
+ return s1 | (s2 << 32);
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/Hi3Helper.SharpHDiffPatch/Hash/CRC32/Crc32Algorithm.cs b/Hi3Helper.SharpHDiffPatch/Hash/CRC32/Crc32Algorithm.cs
new file mode 100644
index 0000000..5af6785
--- /dev/null
+++ b/Hi3Helper.SharpHDiffPatch/Hash/CRC32/Crc32Algorithm.cs
@@ -0,0 +1,341 @@
+/*
+ * Fast Crc32-NET implementation by Force.NET Team under MIT License
+ * https://github.com/force-net/Crc32.NET
+ */
+
+using System;
+using System.Buffers.Binary;
+using System.Security.Cryptography;
+
+namespace Hi3Helper.SharpHDiffPatch.Hash.Force.Crc32
+{
+ ///
+ /// Implementation of CRC-32.
+ /// This class supports several convenient static methods returning the CRC as UInt32.
+ ///
+ public class Crc32Algorithm : HashAlgorithm
+ {
+ private uint _currentCrc;
+
+ private readonly bool _isBigEndian = true;
+
+ private readonly SafeProxy _proxy = new SafeProxy();
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public Crc32Algorithm()
+ {
+ HashSizeValue = 32;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Should return bytes result as big endian or little endian
+ // Crc32 by dariogriffo uses big endian, so, we need to be compatible and return big endian as default
+ public Crc32Algorithm(bool isBigEndian = true)
+ : this()
+ {
+ _isBigEndian = isBigEndian;
+ }
+
+ ///
+ /// Computes CRC-32 from multiple buffers.
+ /// Call this method multiple times to chain multiple buffers.
+ ///
+ /// Input buffer containing data to be checksummed.
+ public void Append(ReadOnlySpan input)
+ {
+ _currentCrc = AppendInternal(_currentCrc, input);
+ }
+
+ ///
+ /// Computes CRC-32 from multiple buffers.
+ /// Call this method multiple times to chain multiple buffers.
+ ///
+ ///
+ /// Initial CRC value for the algorithm. It is zero for the first buffer.
+ /// Subsequent buffers should have their initial value set to CRC value returned by previous call to this method.
+ ///
+ /// Input buffer containing data to be checksummed.
+ public void Append(byte[] input)
+ {
+ _currentCrc = AppendInternal(_currentCrc, input);
+ }
+
+ ///
+ /// Computes CRC-32 from multiple buffers.
+ /// Call this method multiple times to chain multiple buffers.
+ ///
+ ///
+ /// Initial CRC value for the algorithm. It is zero for the first buffer.
+ /// Subsequent buffers should have their initial value set to CRC value returned by previous call to this method.
+ ///
+ /// Input buffer with data to be checksummed.
+ /// Offset of the input data within the buffer.
+ /// Length of the input data in the buffer.
+ /// Accumulated CRC-32 of all buffers processed so far.
+ public void Append(byte[] input, int offset, int length)
+ {
+ if (input == null)
+ throw new ArgumentNullException("input");
+ if (offset < 0 || length < 0 || offset + length > input.Length)
+ throw new ArgumentOutOfRangeException("length");
+ _currentCrc = AppendInternal(_currentCrc, input, offset, length);
+ }
+
+ ///
+ /// Computes CRC-32 from multiple buffers.
+ /// Call this method multiple times to chain multiple buffers.
+ ///
+ ///
+ /// Initial CRC value for the algorithm. It is zero for the first buffer.
+ /// Subsequent buffers should have their initial value set to CRC value returned by previous call to this method.
+ ///
+ /// Input buffer with data to be checksummed.
+ /// Offset of the input data within the buffer.
+ /// Length of the input data in the buffer.
+ /// Accumulated CRC-32 of all buffers processed so far.
+ public uint Append(uint initial, byte[] input, int offset, int length)
+ {
+ if (input == null)
+ throw new ArgumentNullException("input");
+ if (offset < 0 || length < 0 || offset + length > input.Length)
+ throw new ArgumentOutOfRangeException("length");
+ return AppendInternal(initial, input, offset, length);
+ }
+
+ ///
+ /// Computes CRC-32 from multiple buffers.
+ /// Call this method multiple times to chain multiple buffers.
+ ///
+ ///
+ /// Initial CRC value for the algorithm. It is zero for the first buffer.
+ /// Subsequent buffers should have their initial value set to CRC value returned by previous call to this method.
+ ///
+ /// Input buffer containing data to be checksummed.
+ /// Accumulated CRC-32 of all buffers processed so far.
+ public uint Append(uint initial, byte[] input)
+ {
+ if (input == null)
+ throw new ArgumentNullException();
+ return AppendInternal(initial, input, 0, input.Length);
+ }
+
+ ///
+ /// Computes CRC-32 from multiple buffers.
+ /// Call this method multiple times to chain multiple buffers.
+ ///
+ ///
+ /// Initial CRC value for the algorithm. It is zero for the first buffer.
+ /// Subsequent buffers should have their initial value set to CRC value returned by previous call to this method.
+ ///
+ /// Input buffer containing data to be checksummed.
+ /// Accumulated CRC-32 of all buffers processed so far.
+ public uint Append(uint initial, ReadOnlySpan input)
+ {
+ return AppendInternal(initial, input);
+ }
+
+ ///
+ /// Computes CRC-32 from input buffer.
+ ///
+ /// Input buffer containing data to be checksummed.
+ /// CRC-32 buffer of the input buffer.
+ public byte[] ComputeHashByte(byte[] input)
+ {
+ _currentCrc = AppendInternal(0, input);
+ return HashFinal();
+ }
+
+ ///
+ /// Computes CRC-32 from input buffer.
+ ///
+ /// Input buffer containing data to be checksummed.
+ /// CRC-32 buffer of the input buffer.
+ public byte[] ComputeHashByte(ReadOnlySpan input)
+ {
+ _currentCrc = AppendInternal(0, input);
+ return HashFinal();
+ }
+
+ ///
+ /// Computes CRC-32 from input buffer.
+ ///
+ /// Input buffer with data to be checksummed.
+ /// Offset of the input data within the buffer.
+ /// Length of the input data in the buffer.
+ /// CRC-32 of the data in the buffer.
+ public uint Compute(byte[] input, int offset, int length)
+ {
+ return Append(0, input, offset, length);
+ }
+
+ ///
+ /// Computes CRC-32 from input buffer.
+ ///
+ /// Input buffer containing data to be checksummed.
+ /// CRC-32 of the data in the buffer.
+ public uint Compute(byte[] input)
+ {
+ return Append(0, input);
+ }
+
+ ///
+ /// Computes CRC-32 from input buffer.
+ ///
+ /// Input buffer with data to be checksummed.
+ /// CRC-32 of the data in the buffer.
+ public uint Compute(ReadOnlySpan input)
+ {
+ return Append(0, input);
+ }
+
+ ///
+ /// Computes CRC-32 from input buffer and writes it after end of data (buffer should have 4 bytes reserved space for it). Can be used in conjunction with
+ ///
+ /// Input buffer with data to be checksummed.
+ /// Offset of the input data within the buffer.
+ /// Length of the input data in the buffer.
+ /// CRC-32 of the data in the buffer.
+ public uint ComputeAndWriteToEnd(byte[] input, int offset, int length)
+ {
+ if (length + 4 > input.Length)
+ throw new ArgumentOutOfRangeException("length", "Length of data should be less than array length - 4 bytes of CRC data");
+ var crc = Append(0, input, offset, length);
+ var r = offset + length;
+ input[r] = (byte)crc;
+ input[r + 1] = (byte)(crc >> 8);
+ input[r + 2] = (byte)(crc >> 16);
+ input[r + 3] = (byte)(crc >> 24);
+ return crc;
+ }
+
+ ///
+ /// Computes CRC-32 from input buffer - 4 bytes and writes it as last 4 bytes of buffer. Can be used in conjunction with
+ ///
+ /// Input buffer with data to be checksummed.
+ /// CRC-32 of the data in the buffer.
+ public uint ComputeAndWriteToEnd(byte[] input)
+ {
+ if (input.Length < 4)
+ throw new ArgumentOutOfRangeException("input", "Input array should be 4 bytes at least");
+ return ComputeAndWriteToEnd(input, 0, input.Length - 4);
+ }
+
+ ///
+ /// Validates correctness of CRC-32 data in source buffer with assumption that CRC-32 data located at end of buffer in reverse bytes order. Can be used in conjunction with
+ ///
+ /// Input buffer with data to be checksummed.
+ /// Offset of the input data within the buffer.
+ /// Length of the input data in the buffer with CRC-32 bytes.
+ /// Is checksum valid.
+ public bool IsValidWithCrcAtEnd(byte[] input, int offset, int lengthWithCrc)
+ {
+ return Append(0, input, offset, lengthWithCrc) == 0x2144DF1C;
+ }
+
+ ///
+ /// Validates correctness of CRC-32 data in source buffer with assumption that CRC-32 data located at end of buffer in reverse bytes order. Can be used in conjunction with
+ ///
+ /// Input buffer with data to be checksummed.
+ /// Is checksum valid.
+ public bool IsValidWithCrcAtEnd(byte[] input)
+ {
+ if (input.Length < 4)
+ throw new ArgumentOutOfRangeException("input", "Input array should be 4 bytes at least");
+ return Append(0, input, 0, input.Length) == 0x2144DF1C;
+ }
+
+ ///
+ /// Resets internal state of the algorithm. Used internally.
+ ///
+ public override void Initialize()
+ {
+ _currentCrc = 0;
+ }
+
+ ///
+ /// Appends CRC-32 from given buffer
+ ///
+ protected override void HashCore(byte[] input, int offset, int length)
+ {
+ _currentCrc = AppendInternal(_currentCrc, input, offset, length);
+ }
+
+ ///
+ /// Appends CRC-32 from given buffer
+ ///
+ protected override void HashCore(ReadOnlySpan source)
+ {
+ _currentCrc = AppendInternal(_currentCrc, source);
+ }
+
+ ///
+ /// Computes CRC-32 from
+ ///
+ protected override byte[] HashFinal()
+ {
+ if (_isBigEndian)
+ return new[] { (byte)(_currentCrc >> 24), (byte)(_currentCrc >> 16), (byte)(_currentCrc >> 8), (byte)_currentCrc };
+ else
+ return new[] { (byte)_currentCrc, (byte)(_currentCrc >> 8), (byte)(_currentCrc >> 16), (byte)(_currentCrc >> 24) };
+ }
+
+ ///
+ /// Computes CRC-32 from
+ ///
+ protected override bool TryHashFinal(Span destination, out int bytesWritten)
+ {
+ if (destination.Length < 4)
+ {
+ bytesWritten = 0;
+ return false;
+ }
+
+ if (_isBigEndian)
+ {
+ BinaryPrimitives.WriteUInt32BigEndian(destination, _currentCrc);
+ }
+ else
+ {
+ BinaryPrimitives.WriteUInt32LittleEndian(destination, _currentCrc);
+ }
+
+ bytesWritten = 4;
+ return true;
+ }
+
+ ///
+ /// Get final hash from processed buffer
+ ///
+ public override byte[] Hash
+ {
+ get
+ {
+ return HashFinal();
+ }
+ }
+
+ private uint AppendInternal(uint initial, byte[] input, int offset, int length)
+ {
+ if (length > 0)
+ {
+ return _proxy.Append(initial, input, offset, length);
+ }
+ else
+ return initial;
+ }
+
+ private uint AppendInternal(uint initial, ReadOnlySpan input)
+ {
+ if (input.Length > 0)
+ {
+ return _proxy.Append(initial, input);
+ }
+ else
+ return initial;
+ }
+ }
+}
diff --git a/Hi3Helper.SharpHDiffPatch/Hash/CRC32/SafeProxy.cs b/Hi3Helper.SharpHDiffPatch/Hash/CRC32/SafeProxy.cs
new file mode 100644
index 0000000..dd92168
--- /dev/null
+++ b/Hi3Helper.SharpHDiffPatch/Hash/CRC32/SafeProxy.cs
@@ -0,0 +1,83 @@
+/* This is .NET safe implementation of Crc32 algorithm.
+ * This implementation was investigated as fastest from different variants. It based on Robert Vazan native implementations of Crc32C
+ * Also, it is good for x64 and for x86, so, it seems, there is no sense to do 2 different realizations.
+ *
+ * Addition: some speed increase was found with splitting xor to 4 independent blocks. Also, some attempts to optimize unaligned tails was unsuccessfull (JIT limitations?).
+ *
+ *
+ * Max Vysokikh, 2016-2017
+ */
+
+using System;
+
+namespace Hi3Helper.SharpHDiffPatch.Hash.Force.Crc32
+{
+ internal class SafeProxy
+ {
+ private const uint Poly = 0xedb88320u;
+
+ private readonly uint[] _table = new uint[16 * 256];
+
+ internal SafeProxy()
+ {
+ Init(Poly);
+ }
+
+ protected void Init(uint poly)
+ {
+ var table = _table;
+ for (uint i = 0; i < 256; i++)
+ {
+ uint res = i;
+ for (int t = 0; t < 16; t++)
+ {
+ for (int k = 0; k < 8; k++) res = (res & 1) == 1 ? poly ^ (res >> 1) : (res >> 1);
+ table[(t * 256) + i] = res;
+ }
+ }
+ }
+
+ public uint Append(uint crc, byte[] input, int offset, int length)
+ {
+ return Append(crc, input.AsSpan(offset, length));
+ }
+
+ public uint Append(uint crc, ReadOnlySpan input)
+ {
+ uint crcLocal = uint.MaxValue ^ crc;
+
+ uint[] table = _table;
+ while (input.Length >= 16)
+ {
+ var a = table[(3 * 256) + input[12]]
+ ^ table[(2 * 256) + input[13]]
+ ^ table[(1 * 256) + input[14]]
+ ^ table[(0 * 256) + input[15]];
+
+ var b = table[(7 * 256) + input[8]]
+ ^ table[(6 * 256) + input[9]]
+ ^ table[(5 * 256) + input[10]]
+ ^ table[(4 * 256) + input[11]];
+
+ var c = table[(11 * 256) + input[4]]
+ ^ table[(10 * 256) + input[5]]
+ ^ table[(9 * 256) + input[6]]
+ ^ table[(8 * 256) + input[7]];
+
+ var d = table[(15 * 256) + ((byte)crcLocal ^ input[0])]
+ ^ table[(14 * 256) + ((byte)(crcLocal >> 8) ^ input[1])]
+ ^ table[(13 * 256) + ((byte)(crcLocal >> 16) ^ input[2])]
+ ^ table[(12 * 256) + ((crcLocal >> 24) ^ input[3])];
+
+ crcLocal = d ^ c ^ b ^ a;
+ input = input.Slice(16);
+ }
+
+ var i = 0;
+ while (i < input.Length)
+ crcLocal = table[(byte)(crcLocal ^ input[i++])] ^ crcLocal >> 8;
+
+ return crcLocal ^ uint.MaxValue;
+ }
+ }
+}
diff --git a/Hi3Helper.SharpHDiffPatch/IPatch.cs b/Hi3Helper.SharpHDiffPatch/IPatch.cs
new file mode 100644
index 0000000..2cb9db6
--- /dev/null
+++ b/Hi3Helper.SharpHDiffPatch/IPatch.cs
@@ -0,0 +1,7 @@
+namespace Hi3Helper.SharpHDiffPatch
+{
+ public interface IPatch
+ {
+ void Patch(string input, string output, bool useBufferedPatch = true);
+ }
+}
diff --git a/Hi3Helper.SharpHDiffPatch/PatchCore/Header.cs b/Hi3Helper.SharpHDiffPatch/PatchCore/Header.cs
new file mode 100644
index 0000000..71827b2
--- /dev/null
+++ b/Hi3Helper.SharpHDiffPatch/PatchCore/Header.cs
@@ -0,0 +1,240 @@
+using System;
+using System.IO;
+
+namespace Hi3Helper.SharpHDiffPatch
+{
+ internal class Header
+ {
+ internal static bool TryParseHeaderInfo(BinaryReader sr, string diffPath, TDirDiffInfo tDirDiffInfo, CompressedHDiffInfo singleHDiffInfo, HDiffHeaderInfo headerInfo)
+ {
+ string headerInfoLine = sr.ReadStringToNull();
+ bool isPatchDir = true;
+
+ if (headerInfoLine.Length > 64 || !headerInfoLine.StartsWith("HDIFF")) throw new FormatException("This is not a HDiff file format!");
+
+ string[] hInfoArr = headerInfoLine.Split('&');
+ if (hInfoArr.Length == 2)
+ {
+ byte pFileVer = TryGetVersion(hInfoArr[0]);
+ if (pFileVer != 13) throw new FormatException($"HDiff version is unsupported. This patcher only supports the single patch file with version: 13 only!");
+
+ isPatchDir = false;
+
+ singleHDiffInfo = new CompressedHDiffInfo() { headInfo = new THDiffzHead() };
+ singleHDiffInfo.headerMagic = hInfoArr[0];
+
+ TryParseCompressionEnum(hInfoArr[1], out singleHDiffInfo.compMode);
+ }
+ else if (hInfoArr.Length != 3) throw new IndexOutOfRangeException($"Header info is incomplete! Expecting 3 parts but got {hInfoArr.Length} part(s) instead (Raw: {headerInfoLine})");
+
+ if (isPatchDir)
+ {
+ byte hInfoVer = TryGetVersion(hInfoArr[0]);
+ if (hInfoVer != 19) throw new FormatException($"HDiff version is unsupported. This patcher only supports the directory patch file with version: 19 only!");
+
+ if (hInfoArr[1] != "" && !Enum.TryParse(hInfoArr[1], true, out headerInfo.compMode)) throw new FormatException($"This patcher doesn't support {hInfoArr[1]} compression!");
+ if (!Enum.TryParse(hInfoArr[2], true, out headerInfo.checksumMode)) throw new FormatException($"This patcher doesn't support {hInfoArr[2]} checksum!");
+
+ TryReadDirHeaderNumInfo(sr, tDirDiffInfo, headerInfo);
+ TryAssignDirHeaderExtents(sr, tDirDiffInfo, headerInfo);
+ }
+ else
+ {
+ GetSingleCompressedHDiffInfo(sr, diffPath, singleHDiffInfo);
+ }
+
+ return isPatchDir;
+ }
+
+ private static bool TryParseCompressionEnum(string input, out CompressionMode compOut)
+ {
+ if (input == string.Empty)
+ {
+ compOut = CompressionMode.nocomp;
+ return true;
+ }
+
+ throw new NotSupportedException("This patcher doesn't support patching with compression at the moment");
+ // return Enum.TryParse(input, out compOut);
+ }
+
+ private static void TryAssignDirHeaderExtents(BinaryReader sr, TDirDiffInfo tDirDiffInfo, HDiffHeaderInfo headerInfo)
+ {
+ ulong curPos = (ulong)sr.BaseStream.Position;
+ headerInfo.headDataOffset = curPos;
+
+ curPos += (headerInfo.headDataCompressedSize > 0 ? headerInfo.headDataCompressedSize : headerInfo.headDataSize);
+ headerInfo.privateExternDataOffset = curPos;
+
+ curPos += headerInfo.privateExternDataSize;
+ tDirDiffInfo.externDataOffset = curPos;
+
+ curPos += tDirDiffInfo.externDataSize;
+ headerInfo.hdiffDataOffset = curPos;
+ headerInfo.hdiffDataSize = (ulong)sr.BaseStream.Length - curPos;
+
+ TryReadTDirHDiffInfo(sr, tDirDiffInfo, headerInfo);
+ }
+
+ private static void TryReadTDirHDiffInfo(BinaryReader sr, TDirDiffInfo tDirDiffInfo, HDiffHeaderInfo headerInfo)
+ {
+ tDirDiffInfo.isSingleCompressedDiff = false;
+ tDirDiffInfo.sdiffInfo.stepMemSize = 0;
+
+ if (IsSingleCompressedHDiff(sr, tDirDiffInfo, headerInfo))
+ {
+ // TODO
+ }
+ else
+ {
+ GetNonSingleCompressedHDiffInfo(sr, tDirDiffInfo, headerInfo);
+ }
+ }
+
+ private static void GetSingleCompressedHDiffInfo(BinaryReader sr, string diffPath, CompressedHDiffInfo singleHDiffInfo)
+ {
+ singleHDiffInfo.patchPath = diffPath;
+ singleHDiffInfo.headInfo.typesEndPos = (ulong)sr.BaseStream.Position;
+ singleHDiffInfo.newDataSize = sr.ReadUInt64VarInt();
+ singleHDiffInfo.oldDataSize = sr.ReadUInt64VarInt();
+
+ singleHDiffInfo.headInfo.coverCount = sr.ReadUInt64VarInt();
+ singleHDiffInfo.headInfo.compressSizeBeginPos = (ulong)sr.BaseStream.Position;
+ singleHDiffInfo.headInfo.cover_buf_size = sr.ReadUInt64VarInt();
+ singleHDiffInfo.headInfo.compress_cover_buf_size = sr.ReadUInt64VarInt();
+ singleHDiffInfo.headInfo.rle_ctrlBuf_size = sr.ReadUInt64VarInt();
+ singleHDiffInfo.headInfo.compress_rle_ctrlBuf_size = sr.ReadUInt64VarInt();
+ singleHDiffInfo.headInfo.rle_codeBuf_size = sr.ReadUInt64VarInt();
+ singleHDiffInfo.headInfo.compress_rle_codeBuf_size = sr.ReadUInt64VarInt();
+ singleHDiffInfo.headInfo.newDataDiff_size = sr.ReadUInt64VarInt();
+ singleHDiffInfo.headInfo.compress_newDataDiff_size = sr.ReadUInt64VarInt();
+
+ singleHDiffInfo.headInfo.headEndPos = (ulong)sr.BaseStream.Position;
+ singleHDiffInfo.compressedCount = (ulong)((singleHDiffInfo.headInfo.compress_cover_buf_size > 1) ? 1 : 0)
+ + (ulong)((singleHDiffInfo.headInfo.compress_rle_ctrlBuf_size > 1) ? 1 : 0)
+ + (ulong)((singleHDiffInfo.headInfo.compress_rle_codeBuf_size > 1) ? 1 : 0)
+ + (ulong)((singleHDiffInfo.headInfo.compress_newDataDiff_size > 1) ? 1 : 0);
+
+ singleHDiffInfo.headInfo.coverEndPos = singleHDiffInfo.headInfo.headEndPos
+ + (singleHDiffInfo.headInfo.compress_cover_buf_size > 0 ?
+ singleHDiffInfo.headInfo.compress_cover_buf_size :
+ singleHDiffInfo.headInfo.cover_buf_size);
+ }
+
+ private static void GetNonSingleCompressedHDiffInfo(BinaryReader sr, TDirDiffInfo tDirDiffInfo, HDiffHeaderInfo headerInfo)
+ {
+ if (!tDirDiffInfo.hdiffinfo.headerMagic.StartsWith("HDIFF")) throw new InvalidDataException("The header chunk magic is not valid!");
+ byte magicVersion = TryGetVersion(tDirDiffInfo.hdiffinfo.headerMagic);
+
+ if (magicVersion != 13) throw new InvalidDataException($"The header chunk format: v{magicVersion} is not supported!");
+
+ tDirDiffInfo.hdiffinfo.headInfo.typesEndPos = (ulong)sr.BaseStream.Position;
+ tDirDiffInfo.newDataSize = sr.ReadUInt64VarInt();
+ tDirDiffInfo.oldDataSize = sr.ReadUInt64VarInt();
+
+ tDirDiffInfo.hdiffinfo.headInfo.coverCount = sr.ReadUInt64VarInt();
+ tDirDiffInfo.hdiffinfo.headInfo.compressSizeBeginPos = (ulong)sr.BaseStream.Position;
+ tDirDiffInfo.hdiffinfo.headInfo.cover_buf_size = sr.ReadUInt64VarInt();
+ tDirDiffInfo.hdiffinfo.headInfo.compress_cover_buf_size = sr.ReadUInt64VarInt();
+ tDirDiffInfo.hdiffinfo.headInfo.rle_ctrlBuf_size = sr.ReadUInt64VarInt();
+ tDirDiffInfo.hdiffinfo.headInfo.compress_rle_ctrlBuf_size = sr.ReadUInt64VarInt();
+ tDirDiffInfo.hdiffinfo.headInfo.rle_codeBuf_size = sr.ReadUInt64VarInt();
+ tDirDiffInfo.hdiffinfo.headInfo.compress_rle_codeBuf_size = sr.ReadUInt64VarInt();
+ tDirDiffInfo.hdiffinfo.headInfo.newDataDiff_size = sr.ReadUInt64VarInt();
+ tDirDiffInfo.hdiffinfo.headInfo.compress_newDataDiff_size = sr.ReadUInt64VarInt();
+
+ tDirDiffInfo.hdiffinfo.headInfo.headEndPos = (ulong)sr.BaseStream.Position;
+ tDirDiffInfo.compressedCount = (ulong)((tDirDiffInfo.hdiffinfo.headInfo.compress_cover_buf_size > 1) ? 1 : 0)
+ + (ulong)((tDirDiffInfo.hdiffinfo.headInfo.compress_rle_ctrlBuf_size > 1) ? 1 : 0)
+ + (ulong)((tDirDiffInfo.hdiffinfo.headInfo.compress_rle_codeBuf_size > 1) ? 1 : 0)
+ + (ulong)((tDirDiffInfo.hdiffinfo.headInfo.compress_newDataDiff_size > 1) ? 1 : 0);
+
+ tDirDiffInfo.hdiffinfo.headInfo.coverEndPos = tDirDiffInfo.hdiffinfo.headInfo.headEndPos
+ + (tDirDiffInfo.hdiffinfo.headInfo.compress_cover_buf_size > 0 ?
+ tDirDiffInfo.hdiffinfo.headInfo.compress_cover_buf_size :
+ tDirDiffInfo.hdiffinfo.headInfo.cover_buf_size);
+ }
+
+ private static bool IsSingleCompressedHDiff(BinaryReader sr, TDirDiffInfo tDirDiffInfo, HDiffHeaderInfo headerInfo)
+ {
+ sr.BaseStream.Position = (long)headerInfo.hdiffDataOffset;
+ string singleCompressedHeaderLine = sr.ReadStringToNull();
+ string[] singleCompressedHeaderArr = singleCompressedHeaderLine.Split('&');
+
+ if (singleCompressedHeaderArr[0].Equals("HDIFFSF20"))
+ {
+ // TODO
+ }
+ else
+ {
+ tDirDiffInfo.hdiffinfo = new CompressedHDiffInfo();
+ tDirDiffInfo.hdiffinfo.headInfo = new THDiffzHead();
+
+ if (singleCompressedHeaderArr[1] != "" && !Enum.TryParse(singleCompressedHeaderArr[1], true, out tDirDiffInfo.hdiffinfo.compMode)) throw new FormatException($"The compression chunk has unsupported compression: {singleCompressedHeaderArr[1]}");
+ tDirDiffInfo.hdiffinfo.headerMagic = singleCompressedHeaderArr[0];
+ return false;
+ }
+
+ return true;
+ }
+
+ private static void TryReadDirHeaderNumInfo(BinaryReader sr, TDirDiffInfo tDirDiffInfo, HDiffHeaderInfo headerInfo)
+ {
+ tDirDiffInfo.isInputDir = sr.ReadBoolean();
+ tDirDiffInfo.isOutputDir = sr.ReadBoolean();
+
+ headerInfo.inputDirCount = sr.ReadUInt64VarInt();
+ headerInfo.inputSumSize = sr.ReadUInt64VarInt();
+
+ headerInfo.outputDirCount = sr.ReadUInt64VarInt();
+ headerInfo.outputSumSize = sr.ReadUInt64VarInt();
+
+ headerInfo.inputRefFileCount = sr.ReadUInt64VarInt();
+ headerInfo.inputRefFileSize = sr.ReadUInt64VarInt();
+
+ headerInfo.outputRefFileCount = sr.ReadUInt64VarInt();
+ headerInfo.outputRefFileSize = sr.ReadUInt64VarInt();
+
+ headerInfo.sameFilePairCount = sr.ReadUInt64VarInt();
+ headerInfo.sameFileSize = sr.ReadUInt64VarInt();
+
+ headerInfo.newExecuteCount = (int)sr.ReadUInt64VarInt();
+ headerInfo.privateReservedDataSize = sr.ReadUInt64VarInt();
+ headerInfo.privateExternDataSize = sr.ReadUInt64VarInt();
+ tDirDiffInfo.externDataSize = sr.ReadUInt64VarInt();
+
+ headerInfo.compressSizeBeginPos = sr.BaseStream.Position;
+
+ headerInfo.headDataSize = sr.ReadUInt64VarInt();
+ headerInfo.headDataCompressedSize = sr.ReadUInt64VarInt();
+ tDirDiffInfo.checksumByteSize = (byte)sr.ReadUInt64VarInt();
+
+ tDirDiffInfo.checksumOffset = sr.BaseStream.Position;
+ tDirDiffInfo.dirDataIsCompressed = headerInfo.headDataCompressedSize > 0;
+
+ if (tDirDiffInfo.checksumByteSize > 0)
+ {
+ TrySeekHeader(sr, tDirDiffInfo.checksumByteSize * 4);
+ }
+ }
+
+ private static void TrySeekHeader(BinaryReader sr, int skipLongSize)
+ {
+ int len = 4096;
+ if (len > skipLongSize)
+ {
+ len = skipLongSize;
+ }
+
+ sr.BaseStream.Seek(len, SeekOrigin.Current);
+ }
+
+ private static byte TryGetVersion(string str)
+ {
+ string num = str.Substring(5);
+ if (byte.TryParse(num, out byte ret)) return ret;
+
+ throw new InvalidDataException($"Version string is invalid! Value: {num} (Raw: {str})");
+ }
+ }
+}
diff --git a/Hi3Helper.SharpHDiffPatch/PatchCore/PatchCore.cs b/Hi3Helper.SharpHDiffPatch/PatchCore/PatchCore.cs
new file mode 100644
index 0000000..be38c1f
--- /dev/null
+++ b/Hi3Helper.SharpHDiffPatch/PatchCore/PatchCore.cs
@@ -0,0 +1,372 @@
+using System;
+using System.IO;
+
+namespace Hi3Helper.SharpHDiffPatch
+{
+ internal class PatchCore
+ {
+ private const int _kSignTagBit = 1;
+ private const int _kByteRleType = 2;
+
+ private static long oldPosBack;
+ private static long newPosBack;
+ private static long coverCount;
+ private static long copyReaderOffset;
+
+ internal static ChunkStream GetBufferClipsStream(Stream patchStream, long clipSize)
+ {
+ long start = patchStream.Position;
+ long end = patchStream.Position + clipSize;
+ long size = end - start;
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"Start assigning chunk as Stream: start -> {start} end -> {end} size -> {size}");
+#endif
+
+ ChunkStream stream = new ChunkStream(patchStream, patchStream.Position, end, false);
+ patchStream.Position += clipSize;
+ return stream;
+ }
+
+ internal static byte[] GetBufferClips(Stream patchStream, long clipSize, long clipSizeCompress)
+ {
+ byte[] returnClip = new byte[clipSize];
+ int bufSize = 4 << 10;
+
+ long remainToRead = clipSize;
+ int offset = 0;
+ int read = 0;
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"Start reading buffer clip with buffer size: {bufSize} to size: {clipSize}");
+#endif
+ while ((remainToRead -= read = patchStream.Read(returnClip, offset, (int)Math.Min(bufSize, remainToRead))) > 0)
+ {
+ offset += read;
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"Reading remain {read}: Remain to read: {remainToRead}");
+#endif
+ }
+
+ return returnClip;
+ }
+
+ internal static void UncoverBufferClipsStream(ChunkStream[] clips, Stream inputStream, Stream outputStream, CompressedHDiffInfo hDiffInfo, ulong newDataSize)
+ {
+ hDiffInfo.newDataSize = newDataSize;
+ UncoverBufferClipsStream(clips, inputStream, outputStream, hDiffInfo);
+ }
+
+ internal static void UncoverBufferClipsStream(ChunkStream[] clips, Stream inputStream, Stream outputStream, CompressedHDiffInfo hDiffInfo)
+ {
+ ulong uncoverCount = hDiffInfo.headInfo.coverCount;
+ coverCount = (long)hDiffInfo.headInfo.coverCount;
+ WriteCoverStreamToOutput(clips, uncoverCount, inputStream, outputStream, hDiffInfo.newDataSize);
+ }
+
+ internal static void UncoverBufferClips(ref byte[][] bufferClips, Stream inputStream, Stream outputStream, CompressedHDiffInfo hDiffInfo)
+ {
+ ulong uncoverCount = hDiffInfo.headInfo.coverCount;
+ coverCount = (long)hDiffInfo.headInfo.coverCount;
+ WriteCoverToOutputNew(ref bufferClips, uncoverCount, inputStream, outputStream, hDiffInfo.newDataSize);
+ }
+
+ private static void WriteCoverStreamToOutput(ChunkStream[] clips, ulong count, Stream inputStream, Stream outputStream, ulong newDataSize)
+ {
+ BinaryReader coverReader = new BinaryReader(clips[0]);
+ BinaryReader ctrlReader = new BinaryReader(clips[1]);
+ BinaryReader codeReader = new BinaryReader(clips[2]);
+ BinaryReader copyReader = new BinaryReader(clips[3]);
+ BinaryReader inputReader = new BinaryReader(inputStream);
+ BinaryWriter outputWriter = new BinaryWriter(outputStream);
+
+ try
+ {
+ long newPosBack = 0;
+
+ RLERefClipStruct rleStruct = new RLERefClipStruct();
+
+ rleStruct.rleCtrlClip = ctrlReader;
+ rleStruct.rleCodeClip = codeReader;
+ rleStruct.rleCopyClip = copyReader;
+ rleStruct.rleInputClip = inputReader;
+
+ while (count > 0)
+ {
+ if (count == 2)
+ {
+ Console.WriteLine();
+ }
+
+ ReadCover(out CoverHeader cover, coverReader);
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"Cover {i++}: oldPos -> {cover.oldPos} newPos -> {cover.newPos} length -> {cover.coverLength}");
+#endif
+ CoverHeader coverUse = cover;
+
+ MemoryStream outCache = new MemoryStream();
+
+ if (newPosBack < cover.newPos)
+ {
+ long copyLength = cover.newPos - newPosBack;
+ inputReader.BaseStream.Position = cover.oldPos;
+
+ _TOutStreamCache_copyFromClip(outputStream, copyReader, copyLength);
+ _rle_decode_skip(ref rleStruct, outputStream, copyLength);
+ }
+
+ _patch_add_old_with_rle(outputStream, ref rleStruct, cover.oldPos, cover.coverLength);
+ newPosBack = cover.newPos + cover.coverLength;
+ count--;
+ }
+
+ if (newPosBack < (long)newDataSize)
+ {
+ long copyLength = (long)newDataSize - newPosBack;
+ _TOutStreamCache_copyFromClip(outputStream, copyReader, copyLength);
+ _rle_decode_skip(ref rleStruct, outputStream, copyLength);
+ }
+ }
+ catch
+ {
+ throw;
+ }
+ finally
+ {
+ coverReader?.Dispose();
+ ctrlReader?.Dispose();
+ codeReader?.Dispose();
+ copyReader?.Dispose();
+ inputReader?.Dispose();
+ outputWriter?.Dispose();
+ }
+ }
+
+ private static void WriteCoverToOutputNew(ref byte[][] bufferClips, ulong count, Stream inputStream, Stream outputStream, ulong newDataSize)
+ {
+ using (MemoryStream coverStream = new MemoryStream(bufferClips[0]))
+ using (MemoryStream ctrlStream = new MemoryStream(bufferClips[1]))
+ using (MemoryStream codeStream = new MemoryStream(bufferClips[2]))
+ using (MemoryStream copyStream = new MemoryStream(bufferClips[3]))
+ {
+ BinaryReader coverReader = new BinaryReader(coverStream);
+ BinaryReader ctrlReader = new BinaryReader(ctrlStream);
+ BinaryReader codeReader = new BinaryReader(codeStream);
+ BinaryReader copyReader = new BinaryReader(copyStream);
+ BinaryReader inputReader = new BinaryReader(inputStream);
+ BinaryWriter outputWriter = new BinaryWriter(outputStream);
+ long newPosBack = 0;
+
+ RLERefClipStruct rleStruct = new RLERefClipStruct();
+
+ rleStruct.rleCtrlClip = ctrlReader;
+ rleStruct.rleCodeClip = codeReader;
+ rleStruct.rleCopyClip = copyReader;
+ rleStruct.rleInputClip = inputReader;
+
+ while (count > 0)
+ {
+ ReadCover(out CoverHeader cover, coverReader);
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"Cover {i++}: oldPos -> {cover.oldPos} newPos -> {cover.newPos} length -> {cover.coverLength}");
+#endif
+ CoverHeader coverUse = cover;
+
+ MemoryStream outCache = new MemoryStream();
+
+ if (newPosBack < cover.newPos)
+ {
+ long copyLength = cover.newPos - newPosBack;
+ inputReader.BaseStream.Position = cover.oldPos;
+
+ _TOutStreamCache_copyFromClip(outputStream, copyReader, copyLength);
+ _rle_decode_skip(ref rleStruct, outputStream, copyLength);
+ }
+ _patch_add_old_with_rle(outputStream, ref rleStruct, cover.oldPos, cover.coverLength);
+ newPosBack = cover.newPos + cover.coverLength;
+ count--;
+ }
+
+ if (newPosBack < (long)newDataSize)
+ {
+ long copyLength = (long)newDataSize - newPosBack;
+ _TOutStreamCache_copyFromClip(outputStream, copyReader, copyLength);
+ _rle_decode_skip(ref rleStruct, outputStream, copyLength);
+ }
+ }
+ outputStream.Dispose();
+ }
+
+ private static void _patch_add_old_with_rle(Stream outCache, ref RLERefClipStruct rleLoader, long oldPos, long addLength)
+ {
+ long lastPos = outCache.Position;
+ while (addLength > 0)
+ {
+ long decodeStep = addLength;
+ rleLoader.rleInputClip.BaseStream.Position = oldPos;
+
+ byte[] tempBuffer = new byte[decodeStep];
+ rleLoader.rleInputClip.BaseStream.Read(tempBuffer);
+ outCache.Write(tempBuffer);
+ outCache.Position = lastPos;
+ _TBytesRle_load_stream_decode_add(ref rleLoader, outCache, decodeStep);
+
+ oldPos += decodeStep;
+ addLength -= decodeStep;
+
+ }
+ }
+
+ private static void _TOutStreamCache_copyFromClip(Stream outCache, BinaryReader copyReader, long copyLength)
+ {
+ byte[] buffer = new byte[copyLength];
+ copyReaderOffset += copyLength;
+ copyReader.BaseStream.Read(buffer);
+ copyReader.BaseStream.Position = copyReaderOffset;
+ long lastPos = outCache.Position;
+ outCache.Write(buffer);
+ outCache.Position = lastPos;
+ }
+
+ private static void _rle_decode_skip(ref RLERefClipStruct rleLoader, Stream outCache, long copyLength)
+ {
+ while (copyLength > 0)
+ {
+ _TBytesRle_load_stream_decode_add(ref rleLoader, outCache, copyLength);
+ copyLength -= copyLength;
+ }
+ }
+
+ private static void _TBytesRle_load_stream_decode_add(ref RLERefClipStruct rleLoader, Stream outCache, long copyLength)
+ {
+ long num = outCache.Position;
+
+ _TBytesRle_load_stream_mem_add(ref rleLoader, outCache, ref copyLength);
+
+ while (copyLength > 0)
+ {
+ byte type = rleLoader.rleCtrlClip.ReadByte();
+ type = (byte)((type) >> (8 - _kByteRleType));
+ rleLoader.rleCtrlClip.BaseStream.Position--;
+ ulong length = rleLoader.rleCtrlClip.ReadUInt64VarInt(_kByteRleType);
+ length++;
+
+ switch (rleLoader.type = (kByteRleType)type)
+ {
+ case kByteRleType.rle0:
+ rleLoader.memSetLength = length;
+ rleLoader.memSetValue = 0x0;
+ break;
+ case kByteRleType.rle255:
+ rleLoader.memSetLength = length;
+ rleLoader.memSetValue = 0xFF;
+ break;
+ case kByteRleType.rle:
+ byte pSetValue = rleLoader.rleCodeClip.ReadByte();
+ rleLoader.memSetLength = length;
+ rleLoader.memSetValue = pSetValue;
+ break;
+ case kByteRleType.unrle:
+ rleLoader.memCopyLength = length;
+ break;
+ }
+
+#if DEBUG && SHOWDEBUGINFO
+ if (rleLoader.type != kByteRleType.unrle)
+ {
+ Console.WriteLine($" RLE Type: {rleLoader.type} -> length: {rleLoader.memSetLength} -> code: {rleLoader.memSetValue}");
+ }
+ else
+ {
+ Console.WriteLine($" MemCopy length: {rleLoader.memCopyLength}");
+ }
+#endif
+ _TBytesRle_load_stream_mem_add(ref rleLoader, outCache, ref copyLength);
+ }
+ }
+
+ private static void _TBytesRle_load_stream_mem_add(ref RLERefClipStruct rleLoader, Stream outCache, ref long copyLength)
+ {
+ if (rleLoader.memSetLength != 0)
+ {
+ long memSetStep = (long)rleLoader.memSetLength <= copyLength ? (long)rleLoader.memSetLength : copyLength;
+ byte byteSetValue = rleLoader.memSetValue;
+ if (byteSetValue != 0)
+ {
+ byte[] addToSetValueBuffer = new byte[memSetStep];
+ long lastPos = outCache.Position;
+ outCache.Read(addToSetValueBuffer);
+ outCache.Position = lastPos;
+
+ int length = (int)memSetStep;
+ for (int i = 0; i < length; i++) addToSetValueBuffer[i] += byteSetValue;
+
+ outCache.Write(addToSetValueBuffer);
+ }
+ else
+ {
+ outCache.Position += memSetStep;
+ }
+
+ copyLength -= memSetStep;
+ rleLoader.memSetLength -= (ulong)memSetStep;
+ }
+
+ while (rleLoader.memCopyLength > 0 && copyLength > 0)
+ {
+ long decodeStep = (long)rleLoader.memCopyLength > copyLength ? copyLength : (long)rleLoader.memCopyLength;
+
+ byte[] rleData = new byte[decodeStep];
+ byte[] oldData = new byte[decodeStep];
+ rleLoader.rleCodeClip.BaseStream.Read(rleData);
+ long lastPos = outCache.Position;
+ outCache.Read(oldData);
+ outCache.Position = lastPos;
+
+ int length = (int)decodeStep;
+ for (int i = 0; i < length; i++) rleData[i] += oldData[i];
+
+ outCache.Write(rleData);
+
+ copyLength -= decodeStep;
+ rleLoader.memCopyLength -= (ulong)decodeStep;
+ }
+ }
+
+ private static void ReadCover(out CoverHeader coverHeader, BinaryReader coverReader)
+ {
+ long oldPosBack = PatchCore.oldPosBack;
+ long newPosBack = PatchCore.newPosBack;
+ long coverCount = PatchCore.coverCount;
+
+ if (coverCount > 0)
+ {
+ PatchCore.coverCount = coverCount - 1;
+ }
+
+ byte pSign = coverReader.ReadByte();
+ long oldPos, copyLength, coverLength;
+
+ byte inc_oldPos_sign = (byte)(pSign >> (8 - _kSignTagBit));
+ coverReader.BaseStream.Position--;
+ long inc_oldPos = (long)coverReader.ReadUInt64VarInt(_kSignTagBit);
+ oldPos = inc_oldPos_sign == 0 ? oldPosBack + inc_oldPos : oldPosBack - inc_oldPos;
+
+ copyLength = (long)coverReader.ReadUInt64VarInt();
+ coverLength = (long)coverReader.ReadUInt64VarInt();
+ newPosBack += copyLength;
+ oldPosBack = oldPos;
+
+ oldPosBack += true ? coverLength : 0;
+
+ coverHeader = new CoverHeader
+ {
+ oldPos = oldPos,
+ newPos = newPosBack,
+ coverLength = coverLength
+ };
+ newPosBack += coverLength;
+
+ PatchCore.oldPosBack = oldPosBack;
+ PatchCore.newPosBack = newPosBack;
+ }
+ }
+}
diff --git a/Hi3Helper.SharpHDiffPatch/PatchDir/PatchDir.cs b/Hi3Helper.SharpHDiffPatch/PatchDir/PatchDir.cs
index 368b210..08e1799 100644
--- a/Hi3Helper.SharpHDiffPatch/PatchDir/PatchDir.cs
+++ b/Hi3Helper.SharpHDiffPatch/PatchDir/PatchDir.cs
@@ -1,45 +1,321 @@
-using System;
+using Hi3Helper.EncTool;
+using System;
using System.Collections.Generic;
+using System.Diagnostics;
using System.IO;
-using System.Text;
+using System.Linq;
namespace Hi3Helper.SharpHDiffPatch
{
- public struct TDirPatcher
+ internal struct pairStruct
{
-
+ public ulong oldIndex;
+ public ulong newIndex;
}
- public struct TDecompress
+ internal class TDirPatcher
{
-
+ internal List oldUtf8PathList;
+ internal List newUtf8PathList;
+ internal ulong[] oldRefList;
+ internal ulong[] newRefList;
+ internal ulong[] newRefSizeList;
+ internal pairStruct[] dataSamePairList;
+ internal ulong[] newExecuteList;
}
- public sealed partial class HDiffPatch
+ public sealed class PatchDir : IPatch
{
- private void RunDirectoryPatch(string inputPath, string outputPath)
+ private TDirDiffInfo dirDiffInfo;
+ private HDiffHeaderInfo hdiffHeaderInfo;
+ private Func spawnPatchStream;
+ private string basePathInput;
+ private string basePathOutput;
+
+ public PatchDir(TDirDiffInfo dirDiffInfo, HDiffHeaderInfo hdiffHeaderInfo, string patchPath)
+ {
+ this.dirDiffInfo = dirDiffInfo;
+ this.hdiffHeaderInfo = hdiffHeaderInfo;
+ this.spawnPatchStream = new Func(() => new FileStream(patchPath, FileMode.Open, FileAccess.Read, FileShare.Read));
+ }
+
+ public void Patch(string input, string output, bool useBufferedPatch)
{
- if (!Directory.Exists(inputPath)) throw new ArgumentException($"Input path must be exist");
- if (!Directory.Exists(outputPath)) Directory.CreateDirectory(outputPath);
+ basePathInput = input;
+ basePathOutput = output;
- MemoryStream patchStream = new MemoryStream();
- try
+ using (Stream patchStream = spawnPatchStream())
+ using (BinaryReader patchReader = new BinaryReader(patchStream))
{
- switch (headerInfo.compMode)
+ TDirPatcher dirData = InitializeDirPatcher(patchReader, (long)hdiffHeaderInfo.headDataOffset);
+ // bool IsChecksumPassed = CheckDiffDataIntegration(patchReader, dirDiffInfo.checksumOffset);
+
+ // if (!IsChecksumPassed) throw new InvalidDataException("Checksum has failed and the patch file might be corrupted!");
+
+ patchStream.Position = (long)hdiffHeaderInfo.hdiffDataOffset;
+ CopyOldSimilarToNewFiles(dirData);
+
+ FileStream[] mergedOldStream = GetRefOldStreams(dirData).ToArray();
+ NewFileCombinedStreamStruct[] mergedNewStream = GetRefNewStreams(dirData).ToArray();
+
+ using (Stream newStream = new CombinedStream(mergedNewStream))
+ using (Stream oldStream = new CombinedStream(mergedOldStream))
{
- case CompressionMode.lzma:
- // DecompressLZMA2Diff(patchStream);
- break;
+ dirDiffInfo.sdiffInfo = new SingleCompressedHDiffInfo();
+ _ = Header.TryParseHeaderInfo(patchReader, "", dirDiffInfo, new CompressedHDiffInfo() { headInfo = new THDiffzHead() }, new HDiffHeaderInfo());
+ StartPatchRoutine(oldStream, patchStream, newStream, dirDiffInfo.newDataSize);
}
}
- catch
+ }
+
+ private void StartPatchRoutine(Stream inputStream, Stream patchStream, Stream outputStream, ulong newDataSize)
+ {
+ patchStream.Seek((long)dirDiffInfo.hdiffinfo.headInfo.headEndPos, SeekOrigin.Begin);
+
+ Console.WriteLine("Patch Information:");
+ Console.WriteLine($" Old Size: {dirDiffInfo.oldDataSize} bytes");
+ Console.WriteLine($" New Size: {dirDiffInfo.newDataSize} bytes");
+
+ Console.WriteLine();
+ Console.WriteLine("Technical Information:");
+ Console.WriteLine($" Cover Data Offset: {dirDiffInfo.hdiffinfo.headInfo.headEndPos}");
+ Console.WriteLine($" Cover Data Size: {dirDiffInfo.hdiffinfo.headInfo.cover_buf_size}");
+ Console.WriteLine($" Cover Count: {dirDiffInfo.hdiffinfo.headInfo.coverCount}");
+
+ Console.WriteLine($" RLE Data Offset: {dirDiffInfo.hdiffinfo.headInfo.coverEndPos}");
+ Console.WriteLine($" RLE Control Data Size: {dirDiffInfo.hdiffinfo.headInfo.rle_ctrlBuf_size}");
+ Console.WriteLine($" RLE Code Data Size: {dirDiffInfo.hdiffinfo.headInfo.rle_codeBuf_size}");
+
+ Console.WriteLine($" New Diff Data Size: {dirDiffInfo.hdiffinfo.headInfo.newDataDiff_size}");
+ Console.WriteLine();
+
+ Stopwatch stopwatch = Stopwatch.StartNew();
+ ChunkStream[] clips = new ChunkStream[4];
+ clips[0] = PatchCore.GetBufferClipsStream(patchStream, (long)dirDiffInfo.hdiffinfo.headInfo.cover_buf_size);
+ clips[1] = PatchCore.GetBufferClipsStream(patchStream, (long)dirDiffInfo.hdiffinfo.headInfo.rle_ctrlBuf_size);
+ clips[2] = PatchCore.GetBufferClipsStream(patchStream, (long)dirDiffInfo.hdiffinfo.headInfo.rle_codeBuf_size);
+ clips[3] = PatchCore.GetBufferClipsStream(patchStream, (long)dirDiffInfo.hdiffinfo.headInfo.newDataDiff_size);
+
+ PatchCore.UncoverBufferClipsStream(clips, inputStream, outputStream, dirDiffInfo.hdiffinfo, newDataSize);
+ stopwatch.Stop();
+
+ TimeSpan timeTaken = stopwatch.Elapsed;
+ Console.WriteLine($"Patch has been finished in {timeTaken.TotalSeconds} seconds ({timeTaken.Milliseconds} ms)");
+ }
+
+ private IEnumerable GetRefOldStreams(TDirPatcher dirData)
+ {
+ for (int i = 0; i < dirData.oldRefList.Length; i++)
+ {
+ ReadOnlySpan oldPathByIndex = NewPathByIndex(dirData.oldUtf8PathList, (int)dirData.oldRefList[i], true);
+ string combinedOldPath = Path.Combine(basePathInput, oldPathByIndex.ToString());
+
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[GetRefOldStreams] Assigning stream to the old path: {combinedOldPath}");
+#endif
+ yield return File.OpenRead(combinedOldPath);
+ }
+ }
+
+ private IEnumerable GetRefNewStreams(TDirPatcher dirData)
+ {
+ for (int i = 0; i < dirData.newRefList.Length; i++)
{
- throw;
+ ReadOnlySpan newPathByIndex = NewPathByIndex(dirData.newUtf8PathList, (int)dirData.newRefList[i], true);
+ string combinedNewPath = Path.Combine(basePathOutput, newPathByIndex.ToString());
+ string newPathDirectory = Path.GetDirectoryName(combinedNewPath);
+ if (!Directory.Exists(newPathDirectory)) Directory.CreateDirectory(newPathDirectory);
+
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[GetRefNewStreams] Assigning stream to the new path: {combinedNewPath}");
+#endif
+
+ NewFileCombinedStreamStruct newStruct = new NewFileCombinedStreamStruct
+ {
+ stream = File.Create(combinedNewPath),
+ size = (long)dirData.newRefSizeList[i]
+ };
+ yield return newStruct;
}
- finally
+ }
+
+ private void CopyOldSimilarToNewFiles(TDirPatcher dirData)
+ {
+ int _curNewRefIndex = 0;
+ int _curPathIndex = 0;
+ int _curSamePairIndex = 0;
+ int _newRefCount = dirData.newRefList.Length;
+ int _samePairCount = dirData.dataSamePairList.Length;
+ int _pathCount = dirData.newUtf8PathList.Count + 1;
+
+ while (_curPathIndex < _pathCount)
{
- patchStream?.Dispose();
+ if ((_curNewRefIndex < _newRefCount)
+ && (_curPathIndex == (dirData.newRefList.Length > 0 ? (int)dirData.newRefList[_curNewRefIndex] : _curNewRefIndex)))
+ {
+ bool isPathADir = IsPathADir(dirData.newUtf8PathList[(int)dirData.newRefList[_curNewRefIndex]]);
+
+ if (isPathADir) ++_curPathIndex;
+ ++_curNewRefIndex;
+ }
+ else if (_curSamePairIndex < _samePairCount
+ && (_curPathIndex == (int)dirData.dataSamePairList[_curSamePairIndex].newIndex))
+ {
+ var pair = dirData.dataSamePairList[_curSamePairIndex];
+ CopyFileByPairIndex(dirData.oldUtf8PathList, dirData.newUtf8PathList, (int)pair.oldIndex - 1, (int)pair.newIndex - 1);
+ ++_curSamePairIndex;
+ ++_curPathIndex;
+ }
+ else
+ {
+ ReadOnlySpan pathByIndex = NewPathByIndex(dirData.newUtf8PathList, _curPathIndex, true);
+ string combinedNewPath = Path.Combine(basePathOutput, pathByIndex.ToString());
+
+ bool isPathADir = IsPathADir(pathByIndex);
+
+ if (isPathADir && !Directory.Exists(combinedNewPath)) Directory.CreateDirectory(combinedNewPath);
+ else if (!isPathADir && !File.Exists(combinedNewPath)) File.Create(combinedNewPath).Dispose();
+
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[CopyOldSimilarToNewFiles] Created a new {(isPathADir ? "directory" : "empty file")}: {combinedNewPath}");
+#endif
+
+ ++_curPathIndex;
+ }
+ }
+ }
+
+ private bool IsPathADir(ReadOnlySpan input)
+ {
+ char endOfChar = input[input.Length - 1];
+ return endOfChar == '/';
+ }
+
+ private ReadOnlySpan NewPathByIndex(IList source, int index, bool returnBasePathIf0 = false) => returnBasePathIf0 && index == 0 ? basePathOutput + '/' : source[returnBasePathIf0 ? index - 1 : index];
+
+ private void CopyFileByPairIndex(IList oldList, IList newList, int oldIndex, int newIndex)
+ {
+ string oldPath = oldList[oldIndex];
+ string newPath = newList[newIndex];
+ string oldFullPath = Path.Combine(this.basePathInput, oldPath);
+ string newFullPath = Path.Combine(this.basePathOutput, newPath);
+ string newDirFullPath = Path.GetDirectoryName(newFullPath);
+
+ if (!Directory.Exists(newDirFullPath)) Directory.CreateDirectory(newDirFullPath);
+
+ File.Copy(oldFullPath, newFullPath, true);
+
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[CopyFileByPairIndex] Copied a similar file to target path: {oldFullPath} -> {newFullPath}");
+#endif
+ }
+
+ /*
+ private bool CheckDiffDataIntegration(BinaryReader reader, long checksumDataOffset)
+ {
+ reader.BaseStream.Position = checksumDataOffset;
+ (_, _) = hdiffHeaderInfo.checksumMode switch
+ {
+ ChecksumMode.fadler64 => (8, 4),
+ ChecksumMode.crc32 => (4, 4),
+ ChecksumMode.nochecksum => (0, 0),
+ _ => throw new NotSupportedException($"Checksum mode {hdiffHeaderInfo.checksumMode} is currently not supported!")
+ };
+
+ if (dirDiffInfo.checksumByteSize == 0) return true;
+
+ return true;
+ }
+ */
+
+ private TDirPatcher InitializeDirPatcher(BinaryReader reader, long startOffset)
+ {
+ TDirPatcher returnValue = new TDirPatcher();
+ reader.BaseStream.Position = startOffset;
+
+ GetListOfPaths(reader, out returnValue.oldUtf8PathList, hdiffHeaderInfo.inputDirCount);
+ GetListOfPaths(reader, out returnValue.newUtf8PathList, hdiffHeaderInfo.outputDirCount);
+
+ GetArrayOfIncULongTag(reader, out returnValue.oldRefList, hdiffHeaderInfo.inputRefFileCount, hdiffHeaderInfo.inputDirCount, 0);
+ GetArrayOfIncULongTag(reader, out returnValue.newRefList, hdiffHeaderInfo.outputRefFileCount, hdiffHeaderInfo.outputDirCount, 0);
+ GetArrayOfULongTag(reader, out returnValue.newRefSizeList, hdiffHeaderInfo.outputRefFileCount, 0);
+ GetArrayOfSamePairULongTag(reader, out returnValue.dataSamePairList, hdiffHeaderInfo.sameFilePairCount, hdiffHeaderInfo.outputDirCount, hdiffHeaderInfo.inputDirCount);
+ GetArrayOfIncULongTag(reader, out returnValue.newExecuteList, (ulong)hdiffHeaderInfo.newExecuteCount, hdiffHeaderInfo.outputDirCount, 0);
+
+ return returnValue;
+ }
+
+ private void GetListOfPaths(BinaryReader reader, out List outlist, ulong count)
+ {
+ ++reader.BaseStream.Position;
+ outlist = new List();
+
+ for (ulong i = 0; i < count - 1; i++)
+ {
+ string filePath = reader.ReadStringToNull();
+ outlist.Add(filePath);
+ }
+ }
+
+ private void GetArrayOfIncULongTag(BinaryReader reader, out ulong[] outarray, ulong count, ulong checkCount, int tagBit)
+ {
+ outarray = new ulong[count];
+ ulong backValue = ulong.MaxValue;
+
+ for (ulong i = 0; i < count; i++)
+ {
+ ulong num = reader.ReadUInt64VarInt(tagBit);
+ backValue += 1 + num;
+ if (backValue > checkCount) throw new InvalidDataException($"[GetArrayOfIncULongTag] Given back value for the reference list is invalid! Having {i} refs while expecting max: {checkCount}");
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[GetArrayOfIncULongTag] value {i} - {count}: {backValue}");
+#endif
+ outarray[i] = backValue;
+ }
+ }
+
+ private void GetArrayOfULongTag(BinaryReader reader, out ulong[] outarray, ulong count, int tagBit)
+ {
+ outarray = new ulong[count];
+ for (ulong i = 0; i < count; i++)
+ {
+ ulong num = reader.ReadUInt64VarInt(tagBit);
+ outarray[i] = num;
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[GetArrayOfIncULongTag] value {i} - {count}: {num}");
+#endif
+ }
+ }
+
+ private void GetArrayOfSamePairULongTag(BinaryReader reader, out pairStruct[] outPair, ulong pairCount, ulong check_endNewValue, ulong check_endOldValue)
+ {
+ outPair = new pairStruct[pairCount];
+ ulong backNewValue = ulong.MaxValue;
+ ulong backOldValue = ulong.MaxValue;
+ ulong pSign;
+
+ for (ulong i = 0; i < pairCount; ++i)
+ {
+ ulong incNewValue = reader.ReadUInt64VarInt(0);
+
+ backNewValue += 1 + incNewValue;
+ if (backNewValue > check_endNewValue) throw new InvalidDataException($"[GetArrayOfSamePairULongTag] Given back new value for the list is invalid! Having {backNewValue} value while expecting max: {check_endNewValue}");
+
+ pSign = reader.ReadByte();
+ --reader.BaseStream.Position;
+ ulong incOldValue = reader.ReadUInt64VarInt(1);
+
+ if (pSign >> (8 - 1) == 0)
+ backOldValue += 1 + incOldValue;
+ else
+ backOldValue = backOldValue + 1 - incOldValue;
+
+ if (backOldValue > check_endOldValue) throw new InvalidDataException($"[GetArrayOfSamePairULongTag] Given back old value for the list is invalid! Having {backOldValue} value while expecting max: {check_endOldValue}");
+#if DEBUG && SHOWDEBUGINFO
+ Console.WriteLine($"[GetArrayOfSamePairULongTag] value {i} - {pairCount}: newIndex -> {backNewValue} oldIndex -> {backOldValue}");
+#endif
+ outPair[i] = new pairStruct { newIndex = backNewValue, oldIndex = backOldValue };
}
}
}
diff --git a/Hi3Helper.SharpHDiffPatch/PatchSingle/PatchSingle.cs b/Hi3Helper.SharpHDiffPatch/PatchSingle/PatchSingle.cs
index aa4d501..ee86b9d 100644
--- a/Hi3Helper.SharpHDiffPatch/PatchSingle/PatchSingle.cs
+++ b/Hi3Helper.SharpHDiffPatch/PatchSingle/PatchSingle.cs
@@ -4,43 +4,36 @@
namespace Hi3Helper.SharpHDiffPatch
{
- public sealed class PatchSingle
+ internal enum kByteRleType
{
- enum kByteRleType
- {
- rle0 = 0,
- rle255 = 1,
- rle = 2,
- unrle = 3
- }
-
- ref struct RLERefClipStruct
- {
- public ulong memCopyLength;
- public ulong memSetLength;
- public byte memSetValue;
- public kByteRleType type;
-
- internal BinaryReader rleCodeClip;
- internal BinaryReader rleCtrlClip;
- internal BinaryReader rleCopyClip;
- internal BinaryReader rleInputClip;
- };
-
- struct CoverHeader
- {
- internal long oldPos;
- internal long newPos;
- internal long coverLength;
- }
-
- private const int _kSignTagBit = 1;
- private const int _kByteRleType = 2;
+ rle0 = 0,
+ rle255 = 1,
+ rle = 2,
+ unrle = 3
+ }
- private long oldPosBack;
- private long newPosBack;
- private long coverCount;
+ internal ref struct RLERefClipStruct
+ {
+ public ulong memCopyLength;
+ public ulong memSetLength;
+ public byte memSetValue;
+ public kByteRleType type;
+
+ internal BinaryReader rleCodeClip;
+ internal BinaryReader rleCtrlClip;
+ internal BinaryReader rleCopyClip;
+ internal BinaryReader rleInputClip;
+ };
+
+ internal struct CoverHeader
+ {
+ internal long oldPos;
+ internal long newPos;
+ internal long coverLength;
+ }
+ public sealed class PatchSingle : IPatch
+ {
private CompressedHDiffInfo hDiffInfo;
private Func spawnPatchStream;
@@ -52,7 +45,7 @@ public PatchSingle(CompressedHDiffInfo hDiffInfo)
this.spawnPatchStream = new Func(() => new FileStream(hDiffInfo.patchPath, FileMode.Open, FileAccess.Read, FileShare.Read));
}
- public void Patch(string input, string output, bool useBufferedPatch = true)
+ public void Patch(string input, string output, bool useBufferedPatch)
{
isUseBufferedPatch = useBufferedPatch;
@@ -101,358 +94,27 @@ private void StartPatchRoutine(Stream inputStream, Stream patchStream, Stream ou
if (!isUseBufferedPatch)
{
ChunkStream[] clips = new ChunkStream[4];
- clips[0] = GetBufferClipsStream(patchStream, (long)hDiffInfo.headInfo.cover_buf_size);
- clips[1] = GetBufferClipsStream(patchStream, (long)hDiffInfo.headInfo.rle_ctrlBuf_size);
- clips[2] = GetBufferClipsStream(patchStream, (long)hDiffInfo.headInfo.rle_codeBuf_size);
- clips[3] = GetBufferClipsStream(patchStream, (long)hDiffInfo.headInfo.newDataDiff_size);
+ clips[0] = PatchCore.GetBufferClipsStream(patchStream, (long)hDiffInfo.headInfo.cover_buf_size);
+ clips[1] = PatchCore.GetBufferClipsStream(patchStream, (long)hDiffInfo.headInfo.rle_ctrlBuf_size);
+ clips[2] = PatchCore.GetBufferClipsStream(patchStream, (long)hDiffInfo.headInfo.rle_codeBuf_size);
+ clips[3] = PatchCore.GetBufferClipsStream(patchStream, (long)hDiffInfo.headInfo.newDataDiff_size);
- UncoverBufferClipsStream(clips, inputStream, outputStream);
+ PatchCore.UncoverBufferClipsStream(clips, inputStream, outputStream, hDiffInfo);
}
else
{
byte[][] bufferClips = new byte[4][];
- bufferClips[0] = GetBufferClips(patchStream, (long)hDiffInfo.headInfo.cover_buf_size, (long)hDiffInfo.headInfo.compress_cover_buf_size);
- bufferClips[1] = GetBufferClips(patchStream, (long)hDiffInfo.headInfo.rle_ctrlBuf_size, (long)hDiffInfo.headInfo.compress_rle_ctrlBuf_size);
- bufferClips[2] = GetBufferClips(patchStream, (long)hDiffInfo.headInfo.rle_codeBuf_size, (long)hDiffInfo.headInfo.compress_rle_codeBuf_size);
- bufferClips[3] = GetBufferClips(patchStream, (long)hDiffInfo.headInfo.newDataDiff_size, (long)hDiffInfo.headInfo.compress_newDataDiff_size);
+ bufferClips[0] = PatchCore.GetBufferClips(patchStream, (long)hDiffInfo.headInfo.cover_buf_size, (long)hDiffInfo.headInfo.compress_cover_buf_size);
+ bufferClips[1] = PatchCore.GetBufferClips(patchStream, (long)hDiffInfo.headInfo.rle_ctrlBuf_size, (long)hDiffInfo.headInfo.compress_rle_ctrlBuf_size);
+ bufferClips[2] = PatchCore.GetBufferClips(patchStream, (long)hDiffInfo.headInfo.rle_codeBuf_size, (long)hDiffInfo.headInfo.compress_rle_codeBuf_size);
+ bufferClips[3] = PatchCore.GetBufferClips(patchStream, (long)hDiffInfo.headInfo.newDataDiff_size, (long)hDiffInfo.headInfo.compress_newDataDiff_size);
- UncoverBufferClips(ref bufferClips, inputStream, outputStream);
+ PatchCore.UncoverBufferClips(ref bufferClips, inputStream, outputStream, hDiffInfo);
}
stopwatch.Stop();
TimeSpan timeTaken = stopwatch.Elapsed;
Console.WriteLine($"Patch has been finished in {timeTaken.TotalSeconds} seconds ({timeTaken.Milliseconds} ms)");
}
-
- private ChunkStream GetBufferClipsStream(Stream patchStream, long clipSize)
- {
- long start = patchStream.Position;
- long end = patchStream.Position + clipSize;
- long size = end - start;
-#if DEBUG && SHOWDEBUGINFO
- Console.WriteLine($"Start assigning chunk as Stream: start -> {start} end -> {end} size -> {size}");
-#endif
-
- ChunkStream stream = new ChunkStream(patchStream, patchStream.Position, end, false);
- patchStream.Position += clipSize;
- return stream;
- }
-
- private byte[] GetBufferClips(Stream patchStream, long clipSize, long clipSizeCompress)
- {
- byte[] returnClip = new byte[clipSize];
- int bufSize = 4 << 10;
-
- long remainToRead = clipSize;
- int offset = 0;
- int read = 0;
-#if DEBUG && SHOWDEBUGINFO
- Console.WriteLine($"Start reading buffer clip with buffer size: {bufSize} to size: {clipSize}");
-#endif
- while ((remainToRead -= read = patchStream.Read(returnClip, offset, (int)Math.Min(bufSize, remainToRead))) > 0)
- {
- offset += read;
-#if DEBUG && SHOWDEBUGINFO
- Console.WriteLine($"Reading remain {read}: Remain to read: {remainToRead}");
-#endif
- }
-
- return returnClip;
- }
-
- private void UncoverBufferClipsStream(ChunkStream[] clips, Stream inputStream, Stream outputStream)
- {
- ulong uncoverCount = hDiffInfo.headInfo.coverCount;
- this.coverCount = (long)hDiffInfo.headInfo.coverCount;
- WriteCoverStreamToOutput(clips, uncoverCount, inputStream, outputStream);
- }
-
- private void UncoverBufferClips(ref byte[][] bufferClips, Stream inputStream, Stream outputStream)
- {
- ulong uncoverCount = hDiffInfo.headInfo.coverCount;
- this.coverCount = (long)hDiffInfo.headInfo.coverCount;
- WriteCoverToOutputNew(ref bufferClips, uncoverCount, inputStream, outputStream);
- }
-
- private void WriteCoverStreamToOutput(ChunkStream[] clips, ulong count, Stream inputStream, Stream outputStream)
- {
- BinaryReader coverReader = new BinaryReader(clips[0]);
- BinaryReader ctrlReader = new BinaryReader(clips[1]);
- BinaryReader codeReader = new BinaryReader(clips[2]);
- BinaryReader copyReader = new BinaryReader(clips[3]);
- BinaryReader inputReader = new BinaryReader(inputStream);
- BinaryWriter outputWriter = new BinaryWriter(outputStream);
-
- try
- {
- long newPosBack = 0;
-
- RLERefClipStruct rleStruct = new RLERefClipStruct();
-
- rleStruct.rleCtrlClip = ctrlReader;
- rleStruct.rleCodeClip = codeReader;
- rleStruct.rleCopyClip = copyReader;
- rleStruct.rleInputClip = inputReader;
-
- while (count > 0)
- {
- ReadCover(out CoverHeader cover, coverReader);
-#if DEBUG && SHOWDEBUGINFO
- Console.WriteLine($"Cover {i++}: oldPos -> {cover.oldPos} newPos -> {cover.newPos} length -> {cover.coverLength}");
-#endif
- CoverHeader coverUse = cover;
-
- MemoryStream outCache = new MemoryStream();
-
- if (newPosBack < cover.newPos)
- {
- long copyLength = cover.newPos - newPosBack;
- inputReader.BaseStream.Position = cover.oldPos;
-
- _TOutStreamCache_copyFromClip(outputStream, copyReader, copyLength);
- _rle_decode_skip(ref rleStruct, outputStream, copyLength);
- }
- _patch_add_old_with_rle(outputStream, ref rleStruct, cover.oldPos, cover.coverLength);
- newPosBack = cover.newPos + cover.coverLength;
- count--;
- }
- }
- catch
- {
- throw;
- }
- finally
- {
- coverReader?.Dispose();
- ctrlReader?.Dispose();
- codeReader?.Dispose();
- copyReader?.Dispose();
- inputReader?.Dispose();
- outputWriter?.Dispose();
- }
- }
-
- private void WriteCoverToOutputNew(ref byte[][] bufferClips, ulong count, Stream inputStream, Stream outputStream)
- {
- using (MemoryStream coverStream = new MemoryStream(bufferClips[0]))
- using (MemoryStream ctrlStream = new MemoryStream(bufferClips[1]))
- using (MemoryStream codeStream = new MemoryStream(bufferClips[2]))
- using (MemoryStream copyStream = new MemoryStream(bufferClips[3]))
- {
- BinaryReader coverReader = new BinaryReader(coverStream);
- BinaryReader ctrlReader = new BinaryReader(ctrlStream);
- BinaryReader codeReader = new BinaryReader(codeStream);
- BinaryReader copyReader = new BinaryReader(copyStream);
- BinaryReader inputReader = new BinaryReader(inputStream);
- BinaryWriter outputWriter = new BinaryWriter(outputStream);
- long newPosBack = 0;
-
- RLERefClipStruct rleStruct = new RLERefClipStruct();
-
- rleStruct.rleCtrlClip = ctrlReader;
- rleStruct.rleCodeClip = codeReader;
- rleStruct.rleCopyClip = copyReader;
- rleStruct.rleInputClip = inputReader;
-
- while (count > 0)
- {
- ReadCover(out CoverHeader cover, coverReader);
-#if DEBUG && SHOWDEBUGINFO
- Console.WriteLine($"Cover {i++}: oldPos -> {cover.oldPos} newPos -> {cover.newPos} length -> {cover.coverLength}");
-#endif
- CoverHeader coverUse = cover;
-
- MemoryStream outCache = new MemoryStream();
-
- if (newPosBack < cover.newPos)
- {
- long copyLength = cover.newPos - newPosBack;
- inputReader.BaseStream.Position = cover.oldPos;
-
- _TOutStreamCache_copyFromClip(outputStream, copyReader, copyLength);
- _rle_decode_skip(ref rleStruct, outputStream, copyLength);
- }
- _patch_add_old_with_rle(outputStream, ref rleStruct, cover.oldPos, cover.coverLength);
- newPosBack = cover.newPos + cover.coverLength;
- count--;
- }
- }
- outputStream.Dispose();
- }
-
- private void _patch_add_old_with_rle(Stream outCache, ref RLERefClipStruct rleLoader, long oldPos, long addLength)
- {
- long lastPos = outCache.Position;
- while (addLength > 0)
- {
- long decodeStep = addLength;
- rleLoader.rleInputClip.BaseStream.Position = oldPos;
-
- byte[] tempBuffer = new byte[decodeStep];
- rleLoader.rleInputClip.BaseStream.Read(tempBuffer);
- outCache.Write(tempBuffer);
- outCache.Position = lastPos;
- _TBytesRle_load_stream_decode_add(ref rleLoader, outCache, decodeStep);
-
- oldPos += decodeStep;
- addLength -= decodeStep;
-
- }
- }
-
- long copyReaderOffset = 0;
- private void _TOutStreamCache_copyFromClip(Stream outCache, BinaryReader copyReader, long copyLength)
- {
- byte[] buffer = new byte[copyLength];
- copyReaderOffset += copyLength;
- copyReader.BaseStream.Read(buffer);
- copyReader.BaseStream.Position = copyReaderOffset;
- long lastPos = outCache.Position;
- outCache.Write(buffer);
- outCache.Position = lastPos;
- }
-
- private void _rle_decode_skip(ref RLERefClipStruct rleLoader, Stream outCache, long copyLength)
- {
- while (copyLength > 0)
- {
- _TBytesRle_load_stream_decode_add(ref rleLoader, outCache, copyLength);
- copyLength -= copyLength;
- }
- }
-
- private void _TBytesRle_load_stream_decode_add(ref RLERefClipStruct rleLoader, Stream outCache, long copyLength)
- {
- long num = outCache.Position;
-
- _TBytesRle_load_stream_mem_add(ref rleLoader, outCache, ref copyLength);
-
- while (copyLength > 0)
- {
- byte type = rleLoader.rleCtrlClip.ReadByte();
- type = (byte)((type) >> (8 - _kByteRleType));
- rleLoader.rleCtrlClip.BaseStream.Position--;
- ulong length = rleLoader.rleCtrlClip.ReadUInt64VarInt(_kByteRleType);
- length++;
-
- switch (rleLoader.type = (kByteRleType)type)
- {
- case kByteRleType.rle0:
- rleLoader.memSetLength = length;
- rleLoader.memSetValue = 0x0;
- break;
- case kByteRleType.rle255:
- rleLoader.memSetLength = length;
- rleLoader.memSetValue = 0xFF;
- break;
- case kByteRleType.rle:
- byte pSetValue = rleLoader.rleCodeClip.ReadByte();
- rleLoader.memSetLength = length;
- rleLoader.memSetValue = pSetValue;
- break;
- case kByteRleType.unrle:
- rleLoader.memCopyLength = length;
- break;
- }
-
-#if DEBUG && SHOWDEBUGINFO
- if (rleLoader.type != kByteRleType.unrle)
- {
- Console.WriteLine($" RLE Type: {rleLoader.type} -> length: {rleLoader.memSetLength} -> code: {rleLoader.memSetValue}");
- }
- else
- {
- Console.WriteLine($" MemCopy length: {rleLoader.memCopyLength}");
- }
-#endif
- _TBytesRle_load_stream_mem_add(ref rleLoader, outCache, ref copyLength);
- }
- }
-
- private void _TBytesRle_load_stream_mem_add(ref RLERefClipStruct rleLoader, Stream outCache, ref long copyLength)
- {
- if (rleLoader.memSetLength != 0)
- {
- long memSetStep = (long)rleLoader.memSetLength <= copyLength ? (long)rleLoader.memSetLength : copyLength;
- byte byteSetValue = rleLoader.memSetValue;
- if (byteSetValue != 0)
- {
- byte[] addToSetValueBuffer = new byte[memSetStep];
- long lastPos = outCache.Position;
- outCache.Read(addToSetValueBuffer);
- outCache.Position = lastPos;
-
- int length = (int)memSetStep;
- for (int i = 0; i < length; i++) addToSetValueBuffer[i] += byteSetValue;
-
- outCache.Write(addToSetValueBuffer);
- }
- else
- {
- outCache.Position += memSetStep;
- }
-
- copyLength -= memSetStep;
- rleLoader.memSetLength -= (ulong)memSetStep;
- }
-
- while (rleLoader.memCopyLength > 0 && copyLength > 0)
- {
- long decodeStep = (long)rleLoader.memCopyLength > copyLength ? copyLength : (long)rleLoader.memCopyLength;
-
- byte[] rleData = new byte[decodeStep];
- byte[] oldData = new byte[decodeStep];
- rleLoader.rleCodeClip.BaseStream.Read(rleData);
- long lastPos = outCache.Position;
- outCache.Read(oldData);
- outCache.Position = lastPos;
-
- int length = (int)decodeStep;
- for (int i = 0; i < length; i++) rleData[i] += oldData[i];
-
- outCache.Write(rleData);
-
- copyLength -= decodeStep;
- rleLoader.memCopyLength -= (ulong)decodeStep;
- }
- }
-
- private void ReadCover(out CoverHeader coverHeader, BinaryReader coverReader)
- {
- long oldPosBack = this.oldPosBack;
- long newPosBack = this.newPosBack;
- long coverCount = this.coverCount;
-
- if (coverCount > 0)
- {
- this.coverCount = coverCount - 1;
- }
-
- byte pSign = coverReader.ReadByte();
- long oldPos, copyLength, coverLength;
-
- byte inc_oldPos_sign = (byte)(pSign >> (8 - _kSignTagBit));
- coverReader.BaseStream.Position--;
- long inc_oldPos = (long)coverReader.ReadUInt64VarInt(_kSignTagBit);
- oldPos = inc_oldPos_sign == 0 ? oldPosBack + inc_oldPos : oldPosBack - inc_oldPos;
-
- copyLength = (long)coverReader.ReadUInt64VarInt();
- coverLength = (long)coverReader.ReadUInt64VarInt();
- newPosBack += copyLength;
- oldPosBack = oldPos;
-
- oldPosBack += true ? coverLength : 0;
-
- coverHeader = new CoverHeader
- {
- oldPos = oldPos,
- newPos = newPosBack,
- coverLength = coverLength
- };
- newPosBack += coverLength;
-
- this.oldPosBack = oldPosBack;
- this.newPosBack = newPosBack;
- }
}
}
diff --git a/SharpHDiffPatch/Program.cs b/SharpHDiffPatch/Program.cs
index cbdcfc2..85b931c 100644
--- a/SharpHDiffPatch/Program.cs
+++ b/SharpHDiffPatch/Program.cs
@@ -1,7 +1,7 @@
-using System;
+using Hi3Helper.SharpHDiffPatch;
+using System;
using System.IO;
using System.Reflection;
-using Hi3Helper.SharpHDiffPatch;
namespace SharpHDiffPatchBin
{
@@ -43,7 +43,7 @@ public static void Main(params string[] args)
}
}
- if (!File.Exists(inputPath))
+ if (!(File.Exists(inputPath) || Directory.Exists(inputPath)))
{
Console.WriteLine("Input file doesn't exist!");
return;