diff --git a/src/SharpCompress/Common/Ace/AceCrc.cs b/src/SharpCompress/Common/Ace/AceCrc.cs
new file mode 100644
index 000000000..8ae43a2b0
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/AceCrc.cs
@@ -0,0 +1,61 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace SharpCompress.Common.Ace
+{
+ public class AceCrc
+ {
+ // CRC-32 lookup table (standard polynomial 0xEDB88320, reflected)
+ private static readonly uint[] Crc32Table = GenerateTable();
+
+ private static uint[] GenerateTable()
+ {
+ var table = new uint[256];
+
+ for (int i = 0; i < 256; i++)
+ {
+ uint crc = (uint)i;
+
+ for (int j = 0; j < 8; j++)
+ {
+ if ((crc & 1) != 0)
+ crc = (crc >> 1) ^ 0xEDB88320u;
+ else
+ crc >>= 1;
+ }
+
+ table[i] = crc;
+ }
+
+ return table;
+ }
+
+ ///
+ /// Calculate ACE CRC-32 checksum.
+ /// ACE CRC-32 uses standard CRC-32 polynomial (0xEDB88320, reflected)
+ /// with init=0xFFFFFFFF but NO final XOR.
+ ///
+ public static uint AceCrc32(ReadOnlySpan data)
+ {
+ uint crc = 0xFFFFFFFFu;
+
+ foreach (byte b in data)
+ {
+ crc = (crc >> 8) ^ Crc32Table[(crc ^ b) & 0xFF];
+ }
+
+ return crc; // No final XOR for ACE
+ }
+
+ ///
+ /// ACE CRC-16 is the lower 16 bits of the ACE CRC-32.
+ ///
+ public static ushort AceCrc16(ReadOnlySpan data)
+ {
+ return (ushort)(AceCrc32(data) & 0xFFFF);
+ }
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/AceEntry.cs b/src/SharpCompress/Common/Ace/AceEntry.cs
new file mode 100644
index 000000000..a3dde1e7f
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/AceEntry.cs
@@ -0,0 +1,68 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using SharpCompress.Common.Ace.Headers;
+
+namespace SharpCompress.Common.Ace
+{
+ public class AceEntry : Entry
+ {
+ private readonly AceFilePart _filePart;
+
+ internal AceEntry(AceFilePart filePart)
+ {
+ _filePart = filePart;
+ }
+
+ public override long Crc
+ {
+ get
+ {
+ if (_filePart == null)
+ {
+ return 0;
+ }
+ return _filePart.Header.Crc32;
+ }
+ }
+
+ public override string? Key => _filePart?.Header.Filename;
+
+ public override string? LinkTarget => null;
+
+ public override long CompressedSize => _filePart?.Header.PackedSize ?? 0;
+
+ public override CompressionType CompressionType
+ {
+ get
+ {
+ if (_filePart.Header.CompressionType == Headers.CompressionType.Stored)
+ {
+ return CompressionType.None;
+ }
+ return CompressionType.AceLZ77;
+ }
+ }
+
+ public override long Size => _filePart?.Header.OriginalSize ?? 0;
+
+ public override DateTime? LastModifiedTime => _filePart.Header.DateTime;
+
+ public override DateTime? CreatedTime => null;
+
+ public override DateTime? LastAccessedTime => null;
+
+ public override DateTime? ArchivedTime => null;
+
+ public override bool IsEncrypted => _filePart.Header.IsFileEncrypted;
+
+ public override bool IsDirectory => _filePart.Header.IsDirectory;
+
+ public override bool IsSplitAfter => false;
+
+ internal override IEnumerable Parts => _filePart.Empty();
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/AceFilePart.cs b/src/SharpCompress/Common/Ace/AceFilePart.cs
new file mode 100644
index 000000000..6efefaf56
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/AceFilePart.cs
@@ -0,0 +1,52 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using SharpCompress.Common.Ace.Headers;
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Ace
+{
+ public class AceFilePart : FilePart
+ {
+ private readonly Stream _stream;
+ internal AceFileHeader Header { get; set; }
+
+ internal AceFilePart(AceFileHeader localAceHeader, Stream seekableStream)
+ : base(localAceHeader.ArchiveEncoding)
+ {
+ _stream = seekableStream;
+ Header = localAceHeader;
+ }
+
+ internal override string? FilePartName => Header.Filename;
+
+ internal override Stream GetCompressedStream()
+ {
+ if (_stream != null)
+ {
+ Stream compressedStream;
+ switch (Header.CompressionType)
+ {
+ case Headers.CompressionType.Stored:
+ compressedStream = new ReadOnlySubStream(
+ _stream,
+ Header.DataStartPosition,
+ Header.PackedSize
+ );
+ break;
+ default:
+ throw new NotSupportedException(
+ "CompressionMethod: " + Header.CompressionQuality
+ );
+ }
+ return compressedStream;
+ }
+ return _stream.NotNull();
+ }
+
+ internal override Stream? GetRawStream() => _stream;
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/AceVolume.cs b/src/SharpCompress/Common/Ace/AceVolume.cs
new file mode 100644
index 000000000..28456866b
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/AceVolume.cs
@@ -0,0 +1,35 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using SharpCompress.Common.Arj;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Common.Ace
+{
+ public class AceVolume : Volume
+ {
+ public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
+ : base(stream, readerOptions, index) { }
+
+ public override bool IsFirstVolume
+ {
+ get { return true; }
+ }
+
+ ///
+ /// ArjArchive is part of a multi-part archive.
+ ///
+ public override bool IsMultiVolume
+ {
+ get { return false; }
+ }
+
+ internal IEnumerable GetVolumeFileParts()
+ {
+ return new List();
+ }
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/Headers/AceFileHeader.cs b/src/SharpCompress/Common/Ace/Headers/AceFileHeader.cs
new file mode 100644
index 000000000..9fd34c63d
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/Headers/AceFileHeader.cs
@@ -0,0 +1,171 @@
+using System;
+using System.Buffers.Binary;
+using System.Collections.Generic;
+using System.IO;
+using System.Xml.Linq;
+using SharpCompress.Common.Arc;
+
+namespace SharpCompress.Common.Ace.Headers
+{
+ ///
+ /// ACE file entry header
+ ///
+ public sealed class AceFileHeader : AceHeader
+ {
+ public long DataStartPosition { get; private set; }
+ public long PackedSize { get; set; }
+ public long OriginalSize { get; set; }
+ public DateTime DateTime { get; set; }
+ public int Attributes { get; set; }
+ public uint Crc32 { get; set; }
+ public CompressionType CompressionType { get; set; }
+ public CompressionQuality CompressionQuality { get; set; }
+ public ushort Parameters { get; set; }
+ public string Filename { get; set; } = string.Empty;
+ public List Comment { get; set; } = new();
+
+ ///
+ /// File data offset in the archive
+ ///
+ public ulong DataOffset { get; set; }
+
+ public bool IsDirectory => (Attributes & 0x10) != 0;
+
+ public bool IsContinuedFromPrev =>
+ (HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_PREV) != 0;
+
+ public bool IsContinuedToNext =>
+ (HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_NEXT) != 0;
+
+ public int DictionarySize
+ {
+ get
+ {
+ int bits = Parameters & 0x0F;
+ return bits < 10 ? 1024 : 1 << bits;
+ }
+ }
+
+ public AceFileHeader(ArchiveEncoding archiveEncoding)
+ : base(archiveEncoding, AceHeaderType.FILE) { }
+
+ ///
+ /// Reads the next file entry header from the stream.
+ /// Returns null if no more entries or end of archive.
+ /// Supports both ACE 1.0 and ACE 2.0 formats.
+ ///
+ public override AceHeader? Read(Stream stream)
+ {
+ var headerData = ReadHeader(stream);
+ if (headerData.Length == 0)
+ {
+ return null;
+ }
+ int offset = 0;
+
+ // Header type (1 byte)
+ HeaderType = headerData[offset++];
+
+ // Skip recovery record headers (ACE 2.0 feature)
+ if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
+ {
+ // Skip to next header
+ return null;
+ }
+
+ if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
+ {
+ // Unknown header type - skip
+ return null;
+ }
+
+ // Header flags (2 bytes)
+ HeaderFlags = BitConverter.ToUInt16(headerData, offset);
+ offset += 2;
+
+ // Packed size (4 bytes)
+ PackedSize = BitConverter.ToUInt32(headerData, offset);
+ offset += 4;
+
+ // Original size (4 bytes)
+ OriginalSize = BitConverter.ToUInt32(headerData, offset);
+ offset += 4;
+
+ // File date/time in DOS format (4 bytes)
+ var dosDateTime = BitConverter.ToUInt32(headerData, offset);
+ DateTime = ConvertDosDateTime(dosDateTime);
+ offset += 4;
+
+ // File attributes (4 bytes)
+ Attributes = (int)BitConverter.ToUInt32(headerData, offset);
+ offset += 4;
+
+ // CRC32 (4 bytes)
+ Crc32 = BitConverter.ToUInt32(headerData, offset);
+ offset += 4;
+
+ // Compression type (1 byte)
+ byte compressionType = headerData[offset++];
+ CompressionType = GetCompressionType(compressionType);
+
+ // Compression quality/parameter (1 byte)
+ byte compressionQuality = headerData[offset++];
+ CompressionQuality = GetCompressionQuality(compressionQuality);
+
+ // Parameters (2 bytes)
+ Parameters = BitConverter.ToUInt16(headerData, offset);
+ offset += 2;
+
+ // Reserved (2 bytes) - skip
+ offset += 2;
+
+ // Filename length (2 bytes)
+ var filenameLength = BitConverter.ToUInt16(headerData, offset);
+ offset += 2;
+
+ // Filename
+ if (offset + filenameLength <= headerData.Length)
+ {
+ Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
+ offset += filenameLength;
+ }
+
+ // Handle comment if present
+ if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
+ {
+ // Comment length (2 bytes)
+ if (offset + 2 <= headerData.Length)
+ {
+ ushort commentLength = BitConverter.ToUInt16(headerData, offset);
+ offset += 2 + commentLength; // Skip comment
+ }
+ }
+
+ // Store the data start position
+ DataStartPosition = stream.Position;
+
+ return this;
+ }
+
+ public CompressionType GetCompressionType(byte value) =>
+ value switch
+ {
+ 0 => CompressionType.Stored,
+ 1 => CompressionType.Lz77,
+ 2 => CompressionType.Blocked,
+ _ => CompressionType.Unknown,
+ };
+
+ public CompressionQuality GetCompressionQuality(byte value) =>
+ value switch
+ {
+ 0 => CompressionQuality.None,
+ 1 => CompressionQuality.Fastest,
+ 2 => CompressionQuality.Fast,
+ 3 => CompressionQuality.Normal,
+ 4 => CompressionQuality.Good,
+ 5 => CompressionQuality.Best,
+ _ => CompressionQuality.Unknown,
+ };
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/Headers/AceHeader.cs b/src/SharpCompress/Common/Ace/Headers/AceHeader.cs
new file mode 100644
index 000000000..0fa816e5b
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/Headers/AceHeader.cs
@@ -0,0 +1,153 @@
+using System;
+using System.IO;
+using SharpCompress.Common.Arj.Headers;
+using SharpCompress.Crypto;
+
+namespace SharpCompress.Common.Ace.Headers
+{
+ ///
+ /// Header type constants
+ ///
+ public enum AceHeaderType
+ {
+ MAIN = 0,
+ FILE = 1,
+ RECOVERY32 = 2,
+ RECOVERY64A = 3,
+ RECOVERY64B = 4,
+ }
+
+ public abstract class AceHeader
+ {
+ // ACE signature: bytes at offset 7 should be "**ACE**"
+ private static readonly byte[] AceSignature =
+ [
+ (byte)'*',
+ (byte)'*',
+ (byte)'A',
+ (byte)'C',
+ (byte)'E',
+ (byte)'*',
+ (byte)'*',
+ ];
+
+ public AceHeader(ArchiveEncoding archiveEncoding, AceHeaderType type)
+ {
+ AceHeaderType = type;
+ ArchiveEncoding = archiveEncoding;
+ }
+
+ public ArchiveEncoding ArchiveEncoding { get; }
+ public AceHeaderType AceHeaderType { get; }
+
+ public ushort HeaderFlags { get; set; }
+ public ushort HeaderCrc { get; set; }
+ public ushort HeaderSize { get; set; }
+ public byte HeaderType { get; set; }
+
+ public bool IsFileEncrypted =>
+ (HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.FILE_ENCRYPTED) != 0;
+ public bool Is64Bit =>
+ (HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MEMORY_64BIT) != 0;
+
+ public bool IsSolid =>
+ (HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.SOLID_MAIN) != 0;
+
+ public bool IsMultiVolume =>
+ (HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MULTIVOLUME) != 0;
+
+ public abstract AceHeader? Read(Stream reader);
+
+ public byte[] ReadHeader(Stream stream)
+ {
+ // Read header CRC (2 bytes) and header size (2 bytes)
+ var headerBytes = new byte[4];
+ if (stream.Read(headerBytes, 0, 4) != 4)
+ {
+ return Array.Empty();
+ }
+
+ HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
+ HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
+ if (HeaderSize == 0)
+ {
+ return Array.Empty();
+ }
+
+ // Read the header data
+ var body = new byte[HeaderSize];
+ if (stream.Read(body, 0, HeaderSize) != HeaderSize)
+ {
+ return Array.Empty();
+ }
+
+ // Verify crc
+ var checksum = AceCrc.AceCrc16(body);
+ if (checksum != HeaderCrc)
+ {
+ throw new InvalidDataException("Header checksum is invalid");
+ }
+ return body;
+ }
+
+ public static bool IsArchive(Stream stream)
+ {
+ // ACE files have a specific signature
+ // First two bytes are typically 0x60 0xEA (signature bytes)
+ // At offset 7, there should be "**ACE**" (7 bytes)
+ var bytes = new byte[14];
+ if (stream.Read(bytes, 0, 14) != 14)
+ {
+ return false;
+ }
+
+ // Check for "**ACE**" at offset 7
+ return CheckMagicBytes(bytes, 7);
+ }
+
+ protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
+ {
+ // Check for "**ACE**" at specified offset
+ for (int i = 0; i < AceSignature.Length; i++)
+ {
+ if (headerBytes[offset + i] != AceSignature[i])
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ protected DateTime ConvertDosDateTime(uint dosDateTime)
+ {
+ try
+ {
+ int second = (int)(dosDateTime & 0x1F) * 2;
+ int minute = (int)((dosDateTime >> 5) & 0x3F);
+ int hour = (int)((dosDateTime >> 11) & 0x1F);
+ int day = (int)((dosDateTime >> 16) & 0x1F);
+ int month = (int)((dosDateTime >> 21) & 0x0F);
+ int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
+
+ if (
+ day < 1
+ || day > 31
+ || month < 1
+ || month > 12
+ || hour > 23
+ || minute > 59
+ || second > 59
+ )
+ {
+ return DateTime.MinValue;
+ }
+
+ return new DateTime(year, month, day, hour, minute, second);
+ }
+ catch
+ {
+ return DateTime.MinValue;
+ }
+ }
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/Headers/AceMainHeader.cs b/src/SharpCompress/Common/Ace/Headers/AceMainHeader.cs
new file mode 100644
index 000000000..c2fc68159
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/Headers/AceMainHeader.cs
@@ -0,0 +1,97 @@
+using System;
+using System.Buffers.Binary;
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.Common.Ace.Headers;
+using SharpCompress.Common.Zip.Headers;
+using SharpCompress.Crypto;
+
+namespace SharpCompress.Common.Ace.Headers
+{
+ ///
+ /// ACE main archive header
+ ///
+ public sealed class AceMainHeader : AceHeader
+ {
+ public byte ExtractVersion { get; set; }
+ public byte CreatorVersion { get; set; }
+ public HostOS HostOS { get; set; }
+ public byte VolumeNumber { get; set; }
+ public DateTime DateTime { get; set; }
+ public string Advert { get; set; } = string.Empty;
+ public List Comment { get; set; } = new();
+ public byte AceVersion { get; private set; }
+
+ public AceMainHeader(ArchiveEncoding archiveEncoding)
+ : base(archiveEncoding, AceHeaderType.MAIN) { }
+
+ ///
+ /// Reads the main archive header from the stream.
+ /// Returns header if this is a valid ACE archive.
+ /// Supports both ACE 1.0 and ACE 2.0 formats.
+ ///
+ public override AceHeader? Read(Stream stream)
+ {
+ var headerData = ReadHeader(stream);
+ if (headerData.Length == 0)
+ {
+ return null;
+ }
+ int offset = 0;
+
+ // Header type should be 0 for main header
+ if (headerData[offset++] != HeaderType)
+ {
+ return null;
+ }
+
+ // Header flags (2 bytes)
+ HeaderFlags = BitConverter.ToUInt16(headerData, offset);
+ offset += 2;
+
+ // Skip signature "**ACE**" (7 bytes)
+ if (!CheckMagicBytes(headerData, offset))
+ {
+ throw new InvalidDataException("Invalid ACE archive signature.");
+ }
+ offset += 7;
+
+ // ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
+ AceVersion = headerData[offset++];
+ ExtractVersion = headerData[offset++];
+
+ // Host OS (1 byte)
+ if (offset < headerData.Length)
+ {
+ var hostOsByte = headerData[offset++];
+ HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
+ }
+ // Volume number (1 byte)
+ VolumeNumber = headerData[offset++];
+
+ // Creation date/time (4 bytes)
+ var dosDateTime = BitConverter.ToUInt32(headerData, offset);
+ DateTime = ConvertDosDateTime(dosDateTime);
+ offset += 4;
+
+ // Reserved fields (8 bytes)
+ if (offset + 8 <= headerData.Length)
+ {
+ offset += 8;
+ }
+
+ // Skip additional fields based on flags
+ // Handle comment if present
+ if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
+ {
+ if (offset + 2 <= headerData.Length)
+ {
+ ushort commentLength = BitConverter.ToUInt16(headerData, offset);
+ offset += 2 + commentLength;
+ }
+ }
+
+ return this;
+ }
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/Headers/CompressionQuality.cs b/src/SharpCompress/Common/Ace/Headers/CompressionQuality.cs
new file mode 100644
index 000000000..57017b55f
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/Headers/CompressionQuality.cs
@@ -0,0 +1,16 @@
+namespace SharpCompress.Common.Ace.Headers
+{
+ ///
+ /// Compression quality
+ ///
+ public enum CompressionQuality
+ {
+ None,
+ Fastest,
+ Fast,
+ Normal,
+ Good,
+ Best,
+ Unknown,
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/Headers/CompressionType.cs b/src/SharpCompress/Common/Ace/Headers/CompressionType.cs
new file mode 100644
index 000000000..799e7929b
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/Headers/CompressionType.cs
@@ -0,0 +1,13 @@
+namespace SharpCompress.Common.Ace.Headers
+{
+ ///
+ /// Compression types
+ ///
+ public enum CompressionType
+ {
+ Stored,
+ Lz77,
+ Blocked,
+ Unknown,
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/Headers/HeaderFlags.cs b/src/SharpCompress/Common/Ace/Headers/HeaderFlags.cs
new file mode 100644
index 000000000..6a5f8926f
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/Headers/HeaderFlags.cs
@@ -0,0 +1,33 @@
+namespace SharpCompress.Common.Ace.Headers
+{
+ ///
+ /// Header flags (main + file, overlapping meanings)
+ ///
+ public static class HeaderFlags
+ {
+ // Shared / low bits
+ public const ushort ADDSIZE = 0x0001; // extra size field present
+ public const ushort COMMENT = 0x0002; // comment present
+ public const ushort MEMORY_64BIT = 0x0004;
+ public const ushort AV_STRING = 0x0008; // AV string present
+ public const ushort SOLID = 0x0010; // solid file
+ public const ushort LOCKED = 0x0020;
+ public const ushort PROTECTED = 0x0040;
+
+ // Main header specific
+ public const ushort V20FORMAT = 0x0100;
+ public const ushort SFX = 0x0200;
+ public const ushort LIMITSFXJR = 0x0400;
+ public const ushort MULTIVOLUME = 0x0800;
+ public const ushort ADVERT = 0x1000;
+ public const ushort RECOVERY = 0x2000;
+ public const ushort LOCKED_MAIN = 0x4000;
+ public const ushort SOLID_MAIN = 0x8000;
+
+ // File header specific (same bits, different meaning)
+ public const ushort NTSECURITY = 0x0400;
+ public const ushort CONTINUED_PREV = 0x1000;
+ public const ushort CONTINUED_NEXT = 0x2000;
+ public const ushort FILE_ENCRYPTED = 0x4000; // file encrypted (file header)
+ }
+}
diff --git a/src/SharpCompress/Common/Ace/Headers/HostOS.cs b/src/SharpCompress/Common/Ace/Headers/HostOS.cs
new file mode 100644
index 000000000..173d56eb8
--- /dev/null
+++ b/src/SharpCompress/Common/Ace/Headers/HostOS.cs
@@ -0,0 +1,22 @@
+namespace SharpCompress.Common.Ace.Headers
+{
+ ///
+ /// Host OS type
+ ///
+ public enum HostOS
+ {
+ MsDos = 0,
+ Os2,
+ Windows,
+ Unix,
+ MacOs,
+ WinNt,
+ Primos,
+ AppleGs,
+ Atari,
+ Vax,
+ Amiga,
+ Next,
+ Unknown,
+ }
+}
diff --git a/src/SharpCompress/Common/ArchiveType.cs b/src/SharpCompress/Common/ArchiveType.cs
index 9d9438955..5952f6459 100644
--- a/src/SharpCompress/Common/ArchiveType.cs
+++ b/src/SharpCompress/Common/ArchiveType.cs
@@ -9,4 +9,5 @@ public enum ArchiveType
GZip,
Arc,
Arj,
+ Ace,
}
diff --git a/src/SharpCompress/Common/Arj/Headers/ArjHeader.cs b/src/SharpCompress/Common/Arj/Headers/ArjHeader.cs
index 138f75e72..142aca5bb 100644
--- a/src/SharpCompress/Common/Arj/Headers/ArjHeader.cs
+++ b/src/SharpCompress/Common/Arj/Headers/ArjHeader.cs
@@ -34,14 +34,13 @@ public ArjHeader(ArjHeaderType type)
public byte[] ReadHeader(Stream stream)
{
// check for magic bytes
- Span magic = stackalloc byte[2];
+ var magic = new byte[2];
if (stream.Read(magic) != 2)
{
return Array.Empty();
}
- var magicValue = (ushort)(magic[0] | magic[1] << 8);
- if (magicValue != ARJ_MAGIC)
+ if (!CheckMagicBytes(magic))
{
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
@@ -138,5 +137,22 @@ public static FileType FileTypeFromByte(byte value)
? (FileType)value
: Headers.FileType.Unknown;
}
+
+ public static bool IsArchive(Stream stream)
+ {
+ var bytes = new byte[2];
+ if (stream.Read(bytes, 0, 2) != 2)
+ {
+ return false;
+ }
+
+ return CheckMagicBytes(bytes);
+ }
+
+ protected static bool CheckMagicBytes(byte[] headerBytes)
+ {
+ var magicValue = (ushort)(headerBytes[0] | headerBytes[1] << 8);
+ return magicValue == ARJ_MAGIC;
+ }
}
}
diff --git a/src/SharpCompress/Common/CompressionType.cs b/src/SharpCompress/Common/CompressionType.cs
index 7c50758a0..595834233 100644
--- a/src/SharpCompress/Common/CompressionType.cs
+++ b/src/SharpCompress/Common/CompressionType.cs
@@ -30,4 +30,5 @@ public enum CompressionType
Distilled,
ZStandard,
ArjLZ77,
+ AceLZ77,
}
diff --git a/src/SharpCompress/Factories/AceFactory.cs b/src/SharpCompress/Factories/AceFactory.cs
new file mode 100644
index 000000000..5b80ae24f
--- /dev/null
+++ b/src/SharpCompress/Factories/AceFactory.cs
@@ -0,0 +1,37 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using SharpCompress.Common;
+using SharpCompress.Common.Ace.Headers;
+using SharpCompress.Readers;
+using SharpCompress.Readers.Ace;
+
+namespace SharpCompress.Factories
+{
+ public class AceFactory : Factory, IReaderFactory
+ {
+ public override string Name => "Ace";
+
+ public override ArchiveType? KnownArchiveType => ArchiveType.Ace;
+
+ public override IEnumerable GetSupportedExtensions()
+ {
+ yield return "ace";
+ }
+
+ public override bool IsArchive(
+ Stream stream,
+ string? password = null,
+ int bufferSize = ReaderOptions.DefaultBufferSize
+ )
+ {
+ return AceHeader.IsArchive(stream);
+ }
+
+ public IReader OpenReader(Stream stream, ReaderOptions? options) =>
+ AceReader.Open(stream, options);
+ }
+}
diff --git a/src/SharpCompress/Factories/ArjFactory.cs b/src/SharpCompress/Factories/ArjFactory.cs
index 7499946be..f6f7a3934 100644
--- a/src/SharpCompress/Factories/ArjFactory.cs
+++ b/src/SharpCompress/Factories/ArjFactory.cs
@@ -28,12 +28,7 @@ public override bool IsArchive(
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
- var arjHeader = new ArjMainHeader(new ArchiveEncoding());
- if (arjHeader.Read(stream) == null)
- {
- return false;
- }
- return true;
+ return ArjHeader.IsArchive(stream);
}
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
diff --git a/src/SharpCompress/Factories/Factory.cs b/src/SharpCompress/Factories/Factory.cs
index dba201777..4651ccb22 100644
--- a/src/SharpCompress/Factories/Factory.cs
+++ b/src/SharpCompress/Factories/Factory.cs
@@ -19,6 +19,7 @@ static Factory()
RegisterFactory(new TarFactory());
RegisterFactory(new ArcFactory());
RegisterFactory(new ArjFactory());
+ RegisterFactory(new AceFactory());
}
private static readonly HashSet _factories = new();
diff --git a/src/SharpCompress/Readers/Ace/AceReader.cs b/src/SharpCompress/Readers/Ace/AceReader.cs
new file mode 100644
index 000000000..2252a276e
--- /dev/null
+++ b/src/SharpCompress/Readers/Ace/AceReader.cs
@@ -0,0 +1,115 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using SharpCompress.Common;
+using SharpCompress.Common.Ace;
+using SharpCompress.Common.Ace.Headers;
+using SharpCompress.Common.Arj;
+
+namespace SharpCompress.Readers.Ace
+{
+ ///
+ /// Reader for ACE archives.
+ /// ACE is a proprietary archive format. This implementation supports both ACE 1.0 and ACE 2.0 formats
+ /// and can read archive metadata and extract uncompressed (stored) entries.
+ /// Compressed entries require proprietary decompression algorithms that are not publicly documented.
+ ///
+ ///
+ /// ACE 2.0 additions over ACE 1.0:
+ /// - Improved LZ77 compression (compression type 2)
+ /// - Recovery record support
+ /// - Additional header flags
+ ///
+ public abstract class AceReader : AbstractReader
+ {
+ private readonly ArchiveEncoding _archiveEncoding;
+
+ internal AceReader(ReaderOptions options)
+ : base(options, ArchiveType.Ace)
+ {
+ _archiveEncoding = Options.ArchiveEncoding;
+ }
+
+ private AceReader(Stream stream, ReaderOptions options)
+ : this(options) { }
+
+ ///
+ /// Derived class must create or manage the Volume itself.
+ /// AbstractReader.Volume is get-only, so it cannot be set here.
+ ///
+ public override AceVolume? Volume => _volume;
+
+ private AceVolume? _volume;
+
+ ///
+ /// Opens an AceReader for non-seeking usage with a single volume.
+ ///
+ /// The stream containing the ACE archive.
+ /// Reader options.
+ /// An AceReader instance.
+ public static AceReader Open(Stream stream, ReaderOptions? options = null)
+ {
+ stream.NotNull(nameof(stream));
+ return new SingleVolumeAceReader(stream, options ?? new ReaderOptions());
+ }
+
+ ///
+ /// Opens an AceReader for Non-seeking usage with multiple volumes
+ ///
+ ///
+ ///
+ ///
+ public static AceReader Open(IEnumerable streams, ReaderOptions? options = null)
+ {
+ streams.NotNull(nameof(streams));
+ return new MultiVolumeAceReader(streams, options ?? new ReaderOptions());
+ }
+
+ protected abstract void ValidateArchive(AceVolume archive);
+
+ protected override IEnumerable GetEntries(Stream stream)
+ {
+ var mainHeaderReader = new AceMainHeader(_archiveEncoding);
+ var mainHeader = mainHeaderReader.Read(stream);
+ if (mainHeader == null)
+ {
+ yield break;
+ }
+
+ if (mainHeader?.IsMultiVolume == true)
+ {
+ throw new MultiVolumeExtractionException(
+ "Multi volumes are currently not supported"
+ );
+ }
+
+ if (_volume == null)
+ {
+ _volume = new AceVolume(stream, Options, 0);
+ ValidateArchive(_volume);
+ }
+
+ var localHeaderReader = new AceFileHeader(_archiveEncoding);
+ while (true)
+ {
+ var localHeader = localHeaderReader.Read(stream);
+ if (localHeader?.IsFileEncrypted == true)
+ {
+ throw new CryptographicException(
+ "Password protected archives are currently not supported"
+ );
+ }
+ if (localHeader == null)
+ break;
+
+ yield return new AceEntry(new AceFilePart((AceFileHeader)localHeader, stream));
+ }
+ }
+
+ protected virtual IEnumerable CreateFilePartEnumerableForCurrentEntry() =>
+ Entry.Parts;
+ }
+}
diff --git a/src/SharpCompress/Readers/Ace/MultiVolumeAceReader.cs b/src/SharpCompress/Readers/Ace/MultiVolumeAceReader.cs
new file mode 100644
index 000000000..a729ab3fd
--- /dev/null
+++ b/src/SharpCompress/Readers/Ace/MultiVolumeAceReader.cs
@@ -0,0 +1,117 @@
+#nullable disable
+
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using SharpCompress.Common;
+using SharpCompress.Common.Ace;
+
+namespace SharpCompress.Readers.Ace
+{
+ internal class MultiVolumeAceReader : AceReader
+ {
+ private readonly IEnumerator streams;
+ private Stream tempStream;
+
+ internal MultiVolumeAceReader(IEnumerable streams, ReaderOptions options)
+ : base(options) => this.streams = streams.GetEnumerator();
+
+ protected override void ValidateArchive(AceVolume archive) { }
+
+ protected override Stream RequestInitialStream()
+ {
+ if (streams.MoveNext())
+ {
+ return streams.Current;
+ }
+ throw new MultiVolumeExtractionException(
+ "No stream provided when requested by MultiVolumeAceReader"
+ );
+ }
+
+ internal override bool NextEntryForCurrentStream()
+ {
+ if (!base.NextEntryForCurrentStream())
+ {
+ // if we're got another stream to try to process then do so
+ return streams.MoveNext() && LoadStreamForReading(streams.Current);
+ }
+ return true;
+ }
+
+ protected override IEnumerable CreateFilePartEnumerableForCurrentEntry()
+ {
+ var enumerator = new MultiVolumeStreamEnumerator(this, streams, tempStream);
+ tempStream = null;
+ return enumerator;
+ }
+
+ private class MultiVolumeStreamEnumerator : IEnumerable, IEnumerator
+ {
+ private readonly MultiVolumeAceReader reader;
+ private readonly IEnumerator nextReadableStreams;
+ private Stream tempStream;
+ private bool isFirst = true;
+
+ internal MultiVolumeStreamEnumerator(
+ MultiVolumeAceReader r,
+ IEnumerator nextReadableStreams,
+ Stream tempStream
+ )
+ {
+ reader = r;
+ this.nextReadableStreams = nextReadableStreams;
+ this.tempStream = tempStream;
+ }
+
+ public IEnumerator GetEnumerator() => this;
+
+ IEnumerator IEnumerable.GetEnumerator() => this;
+
+ public FilePart Current { get; private set; }
+
+ public void Dispose() { }
+
+ object IEnumerator.Current => Current;
+
+ public bool MoveNext()
+ {
+ if (isFirst)
+ {
+ Current = reader.Entry.Parts.First();
+ isFirst = false; //first stream already to go
+ return true;
+ }
+
+ if (!reader.Entry.IsSplitAfter)
+ {
+ return false;
+ }
+ if (tempStream != null)
+ {
+ reader.LoadStreamForReading(tempStream);
+ tempStream = null;
+ }
+ else if (!nextReadableStreams.MoveNext())
+ {
+ throw new MultiVolumeExtractionException(
+ "No stream provided when requested by MultiVolumeAceReader"
+ );
+ }
+ else
+ {
+ reader.LoadStreamForReading(nextReadableStreams.Current);
+ }
+
+ Current = reader.Entry.Parts.First();
+ return true;
+ }
+
+ public void Reset() { }
+ }
+ }
+}
diff --git a/src/SharpCompress/Readers/Ace/SingleVolumeAceReader.cs b/src/SharpCompress/Readers/Ace/SingleVolumeAceReader.cs
new file mode 100644
index 000000000..61182d66a
--- /dev/null
+++ b/src/SharpCompress/Readers/Ace/SingleVolumeAceReader.cs
@@ -0,0 +1,31 @@
+using System;
+using System.IO;
+using SharpCompress.Common;
+using SharpCompress.Common.Ace;
+
+namespace SharpCompress.Readers.Ace
+{
+ internal class SingleVolumeAceReader : AceReader
+ {
+ private readonly Stream _stream;
+
+ internal SingleVolumeAceReader(Stream stream, ReaderOptions options)
+ : base(options)
+ {
+ stream.NotNull(nameof(stream));
+ _stream = stream;
+ }
+
+ protected override Stream RequestInitialStream() => _stream;
+
+ protected override void ValidateArchive(AceVolume archive)
+ {
+ if (archive.IsMultiVolume)
+ {
+ throw new MultiVolumeExtractionException(
+ "Streamed archive is a Multi-volume archive. Use a different AceReader method to extract."
+ );
+ }
+ }
+ }
+}
diff --git a/src/SharpCompress/Readers/ReaderFactory.cs b/src/SharpCompress/Readers/ReaderFactory.cs
index 80995e41e..b8b022b04 100644
--- a/src/SharpCompress/Readers/ReaderFactory.cs
+++ b/src/SharpCompress/Readers/ReaderFactory.cs
@@ -70,7 +70,7 @@ public static IReader Open(Stream stream, ReaderOptions? options = null)
}
throw new InvalidFormatException(
- "Cannot determine compressed stream type. Supported Reader Formats: Arc, Arj, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard"
+ "Cannot determine compressed stream type. Supported Reader Formats: Ace, Arc, Arj, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard"
);
}
}
diff --git a/tests/SharpCompress.Test/Ace/AceReaderTests.cs b/tests/SharpCompress.Test/Ace/AceReaderTests.cs
new file mode 100644
index 000000000..b6cf97f2a
--- /dev/null
+++ b/tests/SharpCompress.Test/Ace/AceReaderTests.cs
@@ -0,0 +1,61 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using SharpCompress.Common;
+using SharpCompress.Readers;
+using SharpCompress.Readers.Ace;
+using Xunit;
+
+namespace SharpCompress.Test.Ace
+{
+ public class AceReaderTests : ReaderTests
+ {
+ public AceReaderTests()
+ {
+ UseExtensionInsteadOfNameToVerify = true;
+ UseCaseInsensitiveToVerify = true;
+ }
+
+ [Fact]
+ public void Ace_Uncompressed_Read() => Read("Ace.store.ace", CompressionType.None);
+
+ [Fact]
+ public void Ace_Encrypted_Read()
+ {
+ var exception = Assert.Throws(() => Read("Ace.encrypted.ace"));
+ }
+
+ [Theory]
+ [InlineData("Ace.method1.ace", CompressionType.AceLZ77)]
+ [InlineData("Ace.method1-solid.ace", CompressionType.AceLZ77)]
+ [InlineData("Ace.method2.ace", CompressionType.AceLZ77)]
+ [InlineData("Ace.method2-solid.ace", CompressionType.AceLZ77)]
+ public void Ace_Unsupported_ShouldThrow(string fileName, CompressionType compressionType)
+ {
+ var exception = Assert.Throws(() =>
+ Read(fileName, compressionType)
+ );
+ }
+
+ [Theory]
+ [InlineData("Ace.store.largefile.ace", CompressionType.None)]
+ public void Ace_LargeFileTest_Read(string fileName, CompressionType compressionType)
+ {
+ ReadForBufferBoundaryCheck(fileName, compressionType);
+ }
+
+ [Fact]
+ public void Ace_Multi_Reader()
+ {
+ var exception = Assert.Throws(() =>
+ DoMultiReader(
+ ["Ace.store.split.ace", "Ace.store.split.c01"],
+ streams => AceReader.Open(streams)
+ )
+ );
+ }
+ }
+}
diff --git a/tests/SharpCompress.Test/Arj/ArjReaderTests.cs b/tests/SharpCompress.Test/Arj/ArjReaderTests.cs
index ec8b422d7..bad98ec80 100644
--- a/tests/SharpCompress.Test/Arj/ArjReaderTests.cs
+++ b/tests/SharpCompress.Test/Arj/ArjReaderTests.cs
@@ -45,14 +45,17 @@ public void Arj_Encrypted_Read()
public void Arj_Multi_Reader()
{
var exception = Assert.Throws(() =>
- DoArj_Multi_Reader([
- "Arj.store.split.arj",
- "Arj.store.split.a01",
- "Arj.store.split.a02",
- "Arj.store.split.a03",
- "Arj.store.split.a04",
- "Arj.store.split.a05",
- ])
+ DoMultiReader(
+ [
+ "Arj.store.split.arj",
+ "Arj.store.split.a01",
+ "Arj.store.split.a02",
+ "Arj.store.split.a03",
+ "Arj.store.split.a04",
+ "Arj.store.split.a05",
+ ],
+ streams => ArjReader.Open(streams)
+ )
);
}
@@ -74,26 +77,5 @@ public void Arj_LargeFileTest_Read(string fileName, CompressionType compressionT
{
ReadForBufferBoundaryCheck(fileName, compressionType);
}
-
- private void DoArj_Multi_Reader(string[] archives)
- {
- using (
- var reader = ArjReader.Open(
- archives
- .Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
- .Select(p => File.OpenRead(p))
- )
- )
- {
- while (reader.MoveToNextEntry())
- {
- reader.WriteEntryToDirectory(
- SCRATCH_FILES_PATH,
- new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
- );
- }
- }
- VerifyFiles();
- }
}
}
diff --git a/tests/SharpCompress.Test/ReaderTests.cs b/tests/SharpCompress.Test/ReaderTests.cs
index cc5a75a76..897e6b329 100644
--- a/tests/SharpCompress.Test/ReaderTests.cs
+++ b/tests/SharpCompress.Test/ReaderTests.cs
@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
+using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
@@ -220,4 +221,26 @@ protected void Iterate(
Assert.Equal(expected.Pop(), reader.Entry.Key);
}
}
+
+ protected void DoMultiReader(
+ string[] archives,
+ Func, IDisposable> readerFactory
+ )
+ {
+ using var reader = readerFactory(
+ archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s)).Select(File.OpenRead)
+ );
+
+ dynamic dynReader = reader;
+
+ while (dynReader.MoveToNextEntry())
+ {
+ dynReader.WriteEntryToDirectory(
+ SCRATCH_FILES_PATH,
+ new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
+ );
+ }
+
+ VerifyFiles();
+ }
}
diff --git a/tests/TestArchives/Archives/Ace.encrypted.ace b/tests/TestArchives/Archives/Ace.encrypted.ace
new file mode 100644
index 000000000..d0b86b4ad
Binary files /dev/null and b/tests/TestArchives/Archives/Ace.encrypted.ace differ
diff --git a/tests/TestArchives/Archives/Ace.method1-solid.ace b/tests/TestArchives/Archives/Ace.method1-solid.ace
new file mode 100644
index 000000000..9cc4145e0
Binary files /dev/null and b/tests/TestArchives/Archives/Ace.method1-solid.ace differ
diff --git a/tests/TestArchives/Archives/Ace.method1.ace b/tests/TestArchives/Archives/Ace.method1.ace
new file mode 100644
index 000000000..5a8abc30f
Binary files /dev/null and b/tests/TestArchives/Archives/Ace.method1.ace differ
diff --git a/tests/TestArchives/Archives/Ace.method2-solid.ace b/tests/TestArchives/Archives/Ace.method2-solid.ace
new file mode 100644
index 000000000..1b991acd2
Binary files /dev/null and b/tests/TestArchives/Archives/Ace.method2-solid.ace differ
diff --git a/tests/TestArchives/Archives/Ace.method2.ace b/tests/TestArchives/Archives/Ace.method2.ace
new file mode 100644
index 000000000..7d093e97e
Binary files /dev/null and b/tests/TestArchives/Archives/Ace.method2.ace differ
diff --git a/tests/TestArchives/Archives/Ace.store.ace b/tests/TestArchives/Archives/Ace.store.ace
new file mode 100644
index 000000000..8503b8720
Binary files /dev/null and b/tests/TestArchives/Archives/Ace.store.ace differ
diff --git a/tests/TestArchives/Archives/Ace.store.largefile.ace b/tests/TestArchives/Archives/Ace.store.largefile.ace
new file mode 100644
index 000000000..8df2e7435
Binary files /dev/null and b/tests/TestArchives/Archives/Ace.store.largefile.ace differ
diff --git a/tests/TestArchives/Archives/Ace.store.split.ace b/tests/TestArchives/Archives/Ace.store.split.ace
new file mode 100644
index 000000000..df3e52554
Binary files /dev/null and b/tests/TestArchives/Archives/Ace.store.split.ace differ
diff --git a/tests/TestArchives/Archives/Ace.store.split.c00 b/tests/TestArchives/Archives/Ace.store.split.c00
new file mode 100644
index 000000000..4f1d89a44
Binary files /dev/null and b/tests/TestArchives/Archives/Ace.store.split.c00 differ