diff --git a/src/SharpCompress/Archives/AbstractWritableArchive.cs b/src/SharpCompress/Archives/AbstractWritableArchive.cs index 082b96312..b72c2dff7 100644 --- a/src/SharpCompress/Archives/AbstractWritableArchive.cs +++ b/src/SharpCompress/Archives/AbstractWritableArchive.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.IO; using SharpCompress.Writers; @@ -141,6 +143,18 @@ public void SaveTo(Stream stream, WriterOptions options) SaveTo(stream, options, OldEntries, newEntries); } + public async Task SaveToAsync( + Stream stream, + WriterOptions options, + CancellationToken cancellationToken = default + ) + { + //reset streams of new entries + newEntries.Cast().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin)); + await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken) + .ConfigureAwait(false); + } + protected TEntry CreateEntry( string key, Stream source, @@ -173,6 +187,14 @@ protected abstract void SaveTo( IEnumerable newEntries ); + protected abstract Task SaveToAsync( + Stream stream, + WriterOptions options, + IEnumerable oldEntries, + IEnumerable newEntries, + CancellationToken cancellationToken = default + ); + public override void Dispose() { base.Dispose(); diff --git a/src/SharpCompress/Archives/GZip/GZipArchive.cs b/src/SharpCompress/Archives/GZip/GZipArchive.cs index a0345cdf9..f4eb2805d 100644 --- a/src/SharpCompress/Archives/GZip/GZipArchive.cs +++ b/src/SharpCompress/Archives/GZip/GZipArchive.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.GZip; using SharpCompress.IO; @@ -136,6 +138,16 @@ public void SaveTo(FileInfo fileInfo) SaveTo(stream, new WriterOptions(CompressionType.GZip)); } + public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default) => + SaveToAsync(new FileInfo(filePath), cancellationToken); + + public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default) + { + using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write); + await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken) + .ConfigureAwait(false); + } + public static bool IsGZipFile(Stream stream) { // read the header on the first read @@ -196,6 +208,28 @@ IEnumerable newEntries } } + protected override async Task SaveToAsync( + Stream stream, + WriterOptions options, + IEnumerable oldEntries, + IEnumerable newEntries, + CancellationToken cancellationToken = default + ) + { + if (Entries.Count > 1) + { + throw new InvalidFormatException("Only one entry is allowed in a GZip Archive"); + } + using var writer = new GZipWriter(stream, new GZipWriterOptions(options)); + foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory)) + { + using var entryStream = entry.OpenEntryStream(); + await writer + .WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken) + .ConfigureAwait(false); + } + } + protected override IEnumerable LoadEntries(IEnumerable volumes) { var stream = volumes.Single().Stream; diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs index bda1e3580..6fc17e315 100644 --- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs +++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs @@ -1,4 +1,6 @@ using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.IO; @@ -30,6 +32,34 @@ public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWrite streamListener.FireEntryExtractionEnd(archiveEntry); } + public static async Task WriteToAsync( + this IArchiveEntry archiveEntry, + Stream streamToWriteTo, + CancellationToken cancellationToken = default + ) + { + if (archiveEntry.IsDirectory) + { + throw new ExtractionException("Entry is a file directory and cannot be extracted."); + } + + var streamListener = (IArchiveExtractionListener)archiveEntry.Archive; + streamListener.EnsureEntriesLoaded(); + streamListener.FireEntryExtractionBegin(archiveEntry); + streamListener.FireFilePartExtractionBegin( + archiveEntry.Key ?? "Key", + archiveEntry.Size, + archiveEntry.CompressedSize + ); + var entryStream = archiveEntry.OpenEntryStream(); + using (entryStream) + { + using Stream s = new ListeningStream(streamListener, entryStream); + await s.CopyToAsync(streamToWriteTo, 81920, cancellationToken).ConfigureAwait(false); + } + streamListener.FireEntryExtractionEnd(archiveEntry); + } + /// /// Extract to specific directory, retaining filename /// @@ -45,6 +75,23 @@ public static void WriteToDirectory( entry.WriteToFile ); + /// + /// Extract to specific directory asynchronously, retaining filename + /// + public static Task WriteToDirectoryAsync( + this IArchiveEntry entry, + string destinationDirectory, + ExtractionOptions? options = null, + CancellationToken cancellationToken = default + ) => + ExtractionMethods.WriteEntryToDirectoryAsync( + entry, + destinationDirectory, + options, + (x, opt) => entry.WriteToFileAsync(x, opt, cancellationToken), + cancellationToken + ); + /// /// Extract to specific file /// @@ -63,4 +110,24 @@ public static void WriteToFile( entry.WriteTo(fs); } ); + + /// + /// Extract to specific file asynchronously + /// + public static Task WriteToFileAsync( + this IArchiveEntry entry, + string destinationFileName, + ExtractionOptions? options = null, + CancellationToken cancellationToken = default + ) => + ExtractionMethods.WriteEntryToFileAsync( + entry, + destinationFileName, + options, + async (x, fm) => + { + using var fs = File.Open(destinationFileName, fm); + await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false); + } + ); } diff --git a/src/SharpCompress/Archives/IWritableArchive.cs b/src/SharpCompress/Archives/IWritableArchive.cs index 37b84aa03..5529ec3b6 100644 --- a/src/SharpCompress/Archives/IWritableArchive.cs +++ b/src/SharpCompress/Archives/IWritableArchive.cs @@ -1,5 +1,7 @@ using System; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Writers; namespace SharpCompress.Archives; @@ -18,6 +20,12 @@ IArchiveEntry AddEntry( void SaveTo(Stream stream, WriterOptions options); + Task SaveToAsync( + Stream stream, + WriterOptions options, + CancellationToken cancellationToken = default + ); + /// /// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended. /// diff --git a/src/SharpCompress/Archives/IWritableArchiveExtensions.cs b/src/SharpCompress/Archives/IWritableArchiveExtensions.cs index 8f531d41a..4defe6049 100644 --- a/src/SharpCompress/Archives/IWritableArchiveExtensions.cs +++ b/src/SharpCompress/Archives/IWritableArchiveExtensions.cs @@ -1,5 +1,7 @@ using System; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Writers; namespace SharpCompress.Archives; @@ -42,6 +44,24 @@ WriterOptions options writableArchive.SaveTo(stream, options); } + public static Task SaveToAsync( + this IWritableArchive writableArchive, + string filePath, + WriterOptions options, + CancellationToken cancellationToken = default + ) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken); + + public static async Task SaveToAsync( + this IWritableArchive writableArchive, + FileInfo fileInfo, + WriterOptions options, + CancellationToken cancellationToken = default + ) + { + using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write); + await writableArchive.SaveToAsync(stream, options, cancellationToken).ConfigureAwait(false); + } + public static void AddAllFromDirectory( this IWritableArchive writableArchive, string filePath, diff --git a/src/SharpCompress/Archives/Tar/TarArchive.cs b/src/SharpCompress/Archives/Tar/TarArchive.cs index 39f0fce6a..05e74dbea 100644 --- a/src/SharpCompress/Archives/Tar/TarArchive.cs +++ b/src/SharpCompress/Archives/Tar/TarArchive.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.Tar; using SharpCompress.Common.Tar.Headers; @@ -242,6 +244,30 @@ IEnumerable newEntries } } + protected override async Task SaveToAsync( + Stream stream, + WriterOptions options, + IEnumerable oldEntries, + IEnumerable newEntries, + CancellationToken cancellationToken = default + ) + { + using var writer = new TarWriter(stream, new TarWriterOptions(options)); + foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory)) + { + using var entryStream = entry.OpenEntryStream(); + await writer + .WriteAsync( + entry.Key.NotNull("Entry Key is null"), + entryStream, + entry.LastModifiedTime, + entry.Size, + cancellationToken + ) + .ConfigureAwait(false); + } + } + protected override IReader CreateReaderForSolidExtraction() { var stream = Volumes.Single().Stream; diff --git a/src/SharpCompress/Archives/Zip/ZipArchive.cs b/src/SharpCompress/Archives/Zip/ZipArchive.cs index 35b8e0cb1..955e96498 100644 --- a/src/SharpCompress/Archives/Zip/ZipArchive.cs +++ b/src/SharpCompress/Archives/Zip/ZipArchive.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.Zip; using SharpCompress.Common.Zip.Headers; @@ -317,6 +319,24 @@ IEnumerable newEntries } } + protected override async Task SaveToAsync( + Stream stream, + WriterOptions options, + IEnumerable oldEntries, + IEnumerable newEntries, + CancellationToken cancellationToken = default + ) + { + using var writer = new ZipWriter(stream, new ZipWriterOptions(options)); + foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory)) + { + using var entryStream = entry.OpenEntryStream(); + await writer + .WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken) + .ConfigureAwait(false); + } + } + protected override ZipArchiveEntry CreateEntryInternal( string filePath, Stream source, diff --git a/src/SharpCompress/Writers/Tar/TarWriter.cs b/src/SharpCompress/Writers/Tar/TarWriter.cs index e4b207862..38565c576 100644 --- a/src/SharpCompress/Writers/Tar/TarWriter.cs +++ b/src/SharpCompress/Writers/Tar/TarWriter.cs @@ -1,5 +1,7 @@ using System; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.Tar.Headers; using SharpCompress.Compressors; @@ -91,6 +93,40 @@ public void Write(string filename, Stream source, DateTime? modificationTime, lo PadTo512(size.Value); } + public override async Task WriteAsync( + string filename, + Stream source, + DateTime? modificationTime, + CancellationToken cancellationToken = default + ) => await WriteAsync(filename, source, modificationTime, null, cancellationToken); + + public async Task WriteAsync( + string filename, + Stream source, + DateTime? modificationTime, + long? size, + CancellationToken cancellationToken = default + ) + { + if (!source.CanSeek && size is null) + { + throw new ArgumentException("Seekable stream is required if no size is given."); + } + + var realSize = size ?? source.Length; + + var header = new TarHeader(WriterOptions.ArchiveEncoding); + + header.LastModifiedTime = modificationTime ?? TarHeader.EPOCH; + header.Name = NormalizeFilename(filename); + header.Size = realSize; + header.Write(OutputStream); + var written = await source + .TransferToAsync(OutputStream, realSize, cancellationToken) + .ConfigureAwait(false); + PadTo512(written); + } + private void PadTo512(long size) { var zeros = unchecked((int)(((size + 511L) & ~511L) - size)); diff --git a/tests/SharpCompress.Test/ArchiveTests.cs b/tests/SharpCompress.Test/ArchiveTests.cs index 9c483c605..2849a71fc 100644 --- a/tests/SharpCompress.Test/ArchiveTests.cs +++ b/tests/SharpCompress.Test/ArchiveTests.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Threading.Tasks; using SharpCompress.Archives; using SharpCompress.Common; using SharpCompress.Compressors.Xz; @@ -569,4 +570,56 @@ string entryName return (extractedData, crc); } + + protected async Task ArchiveStreamReadAsync( + string testArchive, + ReaderOptions? readerOptions = null + ) + { + testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive); + await ArchiveStreamReadAsync( + ArchiveFactory.AutoFactory, + readerOptions, + new[] { testArchive } + ); + } + + protected async Task ArchiveStreamReadAsync( + IArchiveFactory archiveFactory, + ReaderOptions? readerOptions, + IEnumerable testArchives + ) + { + foreach (var path in testArchives) + { + using ( + var stream = SharpCompressStream.Create( + File.OpenRead(path), + leaveOpen: true, + throwOnDispose: true + ) + ) + using (var archive = archiveFactory.Open(stream, readerOptions)) + { + try + { + foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory)) + { + await entry.WriteToDirectoryAsync( + SCRATCH_FILES_PATH, + new ExtractionOptions { ExtractFullPath = true, Overwrite = true } + ); + } + } + catch (IndexOutOfRangeException) + { + //SevenZipArchive_BZip2_Split test needs this + stream.ThrowOnDispose = false; + throw; + } + stream.ThrowOnDispose = false; + } + VerifyFiles(); + } + } } diff --git a/tests/SharpCompress.Test/GZip/GZipArchiveAsyncTests.cs b/tests/SharpCompress.Test/GZip/GZipArchiveAsyncTests.cs new file mode 100644 index 000000000..016c6fc0b --- /dev/null +++ b/tests/SharpCompress.Test/GZip/GZipArchiveAsyncTests.cs @@ -0,0 +1,127 @@ +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using SharpCompress.Archives; +using SharpCompress.Archives.GZip; +using SharpCompress.Archives.Tar; +using SharpCompress.Common; +using Xunit; + +namespace SharpCompress.Test.GZip; + +public class GZipArchiveAsyncTests : ArchiveTests +{ + public GZipArchiveAsyncTests() => UseExtensionInsteadOfNameToVerify = true; + + [Fact] + public async Task GZip_Archive_Generic_Async() + { + using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) + using (var archive = ArchiveFactory.Open(stream)) + { + var entry = archive.Entries.First(); + await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull())); + + var size = entry.Size; + var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar")); + var test = new FileInfo(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")); + + Assert.Equal(size, scratch.Length); + Assert.Equal(size, test.Length); + } + CompareArchivesByPath( + Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"), + Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar") + ); + } + + [Fact] + public async Task GZip_Archive_Async() + { + using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) + using (var archive = GZipArchive.Open(stream)) + { + var entry = archive.Entries.First(); + await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull())); + + var size = entry.Size; + var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar")); + var test = new FileInfo(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")); + + Assert.Equal(size, scratch.Length); + Assert.Equal(size, test.Length); + } + CompareArchivesByPath( + Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"), + Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar") + ); + } + + [Fact] + public async Task GZip_Archive_NoAdd_Async() + { + var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg"); + using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")); + using var archive = GZipArchive.Open(stream); + Assert.Throws(() => archive.AddEntry("jpg\\test.jpg", jpg)); + await archive.SaveToAsync(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz")); + } + + [Fact] + public async Task GZip_Archive_Multiple_Reads_Async() + { + var inputStream = new MemoryStream(); + using (var fileStream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) + { + await fileStream.CopyToAsync(inputStream); + inputStream.Position = 0; + } + using var archive = GZipArchive.Open(inputStream); + var archiveEntry = archive.Entries.First(); + + MemoryStream tarStream; + using (var entryStream = archiveEntry.OpenEntryStream()) + { + tarStream = new MemoryStream(); + await entryStream.CopyToAsync(tarStream); + } + var size = tarStream.Length; + using (var entryStream = archiveEntry.OpenEntryStream()) + { + tarStream = new MemoryStream(); + await entryStream.CopyToAsync(tarStream); + } + Assert.Equal(size, tarStream.Length); + using (var entryStream = archiveEntry.OpenEntryStream()) + { + var result = TarArchive.IsTarFile(entryStream); + Assert.True(result); + } + Assert.Equal(size, tarStream.Length); + using (var entryStream = archiveEntry.OpenEntryStream()) + { + tarStream = new MemoryStream(); + await entryStream.CopyToAsync(tarStream); + } + Assert.Equal(size, tarStream.Length); + } + + [Fact] + public void TestGzCrcWithMostSignificantBitNotNegative_Async() + { + using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")); + using var archive = GZipArchive.Open(stream); + foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory)) + { + Assert.InRange(entry.Crc, 0L, 0xFFFFFFFFL); + } + } + + [Fact] + public void TestGzArchiveTypeGzip_Async() + { + using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")); + using var archive = GZipArchive.Open(stream); + Assert.Equal(archive.Type, ArchiveType.GZip); + } +} diff --git a/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs b/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs new file mode 100644 index 000000000..ead377edc --- /dev/null +++ b/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs @@ -0,0 +1,83 @@ +using System.IO; +using System.Threading.Tasks; +using SharpCompress.Common; +using SharpCompress.Writers; +using SharpCompress.Writers.GZip; +using Xunit; + +namespace SharpCompress.Test.GZip; + +public class GZipWriterAsyncTests : WriterTests +{ + public GZipWriterAsyncTests() + : base(ArchiveType.GZip) => UseExtensionInsteadOfNameToVerify = true; + + [Fact] + public async Task GZip_Writer_Generic_Async() + { + using ( + Stream stream = File.Open( + Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"), + FileMode.OpenOrCreate, + FileAccess.Write + ) + ) + using (var writer = WriterFactory.Open(stream, ArchiveType.GZip, CompressionType.GZip)) + { + await writer.WriteAsync("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")); + } + CompareArchivesByPath( + Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"), + Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz") + ); + } + + [Fact] + public async Task GZip_Writer_Async() + { + using ( + Stream stream = File.Open( + Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"), + FileMode.OpenOrCreate, + FileAccess.Write + ) + ) + using (var writer = new GZipWriter(stream)) + { + await writer.WriteAsync("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")); + } + CompareArchivesByPath( + Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"), + Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz") + ); + } + + [Fact] + public void GZip_Writer_Generic_Bad_Compression_Async() => + Assert.Throws(() => + { + using Stream stream = File.OpenWrite(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz")); + using var writer = WriterFactory.Open(stream, ArchiveType.GZip, CompressionType.BZip2); + }); + + [Fact] + public async Task GZip_Writer_Entry_Path_With_Dir_Async() + { + using ( + Stream stream = File.Open( + Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"), + FileMode.OpenOrCreate, + FileAccess.Write + ) + ) + using (var writer = new GZipWriter(stream)) + { + var path = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"); + await writer.WriteAsync(path, path); + } + CompareArchivesByPath( + Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz"), + Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz") + ); + } +} diff --git a/tests/SharpCompress.Test/ReaderTests.cs b/tests/SharpCompress.Test/ReaderTests.cs index 0bc4dba9e..b51fd8049 100644 --- a/tests/SharpCompress.Test/ReaderTests.cs +++ b/tests/SharpCompress.Test/ReaderTests.cs @@ -1,6 +1,8 @@ using System; using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.IO; using SharpCompress.Readers; @@ -72,6 +74,72 @@ public void UseReader(IReader reader, CompressionType expectedCompression) } } + protected async Task ReadAsync( + string testArchive, + CompressionType expectedCompression, + ReaderOptions? options = null, + CancellationToken cancellationToken = default + ) + { + testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive); + + options ??= new ReaderOptions() { BufferSize = 0x20000 }; + + options.LeaveStreamOpen = true; + await ReadImplAsync(testArchive, expectedCompression, options, cancellationToken); + + options.LeaveStreamOpen = false; + await ReadImplAsync(testArchive, expectedCompression, options, cancellationToken); + VerifyFiles(); + } + + private async Task ReadImplAsync( + string testArchive, + CompressionType expectedCompression, + ReaderOptions options, + CancellationToken cancellationToken = default + ) + { + using var file = File.OpenRead(testArchive); + using var protectedStream = SharpCompressStream.Create( + new ForwardOnlyStream(file, options.BufferSize), + leaveOpen: true, + throwOnDispose: true, + bufferSize: options.BufferSize + ); + using var testStream = new TestStream(protectedStream); + using (var reader = ReaderFactory.Open(testStream, options)) + { + await UseReaderAsync(reader, expectedCompression, cancellationToken); + protectedStream.ThrowOnDispose = false; + Assert.False(testStream.IsDisposed, $"{nameof(testStream)} prematurely closed"); + } + + var message = + $"{nameof(options.LeaveStreamOpen)} is set to '{options.LeaveStreamOpen}', so {nameof(testStream.IsDisposed)} should be set to '{!testStream.IsDisposed}', but is set to {testStream.IsDisposed}"; + Assert.True(options.LeaveStreamOpen != testStream.IsDisposed, message); + } + + public async Task UseReaderAsync( + IReader reader, + CompressionType expectedCompression, + CancellationToken cancellationToken = default + ) + { + while (reader.MoveToNextEntry()) + { + if (!reader.Entry.IsDirectory) + { + Assert.Equal(expectedCompression, reader.Entry.CompressionType); + await reader.WriteEntryToDirectoryAsync( + SCRATCH_FILES_PATH, + new ExtractionOptions { ExtractFullPath = true, Overwrite = true }, + cancellationToken + ); + } + } + } + protected void Iterate( string testArchive, string fileOrder, diff --git a/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs b/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs new file mode 100644 index 000000000..280d37b2d --- /dev/null +++ b/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs @@ -0,0 +1,223 @@ +using System; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using SharpCompress.Archives; +using SharpCompress.Archives.Tar; +using SharpCompress.Common; +using SharpCompress.Readers; +using SharpCompress.Readers.Tar; +using SharpCompress.Writers; +using SharpCompress.Writers.Tar; +using Xunit; + +namespace SharpCompress.Test.Tar; + +public class TarArchiveAsyncTests : ArchiveTests +{ + public TarArchiveAsyncTests() => UseExtensionInsteadOfNameToVerify = true; + + [Fact] + public async Task TarArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Tar.tar"); + + [Fact] + public async Task Tar_FileName_Exactly_100_Characters_Async() + { + var archive = "Tar_FileName_Exactly_100_Characters.tar"; + + // create the 100 char filename + var filename = + "filename_with_exactly_100_characters_______________________________________________________________X"; + + // Step 1: create a tar file containing a file with the test name + using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive))) + using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None)) + using (Stream inputStream = new MemoryStream()) + { + var sw = new StreamWriter(inputStream); + await sw.WriteAsync("dummy filecontent"); + await sw.FlushAsync(); + + inputStream.Position = 0; + await writer.WriteAsync(filename, inputStream, null); + } + + // Step 2: check if the written tar file can be read correctly + var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive); + using (var archive2 = TarArchive.Open(unmodified)) + { + Assert.Equal(1, archive2.Entries.Count); + Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key)); + + foreach (var entry in archive2.Entries) + { + Assert.Equal( + "dummy filecontent", + await new StreamReader(entry.OpenEntryStream()).ReadLineAsync() + ); + } + } + } + + [Fact] + public async Task Tar_VeryLongFilepathReadback_Async() + { + var archive = "Tar_VeryLongFilepathReadback.tar"; + + // create a very long filename + var longFilename = ""; + for (var i = 0; i < 600; i = longFilename.Length) + { + longFilename += i.ToString("D10") + "-"; + } + + longFilename += ".txt"; + + // Step 1: create a tar file containing a file with a long name + using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive))) + using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.None)) + using (Stream inputStream = new MemoryStream()) + { + var sw = new StreamWriter(inputStream); + await sw.WriteAsync("dummy filecontent"); + await sw.FlushAsync(); + + inputStream.Position = 0; + await writer.WriteAsync(longFilename, inputStream, null); + } + + // Step 2: check if the written tar file can be read correctly + var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive); + using (var archive2 = TarArchive.Open(unmodified)) + { + Assert.Equal(1, archive2.Entries.Count); + Assert.Contains(longFilename, archive2.Entries.Select(entry => entry.Key)); + + foreach (var entry in archive2.Entries) + { + Assert.Equal( + "dummy filecontent", + await new StreamReader(entry.OpenEntryStream()).ReadLineAsync() + ); + } + } + } + + [Fact] + public async Task Tar_Create_New_Async() + { + var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"); + var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar"); + + using (var archive = TarArchive.Create()) + { + archive.AddAllFromDirectory(ORIGINAL_FILES_PATH); + var twopt = new TarWriterOptions(CompressionType.None, true); + twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) }; + await archive.SaveToAsync(scratchPath, twopt); + } + CompareArchivesByPath(unmodified, scratchPath); + } + + [Fact] + public async Task Tar_Random_Write_Add_Async() + { + var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg"); + var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar"); + var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar"); + var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar"); + + using (var archive = TarArchive.Open(unmodified)) + { + archive.AddEntry("jpg\\test.jpg", jpg); + await archive.SaveToAsync(scratchPath, new WriterOptions(CompressionType.None)); + } + CompareArchivesByPath(modified, scratchPath); + } + + [Fact] + public async Task Tar_Random_Write_Remove_Async() + { + var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar"); + var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar"); + var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar"); + + using (var archive = TarArchive.Open(unmodified)) + { + var entry = archive.Entries.Single(x => + x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase) + ); + archive.RemoveEntry(entry); + await archive.SaveToAsync(scratchPath, new WriterOptions(CompressionType.None)); + } + CompareArchivesByPath(modified, scratchPath); + } + + [Theory] + [InlineData(10)] + [InlineData(128)] + public async Task Tar_Japanese_Name_Async(int length) + { + using var mstm = new MemoryStream(); + var enc = new ArchiveEncoding { Default = Encoding.UTF8 }; + var twopt = new TarWriterOptions(CompressionType.None, true); + twopt.ArchiveEncoding = enc; + var fname = new string((char)0x3042, length); + using (var tw = new TarWriter(mstm, twopt)) + using (var input = new MemoryStream(new byte[32])) + { + await tw.WriteAsync(fname, input, null); + } + using (var inputMemory = new MemoryStream(mstm.ToArray())) + { + var tropt = new ReaderOptions { ArchiveEncoding = enc }; + using (var tr = TarReader.Open(inputMemory, tropt)) + { + while (tr.MoveToNextEntry()) + { + Assert.Equal(fname, tr.Entry.Key); + } + } + } + } + + [Fact] + public async Task Tar_Read_One_At_A_Time_Async() + { + var archiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8 }; + var tarWriterOptions = new TarWriterOptions(CompressionType.None, true) + { + ArchiveEncoding = archiveEncoding, + }; + var testBytes = Encoding.UTF8.GetBytes("This is a test."); + + using var memoryStream = new MemoryStream(); + using (var tarWriter = new TarWriter(memoryStream, tarWriterOptions)) + using (var testFileStream = new MemoryStream(testBytes)) + { + await tarWriter.WriteAsync("test1.txt", testFileStream, null); + testFileStream.Position = 0; + await tarWriter.WriteAsync("test2.txt", testFileStream, null); + } + + memoryStream.Position = 0; + + var numberOfEntries = 0; + + using (var archiveFactory = TarArchive.Open(memoryStream)) + { + foreach (var entry in archiveFactory.Entries) + { + ++numberOfEntries; + + using var tarEntryStream = entry.OpenEntryStream(); + using var testFileStream = new MemoryStream(); + await tarEntryStream.CopyToAsync(testFileStream); + Assert.Equal(testBytes.Length, testFileStream.Length); + } + } + + Assert.Equal(2, numberOfEntries); + } +} diff --git a/tests/SharpCompress.Test/Tar/TarReaderAsyncTests.cs b/tests/SharpCompress.Test/Tar/TarReaderAsyncTests.cs new file mode 100644 index 000000000..7d7ed4306 --- /dev/null +++ b/tests/SharpCompress.Test/Tar/TarReaderAsyncTests.cs @@ -0,0 +1,272 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; +using SharpCompress.Common; +using SharpCompress.Readers; +using SharpCompress.Readers.Tar; +using SharpCompress.Test.Mocks; +using Xunit; + +namespace SharpCompress.Test.Tar; + +public class TarReaderAsyncTests : ReaderTests +{ + public TarReaderAsyncTests() => UseExtensionInsteadOfNameToVerify = true; + + [Fact] + public async Task Tar_Reader_Async() => await ReadAsync("Tar.tar", CompressionType.None); + + [Fact] + public async Task Tar_Skip_Async() + { + using Stream stream = new ForwardOnlyStream( + File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")) + ); + using var reader = ReaderFactory.Open(stream); + var x = 0; + while (reader.MoveToNextEntry()) + { + if (!reader.Entry.IsDirectory) + { + x++; + if (x % 2 == 0) + { + await reader.WriteEntryToDirectoryAsync( + SCRATCH_FILES_PATH, + new ExtractionOptions { ExtractFullPath = true, Overwrite = true } + ); + } + } + } + } + + [Fact] + public async Task Tar_Z_Reader_Async() => await ReadAsync("Tar.tar.Z", CompressionType.Lzw); + + [Fact] + public async Task Tar_BZip2_Reader_Async() => + await ReadAsync("Tar.tar.bz2", CompressionType.BZip2); + + [Fact] + public async Task Tar_GZip_Reader_Async() => + await ReadAsync("Tar.tar.gz", CompressionType.GZip); + + [Fact] + public async Task Tar_ZStandard_Reader_Async() => + await ReadAsync("Tar.tar.zst", CompressionType.ZStandard); + + [Fact] + public async Task Tar_LZip_Reader_Async() => + await ReadAsync("Tar.tar.lz", CompressionType.LZip); + + [Fact] + public async Task Tar_Xz_Reader_Async() => await ReadAsync("Tar.tar.xz", CompressionType.Xz); + + [Fact] + public async Task Tar_GZip_OldGnu_Reader_Async() => + await ReadAsync("Tar.oldgnu.tar.gz", CompressionType.GZip); + + [Fact] + public async Task Tar_BZip2_Entry_Stream_Async() + { + using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"))) + using (var reader = TarReader.Open(stream)) + { + while (reader.MoveToNextEntry()) + { + if (!reader.Entry.IsDirectory) + { + Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType); + using var entryStream = reader.OpenEntryStream(); + var file = Path.GetFileName(reader.Entry.Key); + var folder = + Path.GetDirectoryName(reader.Entry.Key) + ?? throw new ArgumentNullException(); + var destdir = Path.Combine(SCRATCH_FILES_PATH, folder); + if (!Directory.Exists(destdir)) + { + Directory.CreateDirectory(destdir); + } + var destinationFileName = Path.Combine(destdir, file.NotNull()); + + using var fs = File.OpenWrite(destinationFileName); + await entryStream.CopyToAsync(fs); + } + } + } + VerifyFiles(); + } + + [Fact] + public void Tar_LongNamesWithLongNameExtension_Async() + { + var filePaths = new List(); + + using ( + Stream stream = File.OpenRead( + Path.Combine(TEST_ARCHIVES_PATH, "Tar.LongPathsWithLongNameExtension.tar") + ) + ) + using (var reader = TarReader.Open(stream)) + { + while (reader.MoveToNextEntry()) + { + if (!reader.Entry.IsDirectory) + { + filePaths.Add(reader.Entry.Key.NotNull("Entry Key is null")); + } + } + } + + Assert.Equal(3, filePaths.Count); + Assert.Contains("a.txt", filePaths); + Assert.Contains( + "wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Bar.php", + filePaths + ); + Assert.Contains( + "wp-content/plugins/gravityformsextend/lib/Aws/Symfony/Component/ClassLoader/Tests/Fixtures/Apc/beta/Apc/ApcPrefixCollision/A/B/Foo.php", + filePaths + ); + } + + [Fact] + public void Tar_BZip2_Skip_Entry_Stream_Async() + { + using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")); + using var reader = TarReader.Open(stream); + var names = new List(); + while (reader.MoveToNextEntry()) + { + if (!reader.Entry.IsDirectory) + { + Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType); + using var entryStream = reader.OpenEntryStream(); + entryStream.SkipEntry(); + names.Add(reader.Entry.Key.NotNull()); + } + } + Assert.Equal(3, names.Count); + } + + [Fact] + public void Tar_Containing_Rar_Reader_Async() + { + var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsRar.tar"); + using Stream stream = File.OpenRead(archiveFullPath); + using var reader = ReaderFactory.Open(stream); + Assert.True(reader.ArchiveType == ArchiveType.Tar); + } + + [Fact] + public void Tar_With_TarGz_With_Flushed_EntryStream_Async() + { + var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.ContainsTarGz.tar"); + using Stream stream = File.OpenRead(archiveFullPath); + using var reader = ReaderFactory.Open(stream); + Assert.True(reader.MoveToNextEntry()); + Assert.Equal("inner.tar.gz", reader.Entry.Key); + + using var entryStream = reader.OpenEntryStream(); + using var flushingStream = new FlushOnDisposeStream(entryStream); + + // Extract inner.tar.gz + using var innerReader = ReaderFactory.Open(flushingStream); + Assert.True(innerReader.MoveToNextEntry()); + Assert.Equal("test", innerReader.Entry.Key); + } + + [Fact] + public async Task Tar_Broken_Stream_Async() + { + var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"); + using Stream stream = File.OpenRead(archiveFullPath); + using var reader = ReaderFactory.Open(stream); + var memoryStream = new MemoryStream(); + + Assert.True(reader.MoveToNextEntry()); + Assert.True(reader.MoveToNextEntry()); + await reader.WriteEntryToAsync(memoryStream); + stream.Close(); + Assert.Throws(() => reader.MoveToNextEntry()); + } + + [Fact] + public async Task Tar_Corrupted_Async() + { + var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar"); + using Stream stream = File.OpenRead(archiveFullPath); + using var reader = ReaderFactory.Open(stream); + var memoryStream = new MemoryStream(); + + Assert.True(reader.MoveToNextEntry()); + Assert.True(reader.MoveToNextEntry()); + await reader.WriteEntryToAsync(memoryStream); + stream.Close(); + Assert.Throws(() => reader.MoveToNextEntry()); + } + +#if !NETFRAMEWORK + [Fact] + public async Task Tar_GZip_With_Symlink_Entries_Async() + { + var isWindows = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform( + System.Runtime.InteropServices.OSPlatform.Windows + ); + using Stream stream = File.OpenRead( + Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz") + ); + using var reader = TarReader.Open(stream); + while (reader.MoveToNextEntry()) + { + if (reader.Entry.IsDirectory) + { + continue; + } + await reader.WriteEntryToDirectoryAsync( + SCRATCH_FILES_PATH, + new ExtractionOptions + { + ExtractFullPath = true, + Overwrite = true, + WriteSymbolicLink = (sourcePath, targetPath) => + { + if (!isWindows) + { + var link = new Mono.Unix.UnixSymbolicLinkInfo(sourcePath); + if (File.Exists(sourcePath)) + { + link.Delete(); // equivalent to ln -s -f + } + link.CreateSymbolicLinkTo(targetPath); + } + }, + } + ); + if (!isWindows) + { + if (reader.Entry.LinkTarget != null) + { + var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key.NotNull()); + var link = new Mono.Unix.UnixSymbolicLinkInfo(path); + if (link.HasContents) + { + // need to convert the link to an absolute path for comparison + var target = reader.Entry.LinkTarget; + var realTarget = Path.GetFullPath( + Path.Combine($"{Path.GetDirectoryName(path)}", target) + ); + + Assert.Equal(realTarget, link.GetContents().ToString()); + } + else + { + Assert.True(false, "Symlink has no target"); + } + } + } + } + } +#endif +} diff --git a/tests/SharpCompress.Test/Tar/TarWriterAsyncTests.cs b/tests/SharpCompress.Test/Tar/TarWriterAsyncTests.cs new file mode 100644 index 000000000..f94a39373 --- /dev/null +++ b/tests/SharpCompress.Test/Tar/TarWriterAsyncTests.cs @@ -0,0 +1,83 @@ +using System.IO; +using System.Text; +using System.Threading.Tasks; +using SharpCompress.Common; +using SharpCompress.Writers.Tar; +using Xunit; + +namespace SharpCompress.Test.Tar; + +public class TarWriterAsyncTests : WriterTests +{ + static TarWriterAsyncTests() + { +#if !NETFRAMEWORK + //fix issue where these tests could not be ran in isolation + System.Text.Encoding.RegisterProvider(System.Text.CodePagesEncodingProvider.Instance); +#endif + } + + public TarWriterAsyncTests() + : base(ArchiveType.Tar) => UseExtensionInsteadOfNameToVerify = true; + + [Fact] + public async Task Tar_Writer_Async() => + await WriteAsync( + CompressionType.None, + "Tar.noEmptyDirs.tar", + "Tar.noEmptyDirs.tar", + Encoding.GetEncoding(866) + ); + + [Fact] + public async Task Tar_BZip2_Writer_Async() => + await WriteAsync( + CompressionType.BZip2, + "Tar.noEmptyDirs.tar.bz2", + "Tar.noEmptyDirs.tar.bz2", + Encoding.GetEncoding(866) + ); + + [Fact] + public async Task Tar_LZip_Writer_Async() => + await WriteAsync( + CompressionType.LZip, + "Tar.noEmptyDirs.tar.lz", + "Tar.noEmptyDirs.tar.lz", + Encoding.GetEncoding(866) + ); + + [Fact] + public async Task Tar_Rar_Write_Async() => + await Assert.ThrowsAsync(async () => + await WriteAsync( + CompressionType.Rar, + "Zip.ppmd.noEmptyDirs.zip", + "Zip.ppmd.noEmptyDirs.zip" + ) + ); + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task Tar_Finalize_Archive_Async(bool finalizeArchive) + { + using var stream = new MemoryStream(); + using Stream content = File.OpenRead(Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg")); + using ( + var writer = new TarWriter( + stream, + new TarWriterOptions(CompressionType.None, finalizeArchive) + ) + ) + { + await writer.WriteAsync("doesn't matter", content, null); + } + + var paddedContentWithHeader = (content.Length / 512 * 512) + 512 + 512; + var expectedStreamLength = finalizeArchive + ? paddedContentWithHeader + (512 * 2) + : paddedContentWithHeader; + Assert.Equal(expectedStreamLength, stream.Length); + } +} diff --git a/tests/SharpCompress.Test/WriterTests.cs b/tests/SharpCompress.Test/WriterTests.cs index 7a367e6e1..5212fab5d 100644 --- a/tests/SharpCompress.Test/WriterTests.cs +++ b/tests/SharpCompress.Test/WriterTests.cs @@ -1,5 +1,7 @@ using System.IO; using System.Text; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.IO; using SharpCompress.Readers; @@ -51,4 +53,49 @@ protected void Write( } VerifyFiles(); } + + protected async Task WriteAsync( + CompressionType compressionType, + string archive, + string archiveToVerifyAgainst, + Encoding? encoding = null, + CancellationToken cancellationToken = default + ) + { + using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive))) + { + var writerOptions = new WriterOptions(compressionType) { LeaveStreamOpen = true }; + + writerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default; + + using var writer = WriterFactory.Open(stream, _type, writerOptions); + await writer.WriteAllAsync( + ORIGINAL_FILES_PATH, + "*", + SearchOption.AllDirectories, + cancellationToken + ); + } + CompareArchivesByPath( + Path.Combine(SCRATCH2_FILES_PATH, archive), + Path.Combine(TEST_ARCHIVES_PATH, archiveToVerifyAgainst) + ); + + using (Stream stream = File.OpenRead(Path.Combine(SCRATCH2_FILES_PATH, archive))) + { + var readerOptions = new ReaderOptions(); + + readerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default; + + using var reader = ReaderFactory.Open( + SharpCompressStream.Create(stream, leaveOpen: true), + readerOptions + ); + reader.WriteAllToDirectory( + SCRATCH_FILES_PATH, + new ExtractionOptions { ExtractFullPath = true } + ); + } + VerifyFiles(); + } }