Merge pull request #1077 from wabbajack-tools/7zip-improvements

7zip improvements and extraction rework
This commit is contained in:
Timothy Baldridge 2020-09-09 06:27:41 -06:00 committed by GitHub
commit 4f39eaca17
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
50 changed files with 1950 additions and 770 deletions

View File

@ -83,8 +83,7 @@ namespace Compression.BSA.Test
var folder = _bsaFolder.Combine(game.ToString(), modid.ToString());
await folder.DeleteDirectory();
folder.CreateDirectory();
await using var files = await FileExtractor.ExtractAll(Queue, filename);
await files.MoveAllTo(folder);
await FileExtractor2.ExtractAll(filename, folder);
foreach (var bsa in folder.EnumerateFiles().Where(f => Consts.SupportedBSAs.Contains(f.Extension)))
{

View File

@ -27,7 +27,6 @@ namespace Compression.BSA
public class BA2Reader : IBSAReader
{
internal AbsolutePath _filename;
private Stream _stream;
internal BinaryReader _rdr;
internal uint _version;
@ -35,15 +34,16 @@ namespace Compression.BSA
internal EntryType _type;
internal uint _numFiles;
internal ulong _nameTableOffset;
public IStreamFactory _streamFactory;
public bool UseATIFourCC { get; set; } = false;
public bool HasNameTable => _nameTableOffset > 0;
public static async Task<BA2Reader> Load(AbsolutePath filename)
public static async Task<BA2Reader> Load(IStreamFactory streamFactory)
{
var rdr = new BA2Reader(await filename.OpenShared()) {_filename = filename};
var rdr = new BA2Reader(await streamFactory.GetStream()) {_streamFactory = streamFactory};
await rdr.LoadHeaders();
return rdr;
}
@ -206,7 +206,7 @@ namespace Compression.BSA
WriteHeader(bw);
await using var fs = await _bsa._filename.OpenRead();
await using var fs = await _bsa._streamFactory.GetStream();
using var br = new BinaryReader(fs);
foreach (var chunk in _chunks)
{
@ -344,6 +344,14 @@ namespace Compression.BSA
break;
}
}
public async ValueTask<IStreamFactory> GetStreamFactory()
{
var ms = new MemoryStream();
await CopyDataTo(ms);
ms.Position = 0;
return new MemoryStreamFactory(ms, Path);
}
}
[JsonName("BA2DX10Entry")]
@ -483,7 +491,7 @@ namespace Compression.BSA
public async ValueTask CopyDataTo(Stream output)
{
await using var fs = await _bsa._filename.OpenRead();
await using var fs = await _bsa._streamFactory.GetStream();
fs.Seek((long) _offset, SeekOrigin.Begin);
uint len = Compressed ? _size : _realSize;
@ -503,6 +511,14 @@ namespace Compression.BSA
await output.WriteAsync(uncompressed, 0, uncompressed.Length);
}
}
public async ValueTask<IStreamFactory> GetStreamFactory()
{
var ms = new MemoryStream();
await CopyDataTo(ms);
ms.Position = 0;
return new MemoryStreamFactory(ms, Path);
}
}
[JsonName("BA2FileEntryState")]

View File

@ -16,7 +16,6 @@ namespace Compression.BSA
public const int HeaderLength = 0x24;
internal uint _fileCount;
internal AbsolutePath _fileName;
internal uint _folderCount;
internal uint _folderRecordOffset;
private Lazy<FolderRecord[]> _folders = null!;
@ -24,6 +23,7 @@ namespace Compression.BSA
internal string _magic = string.Empty;
internal uint _totalFileNameLength;
internal uint _totalFolderNameLength;
public IStreamFactory _streamFactory = new NativeFileStreamFactory(default);
public VersionType HeaderType { get; private set; }
@ -56,7 +56,7 @@ namespace Compression.BSA
public void Dump(Action<string> print)
{
print($"File Name: {_fileName}");
print($"File Name: {_streamFactory.Name}");
print($"File Count: {_fileCount}");
print($"Magic: {_magic}");
@ -67,18 +67,19 @@ namespace Compression.BSA
}
}
public static async ValueTask<BSAReader> LoadAsync(AbsolutePath filename)
public static async ValueTask<BSAReader> LoadAsync(IStreamFactory factory)
{
using var stream = await filename.OpenRead().ConfigureAwait(false);
await using var stream = await factory.GetStream().ConfigureAwait(false);
using var br = new BinaryReader(stream);
var bsa = new BSAReader { _fileName = filename };
var bsa = new BSAReader { _streamFactory = factory };
bsa.LoadHeaders(br);
return bsa;
}
public static BSAReader Load(AbsolutePath filename)
{
var bsa = new BSAReader { _fileName = filename };
var bsa = new BSAReader { _streamFactory = new NativeFileStreamFactory(filename)};
using var rdr = bsa.GetStream();
bsa.LoadHeaders(rdr);
return bsa;
@ -86,7 +87,7 @@ namespace Compression.BSA
internal BinaryReader GetStream()
{
return new BinaryReader(File.Open(_fileName.ToString(), FileMode.Open, FileAccess.Read, FileShare.Read));
return new BinaryReader(_streamFactory.GetStream().Result);
}
private void LoadHeaders(BinaryReader rdr)

View File

@ -74,7 +74,7 @@ namespace Compression.BSA
public async ValueTask CopyDataTo(Stream output)
{
await using var in_file = await BSA._fileName.OpenRead().ConfigureAwait(false);
await using var in_file = await BSA._streamFactory.GetStream().ConfigureAwait(false);
using var rdr = new BinaryReader(in_file);
rdr.BaseStream.Position = Offset;
@ -165,5 +165,13 @@ namespace Compression.BSA
print($"Raw Size: {RawSize}");
print($"Index: {_index}");
}
public async ValueTask<IStreamFactory> GetStreamFactory()
{
var ms = new MemoryStream();
await CopyDataTo(ms);
ms.Position = 0;
return new MemoryStreamFactory(ms, Path);
}
}
}

View File

@ -1,4 +1,6 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading.Tasks;
@ -13,17 +15,33 @@ namespace Compression.BSA
{
return await BSASignatures.MatchesAsync(filename) switch
{
Definitions.FileType.TES3 => await TES3Reader.Load(filename),
Definitions.FileType.BSA => await BSAReader.LoadAsync(filename),
Definitions.FileType.BA2 => await BA2Reader.Load(filename),
Definitions.FileType.TES3 => await TES3Reader.Load(new NativeFileStreamFactory(filename)),
Definitions.FileType.BSA => await BSAReader.LoadAsync(new NativeFileStreamFactory(filename)),
Definitions.FileType.BA2 => await BA2Reader.Load(new NativeFileStreamFactory(filename)),
_ => throw new InvalidDataException("Filename is not a .bsa or .ba2")
};
}
private static SignatureChecker BSASignatures = new SignatureChecker(Definitions.FileType.BSA, Definitions.FileType.BA2, Definitions.FileType.TES3);
public static async ValueTask<bool> MightBeBSA(AbsolutePath filename)
{
return await BSASignatures.MatchesAsync(filename) != null;
}
public static async ValueTask<IBSAReader> OpenRead(IStreamFactory sFn, Definitions.FileType sig)
{
switch(sig)
{
case Definitions.FileType.TES3:
return await TES3Reader.Load(sFn);
case Definitions.FileType.BSA:
return await BSAReader.LoadAsync(sFn);
case Definitions.FileType.BA2:
return await BA2Reader.Load(sFn);
default:
throw new Exception($"Bad archive format for {sFn.Name}");
}
}
}
}

View File

@ -32,5 +32,6 @@ namespace Compression.BSA
ValueTask CopyDataTo(Stream output);
void Dump(Action<string> print);
ValueTask<IStreamFactory> GetStreamFactory();
}
}

View File

@ -0,0 +1,45 @@
using System;
using System.IO;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace Compression.BSA
{
public class MemoryStreamFactory : IStreamFactory
{
private readonly MemoryStream _data;
public MemoryStreamFactory(MemoryStream data, IPath path)
{
_data = data;
Name = path;
}
public async ValueTask<Stream> GetStream()
{
return new MemoryStream(_data.GetBuffer(), 0, (int)_data.Length);
}
public DateTime LastModifiedUtc => DateTime.UtcNow;
public IPath Name { get; }
}
public class MemoryBufferFactory : IStreamFactory
{
private readonly byte[] _data;
private int _size;
public MemoryBufferFactory(byte[] data, int size, IPath path)
{
_data = data;
_size = size;
Name = path;
}
public async ValueTask<Stream> GetStream()
{
return new MemoryStream(_data, 0, _size);
}
public DateTime LastModifiedUtc => DateTime.UtcNow;
public IPath Name { get; }
}
}

View File

@ -0,0 +1,82 @@
using System;
using System.IO;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace Compression.BSA
{
public class StreamView : Stream
{
private Stream _base;
private long _startPos;
private long _length;
public StreamView(Stream baseStream, long startPos, long length)
{
_base = baseStream;
_startPos = startPos;
_length = length;
}
public override void Flush()
{
throw new System.NotImplementedException();
}
public override int Read(byte[] buffer, int offset, int count)
{
var realCount = Math.Min(count, Length - Position);
return _base.Read(buffer, offset, (int)realCount);
}
public override long Seek(long offset, SeekOrigin origin)
{
switch (origin)
{
case SeekOrigin.Begin:
Position = offset;
return Position;
case SeekOrigin.End:
Position = _length - offset;
return Position;
case SeekOrigin.Current:
Position += offset;
return Position;
default:
throw new ArgumentOutOfRangeException(nameof(origin), origin, null);
}
}
public override void SetLength(long value)
{
throw new System.NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new System.NotImplementedException();
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => false;
public override long Length => _length;
public override long Position
{
get
{
return _base.Position - _startPos;
}
set
{
_base.Position = _startPos + value;
}
}
public override async ValueTask DisposeAsync()
{
await _base.DisposeAsync();
}
}
}

View File

@ -17,15 +17,15 @@ namespace Compression.BSA
private uint _fileCount;
private TES3FileEntry[] _files;
internal long _dataOffset;
internal AbsolutePath _filename;
public IStreamFactory _streamFactory;
public static async ValueTask<TES3Reader> Load(AbsolutePath filename)
public static async ValueTask<TES3Reader> Load(IStreamFactory factory)
{
await using var fs = await filename.OpenRead();
await using var fs = await factory.GetStream();
using var br = new BinaryReader(fs);
var rdr = new TES3Reader
{
_filename = filename,
_streamFactory = factory,
_versionNumber = br.ReadUInt32(),
_hashTableOffset = br.ReadUInt32(),
_fileCount = br.ReadUInt32()
@ -125,16 +125,27 @@ namespace Compression.BSA
public async ValueTask CopyDataTo(Stream output)
{
await using var fs = await Archive._filename.OpenRead();
await using var fs = await Archive._streamFactory.GetStream();
fs.Position = Archive._dataOffset + Offset;
await fs.CopyToLimitAsync(output, (int)Size);
}
public async ValueTask<IStreamFactory> GetStreamFactory()
{
var ms = new MemoryStream();
await CopyDataTo(ms);
ms.Position = 0;
return new MemoryStreamFactory(ms, Path);
}
public void Dump(Action<string> print)
{
throw new NotImplementedException();
}
public uint Offset { get; set; }
public uint NameOffset { get; set; }
public uint Hash1 { get; set; }

View File

@ -1,4 +1,6 @@
Morrowind BSA,00 01 00 00,TES3, null, null, 0, null
TES 4-5 and FO 3 BSA,42 53 41 00,BSA
FO4 BSA,42 54 44 58,BA2
Relaxed RAR format,52 61 72 21,RAR
Relaxed RAR format,52 61 72 21,RAR
RAR5 or newer, 52 61 72 21 1A 07 01 00,RAR_NEW
RAR4 or older, 52 61 72 21 1A 07 00,RAR_OLD

View File

@ -1,4 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
@ -21,9 +22,14 @@ namespace Wabbajack.Common.FileSignatures
public async Task<Definitions.FileType?> MatchesAsync(AbsolutePath path)
{
await using var fs = await path.OpenShared();
return await MatchesAsync(fs);
}
public async Task<Definitions.FileType?> MatchesAsync(Stream stream)
{
var buffer = new byte[_maxLength];
fs.Position = 0;
await fs.ReadAsync(buffer);
stream.Position = 0;
await stream.ReadAsync(buffer);
foreach (var (fileType, signature) in _signatures)
{

View File

@ -307,6 +307,8 @@ namespace Wabbajack.Common.FileSignatures {
RA,
RAM,
RAR,
RAR_NEW,
RAR_OLD,
RBI,
RDATA,
REG,
@ -443,6 +445,12 @@ namespace Wabbajack.Common.FileSignatures {
// Relaxed RAR format
(FileType.RAR, new byte[] {0x52, 0x61, 0x72, 0x21}),
// RAR5 or newer
(FileType.RAR_NEW, new byte[] {0x52, 0x61, 0x72, 0x21, 0x1A, 0x07, 0x01, 0x00}),
// RAR4 or older
(FileType.RAR_OLD, new byte[] {0x52, 0x61, 0x72, 0x21, 0x1A, 0x07, 0x00}),
// JPEG2000 image files
(FileType.JP2, new byte[] {0x00, 0x00, 0x00, 0x0C, 0x6A, 0x50, 0x20, 0x20}),

View File

@ -7,10 +7,12 @@
<#
byte[] StringToByteArray(string hex) {
return Enumerable.Range(0, hex.Length)
.Where(x => x % 2 == 0)
.Select(x => Convert.ToByte(hex.Substring(x, 2), 16))
byte[] StringToByteArray(string hex)
{
return Enumerable.Range(0, hex.Length)
.Where(x => x % 2 == 0)
.Select(x => Convert.ToByte(hex.Substring(x, 2), 16))
.ToArray();
}
var files = new string[] {"bsasigs.txt", "file_sigs_RAW.txt"};

View File

@ -0,0 +1,41 @@
using System;
using System.IO;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace Wabbajack.Common
{
public interface IStreamFactory
{
ValueTask<Stream> GetStream();
DateTime LastModifiedUtc { get; }
IPath Name { get; }
}
public class NativeFileStreamFactory : IStreamFactory
{
private AbsolutePath _file;
public NativeFileStreamFactory(AbsolutePath file, IPath path)
{
_file = file;
Name = path;
}
public NativeFileStreamFactory(AbsolutePath file)
{
_file = file;
Name = file;
}
public async ValueTask<Stream> GetStream()
{
return await _file.OpenRead();
}
public DateTime LastModifiedUtc => _file.LastModifiedUtc;
public IPath Name { get; }
}
}

View File

@ -50,7 +50,7 @@ namespace Wabbajack.Common
await patch.CopyToAsync(output);
}
public static async Task<long> CreatePatchCached(Stream srcStream, Hash srcHash, FileStream destStream, Hash destHash,
public static async Task<long> CreatePatchCached(Stream srcStream, Hash srcHash, Stream destStream, Hash destHash,
Stream? patchOutStream = null)
{
var key = PatchKey(srcHash, destHash);
@ -128,7 +128,7 @@ namespace Wabbajack.Common
public static Task CreatePatchCached(byte[] a, byte[] b, Stream output) =>
PatchCache.CreatePatchCached(a, b, output);
public static Task<long> CreatePatchCached(Stream srcStream, Hash srcHash, FileStream destStream, Hash destHash, Stream? patchOutStream = null) =>
public static Task<long> CreatePatchCached(Stream srcStream, Hash srcHash, Stream destStream, Hash destHash, Stream? patchOutStream = null) =>
PatchCache.CreatePatchCached(srcStream, srcHash, destStream, destHash, patchOutStream);
public static bool TryGetPatch(Hash foundHash, Hash fileHash, [MaybeNullWhen(false)] out byte[] ePatch) =>

View File

@ -57,5 +57,15 @@ namespace Wabbajack.Common
return await proc.Start() == 0;
}
}
public static async Task CompactFolder(this AbsolutePath folder, WorkQueue queue, Algorithm algorithm)
{
await folder.EnumerateFiles(true)
.PMap(queue, async path =>
{
Utils.Status($"Compacting {path.FileName}");
await path.Compact(algorithm);
});
}
}
}

View File

@ -17,7 +17,7 @@ namespace Wabbajack.Common
_inner = fs;
_message = message;
_lastUpdate = DateTime.UnixEpoch;
_span = TimeSpan.FromMilliseconds(500);
_span = TimeSpan.FromMilliseconds(100);
}
public override void Flush()

View File

@ -18,6 +18,7 @@
<Version>2.2.1.2</Version>
<AssemblyVersion>2.2.1.2</AssemblyVersion>
<FileVersion>2.2.1.2</FileVersion>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<DocumentationFile>Wabbajack.Common.xml</DocumentationFile>

View File

@ -101,12 +101,18 @@ namespace Wabbajack.Lib
return id;
}
internal async Task<RelativePath> IncludeFile(AbsolutePath data)
internal async Task<RelativePath> IncludeFile(Stream data)
{
var id = IncludeId();
await data.CopyToAsync(ModListOutputFolder.Combine(id));
await ModListOutputFolder.Combine(id).WriteAllAsync(data);
return id;
}
internal async Task<RelativePath> IncludeFile(AbsolutePath data)
{
await using var stream = await data.OpenRead();
return await IncludeFile(stream);
}
internal async Task<(RelativePath, AbsolutePath)> IncludeString(string str)
@ -302,6 +308,28 @@ namespace Wabbajack.Lib
}
}
protected async Task InlineFiles()
{
var grouped = ModList.Directives.OfType<InlineFile>()
.Where(f => f.SourceDataID == default)
.GroupBy(f => f.SourceDataFile)
.ToDictionary(f => f.Key);
if (grouped.Count == 0) return;
await VFS.Extract(Queue, grouped.Keys.ToHashSet(), async (vf, sfn) =>
{
await using var stream = await sfn.GetStream();
var id = await IncludeFile(stream);
foreach (var file in grouped[vf])
{
file.SourceDataID = id;
file.SourceDataFile = null;
}
});
}
public bool CheckForNoMatchExit(ICollection<NoMatch> noMatches)
{
if (noMatches.Count > 0)

View File

@ -9,6 +9,7 @@ using Alphaleonis.Win32.Filesystem;
using Wabbajack.Common;
using Wabbajack.Lib.Downloaders;
using Wabbajack.VirtualFileSystem;
using Wabbajack.VirtualFileSystem.SevenZipExtractor;
using Directory = Alphaleonis.Win32.Filesystem.Directory;
using File = Alphaleonis.Win32.Filesystem.File;
using FileInfo = Alphaleonis.Win32.Filesystem.FileInfo;
@ -35,6 +36,8 @@ namespace Wabbajack.Lib
public bool UseCompression { get; set; }
public TempFolder? ExtractedModlistFolder { get; set; } = null;
public AInstaller(AbsolutePath archive, ModList modList, AbsolutePath outputFolder, AbsolutePath downloadFolder, SystemParameters? parameters, int steps, Game game)
: base(steps)
@ -45,12 +48,21 @@ namespace Wabbajack.Lib
DownloadFolder = downloadFolder;
SystemParameters = parameters;
Game = game.MetaData();
}
private ExtractedFiles? ExtractedModListFiles { get; set; } = null;
public async Task ExtractModlist()
{
ExtractedModListFiles = await FileExtractor.ExtractAll(Queue, ModListArchive);
ExtractedModlistFolder = await TempFolder.Create();
await FileExtractor2.GatheringExtract(new NativeFileStreamFactory(ModListArchive), _ => true,
async (path, sfn) =>
{
await using var s = await sfn.GetStream();
var fp = ExtractedModlistFolder.Dir.Combine(path);
fp.Parent.CreateDirectory();
await fp.WriteAllAsync(s);
return 0;
});
}
@ -73,8 +85,11 @@ namespace Wabbajack.Lib
public async Task<byte[]> LoadBytesFromPath(RelativePath path)
{
await using var e = await ExtractedModListFiles![path].OpenRead();
return await e.ReadAllAsync();
var fullPath = ExtractedModlistFolder!.Dir.Combine(path);
if (!fullPath.IsFile)
throw new Exception($"Cannot load inlined data {path} file does not exist");
return await fullPath.ReadAllBytesAsync();
}
public static ModList LoadFromFile(AbsolutePath path)
@ -113,113 +128,51 @@ namespace Wabbajack.Lib
public async Task InstallArchives()
{
Info("Installing Archives");
Info("Grouping Install Files");
var grouped = ModList.Directives
.OfType<FromArchive>()
.GroupBy(e => e.ArchiveHashPath.BaseHash)
.ToDictionary(k => k.Key);
var archives = ModList.Archives
.Select(a => new { Archive = a, AbsolutePath = HashedArchives.GetOrDefault(a.Hash) })
.Where(a => a.AbsolutePath != null)
.ToList();
.Select(a => new {VF = VFS.Index.FileForArchiveHashPath(a.ArchiveHashPath), Directive = a})
.GroupBy(a => a.VF)
.ToDictionary(a => a.Key);
Info("Installing Archives");
await archives.PMap(Queue, UpdateTracker,a => InstallArchive(Queue, a.Archive, a.AbsolutePath, grouped[a.Archive.Hash]));
}
private async Task InstallArchive(WorkQueue queue, Archive archive, AbsolutePath absolutePath, IGrouping<Hash, FromArchive> grouping)
{
Status($"Extracting {archive.Name}");
List<FromArchive> vFiles = grouping.Select(g =>
if (grouped.Count == 0) return;
await VFS.Extract(Queue, grouped.Keys.ToHashSet(), async (vf, sf) =>
{
var file = VFS.Index.FileForArchiveHashPath(g.ArchiveHashPath);
g.FromFile = file;
return g;
}).ToList();
var onFinish = await VFS.Stage(vFiles.Select(f => f.FromFile).Distinct());
Status($"Copying files for {archive.Name}");
async ValueTask CopyFile(AbsolutePath from, AbsolutePath to)
{
if (to.Exists)
await using var s = await sf.GetStream();
foreach (var directive in grouped[vf])
{
if (to.IsReadOnly)
to.IsReadOnly = false;
await to.DeleteAsync();
}
var file = directive.Directive;
s.Position = 0;
if (from.Exists)
{
if (from.IsReadOnly)
from.IsReadOnly = false;
}
switch (file)
{
case PatchedFromArchive pfa:
{
var patchData = await LoadBytesFromPath(pfa.PatchID);
var toFile = file.To.RelativeTo(OutputFolder);
{
await using var os = await toFile.Create();
Utils.ApplyPatch(s, () => new MemoryStream(patchData), os);
}
await @from.CopyToAsync(to);
// If we don't do this, the file will use the last-modified date of the file when it was compressed
// into an archive, which isn't really what we want in the case of files installed archives
to.LastModified = DateTime.Now;
}
if (await VirusScanner.ShouldScan(toFile) &&
await ClientAPI.GetVirusScanResult(toFile) == VirusScanner.Result.Malware)
{
await toFile.DeleteAsync();
Utils.ErrorThrow(new Exception($"Virus scan of patched executable reported possible malware: {toFile.ToString()} ({(long)await toFile.FileHashCachedAsync()})"));
}
}
break;
foreach (var (idx, group) in vFiles.GroupBy(f => f.FromFile).Select((grp, i) => (i, grp)))
{
Utils.Status("Installing files", Percent.FactoryPutInRange(idx, vFiles.Count));
if (group.Key == null)
{
throw new ArgumentNullException("FromFile was null");
}
var firstDest = OutputFolder.Combine(group.First().To);
case FromArchive _:
await directive.Directive.To.RelativeTo(OutputFolder).WriteAllAsync(s, false);
break;
default:
throw new Exception($"No handler for {directive}");
if (group.Key.IsNative)
{
await group.Key.AbsoluteName.HardLinkIfOversize(firstDest);
}
else
{
await group.Key.StagedFile.MoveTo(firstDest);
}
foreach (var copy in group.Skip(1))
{
await CopyFile(firstDest, OutputFolder.Combine(copy.To));
}
foreach (var toPatch in group.OfType<PatchedFromArchive>())
{
await using var patchStream = new MemoryStream();
Status($"Patching {toPatch.To.FileName}");
// Read in the patch data
}
Status($"Verifying unpatched file {toPatch.To.FileName}");
var toFile = OutputFolder.Combine(toPatch.To);
byte[] patchData = await LoadBytesFromPath(toPatch.PatchID);
var oldData = new MemoryStream(await toFile.ReadAllBytesAsync());
// Remove the file we're about to patch
await toFile.DeleteAsync();
// Patch it
await using (var outStream = await toFile.Create())
{
Utils.ApplyPatch(oldData, () => new MemoryStream(patchData), outStream);
}
if (await VirusScanner.ShouldScan(toFile) &&
await ClientAPI.GetVirusScanResult(toFile) == VirusScanner.Result.Malware)
{
await toFile.DeleteAsync();
Utils.ErrorThrow(new Exception($"Virus scan of patched executable reported possible malware: {toFile.ToString()} ({(long)await toFile.FileHashCachedAsync()})"));
}
}
foreach (var file in group)
{
if (file is PatchedFromArchive)
{
await file.To.RelativeTo(OutputFolder).FileHashAsync();
@ -235,12 +188,7 @@ namespace Wabbajack.Lib
await file.To.RelativeTo(OutputFolder).Compact(FileCompaction.Algorithm.XPRESS16K);
}
}
}
Status("Unstaging files");
await onFinish();
});
}
public async Task DownloadArchives()

View File

@ -76,7 +76,7 @@ namespace Wabbajack.Lib.CompilationSteps
Func<Task>? _cleanup = null;
if (defaultInclude)
{
_cleanup = await source.File.Context.Stage(source.File.Children);
//_cleanup = await source.File.Context.Stage(source.File.Children);
}
var matches = await sourceFiles.PMap(_mo2Compiler.Queue, e => _mo2Compiler.RunStack(stack, new RawSourceFile(e, Consts.BSACreationDir.Combine((RelativePath)id, (RelativePath)e.Name))));

View File

@ -14,8 +14,7 @@ namespace Wabbajack.Lib.CompilationSteps
public override async ValueTask<Directive?> Run(RawSourceFile source)
{
var inline = source.EvolveTo<InlineFile>();
await using var file = await source.File.StagedFile.OpenRead();
inline.SourceDataID = await _compiler.IncludeFile(await file.ReadAllAsync());
inline.SourceDataFile = source.File;
return inline;
}
}

View File

@ -161,6 +161,9 @@ namespace Wabbajack.Lib
/// Data that will be written as-is to the destination location;
/// </summary>
public RelativePath SourceDataID { get; set; }
[JsonIgnore]
public VirtualFile? SourceDataFile { get; set; }
}
[JsonName("ArchiveMeta")]

View File

@ -6,10 +6,12 @@ using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using AngleSharp.Common;
using Wabbajack.Common;
using Wabbajack.Lib.CompilationSteps;
using Wabbajack.Lib.Downloaders;
using Wabbajack.Lib.Validation;
using Wabbajack.VirtualFileSystem;
using Path = Alphaleonis.Win32.Filesystem.Path;
namespace Wabbajack.Lib
@ -55,7 +57,7 @@ namespace Wabbajack.Lib
public HashSet<string> SelectedProfiles { get; set; } = new HashSet<string>();
public MO2Compiler(AbsolutePath mo2Folder, string mo2Profile, AbsolutePath outputFile)
: base(steps: 20)
: base(steps: 21)
{
MO2Folder = mo2Folder;
MO2Profile = mo2Profile;
@ -350,6 +352,9 @@ namespace Wabbajack.Lib
Version = ModlistVersion ?? new Version(1,0,0,0),
IsNSFW = ModlistIsNSFW
};
UpdateTracker.NextStep("Including required files");
await InlineFiles();
UpdateTracker.NextStep("Running Validation");
@ -368,7 +373,6 @@ namespace Wabbajack.Lib
return true;
}
public Dictionary<Game, HashSet<Hash>> GameHashes { get; set; } = new Dictionary<Game, HashSet<Hash>>();
public Dictionary<Hash, Game[]> GamesWithHashes { get; set; } = new Dictionary<Hash, Game[]>();
@ -495,17 +499,35 @@ namespace Wabbajack.Lib
.ToArray();
if (toBuild.Length == 0) return;
// Extract all the source files
var indexed = toBuild.GroupBy(f => (VFS.Index.FileForArchiveHashPath(f.ArchiveHashPath)))
.ToDictionary(f => f.Key);
await VFS.Extract(Queue, indexed.Keys.ToHashSet(),
async (vf, sf) =>
{
// For each, extract the destination
var matches = indexed[vf];
using var iqueue = new WorkQueue(1);
foreach (var match in matches)
{
var destFile = FindDestFile(match.To);
// Build the patch
await VFS.Extract(iqueue, new[] {destFile}.ToHashSet(),
async (destvf, destsfn) =>
{
Info($"Patching {match.To}");
Status($"Patching {match.To}");
await using var srcStream = await sf.GetStream();
await using var destStream = await destsfn.GetStream();
var patchSize = await Utils.CreatePatchCached(srcStream, vf.Hash, destStream, destvf.Hash);
Info($"Patch size {patchSize} for {match.To}");
});
}
});
var groups = toBuild
.Where(p => p.PatchID == default)
.GroupBy(p => p.ArchiveHashPath.BaseHash)
.ToList();
Info($"Patching building patches from {groups.Count} archives");
var absolutePaths = AllFiles.ToDictionary(e => e.Path, e => e.AbsolutePath);
await groups.PMap(Queue, group => BuildArchivePatches(group.Key, group, absolutePaths));
// Load in the patches
await InstallDirectives.OfType<PatchedFromArchive>()
.Where(p => p.PatchID == default)
.PMap(Queue, async pfa =>
@ -514,6 +536,7 @@ namespace Wabbajack.Lib
.Select(c => (Utils.TryGetPatch(c.Hash, pfa.Hash, out var data), data, c))
.ToArray();
// Pick the best patch
if (patches.All(p => p.Item1))
{
var (_, bytes, file) = IncludePatches.PickPatch(this, patches);
@ -529,42 +552,19 @@ namespace Wabbajack.Lib
Error($"Missing patches after generation, this should not happen. First failure: {firstFailedPatch.FullPath}");
}
private async Task BuildArchivePatches(Hash archiveSha, IEnumerable<PatchedFromArchive> group,
Dictionary<RelativePath, AbsolutePath> absolutePaths)
private VirtualFile FindDestFile(RelativePath to)
{
await using var files = await VFS.StageWith(@group.Select(g => VFS.Index.FileForArchiveHashPath(g.ArchiveHashPath)));
var byPath = files.GroupBy(f => string.Join("|", f.FilesInFullPath.Skip(1).Select(i => i.Name)))
.ToDictionary(f => f.Key, f => f.First());
// Now Create the patches
await @group.PMap(Queue, async entry =>
{
Info($"Patching {entry.To}");
Status($"Patching {entry.To}");
var srcFile = byPath[string.Join("|", entry.ArchiveHashPath.Paths)];
await using var srcStream = await srcFile.OpenRead();
await using var destStream = await LoadDataForTo(entry.To, absolutePaths);
var patchSize = await Utils.CreatePatchCached(srcStream, srcFile.Hash, destStream, entry.Hash);
Info($"Patch size {patchSize} for {entry.To}");
});
}
private async Task<FileStream> LoadDataForTo(RelativePath to, Dictionary<RelativePath, AbsolutePath> absolutePaths)
{
if (absolutePaths.TryGetValue(to, out var absolute))
return await absolute.OpenRead();
var abs = to.RelativeTo(MO2Folder);
if (abs.Exists)
return VFS.Index.ByRootPath[abs];
if (to.StartsWith(Consts.BSACreationDir))
{
var bsaId = (RelativePath)((string)to).Split('\\')[1];
var bsa = InstallDirectives.OfType<CreateBSA>().First(b => b.TempID == bsaId);
var a = await BSADispatch.OpenRead(MO2Folder.Combine(bsa.To));
var find = (RelativePath)Path.Combine(((string)to).Split('\\').Skip(2).ToArray());
var file = a.Files.First(e => e.Path == find);
var returnStream = new TempStream();
await file.CopyDataTo(returnStream);
returnStream.Position = 0;
return returnStream;
return VFS.Index.ByRootPath[MO2Folder.Combine(bsa.To)].Children.First(c => c.RelativeName == find);
}
throw new ArgumentException($"Couldn't load data for {to}");

View File

@ -40,7 +40,7 @@ namespace Wabbajack.Lib
outputFolder: outputFolder,
downloadFolder: downloadFolder,
parameters: parameters,
steps: 21,
steps: 22,
game: modList.GameType)
{
var gameExe = Consts.GameFolderFilesDir.Combine(modList.GameType.MetaData().MainExecutable!);
@ -178,13 +178,25 @@ namespace Wabbajack.Lib
UpdateTracker.NextStep("Updating System-specific ini settings");
SetScreenSizeInPrefs();
UpdateTracker.NextStep("Compacting files");
await CompactFiles();
UpdateTracker.NextStep("Installation complete! You may exit the program.");
await ExtractedModlistFolder!.DisposeAsync();
await Metrics.Send(Metrics.FinishInstall, ModList.Name);
return true;
}
private async Task CompactFiles()
{
if (this.UseCompression)
{
await OutputFolder.CompactFolder(Queue, FileCompaction.Algorithm.XPRESS16K);
}
}
private void CreateOutputMods()
{
OutputFolder.Combine("profiles")
@ -243,7 +255,8 @@ namespace Wabbajack.Lib
Status($"Writing included .meta file {directive.To}");
var outPath = DownloadFolder.Combine(directive.To);
if (outPath.IsFile) await outPath.DeleteAsync();
await outPath.WriteAllBytesAsync(await LoadBytesFromPath(directive.SourceDataID));
var bytes = await LoadBytesFromPath(directive.SourceDataID);
await outPath.WriteAllBytesAsync(bytes);
});
}

View File

@ -485,6 +485,7 @@ namespace Wabbajack.Test
Consts.TestMode = true;
}
/* Disabled, will be removed in the future
[Fact]
public async Task BethesdaNetDownload()
{
@ -510,7 +511,7 @@ namespace Wabbajack.Test
using var archive = new ZipArchive(fs);
var entries = archive.Entries.Select(e => e.FullName).ToList();
Assert.Equal(entries, new List<string> {@"Data\TestCK.esp", @"Data\TestCK.ini"});
}
}*/
/*
[Fact]

View File

@ -110,9 +110,8 @@ namespace Wabbajack.Test
var destFile = utils.DownloadsFolder.Combine(filename);
await src.CopyToAsync(destFile);
await using var dest = await FileExtractor.ExtractAll(Queue, src);
var modFolder = modName == null ? utils.MO2Folder : utils.ModsFolder.Combine(modName);
await dest.MoveAllTo(modFolder);
await FileExtractor2.ExtractAll(src, modFolder);
return (destFile, modFolder);
}
@ -147,9 +146,8 @@ namespace Wabbajack.Test
await src.CopyToAsync(dest);
var modFolder = utils.ModsFolder.Combine(modName);
await using var files = await FileExtractor.ExtractAll(Queue, src);
await files.MoveAllTo(modFolder);
await FileExtractor2.ExtractAll(src, modFolder);
await dest.WithExtension(Consts.MetaFileExtension).WriteAllTextAsync(ini);
return (dest, modFolder);
}

View File

@ -0,0 +1,136 @@
using System;
using System.IO.Compression;
using System.Linq;
using System.Threading.Tasks;
using Wabbajack.Common;
using Wabbajack.Lib.Downloaders;
using Wabbajack.Lib.NexusApi;
using Xunit;
using Xunit.Abstractions;
namespace Wabbajack.VirtualFileSystem.Test
{
public class FileExtractorTests : IAsyncLifetime
{
private ITestOutputHelper _helper;
private IDisposable _unsub;
public FileExtractorTests(ITestOutputHelper helper)
{
_helper = helper;
_unsub = Utils.LogMessages.Subscribe(f =>
{
try
{
_helper.WriteLine(f.ShortDescription);
}
catch (Exception _)
{
// ignored
}
});
}
public async Task InitializeAsync()
{
}
public async Task DisposeAsync()
{
_unsub.Dispose();
}
[Fact]
public async Task CanGatherDataFromZipFiles()
{
await using var temp = await TempFolder.Create();
await using var archive = new TempFile();
for (int i = 0; i < 10; i ++)
{
await WriteRandomData(temp.Dir.Combine($"{i}.bin"), _rng.Next(10, 1024));
}
await ZipUpFolder(temp.Dir, archive.Path, false);
var results = await FileExtractor2.GatheringExtract(new NativeFileStreamFactory(archive.Path),
_ => true,
async (path, sfn) =>
{
await using var s = await sfn.GetStream();
return await s.xxHashAsync();
});
Assert.Equal(10, results.Count);
foreach (var (path, hash) in results)
{
Assert.Equal(await temp.Dir.Combine(path).FileHashAsync(), hash);
}
}
private static Extension OMODExtension = new Extension(".omod");
private static Extension CRCExtension = new Extension(".crc");
[Fact]
public async Task CanGatherDataFromOMODFiles()
{
var src = await DownloadMod(Game.Oblivion, 18498);
await FileExtractor2.GatheringExtract(new NativeFileStreamFactory(src),
p => p.Extension == OMODExtension, async (path, sfn) =>
{
await FileExtractor2.GatheringExtract(sfn, _ => true, async (ipath, isfn) => {
// We shouldn't have any .crc files because this file should be recognized as a OMOD and extracted correctly
Assert.NotEqual(CRCExtension, ipath.Extension);
return 0;
});
return 0;
});
}
private static readonly Random _rng = new Random();
private static async Task WriteRandomData(AbsolutePath path, int size)
{
var buff = new byte[size];
_rng.NextBytes(buff);
await path.WriteAllBytesAsync(buff);
}
private static async Task AddFile(AbsolutePath filename, string text)
{
filename.Parent.CreateDirectory();
await filename.WriteAllTextAsync(text);
}
private static async Task ZipUpFolder(AbsolutePath folder, AbsolutePath output, bool deleteSource = true)
{
ZipFile.CreateFromDirectory((string)folder, (string)output);
if (deleteSource)
await folder.DeleteDirectory();
}
private static AbsolutePath _stagingFolder = ((RelativePath)"NexusDownloads").RelativeToEntryPoint();
private static async Task<AbsolutePath> DownloadMod(Game game, int mod)
{
using var client = await NexusApiClient.Get();
var results = await client.GetModFiles(game, mod);
var file = results.files.FirstOrDefault(f => f.is_primary) ??
results.files.OrderByDescending(f => f.uploaded_timestamp).First();
var src = _stagingFolder.Combine(file.file_name);
if (src.Exists) return src;
var state = new NexusDownloader.State
{
ModID = mod,
Game = game,
FileID = file.file_id
};
await state.Download(src);
return src;
}
}
}

View File

@ -19,12 +19,23 @@ namespace Wabbajack.VirtualFileSystem.Test
private Context context;
private readonly ITestOutputHelper _helper;
private IDisposable _unsub;
private WorkQueue Queue { get; } = new WorkQueue();
public VFSTests(ITestOutputHelper helper)
{
_helper = helper;
Utils.LogMessages.Subscribe(f => _helper.WriteLine(f.ShortDescription));
_unsub = Utils.LogMessages.Subscribe(f =>
{
try
{
_helper.WriteLine(f.ShortDescription);
}
catch (Exception ex)
{
// ignored
}
});
context = new Context(Queue);
}
@ -36,6 +47,7 @@ namespace Wabbajack.VirtualFileSystem.Test
public async Task DisposeAsync()
{
_unsub.Dispose();
await VFS_TEST_DIR.DeleteDirectory();
}
@ -139,15 +151,15 @@ namespace Wabbajack.VirtualFileSystem.Test
await AddTestRoot();
var res = new FullPath(TEST_ZIP, new[] {(RelativePath)"test.txt"});
var file = context.Index.ByFullPath[res];
var files = new [] {context.Index.ByFullPath[res]};
var cleanup = await context.Stage(new List<VirtualFile> {file});
await using var stream = await file.StagedFile.OpenRead();
Assert.Equal("This is a test", await stream.ReadAllTextAsync());
var queue = new WorkQueue();
await context.Extract(queue, files.ToHashSet(), async (file, factory) =>
{
await using var s = await factory.GetStream();
Assert.Equal("This is a test", await s.ReadAllTextAsync());
});
await cleanup();
}
[Fact]
@ -165,16 +177,13 @@ namespace Wabbajack.VirtualFileSystem.Test
var files = context.Index.ByHash[Hash.FromBase64("qX0GZvIaTKM=")];
var cleanup = await context.Stage(files);
foreach (var file in files)
var queue = new WorkQueue();
await context.Extract(queue, files.ToHashSet(), async (file, factory) =>
{
await using var stream = await file.StagedFile.OpenRead();
Assert.Equal("This is a test", await stream.ReadAllTextAsync());
}
await using var s = await factory.GetStream();
Assert.Equal("This is a test", await s.ReadAllTextAsync());
});
await cleanup();
}
private static async Task AddFile(AbsolutePath filename, string text)

View File

@ -15,6 +15,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Wabbajack.Lib\Wabbajack.Lib.csproj" />
<ProjectReference Include="..\Wabbajack.VirtualFileSystem\Wabbajack.VirtualFileSystem.csproj" />
</ItemGroup>

View File

@ -71,7 +71,7 @@ namespace Wabbajack.VirtualFileSystem
return found;
}
return await VirtualFile.Analyze(this, null, new RootDiskFile(f), f, 0);
return await VirtualFile.Analyze(this, null, new NativeFileStreamFactory(f), f, 0);
});
var newIndex = await IndexRoot.Empty.Integrate(filtered.Concat(allFiles).ToList());
@ -103,7 +103,7 @@ namespace Wabbajack.VirtualFileSystem
return found;
}
return await VirtualFile.Analyze(this, null, new RootDiskFile(f), f, 0);
return await VirtualFile.Analyze(this, null, new NativeFileStreamFactory(f), f, 0);
});
var newIndex = await IndexRoot.Empty.Integrate(filtered.Concat(allFiles).ToList());
@ -196,43 +196,40 @@ namespace Wabbajack.VirtualFileSystem
}
}
public async Task<Func<Task>> Stage(IEnumerable<VirtualFile> files)
/// <summary>
/// Extract the set of files and call the callback for each, handing it a stream factory and the virtual file,
/// top level archives (native archives) will be processed in parallel. Duplicate files will not be
/// </summary>
/// <param name="files"></param>
/// <param name="callback"></param>
/// <returns></returns>
public async Task Extract(WorkQueue queue, HashSet<VirtualFile> files, Func<VirtualFile, IStreamFactory, ValueTask> callback)
{
await _cleanupTask;
var grouped = files.SelectMany(f => f.FilesInFullPath)
var top = new VirtualFile();
var filesByParent = files.SelectMany(f => f.FilesInFullPath)
.Distinct()
.Where(f => f.Parent != null)
.GroupBy(f => f.Parent)
.OrderBy(f => f.Key?.NestingFactor ?? 0)
.ToList();
.GroupBy(f => f.Parent ?? top)
.ToDictionary(f => f.Key);
var paths = new List<IAsyncDisposable>();
foreach (var group in grouped)
async Task HandleFile(VirtualFile file, IStreamFactory sfn)
{
var only = group.Select(f => f.RelativeName);
var extracted = await group.Key.StagedFile.ExtractAll(Queue, only);
paths.Add(extracted);
foreach (var file in group)
file.StagedFile = extracted[file.RelativeName];
}
return async () =>
{
foreach (var p in paths)
if (files.Contains(file)) await callback(file, sfn);
if (filesByParent.TryGetValue(file, out var children))
{
await p.DisposeAsync();
var fileNames = children.ToDictionary(c => c.RelativeName);
await FileExtractor2.GatheringExtract(sfn,
r => fileNames.ContainsKey(r),
async (rel, csf) =>
{
await HandleFile(fileNames[rel], csf);
return 0;
});
}
};
}
public async Task<AsyncDisposableList<VirtualFile>> StageWith(IEnumerable<VirtualFile> files)
{
return new AsyncDisposableList<VirtualFile>(await Stage(files), files);
}
await filesByParent[top].PMap(queue, async file => await HandleFile(file, new NativeFileStreamFactory(file.AbsoluteName)));
}
#region KnownFiles
private List<HashRelativePath> _knownFiles = new List<HashRelativePath>();

View File

@ -1,51 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Compression.BSA;
using Wabbajack.Common;
namespace Wabbajack.VirtualFileSystem
{
public class ExtractedBSAFile : IExtractedFile
{
private readonly IFile _file;
public ExtractedBSAFile(IFile file)
{
_file = file;
}
public RelativePath Path => _file.Path;
public async Task<Hash> HashAsync()
{
await using var stream = await OpenRead();
return await stream.xxHashAsync();
}
public DateTime LastModifiedUtc => DateTime.UtcNow;
public long Size => _file.Size;
public async ValueTask<Stream> OpenRead()
{
var ms = new MemoryStream();
await _file.CopyDataTo(ms);
ms.Position = 0;
return ms;
}
public async Task<bool> CanExtract()
{
return false;
}
public Task<ExtractedFiles> ExtractAll(WorkQueue queue, IEnumerable<RelativePath> OnlyFiles, bool throwOnError)
{
throw new Exception("BSAs can't contain archives");
}
public async Task MoveTo(AbsolutePath path)
{
await using var fs = await path.Create();
await _file.CopyDataTo(fs);
}
}
}

View File

@ -1,53 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace Wabbajack.VirtualFileSystem
{
public class ExtractedDiskFile : IExtractedFile
{
protected AbsolutePath _path;
public ExtractedDiskFile(AbsolutePath path)
{
if (path == default)
throw new InvalidDataException("Path cannot be empty");
_path = path;
}
public virtual async Task<Hash> HashAsync()
{
return await _path.FileHashAsync();
}
public DateTime LastModifiedUtc => _path.LastModifiedUtc;
public long Size => _path.Size;
public async ValueTask<Stream> OpenRead()
{
return await _path.OpenRead();
}
public async Task<bool> CanExtract()
{
return await FileExtractor.CanExtract(_path);
}
public Task<ExtractedFiles> ExtractAll(WorkQueue queue, IEnumerable<RelativePath> onlyFiles, bool throwOnError)
{
return FileExtractor.ExtractAll(queue, _path, onlyFiles, throwOnError);
}
public async Task MoveTo(AbsolutePath path)
{
if (FileExtractor.MightBeArchive(_path.Extension))
{
path.Parent.CreateDirectory();
await _path.CopyToAsync(path);
return;
}
await _path.MoveToAsync(path, true);
_path = path;
}
}
}

View File

@ -1,64 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace Wabbajack.VirtualFileSystem
{
public class ExtractedFiles : IAsyncDisposable, IEnumerable<KeyValuePair<RelativePath, IExtractedFile>>
{
private Dictionary<RelativePath, IExtractedFile> _files;
private IAsyncDisposable _disposable;
private AbsolutePath _tempFolder;
public ExtractedFiles(Dictionary<RelativePath, IExtractedFile> files, IAsyncDisposable disposeOther = null)
{
_files = files;
_disposable = disposeOther;
}
public ExtractedFiles(TempFolder tempPath)
{
_files = tempPath.Dir.EnumerateFiles().ToDictionary(f => f.RelativeTo(tempPath.Dir),
f => (IExtractedFile)new ExtractedDiskFile(f));
_disposable = tempPath;
}
public async ValueTask DisposeAsync()
{
if (_disposable != null)
{
await _disposable.DisposeAsync();
_disposable = null;
}
}
public bool ContainsKey(RelativePath key)
{
return _files.ContainsKey(key);
}
public int Count => _files.Count;
public IExtractedFile this[RelativePath key] => _files[key];
public IEnumerator<KeyValuePair<RelativePath, IExtractedFile>> GetEnumerator()
{
return _files.GetEnumerator();
}
public async Task MoveAllTo(AbsolutePath folder)
{
foreach (var (key, value) in this)
{
await value.MoveTo(key.RelativeTo(folder));
}
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
}

View File

@ -1,263 +0,0 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Reactive.Linq;
using System.Threading.Tasks;
using Alphaleonis.Win32.Filesystem;
using Compression.BSA;
using OMODFramework;
using Wabbajack.Common.StatusFeed;
using Wabbajack.Common.StatusFeed.Errors;
using Wabbajack.Common;
using Wabbajack.Common.FileSignatures;
using Utils = Wabbajack.Common.Utils;
namespace Wabbajack.VirtualFileSystem
{
public class FileExtractor
{
private static SignatureChecker archiveSigs = new SignatureChecker(Definitions.FileType.TES3,
Definitions.FileType.BSA,
Definitions.FileType.BA2,
Definitions.FileType.ZIP,
Definitions.FileType.EXE,
Definitions.FileType.RAR,
Definitions.FileType._7Z);
public static async Task<ExtractedFiles> ExtractAll(WorkQueue queue, AbsolutePath source, IEnumerable<RelativePath> OnlyFiles = null, bool throwOnError = true)
{
try
{
var sig = await archiveSigs.MatchesAsync(source);
if (source.Extension == Consts.OMOD)
return await ExtractAllWithOMOD(source);
switch (sig)
{
case Definitions.FileType.BSA:
case Definitions.FileType.TES3:
case Definitions.FileType.BA2:
return await ExtractAllWithBSA(queue, source);
case Definitions.FileType.EXE:
return await ExtractAllExe(source);
case Definitions.FileType._7Z:
case Definitions.FileType.ZIP:
case Definitions.FileType.RAR:
return await ExtractAllWith7Zip(source, OnlyFiles);
}
throw new Exception("Invalid archive format");
}
catch (Exception ex)
{
if (!throwOnError)
return new ExtractedFiles(await TempFolder.Create());
Utils.ErrorThrow(ex, $"Error while extracting {source}");
throw new Exception();
}
}
private static async Task<ExtractedFiles> ExtractAllExe(AbsolutePath source)
{
var isArchive = await TestWith7z(source);
if (isArchive)
{
return await ExtractAllWith7Zip(source, null);
}
var dest = await TempFolder.Create();
Utils.Log($"Extracting {(string)source.FileName}");
var process = new ProcessHelper
{
Path = @"Extractors\innounp.exe".RelativeTo(AbsolutePath.EntryPoint),
Arguments = new object[] {"-x", "-y", "-b", $"-d\"{dest.Dir}\"", source}
};
var result = process.Output.Where(d => d.Type == ProcessHelper.StreamType.Output)
.ForEachAsync(p =>
{
var (_, line) = p;
if (line == null)
return;
if (line.Length <= 4 || line[3] != '%')
return;
int.TryParse(line.Substring(0, 3), out var percentInt);
Utils.Status($"Extracting {source.FileName} - {line.Trim()}", Percent.FactoryPutInRange(percentInt / 100d));
});
await process.Start();
return new ExtractedFiles(dest);
}
private class OMODProgress : ICodeProgress
{
private long _total;
public void SetProgress(long inSize, long outSize)
{
Utils.Status("Extracting OMOD", Percent.FactoryPutInRange(inSize, _total));
}
public void Init(long totalSize, bool compressing)
{
_total = totalSize;
}
public void Dispose()
{
//
}
}
private static async Task<ExtractedFiles> ExtractAllWithOMOD(AbsolutePath source)
{
var dest = await TempFolder.Create();
Utils.Log($"Extracting {(string)source.FileName}");
Framework.Settings.TempPath = (string)dest.Dir;
Framework.Settings.CodeProgress = new OMODProgress();
var omod = new OMOD((string)source);
omod.GetDataFiles();
omod.GetPlugins();
return new ExtractedFiles(dest);
}
private static async Task<ExtractedFiles> ExtractAllWithBSA(WorkQueue queue, AbsolutePath source)
{
try
{
var arch = await BSADispatch.OpenRead(source);
var files = arch.Files.ToDictionary(f => f.Path, f => (IExtractedFile)new ExtractedBSAFile(f));
return new ExtractedFiles(files);
}
catch (Exception ex)
{
Utils.ErrorThrow(ex, $"While Extracting {source}");
throw new Exception();
}
}
private static async Task<ExtractedFiles> ExtractAllWith7Zip(AbsolutePath source, IEnumerable<RelativePath> onlyFiles)
{
TempFile tmpFile = null;
var dest = await TempFolder.Create();
Utils.Log(new GenericInfo($"Extracting {(string)source.FileName}", $"The contents of {(string)source.FileName} are being extracted to {(string)source.FileName} using 7zip.exe"));
var process = new ProcessHelper
{
Path = @"Extractors\7z.exe".RelativeTo(AbsolutePath.EntryPoint),
};
if (onlyFiles != null)
{
//It's stupid that we have to do this, but 7zip's file pattern matching isn't very fuzzy
IEnumerable<string> AllVariants(string input)
{
yield return $"\"{input}\"";
yield return $"\"\\{input}\"";
}
tmpFile = new TempFile();
await tmpFile.Path.WriteAllLinesAsync(onlyFiles.SelectMany(f => AllVariants((string)f)).ToArray());
process.Arguments = new object[]
{
"x", "-bsp1", "-y", $"-o\"{dest.Dir}\"", source, $"@\"{tmpFile.Path}\"", "-mmt=off"
};
}
else
{
process.Arguments = new object[] {"x", "-bsp1", "-y", $"-o\"{dest.Dir}\"", source, "-mmt=off"};
}
var result = process.Output.Where(d => d.Type == ProcessHelper.StreamType.Output)
.ForEachAsync(p =>
{
var (_, line) = p;
if (line == null)
return;
if (line.Length <= 4 || line[3] != '%') return;
int.TryParse(line.Substring(0, 3), out var percentInt);
Utils.Status($"Extracting {(string)source.FileName} - {line.Trim()}", Percent.FactoryPutInRange(percentInt / 100d));
});
var exitCode = await process.Start();
if (exitCode != 0)
{
Utils.ErrorThrow(new _7zipReturnError(exitCode, source, dest.Dir, ""));
}
else
{
Utils.Status($"Extracting {source.FileName} - done", Percent.One, alsoLog: true);
}
if (tmpFile != null)
{
await tmpFile.DisposeAsync();
}
return new ExtractedFiles(dest);
}
/// <summary>
/// Returns true if the given extension type can be extracted
/// </summary>
/// <param name="v"></param>
/// <returns></returns>
public static async Task<bool> CanExtract(AbsolutePath v)
{
var found = await archiveSigs.MatchesAsync(v);
switch (found)
{
case null:
return false;
case Definitions.FileType.EXE:
{
var process = new ProcessHelper
{
Path = @"Extractors\innounp.exe".RelativeTo(AbsolutePath.EntryPoint),
Arguments = new object[] {"-t", v},
};
return await process.Start() == 0;
}
default:
return true;
}
}
public static async Task<bool> TestWith7z(AbsolutePath file)
{
var process = new ProcessHelper()
{
Path = @"Extractors\7z.exe".RelativeTo(AbsolutePath.EntryPoint),
Arguments = new object[] {"t", file},
};
return await process.Start() == 0;
}
private static Extension _exeExtension = new Extension(".exe");
public static bool MightBeArchive(Extension ext)
{
return ext == _exeExtension || Consts.SupportedArchives.Contains(ext) || Consts.SupportedBSAs.Contains(ext);
}
}
}

View File

@ -0,0 +1,153 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Compression.BSA;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using OMODFramework;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Readers;
using Wabbajack.Common;
using Wabbajack.Common.FileSignatures;
using Wabbajack.VirtualFileSystem.SevenZipExtractor;
using Utils = Wabbajack.Common.Utils;
namespace Wabbajack.VirtualFileSystem
{
public static class FileExtractor2
{
public static readonly SignatureChecker ArchiveSigs = new SignatureChecker(Definitions.FileType.TES3,
Definitions.FileType.BSA,
Definitions.FileType.BA2,
Definitions.FileType.ZIP,
//Definitions.FileType.EXE,
Definitions.FileType.RAR_OLD,
Definitions.FileType.RAR_NEW,
Definitions.FileType._7Z);
private static Extension OMODExtension = new Extension(".omod");
public static async Task<Dictionary<RelativePath, T>> GatheringExtract<T>(IStreamFactory sFn,
Predicate<RelativePath> shouldExtract, Func<RelativePath, IStreamFactory, ValueTask<T>> mapfn)
{
if (sFn is NativeFileStreamFactory)
{
Utils.Log($"Extracting {sFn.Name}");
}
await using var archive = await sFn.GetStream();
var sig = await ArchiveSigs.MatchesAsync(archive);
archive.Position = 0;
switch (sig)
{
case Definitions.FileType.RAR_OLD:
case Definitions.FileType.RAR_NEW:
case Definitions.FileType._7Z:
case Definitions.FileType.ZIP:
{
if (sFn.Name.FileName.Extension == OMODExtension)
{
return await GatheringExtractWithOMOD(archive, shouldExtract, mapfn);
}
else
{
return await GatheringExtractWith7Zip<T>(archive, (Definitions.FileType)sig, shouldExtract,
mapfn);
}
}
case Definitions.FileType.TES3:
case Definitions.FileType.BSA:
case Definitions.FileType.BA2:
return await GatheringExtractWithBSA(sFn, (Definitions.FileType)sig, shouldExtract, mapfn);
default:
throw new Exception($"Invalid file format {sFn.Name}");
}
}
private static async Task<Dictionary<RelativePath,T>> GatheringExtractWithOMOD<T>(Stream archive, Predicate<RelativePath> shouldExtract, Func<RelativePath,IStreamFactory,ValueTask<T>> mapfn)
{
var tmpFile = new TempFile();
await tmpFile.Path.WriteAllAsync(archive);
var dest = await TempFolder.Create();
Utils.Log($"Extracting {(string)tmpFile.Path}");
Framework.Settings.TempPath = (string)dest.Dir;
Framework.Settings.CodeProgress = new OMODProgress();
var omod = new OMOD((string)tmpFile.Path);
omod.GetDataFiles();
omod.GetPlugins();
var results = new Dictionary<RelativePath, T>();
foreach (var file in dest.Dir.EnumerateFiles())
{
var path = file.RelativeTo(dest.Dir);
if (!shouldExtract(path)) continue;
var result = await mapfn(path, new NativeFileStreamFactory(file, path));
results.Add(path, result);
}
return results;
}
private class OMODProgress : ICodeProgress
{
private long _total;
public void SetProgress(long inSize, long outSize)
{
Utils.Status("Extracting OMOD", Percent.FactoryPutInRange(inSize, _total));
}
public void Init(long totalSize, bool compressing)
{
_total = totalSize;
}
public void Dispose()
{
//
}
}
private static async Task<Dictionary<RelativePath,T>> GatheringExtractWithBSA<T>(IStreamFactory sFn, Definitions.FileType sig, Predicate<RelativePath> shouldExtract, Func<RelativePath,IStreamFactory,ValueTask<T>> mapfn)
{
var archive = await BSADispatch.OpenRead(sFn, sig);
var results = new Dictionary<RelativePath, T>();
foreach (var entry in archive.Files)
{
if (!shouldExtract(entry.Path))
continue;
var result = await mapfn(entry.Path, await entry.GetStreamFactory());
results.Add(entry.Path, result);
}
return results;
}
private static async Task<Dictionary<RelativePath,T>> GatheringExtractWith7Zip<T>(Stream stream, Definitions.FileType sig, Predicate<RelativePath> shouldExtract, Func<RelativePath,IStreamFactory,ValueTask<T>> mapfn)
{
return await new GatheringExtractor<T>(stream, sig, shouldExtract, mapfn).Extract();
}
public static async Task ExtractAll(AbsolutePath src, AbsolutePath dest)
{
await GatheringExtract(new NativeFileStreamFactory(src), _ => true, async (path, factory) =>
{
var abs = path.RelativeTo(dest);
abs.Parent.CreateDirectory();
await using var stream = await factory.GetStream();
await abs.WriteAllAsync(stream);
return 0;
});
}
}
}

View File

@ -0,0 +1,230 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using Compression.BSA;
using Wabbajack.Common;
using Wabbajack.Common.FileSignatures;
using Wabbajack.VirtualFileSystem.SevenZipExtractor;
namespace Wabbajack.VirtualFileSystem
{
public class GatheringExtractor<T> : IArchiveExtractCallback
{
private ArchiveFile _archive;
private Predicate<RelativePath> _shouldExtract;
private Func<RelativePath, IStreamFactory, ValueTask<T>> _mapFn;
private Dictionary<RelativePath, T> _results;
private Dictionary<uint, (RelativePath, ulong)> _indexes;
private Stream _stream;
private Definitions.FileType _sig;
private Exception _killException;
public GatheringExtractor(Stream stream, Definitions.FileType sig, Predicate<RelativePath> shouldExtract, Func<RelativePath,IStreamFactory, ValueTask<T>> mapfn)
{
_shouldExtract = shouldExtract;
_mapFn = mapfn;
_results = new Dictionary<RelativePath, T>();
_stream = stream;
_sig = sig;
}
public async Task<Dictionary<RelativePath, T>> Extract()
{
var source = new TaskCompletionSource<bool>();
var th = new Thread(() =>
{
try
{
_archive = ArchiveFile.Open(_stream, _sig).Result;
_indexes = _archive.Entries
.Select((entry, idx) => (entry, (uint)idx))
.Where(f => !f.entry.IsFolder)
.Select(t => ((RelativePath)t.entry.FileName, t.Item2, t.entry.Size))
.Where(t => _shouldExtract(t.Item1))
.ToDictionary(t => t.Item2, t => (t.Item1, t.Size));
_archive._archive.Extract(null, 0xFFFFFFFF, 0, this);
_archive.Dispose();
if (_killException != null)
{
source.SetException(_killException);
}
else
{
source.SetResult(true);
}
}
catch (Exception ex)
{
source.SetException(ex);
}
}) {Priority = ThreadPriority.BelowNormal, Name = "7Zip Extraction Worker Thread"};
th.Start();
await source.Task;
return _results;
}
public void SetTotal(ulong total)
{
}
public void SetCompleted(ref ulong completeValue)
{
}
public int GetStream(uint index, out ISequentialOutStream outStream, AskMode askExtractMode)
{
if (_indexes.ContainsKey(index))
{
outStream = new GatheringExtractorStream<T>(this, index);
return 0;
}
outStream = null;
return 0;
}
public void PrepareOperation(AskMode askExtractMode)
{
}
public void SetOperationResult(OperationResult resultEOperationResult)
{
}
private class GatheringExtractorStream<T> : ISequentialOutStream, IOutStream
{
private GatheringExtractor<T> _extractor;
private uint _index;
private bool _written;
private ulong _totalSize;
private Stream _tmpStream;
private TempFile _tmpFile;
private IStreamFactory _factory;
private bool _diskCached;
public GatheringExtractorStream(GatheringExtractor<T> extractor, uint index)
{
_extractor = extractor;
_index = index;
_totalSize = extractor._indexes[index].Item2;
_diskCached = _totalSize >= 500_000_000;
}
private IPath GetPath()
{
return _extractor._indexes[_index].Item1;
}
public int Write(byte[] data, uint size, IntPtr processedSize)
{
try
{
if (size == _totalSize)
WriteSingleCall(data, size);
else if (_diskCached)
WriteDiskCached(data, size);
else
WriteMemoryCached(data, size);
if (processedSize != IntPtr.Zero)
{
Marshal.WriteInt32(processedSize, (int)size);
}
return 0;
}
catch (Exception ex)
{
Utils.Log($"Error during extraction {ex}");
_extractor.Kill(ex);
return 1;
}
}
private void WriteSingleCall(byte[] data, in uint size)
{
var result = _extractor._mapFn(_extractor._indexes[_index].Item1, new MemoryBufferFactory(data, (int)size, GetPath())).Result;
AddResult(result);
Cleanup();
}
private void Cleanup()
{
_tmpStream?.Dispose();
_tmpFile?.DisposeAsync().AsTask().Wait();
}
private void AddResult(T result)
{
_extractor._results.Add(_extractor._indexes[_index].Item1, result);
}
private void WriteMemoryCached(byte[] data, in uint size)
{
if (_tmpStream == null)
_tmpStream = new MemoryStream();
_tmpStream.Write(data, 0, (int)size);
if (_tmpStream.Length != (long)_totalSize) return;
_tmpStream.Flush();
_tmpStream.Position = 0;
var result = _extractor._mapFn(_extractor._indexes[_index].Item1, new MemoryStreamFactory((MemoryStream)_tmpStream, GetPath())).Result;
AddResult(result);
Cleanup();
}
private void WriteDiskCached(byte[] data, in uint size)
{
if (_tmpFile == null)
{
_tmpFile = new TempFile();
_tmpStream = _tmpFile.Path.Create().Result;
}
_tmpStream.Write(data, 0, (int)size);
if (_tmpStream.Length != (long)_totalSize) return;
_tmpStream.Flush();
_tmpStream.Close();
var result = _extractor._mapFn(_extractor._indexes[_index].Item1, new NativeFileStreamFactory(_tmpFile.Path, GetPath())).Result;
AddResult(result);
Cleanup();
}
public void Seek(long offset, uint seekOrigin, IntPtr newPosition)
{
}
public int SetSize(long newSize)
{
return 0;
}
}
private void Kill(Exception ex)
{
_killException = ex;
}
}
}

View File

@ -0,0 +1,32 @@
using System;
using System.IO;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace Wabbajack.VirtualFileSystem
{
public class UnmanagedStreamFactory : IStreamFactory
{
private readonly unsafe byte* _data;
private readonly long _size;
public unsafe UnmanagedStreamFactory(byte* data, long size)
{
_data = data;
_size = size;
}
public async ValueTask<Stream> GetStream()
{
unsafe
{
return new UnmanagedMemoryStream(_data, _size);
}
}
public DateTime LastModifiedUtc => DateTime.UtcNow;
public IPath Name => (RelativePath)"Unmanaged Memory Stream";
}
}

View File

@ -1,24 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace Wabbajack.VirtualFileSystem
{
public interface IExtractedFile
{
public Task<Hash> HashAsync();
public DateTime LastModifiedUtc { get; }
public long Size { get; }
public ValueTask<Stream> OpenRead();
public Task<bool> CanExtract();
public Task<ExtractedFiles> ExtractAll(WorkQueue queue, IEnumerable<RelativePath> Only = null, bool throwOnError = false);
public Task MoveTo(AbsolutePath path);
}
}

View File

@ -1,20 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace Wabbajack.VirtualFileSystem
{
public class RootDiskFile : ExtractedDiskFile
{
public RootDiskFile(AbsolutePath path) : base(path)
{
}
public override async Task<Hash> HashAsync()
{
return await _path.FileHashCachedAsync();
}
}
}

View File

@ -0,0 +1,176 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using Wabbajack.Common;
using Wabbajack.Common.FileSignatures;
namespace Wabbajack.VirtualFileSystem.SevenZipExtractor
{
public class ArchiveFile : IDisposable
{
private SevenZipHandle _sevenZipHandle;
internal IInArchive _archive;
private InStreamWrapper _archiveStream;
private IList<Entry> _entries;
private static readonly AbsolutePath LibraryFilePath = @"Extractors\7z.dll".RelativeTo(AbsolutePath.EntryPoint);
private static SignatureChecker _checker = new SignatureChecker(Formats.FileTypeGuidMapping.Keys.ToArray());
public static async Task<ArchiveFile> Open(Stream archiveStream, Definitions.FileType format)
{
var self = new ArchiveFile();
self.InitializeAndValidateLibrary();
self._archive = self._sevenZipHandle.CreateInArchive(Formats.FileTypeGuidMapping[format]);
self._archiveStream = new InStreamWrapper(archiveStream);
return self;
}
public IList<Entry> Entries
{
get
{
if (this._entries != null)
{
return this._entries;
}
ulong checkPos = 32 * 1024;
int open = this._archive.Open(this._archiveStream, ref checkPos, null);
if (open != 0)
{
throw new Exception("Unable to open archive");
}
uint itemsCount = this._archive.GetNumberOfItems();
this._entries = new List<Entry>();
for (uint fileIndex = 0; fileIndex < itemsCount; fileIndex++)
{
string fileName = this.GetProperty<string>(fileIndex, ItemPropId.kpidPath);
bool isFolder = this.GetProperty<bool>(fileIndex, ItemPropId.kpidIsFolder);
bool isEncrypted = this.GetProperty<bool>(fileIndex, ItemPropId.kpidEncrypted);
ulong size = this.GetProperty<ulong>(fileIndex, ItemPropId.kpidSize);
ulong packedSize = this.GetProperty<ulong>(fileIndex, ItemPropId.kpidPackedSize);
DateTime creationTime = this.GetPropertySafe<DateTime>(fileIndex, ItemPropId.kpidCreationTime);
DateTime lastWriteTime = this.GetPropertySafe<DateTime>(fileIndex, ItemPropId.kpidLastWriteTime);
DateTime lastAccessTime = this.GetPropertySafe<DateTime>(fileIndex, ItemPropId.kpidLastAccessTime);
uint crc = this.GetPropertySafe<uint>(fileIndex, ItemPropId.kpidCRC);
uint attributes = this.GetPropertySafe<uint>(fileIndex, ItemPropId.kpidAttributes);
string comment = this.GetPropertySafe<string>(fileIndex, ItemPropId.kpidComment);
string hostOS = this.GetPropertySafe<string>(fileIndex, ItemPropId.kpidHostOS);
string method = this.GetPropertySafe<string>(fileIndex, ItemPropId.kpidMethod);
bool isSplitBefore = this.GetPropertySafe<bool>(fileIndex, ItemPropId.kpidSplitBefore);
bool isSplitAfter = this.GetPropertySafe<bool>(fileIndex, ItemPropId.kpidSplitAfter);
this._entries.Add(new Entry(this._archive, fileIndex)
{
FileName = fileName,
IsFolder = isFolder,
IsEncrypted = isEncrypted,
Size = size,
PackedSize = packedSize,
CreationTime = creationTime,
LastWriteTime = lastWriteTime,
LastAccessTime = lastAccessTime,
CRC = crc,
Attributes = attributes,
Comment = comment,
HostOS = hostOS,
Method = method,
IsSplitBefore = isSplitBefore,
IsSplitAfter = isSplitAfter
});
}
return this._entries;
}
}
private T GetPropertySafe<T>(uint fileIndex, ItemPropId name)
{
try
{
return this.GetProperty<T>(fileIndex, name);
}
catch (InvalidCastException)
{
return default(T);
}
}
private T GetProperty<T>(uint fileIndex, ItemPropId name)
{
PropVariant propVariant = new PropVariant();
this._archive.GetProperty(fileIndex, name, ref propVariant);
object value = propVariant.GetObject();
if (propVariant.VarType == VarEnum.VT_EMPTY)
{
propVariant.Clear();
return default(T);
}
propVariant.Clear();
if (value == null)
{
return default(T);
}
Type type = typeof(T);
bool isNullable = type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>);
Type underlyingType = isNullable ? Nullable.GetUnderlyingType(type) : type;
T result = (T)Convert.ChangeType(value.ToString(), underlyingType);
return result;
}
private void InitializeAndValidateLibrary()
{
try
{
this._sevenZipHandle = new SevenZipHandle((string)LibraryFilePath);
}
catch (Exception e)
{
throw new Exception("Unable to initialize SevenZipHandle", e);
}
}
~ArchiveFile()
{
this.Dispose(false);
}
protected void Dispose(bool disposing)
{
if (this._archiveStream != null)
{
this._archiveStream.Dispose();
}
if (this._archive != null)
{
Marshal.ReleaseComObject(this._archive);
}
if (this._sevenZipHandle != null)
{
this._sevenZipHandle.Dispose();
}
}
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
}
}

View File

@ -0,0 +1,89 @@
using System;
using System.IO;
namespace Wabbajack.VirtualFileSystem.SevenZipExtractor
{
public class Entry
{
private readonly IInArchive archive;
private readonly uint index;
internal Entry(IInArchive archive, uint index)
{
this.archive = archive;
this.index = index;
}
/// <summary>
/// Name of the file with its relative path within the archive
/// </summary>
public string FileName { get; internal set; }
/// <summary>
/// True if entry is a folder, false if it is a file
/// </summary>
public bool IsFolder { get; internal set; }
/// <summary>
/// Original entry size
/// </summary>
public ulong Size { get; internal set; }
/// <summary>
/// Entry size in a archived state
/// </summary>
public ulong PackedSize { get; internal set; }
/// <summary>
/// Date and time of the file (entry) creation
/// </summary>
public DateTime CreationTime { get; internal set; }
/// <summary>
/// Date and time of the last change of the file (entry)
/// </summary>
public DateTime LastWriteTime { get; internal set; }
/// <summary>
/// Date and time of the last access of the file (entry)
/// </summary>
public DateTime LastAccessTime { get; internal set; }
/// <summary>
/// CRC hash of the entry
/// </summary>
public UInt32 CRC { get; internal set; }
/// <summary>
/// Attributes of the entry
/// </summary>
public UInt32 Attributes { get; internal set; }
/// <summary>
/// True if entry is encrypted, otherwise false
/// </summary>
public bool IsEncrypted { get; internal set; }
/// <summary>
/// Comment of the entry
/// </summary>
public string Comment { get; internal set; }
/// <summary>
/// Compression method of the entry
/// </summary>
public string Method { get; internal set; }
/// <summary>
/// Host operating system of the entry
/// </summary>
public string HostOS { get; internal set; }
/// <summary>
/// True if there are parts of this file in previous split archive parts
/// </summary>
public bool IsSplitBefore { get; set; }
/// <summary>
/// True if there are parts of this file in next split archive parts
/// </summary>
public bool IsSplitAfter { get; set; }
}
}

View File

@ -0,0 +1,20 @@
using System;
using System.Collections.Generic;
using Wabbajack.Common.FileSignatures;
namespace Wabbajack.VirtualFileSystem.SevenZipExtractor
{
public class Formats
{
internal static Dictionary<Definitions.FileType, Guid> FileTypeGuidMapping = new Dictionary<Definitions.FileType, Guid>
{
{Definitions.FileType._7Z, new Guid("23170f69-40c1-278a-1000-000110070000")},
{Definitions.FileType.BZ2, new Guid("23170f69-40c1-278a-1000-000110020000")},
{Definitions.FileType.RAR_OLD, new Guid("23170f69-40c1-278a-1000-000110030000")},
{Definitions.FileType.RAR_NEW, new Guid("23170f69-40c1-278a-1000-000110CC0000")},
{Definitions.FileType.ZIP, new Guid("23170f69-40c1-278a-1000-000110010000")},
};
}
}

View File

@ -0,0 +1,23 @@
using System.Runtime.InteropServices;
namespace Wabbajack.VirtualFileSystem.SevenZipExtractor
{
[ComImport]
[Guid("23170F69-40C1-278A-0000-000600200000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IArchiveExtractCallback //: IProgress
{
void SetTotal(ulong total);
void SetCompleted([In] ref ulong completeValue);
[PreserveSig]
int GetStream(
uint index,
[MarshalAs(UnmanagedType.Interface)] out ISequentialOutStream outStream,
AskMode askExtractMode);
// GetStream OUT: S_OK - OK, S_FALSE - skeep this file
void PrepareOperation(AskMode askExtractMode);
void SetOperationResult(OperationResult resultEOperationResult);
}
}

View File

@ -0,0 +1,20 @@
using System;
using System.Runtime.InteropServices;
using System.Security;
namespace Wabbajack.VirtualFileSystem.SevenZipExtractor
{
internal static class Kernel32Dll
{
[DllImport("kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)]
internal static extern SafeLibraryHandle LoadLibrary([MarshalAs(UnmanagedType.LPTStr)] string lpFileName);
[DllImport("kernel32.dll", CharSet = CharSet.Ansi, SetLastError = true)]
internal static extern IntPtr GetProcAddress(SafeLibraryHandle hModule, [MarshalAs(UnmanagedType.LPStr)] string procName);
[SuppressUnmanagedCodeSecurity]
[DllImport("kernel32.dll")]
[return: MarshalAs(UnmanagedType.Bool)]
internal static extern bool FreeLibrary(IntPtr hModule);
}
}

View File

@ -0,0 +1,21 @@
using System;
using System.Runtime.ConstrainedExecution;
using Microsoft.Win32.SafeHandles;
namespace Wabbajack.VirtualFileSystem.SevenZipExtractor
{
internal sealed class SafeLibraryHandle : SafeHandleZeroOrMinusOneIsInvalid
{
public SafeLibraryHandle() : base(true)
{
}
/// <summary>Release library handle</summary>
/// <returns>true if the handle was released</returns>
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
protected override bool ReleaseHandle()
{
return Kernel32Dll.FreeLibrary(this.handle);
}
}
}

View File

@ -0,0 +1,68 @@
using System;
using System.ComponentModel;
using System.Runtime.InteropServices;
namespace Wabbajack.VirtualFileSystem.SevenZipExtractor
{
internal class SevenZipHandle : IDisposable
{
private SafeLibraryHandle sevenZipSafeHandle;
public SevenZipHandle(string sevenZipLibPath)
{
this.sevenZipSafeHandle = Kernel32Dll.LoadLibrary(sevenZipLibPath);
if (this.sevenZipSafeHandle.IsInvalid)
{
throw new Win32Exception();
}
IntPtr functionPtr = Kernel32Dll.GetProcAddress(this.sevenZipSafeHandle, "GetHandlerProperty");
// Not valid dll
if (functionPtr == IntPtr.Zero)
{
this.sevenZipSafeHandle.Close();
throw new ArgumentException();
}
}
~SevenZipHandle()
{
this.Dispose(false);
}
protected void Dispose(bool disposing)
{
if ((this.sevenZipSafeHandle != null) && !this.sevenZipSafeHandle.IsClosed)
{
this.sevenZipSafeHandle.Close();
}
this.sevenZipSafeHandle = null;
}
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
public IInArchive CreateInArchive(Guid classId)
{
if (this.sevenZipSafeHandle == null)
{
throw new ObjectDisposedException("SevenZipHandle");
}
IntPtr procAddress = Kernel32Dll.GetProcAddress(this.sevenZipSafeHandle, "CreateObject");
CreateObjectDelegate createObject = (CreateObjectDelegate) Marshal.GetDelegateForFunctionPointer(procAddress, typeof (CreateObjectDelegate));
object result;
Guid interfaceId = typeof (IInArchive).GUID;
createObject(ref classId, ref interfaceId, out result);
return result as IInArchive;
}
}
}

View File

@ -0,0 +1,461 @@
// Version 1.5
using System;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.Permissions;
using System.Threading;
namespace Wabbajack.VirtualFileSystem.SevenZipExtractor
{
[StructLayout(LayoutKind.Sequential)]
internal struct PropArray
{
uint length;
IntPtr pointerValues;
}
[StructLayout(LayoutKind.Explicit)]
internal struct PropVariant
{
[DllImport("ole32.dll")]
private static extern int PropVariantClear(ref PropVariant pvar);
[FieldOffset(0)] public ushort vt;
[FieldOffset(8)] public IntPtr pointerValue;
[FieldOffset(8)] public byte byteValue;
[FieldOffset(8)] public long longValue;
[FieldOffset(8)] public System.Runtime.InteropServices.ComTypes.FILETIME filetime;
[FieldOffset(8)] public PropArray propArray;
public VarEnum VarType
{
get
{
return (VarEnum) this.vt;
}
}
public void Clear()
{
switch (this.VarType)
{
case VarEnum.VT_EMPTY:
break;
case VarEnum.VT_NULL:
case VarEnum.VT_I2:
case VarEnum.VT_I4:
case VarEnum.VT_R4:
case VarEnum.VT_R8:
case VarEnum.VT_CY:
case VarEnum.VT_DATE:
case VarEnum.VT_ERROR:
case VarEnum.VT_BOOL:
//case VarEnum.VT_DECIMAL:
case VarEnum.VT_I1:
case VarEnum.VT_UI1:
case VarEnum.VT_UI2:
case VarEnum.VT_UI4:
case VarEnum.VT_I8:
case VarEnum.VT_UI8:
case VarEnum.VT_INT:
case VarEnum.VT_UINT:
case VarEnum.VT_HRESULT:
case VarEnum.VT_FILETIME:
this.vt = 0;
break;
default:
PropVariantClear(ref this);
break;
}
}
public object GetObject()
{
switch (this.VarType)
{
case VarEnum.VT_EMPTY:
return null;
case VarEnum.VT_FILETIME:
return DateTime.FromFileTime(this.longValue);
default:
GCHandle PropHandle = GCHandle.Alloc(this, GCHandleType.Pinned);
try
{
return Marshal.GetObjectForNativeVariant(PropHandle.AddrOfPinnedObject());
}
finally
{
PropHandle.Free();
}
}
}
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000000050000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IProgress
{
void SetTotal(ulong total);
void SetCompleted([In] ref ulong completeValue);
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000600100000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IArchiveOpenCallback
{
// ref ulong replaced with IntPtr because handlers ofter pass null value
// read actual value with Marshal.ReadInt64
void SetTotal(
IntPtr files, // [In] ref ulong files, can use 'ulong* files' but it is unsafe
IntPtr bytes); // [In] ref ulong bytes
void SetCompleted(
IntPtr files, // [In] ref ulong files
IntPtr bytes); // [In] ref ulong bytes
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000500100000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface ICryptoGetTextPassword
{
[PreserveSig]
int CryptoGetTextPassword(
[MarshalAs(UnmanagedType.BStr)] out string password);
//[return : MarshalAs(UnmanagedType.BStr)]
//string CryptoGetTextPassword();
}
public enum AskMode : int
{
kExtract = 0,
kTest,
kSkip
}
public enum OperationResult : int
{
kOK = 0,
kUnSupportedMethod,
kDataError,
kCRCError
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000600300000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IArchiveOpenVolumeCallback
{
void GetProperty(
ItemPropId propID, // PROPID
IntPtr value); // PROPVARIANT
[PreserveSig]
int GetStream(
[MarshalAs(UnmanagedType.LPWStr)] string name,
[MarshalAs(UnmanagedType.Interface)] out IInStream inStream);
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000600400000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IInArchiveGetStream
{
[return: MarshalAs(UnmanagedType.Interface)]
ISequentialInStream GetStream(uint index);
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000300010000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface ISequentialInStream
{
//[PreserveSig]
//int Read(
// [Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] byte[] data,
// uint size,
// IntPtr processedSize); // ref uint processedSize
uint Read(
[Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] byte[] data,
uint size);
/*
Out: if size != 0, return_value = S_OK and (*processedSize == 0),
then there are no more bytes in stream.
if (size > 0) && there are bytes in stream,
this function must read at least 1 byte.
This function is allowed to read less than number of remaining bytes in stream.
You must call Read function in loop, if you need exact amount of data
*/
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000300020000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface ISequentialOutStream
{
[PreserveSig]
int Write(
[In, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)]byte[] data,
uint size,
IntPtr processedSize); // ref uint processedSize
/*
if (size > 0) this function must write at least 1 byte.
This function is allowed to write less than "size".
You must call Write function in loop, if you need to write exact amount of data
*/
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000300030000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IInStream //: ISequentialInStream
{
//[PreserveSig]
//int Read(
// [Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] byte[] data,
// uint size,
// IntPtr processedSize); // ref uint processedSize
uint Read(
[Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] byte[] data,
uint size);
//[PreserveSig]
void Seek(
long offset,
uint seekOrigin,
IntPtr newPosition); // ref long newPosition
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000300040000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IOutStream //: ISequentialOutStream
{
[PreserveSig]
int Write(
[Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] byte[] data,
uint size,
IntPtr processedSize); // ref uint processedSize
//[PreserveSig]
void Seek(
long offset,
uint seekOrigin,
IntPtr newPosition); // ref long newPosition
[PreserveSig]
int SetSize(long newSize);
}
internal enum ItemPropId : uint
{
kpidNoProperty = 0,
kpidHandlerItemIndex = 2,
kpidPath,
kpidName,
kpidExtension,
kpidIsFolder,
kpidSize,
kpidPackedSize,
kpidAttributes,
kpidCreationTime,
kpidLastAccessTime,
kpidLastWriteTime,
kpidSolid,
kpidCommented,
kpidEncrypted,
kpidSplitBefore,
kpidSplitAfter,
kpidDictionarySize,
kpidCRC,
kpidType,
kpidIsAnti,
kpidMethod,
kpidHostOS,
kpidFileSystem,
kpidUser,
kpidGroup,
kpidBlock,
kpidComment,
kpidPosition,
kpidPrefix,
kpidTotalSize = 0x1100,
kpidFreeSpace,
kpidClusterSize,
kpidVolumeName,
kpidLocalName = 0x1200,
kpidProvider,
kpidUserDefined = 0x10000
}
[ComImport]
[Guid("23170F69-40C1-278A-0000-000600600000")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
//[AutomationProxy(true)]
internal interface IInArchive
{
[PreserveSig]
int Open(
IInStream stream,
/*[MarshalAs(UnmanagedType.U8)]*/ [In] ref ulong maxCheckStartPosition,
[MarshalAs(UnmanagedType.Interface)] IArchiveOpenCallback openArchiveCallback);
void Close();
//void GetNumberOfItems([In] ref uint numItem);
uint GetNumberOfItems();
void GetProperty(
uint index,
ItemPropId propID, // PROPID
ref PropVariant value); // PROPVARIANT
[PreserveSig]
int Extract(
[MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] uint[] indices, //[In] ref uint indices,
uint numItems,
int testMode,
[MarshalAs(UnmanagedType.Interface)] IArchiveExtractCallback extractCallback);
// indices must be sorted
// numItems = 0xFFFFFFFF means all files
// testMode != 0 means "test files operation"
void GetArchiveProperty(
uint propID, // PROPID
ref PropVariant value); // PROPVARIANT
//void GetNumberOfProperties([In] ref uint numProperties);
uint GetNumberOfProperties();
void GetPropertyInfo(
uint index,
[MarshalAs(UnmanagedType.BStr)] out string name,
out ItemPropId propID, // PROPID
out ushort varType); //VARTYPE
//void GetNumberOfArchiveProperties([In] ref uint numProperties);
uint GetNumberOfArchiveProperties();
void GetArchivePropertyInfo(
uint index,
[MarshalAs(UnmanagedType.BStr)] string name,
ref uint propID, // PROPID
ref ushort varType); //VARTYPE
}
internal enum ArchivePropId : uint
{
kName = 0,
kClassID,
kExtension,
kAddExtension,
kUpdate,
kKeepName,
kStartSignature,
kFinishSignature,
kAssociate
}
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate int CreateObjectDelegate(
[In] ref Guid classID,
[In] ref Guid interfaceID,
//out IntPtr outObject);
[MarshalAs(UnmanagedType.Interface)] out object outObject);
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate int GetHandlerPropertyDelegate(
ArchivePropId propID,
ref PropVariant value); // PROPVARIANT
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate int GetNumberOfFormatsDelegate(out uint numFormats);
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate int GetHandlerProperty2Delegate(
uint formatIndex,
ArchivePropId propID,
ref PropVariant value); // PROPVARIANT
internal class StreamWrapper : IDisposable
{
protected Stream BaseStream;
protected StreamWrapper(Stream baseStream)
{
this.BaseStream = baseStream;
}
public void Dispose()
{
this.BaseStream.Close();
}
public virtual void Seek(long offset, uint seekOrigin, IntPtr newPosition)
{
long Position = (uint) this.BaseStream.Seek(offset, (SeekOrigin) seekOrigin);
if (newPosition != IntPtr.Zero)
{
Marshal.WriteInt64(newPosition, Position);
}
}
}
internal class InStreamWrapper : StreamWrapper, ISequentialInStream, IInStream
{
public InStreamWrapper(Stream baseStream) : base(baseStream)
{
}
public uint Read(byte[] data, uint size)
{
return (uint) this.BaseStream.Read(data, 0, (int) size);
}
}
internal class OutStreamWrapper : StreamWrapper, ISequentialOutStream, IOutStream
{
public OutStreamWrapper(Stream baseStream) : base(baseStream)
{
}
public int SetSize(long newSize)
{
this.BaseStream.SetLength(newSize);
return 0;
}
public int Write(byte[] data, uint size, IntPtr processedSize)
{
throw new NotImplementedException();
/*
this.BaseStream.Write(data, 0, (int) size);
if (processedSize != IntPtr.Zero)
{
Marshal.WriteInt32(processedSize, (int) size);
}
*/
return 0;
}
}
}

View File

@ -5,6 +5,7 @@ using System.IO;
using System.Linq;
using System.Net.Http;
using System.Threading.Tasks;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using K4os.Hash.Crc;
using RocksDbSharp;
using Wabbajack.Common;
@ -48,23 +49,6 @@ namespace Wabbajack.VirtualFileSystem
public Context Context { get; set; }
private IExtractedFile _stagedFile = null;
public IExtractedFile StagedFile
{
get
{
if (IsNative) return new ExtractedDiskFile(AbsoluteName);
if (_stagedFile == null)
throw new InvalidDataException("File is unstaged");
return _stagedFile;
}
set
{
_stagedFile = value;
}
}
/// <summary>
/// Returns the nesting factor for this file. Native files will have a nesting of 1, the factor
/// goes up for each nesting of a file in an archive.
@ -141,7 +125,7 @@ namespace Wabbajack.VirtualFileSystem
itm.ThisAndAllChildrenReduced(fn);
}
private static VirtualFile ConvertFromIndexedFile(Context context, IndexedVirtualFile file, IPath path, VirtualFile vparent, IExtractedFile extractedFile)
private static VirtualFile ConvertFromIndexedFile(Context context, IndexedVirtualFile file, IPath path, VirtualFile vparent, IStreamFactory extractedFile)
{
var vself = new VirtualFile
{
@ -161,7 +145,7 @@ namespace Wabbajack.VirtualFileSystem
return vself;
}
private static bool TryGetFromCache(Context context, VirtualFile parent, IPath path, IExtractedFile extractedFile, Hash hash, out VirtualFile found)
private static bool TryGetFromCache(Context context, VirtualFile parent, IPath path, IStreamFactory extractedFile, Hash hash, out VirtualFile found)
{
var result = _vfsCache.Get(hash.ToArray());
if (result == null)
@ -190,34 +174,25 @@ namespace Wabbajack.VirtualFileSystem
}
public static async Task<VirtualFile> Analyze(Context context, VirtualFile parent, IExtractedFile extractedFile,
public static async Task<VirtualFile> Analyze(Context context, VirtualFile parent, IStreamFactory extractedFile,
IPath relPath, int depth = 0)
{
var hash = await extractedFile.HashAsync();
await using var stream = await extractedFile.GetStream();
var hash = await stream.xxHashAsync();
stream.Position = 0;
if (!context.UseExtendedHashes && FileExtractor.MightBeArchive(relPath.FileName.Extension))
{
// Disabled because it isn't enabled on the server
IndexedVirtualFile result = null; //await TryGetContentsFromServer(hash);
if (result != null)
{
Utils.Log($"Downloaded VFS data for {relPath.FileName}");
return ConvertFromIndexedFile(context, result, relPath, parent, extractedFile);
}
}
if (TryGetFromCache(context, parent, relPath, extractedFile, hash, out var vself))
var sig = await FileExtractor2.ArchiveSigs.MatchesAsync(stream);
stream.Position = 0;
if (sig.HasValue && TryGetFromCache(context, parent, relPath, extractedFile, hash, out var vself))
return vself;
var self = new VirtualFile
{
Context = context,
Name = relPath,
Parent = parent,
Size = extractedFile.Size,
Size = stream.Length,
LastModified = extractedFile.LastModifiedUtc.AsUnixTime(),
LastAnalyzed = DateTime.Now.AsUnixTime(),
Hash = hash
@ -226,20 +201,23 @@ namespace Wabbajack.VirtualFileSystem
self.FillFullPath(depth);
if (context.UseExtendedHashes)
self.ExtendedHashes = await ExtendedHashes.FromFile(extractedFile);
self.ExtendedHashes = await ExtendedHashes.FromStream(stream);
if (!await extractedFile.CanExtract()) return self;
// Can't extract, so return
if (!sig.HasValue) return self;
try
{
await using var extracted = await extractedFile.ExtractAll(context.Queue, throwOnError:false);
var list = await FileExtractor2.GatheringExtract(extractedFile,
_ => true,
async (path, sfactory) => await Analyze(context, self, sfactory, path, depth + 1));
var list = await extracted
.PMap(context.Queue,
file => Analyze(context, self, file.Value, file.Key, depth + 1));
self.Children = list.ToImmutableList();
self.Children = list.Values.ToImmutableList();
}
catch (EndOfStreamException ex)
{
return self;
}
catch (Exception ex)
{
@ -391,11 +369,6 @@ namespace Wabbajack.VirtualFileSystem
var path = new HashRelativePath(FilesInFullPath.First().Hash, paths);
return path;
}
public async ValueTask<Stream> OpenRead()
{
return await StagedFile.OpenRead();
}
}
public class ExtendedHashes
@ -405,10 +378,10 @@ namespace Wabbajack.VirtualFileSystem
public string MD5 { get; set; }
public string CRC { get; set; }
public static async ValueTask<ExtendedHashes> FromFile(IExtractedFile file)
public static async ValueTask<ExtendedHashes> FromStream(Stream stream)
{
var hashes = new ExtendedHashes();
await using var stream = await file.OpenRead();
stream.Position = 0;
hashes.SHA256 = System.Security.Cryptography.SHA256.Create().ComputeHash(stream).ToHex();
stream.Position = 0;
hashes.SHA1 = System.Security.Cryptography.SHA1.Create().ComputeHash(stream).ToHex();

View File

@ -4,6 +4,7 @@
<TargetFramework>netstandard2.1</TargetFramework>
<Platforms>x64</Platforms>
<RuntimeIdentifier>win10-x64</RuntimeIdentifier>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<DocumentationFile>Wabbajack.VirtualFileSystem.xml</DocumentationFile>
@ -16,7 +17,8 @@
<PackageReference Include="Genbox.AlphaFS" Version="2.2.2.1" />
<PackageReference Include="K4os.Hash.Crc" Version="1.1.4" />
<PackageReference Include="OMODFramework" Version="2.0.1" />
<PackageReference Include="System.Collections.Immutable" Version="5.0.0-preview.8.20407.11" />
<PackageReference Include="SharpCompress" Version="0.26.0" />
<PackageReference Include="System.Collections.Immutable" Version="5.0.0-preview.6.20305.11" />
</ItemGroup>
<ItemGroup>
<None Update="Extractors\7z.dll">
@ -29,4 +31,7 @@
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<Folder Include="Extractors\OMOD" />
</ItemGroup>
</Project>