Merge pull request #177 from wabbajack-tools/make-it-sync

Make it sync
This commit is contained in:
Timothy Baldridge 2019-11-15 17:41:17 -07:00 committed by GitHub
commit 2d3d92068a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 308 additions and 317 deletions

View File

@ -2,12 +2,10 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Alphaleonis.Win32.Filesystem;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Newtonsoft.Json;
using Wabbajack.Common;
using Wabbajack.Common.CSP;
using Wabbajack.Lib.Downloaders;
using Wabbajack.Lib.NexusApi;
using Directory = Alphaleonis.Win32.Filesystem.Directory;
@ -28,7 +26,7 @@ namespace Compression.BSA.Test
public TestContext TestContext { get; set; }
[ClassInitialize]
public static async Task Setup(TestContext TestContext)
public static void Setup(TestContext TestContext)
{
Utils.LogMessages.Subscribe(f => TestContext.WriteLine(f));
@ -52,7 +50,7 @@ namespace Compression.BSA.Test
var folder = Path.Combine(BSAFolder, info.Item1.ToString(), info.Item2.ToString());
if (!Directory.Exists(folder))
Directory.CreateDirectory(folder);
await FileExtractor.ExtractAll(filename, folder);
FileExtractor.ExtractAll(filename, folder);
}
}
@ -88,7 +86,7 @@ namespace Compression.BSA.Test
[TestMethod]
[DataTestMethod]
[DynamicData(nameof(BSAs), DynamicDataSourceType.Method)]
public async Task BSACompressionRecompression(string bsa)
public void BSACompressionRecompression(string bsa)
{
TestContext.WriteLine($"From {bsa}");
TestContext.WriteLine("Cleaning Output Dir");
@ -97,9 +95,10 @@ namespace Compression.BSA.Test
Directory.CreateDirectory(TempDir);
TestContext.WriteLine($"Reading {bsa}");
using (var a = await BSADispatch.OpenRead(bsa))
string TempFile = Path.Combine("tmp.bsa");
using (var a = BSADispatch.OpenRead(bsa))
{
await a.Files.UnorderedParallelDo(async file =>
a.Files.PMap(file =>
{
var abs_name = Path.Combine(TempDir, file.Path);
ViaJson(file.State);
@ -110,30 +109,29 @@ namespace Compression.BSA.Test
using (var fs = File.OpenWrite(abs_name))
{
await file.CopyDataToAsync(fs);
file.CopyDataTo(fs);
}
Assert.AreEqual(file.Size, new FileInfo(abs_name).Length);
});
Console.WriteLine($"Building {bsa}");
string TempFile = Path.Combine("tmp.bsa");
using (var w = ViaJson(a.State).MakeBuilder())
{
await a.Files.UnorderedParallelDo(async file =>
a.Files.PMap(file =>
{
var abs_path = Path.Combine(TempDir, file.Path);
using (var str = File.OpenRead(abs_path))
{
await w.AddFile(ViaJson(file.State), str);
w.AddFile(ViaJson(file.State), str);
}
});
await w.Build(TempFile);
w.Build(TempFile);
}
Console.WriteLine($"Verifying {bsa}");
using (var b = await BSADispatch.OpenRead(TempFile))
using (var b = BSADispatch.OpenRead(TempFile))
{
Console.WriteLine($"Performing A/B tests on {bsa}");
@ -143,8 +141,8 @@ namespace Compression.BSA.Test
Assert.AreEqual(a.Files.Count(), b.Files.Count());
var idx = 0;
await a.Files.Zip(b.Files, (ai, bi) => (ai, bi))
.UnorderedParallelDo(async pair =>
a.Files.Zip(b.Files, (ai, bi) => (ai, bi))
.PMap(pair =>
{
idx++;
Assert.AreEqual(JsonConvert.SerializeObject(pair.ai.State),
@ -153,17 +151,17 @@ namespace Compression.BSA.Test
Assert.AreEqual(pair.ai.Path, pair.bi.Path);
//Equal(pair.ai.Compressed, pair.bi.Compressed);
Assert.AreEqual(pair.ai.Size, pair.bi.Size);
CollectionAssert.AreEqual(await GetData(pair.ai), await GetData(pair.bi));
CollectionAssert.AreEqual(GetData(pair.ai), GetData(pair.bi));
});
}
}
}
private static async Task<byte[]> GetData(IFile pairAi)
private static byte[] GetData(IFile pairAi)
{
using (var ms = new MemoryStream())
{
await pairAi.CopyDataToAsync(ms);
pairAi.CopyDataTo(ms);
return ms.ToArray();
}
}

View File

@ -1,13 +1,9 @@
using System;
using System.Collections.Generic;
using System.Dynamic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using ICSharpCode.SharpZipLib.Zip.Compression;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using Wabbajack.Common.CSP;
namespace Compression.BSA
{
@ -19,7 +15,7 @@ namespace Compression.BSA
int Index { get; }
Task WriteData(BinaryWriter wtr);
void WriteData(BinaryWriter wtr);
void WriteHeader(BinaryWriter wtr);
}
@ -37,22 +33,22 @@ namespace Compression.BSA
{
}
public async Task AddFile(FileStateObject state, Stream src)
public void AddFile(FileStateObject state, Stream src)
{
switch (_state.Type)
{
case EntryType.GNRL:
var result = await BA2FileEntryBuilder.Create((BA2FileEntryState)state, src);
var result = BA2FileEntryBuilder.Create((BA2FileEntryState)state, src);
lock(_entries) _entries.Add(result);
break;
case EntryType.DX10:
var resultdx10 = await BA2DX10FileEntryBuilder.Create((BA2DX10EntryState)state, src);
var resultdx10 = BA2DX10FileEntryBuilder.Create((BA2DX10EntryState)state, src);
lock(_entries) _entries.Add(resultdx10);
break;
}
}
public async Task Build(string filename)
public void Build(string filename)
{
SortEntries();
using (var fs = File.OpenWrite(filename))
@ -72,7 +68,7 @@ namespace Compression.BSA
foreach (var entry in _entries)
{
await entry.WriteData(bw);
entry.WriteData(bw);
}
if (_state.HasNameTable)
@ -86,7 +82,7 @@ namespace Compression.BSA
{
var bytes = Encoding.UTF7.GetBytes(entry.FullName);
bw.Write((ushort)bytes.Length);
await bw.BaseStream.WriteAsync(bytes, 0, bytes.Length);
bw.BaseStream.Write(bytes, 0, bytes.Length);
}
}
}
@ -103,10 +99,9 @@ namespace Compression.BSA
private BA2DX10EntryState _state;
private List<ChunkBuilder> _chunks;
public static async Task<BA2DX10FileEntryBuilder> Create(BA2DX10EntryState state, Stream src)
public static BA2DX10FileEntryBuilder Create(BA2DX10EntryState state, Stream src)
{
var builder = new BA2DX10FileEntryBuilder();
builder._state = state;
var builder = new BA2DX10FileEntryBuilder {_state = state};
var header_size = DDS.HeaderSizeForFormat((DXGI_FORMAT) state.PixelFormat) + 4;
new BinaryReader(src).ReadBytes((int)header_size);
@ -115,7 +110,7 @@ namespace Compression.BSA
builder._chunks = new List<ChunkBuilder>();
foreach (var chunk in state.Chunks)
builder._chunks.Add(await ChunkBuilder.Create(state, chunk, src));
builder._chunks.Add(ChunkBuilder.Create(state, chunk, src));
return builder;
}
@ -143,10 +138,10 @@ namespace Compression.BSA
chunk.WriteHeader(bw);
}
public async Task WriteData(BinaryWriter wtr)
public void WriteData(BinaryWriter wtr)
{
foreach (var chunk in _chunks)
await chunk.WriteData(wtr);
chunk.WriteData(wtr);
}
}
@ -158,13 +153,13 @@ namespace Compression.BSA
private uint _packSize;
private long _offsetOffset;
public static async Task<ChunkBuilder> Create(BA2DX10EntryState state, ChunkState chunk, Stream src)
public static ChunkBuilder Create(BA2DX10EntryState state, ChunkState chunk, Stream src)
{
var builder = new ChunkBuilder {_chunk = chunk};
using (var ms = new MemoryStream())
{
await src.CopyToLimitAsync(ms, (int)chunk.FullSz);
src.CopyToLimit(ms, (int)chunk.FullSz);
builder._data = ms.ToArray();
}
@ -197,13 +192,13 @@ namespace Compression.BSA
}
public async Task WriteData(BinaryWriter bw)
public void WriteData(BinaryWriter bw)
{
var pos = bw.BaseStream.Position;
bw.BaseStream.Position = _offsetOffset;
bw.Write((ulong)pos);
bw.BaseStream.Position = pos;
await bw.BaseStream.WriteAsync(_data, 0, _data.Length);
bw.BaseStream.Write(_data, 0, _data.Length);
}
}
@ -215,14 +210,13 @@ namespace Compression.BSA
private BA2FileEntryState _state;
private long _offsetOffset;
public static async Task<BA2FileEntryBuilder> Create(BA2FileEntryState state, Stream src)
public static BA2FileEntryBuilder Create(BA2FileEntryState state, Stream src)
{
var builder = new BA2FileEntryBuilder();
builder._state = state;
var builder = new BA2FileEntryBuilder {_state = state};
using (var ms = new MemoryStream())
{
await src.CopyToAsync(ms);
src.CopyTo(ms);
builder._data = ms.ToArray();
}
builder._rawSize = builder._data.Length;
@ -233,7 +227,7 @@ namespace Compression.BSA
{
using (var ds = new DeflaterOutputStream(ms))
{
await ds.WriteAsync(builder._data, 0, builder._data.Length);
ds.Write(builder._data, 0, builder._data.Length);
}
builder._data = ms.ToArray();
}
@ -260,13 +254,13 @@ namespace Compression.BSA
wtr.Write(_state.Align);
}
public async Task WriteData(BinaryWriter wtr)
public void WriteData(BinaryWriter wtr)
{
var pos = wtr.BaseStream.Position;
wtr.BaseStream.Seek(_offsetOffset, SeekOrigin.Begin);
wtr.Write((ulong)pos);
wtr.BaseStream.Position = pos;
await wtr.BaseStream.WriteAsync(_data, 0, _data.Length);
wtr.BaseStream.Write(_data, 0, _data.Length);
}
}
}

View File

@ -1,17 +1,9 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Alphaleonis.Win32.Filesystem;
using ICSharpCode.SharpZipLib.Zip;
using ICSharpCode.SharpZipLib.Zip.Compression;
using Microsoft.SqlServer.Server;
using File = Alphaleonis.Win32.Filesystem.File;
namespace Compression.BSA
@ -188,7 +180,7 @@ namespace Compression.BSA
public uint HeaderSize => DDS.HeaderSizeForFormat((DXGI_FORMAT)_format);
public async Task CopyDataToAsync(Stream output)
public void CopyDataTo(Stream output)
{
var bw = new BinaryWriter(output);
@ -206,18 +198,18 @@ namespace Compression.BSA
if (!isCompressed)
{
await br.BaseStream.ReadAsync(full, 0, full.Length);
br.BaseStream.Read(full, 0, full.Length);
}
else
{
byte[] compressed = new byte[chunk._packSz];
await br.BaseStream.ReadAsync(compressed, 0, compressed.Length);
br.BaseStream.Read(compressed, 0, compressed.Length);
var inflater = new Inflater();
inflater.SetInput(compressed);
inflater.Inflate(full);
}
await bw.BaseStream.WriteAsync(full, 0, full.Length);
bw.BaseStream.Write(full, 0, full.Length);
}
}
@ -450,7 +442,7 @@ namespace Compression.BSA
public uint Size => _realSize;
public FileStateObject State => new BA2FileEntryState(this);
public async Task CopyDataToAsync(Stream output)
public void CopyDataTo(Stream output)
{
using (var fs = File.OpenRead(_bsa._filename))
{
@ -458,11 +450,11 @@ namespace Compression.BSA
uint len = Compressed ? _size : _realSize;
var bytes = new byte[len];
await fs.ReadAsync(bytes, 0, (int) len);
fs.Read(bytes, 0, (int) len);
if (!Compressed)
{
await output.WriteAsync(bytes, 0, bytes.Length);
output.Write(bytes, 0, bytes.Length);
}
else
{
@ -470,7 +462,7 @@ namespace Compression.BSA
var inflater = new Inflater();
inflater.SetInput(bytes);
inflater.Inflate(uncompressed);
await output.WriteAsync(uncompressed, 0, uncompressed.Length);
output.Write(uncompressed, 0, uncompressed.Length);
}
}
}

View File

@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using K4os.Compression.LZ4;
using K4os.Compression.LZ4.Streams;
@ -77,11 +76,11 @@ namespace Compression.BSA
public void Dispose()
{
}
public async Task AddFile(FileStateObject state, Stream src)
public void AddFile(FileStateObject state, Stream src)
{
var ostate = (BSAFileStateObject) state;
var r = await FileEntry.Create(this, ostate.Path, src, ostate.FlipCompression);
var r = FileEntry.Create(this, ostate.Path, src, ostate.FlipCompression);
lock (this)
{
@ -89,7 +88,7 @@ namespace Compression.BSA
}
}
public async Task Build(string outputName)
public void Build(string outputName)
{
RegenFolderRecords();
if (File.Exists(outputName)) File.Delete(outputName);
@ -122,7 +121,7 @@ namespace Compression.BSA
foreach (var file in _files) wtr.Write(file._nameBytes);
foreach (var file in _files)
await file.WriteData(wtr);
file.WriteData(wtr);
}
}
@ -237,7 +236,7 @@ namespace Compression.BSA
internal byte[] _pathBytes;
internal byte[] _rawData;
public static async Task<FileEntry> Create(BSABuilder bsa, string path, Stream src, bool flipCompression)
public static FileEntry Create(BSABuilder bsa, string path, Stream src, bool flipCompression)
{
var entry = new FileEntry();
entry._bsa = bsa;
@ -250,7 +249,7 @@ namespace Compression.BSA
entry._flipCompression = flipCompression;
var ms = new MemoryStream();
await src.CopyToAsync(ms);
src.CopyTo(ms);
entry._rawData = ms.ToArray();
entry._originalSize = entry._rawData.Length;
@ -321,7 +320,7 @@ namespace Compression.BSA
wtr.Write(0xDEADBEEF);
}
internal async Task WriteData(BinaryWriter wtr)
internal void WriteData(BinaryWriter wtr)
{
var offset = (uint) wtr.BaseStream.Position;
wtr.BaseStream.Position = _offsetOffset;
@ -333,11 +332,11 @@ namespace Compression.BSA
if (Compressed)
{
wtr.Write((uint) _originalSize);
await wtr.BaseStream.WriteAsync(_rawData, 0, _rawData.Length);
wtr.BaseStream.Write(_rawData, 0, _rawData.Length);
}
else
{
await wtr.BaseStream.WriteAsync(_rawData, 0, _rawData.Length);
wtr.BaseStream.Write(_rawData, 0, _rawData.Length);
}
}
}

View File

@ -1,32 +1,23 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using Wabbajack.Common.CSP;
namespace Compression.BSA
{
public static class BSADispatch
{
public static Task<IBSAReader> OpenRead(string filename)
public static IBSAReader OpenRead(string filename)
{
return CSPExtensions.ThreadedTask<IBSAReader>(() =>
var fourcc = "";
using (var file = File.OpenRead(filename))
{
string fourcc = "";
using (var file = File.OpenRead(filename))
{
fourcc = Encoding.ASCII.GetString(new BinaryReader(file).ReadBytes(4));
}
fourcc = Encoding.ASCII.GetString(new BinaryReader(file).ReadBytes(4));
}
if (fourcc == "BSA\0")
return new BSAReader(filename);
if (fourcc == "BTDX")
return new BA2Reader(filename);
throw new InvalidDataException("Filename is not a .bsa or .ba2, magic " + fourcc);
});
if (fourcc == "BSA\0")
return new BSAReader(filename);
if (fourcc == "BTDX")
return new BA2Reader(filename);
throw new InvalidDataException("Filename is not a .bsa or .ba2, magic " + fourcc);
}
}
}

View File

@ -2,8 +2,6 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using K4os.Compression.LZ4.Streams;
using File = Alphaleonis.Win32.Filesystem.File;
@ -303,7 +301,7 @@ namespace Compression.BSA
_name = rdr.ReadStringTerm(_bsa.HeaderType);
}
public async Task CopyDataToAsync(Stream output)
public void CopyDataTo(Stream output)
{
using (var in_file = File.OpenRead(_bsa._fileName))
using (var rdr = new BinaryReader(in_file))
@ -315,11 +313,11 @@ namespace Compression.BSA
if (Compressed)
{
var r = LZ4Stream.Decode(rdr.BaseStream);
await r.CopyToLimitAsync(output, (int) _originalSize);
r.CopyToLimit(output, (int) _originalSize);
}
else
{
await rdr.BaseStream.CopyToLimitAsync(output, (int) _onDiskSize);
rdr.BaseStream.CopyToLimit(output, (int) _onDiskSize);
}
}
else
@ -327,18 +325,18 @@ namespace Compression.BSA
if (Compressed)
using (var z = new InflaterInputStream(rdr.BaseStream))
{
await z.CopyToLimitAsync(output, (int) _originalSize);
z.CopyToLimit(output, (int) _originalSize);
}
else
await rdr.BaseStream.CopyToLimitAsync(output, (int) _onDiskSize);
rdr.BaseStream.CopyToLimit(output, (int) _onDiskSize);
}
}
}
public async Task<byte[]> GetData()
public byte[] GetData()
{
var ms = new MemoryStream();
await CopyDataToAsync(ms);
CopyDataTo(ms);
return ms.ToArray();
}
}

View File

@ -110,11 +110,5 @@
<Version>1.2.0</Version>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Wabbajack.Common.CSP\Wabbajack.Common.CSP.csproj">
<Project>{9e69bc98-1512-4977-b683-6e7e5292c0b8}</Project>
<Name>Wabbajack.Common.CSP</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
</Project>

View File

@ -1,10 +1,4 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
namespace Compression.BSA
{

View File

@ -1,9 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Compression.BSA
{
@ -19,8 +16,8 @@ namespace Compression.BSA
public interface IBSABuilder : IDisposable
{
Task AddFile(FileStateObject state, Stream src);
Task Build(string filename);
void AddFile(FileStateObject state, Stream src);
void Build(string filename);
}
public class ArchiveStateObject
@ -59,6 +56,6 @@ namespace Compression.BSA
/// in order to maintain thread-safe access.
/// </summary>
/// <param name="output"></param>
Task CopyDataToAsync(Stream output);
void CopyDataTo(Stream output);
}
}

View File

@ -2,7 +2,6 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using Path = Alphaleonis.Win32.Filesystem.Path;
namespace Compression.BSA
@ -142,20 +141,6 @@ namespace Compression.BSA
return ((ulong) (hash2 + hash3) << 32) + hash1;
}
public static async Task CopyToLimitAsync(this Stream frm, Stream tw, int limit)
{
var buff = new byte[1024];
while (limit > 0)
{
var to_read = Math.Min(buff.Length, limit);
var read = await frm.ReadAsync(buff, 0, to_read);
await tw.WriteAsync(buff, 0, read);
limit -= read;
}
tw.Flush();
}
public static void CopyToLimit(this Stream frm, Stream tw, int limit)
{
var buff = new byte[1024];

View File

@ -2,12 +2,10 @@
using System.Diagnostics;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
using Alphaleonis.Win32.Filesystem;
using Compression.BSA;
using ICSharpCode.SharpZipLib.GZip;
using OMODFramework;
using Wabbajack.Common.CSP;
namespace Wabbajack.Common
{
@ -33,16 +31,16 @@ namespace Wabbajack.Common
}
public static async Task ExtractAll(string source, string dest)
public static void ExtractAll(string source, string dest)
{
try
{
if (Consts.SupportedBSAs.Any(b => source.ToLower().EndsWith(b)))
await ExtractAllWithBSA(source, dest);
ExtractAllWithBSA(source, dest);
else if (source.EndsWith(".omod"))
await ExtractAllWithOMOD(source, dest);
ExtractAllWithOMOD(source, dest);
else
await ExtractAllWith7Zip(source, dest);
ExtractAllWith7Zip(source, dest);
}
catch (Exception ex)
{
@ -51,48 +49,41 @@ namespace Wabbajack.Common
}
}
private static Task ExtractAllWithOMOD(string source, string dest)
private static string ExtractAllWithOMOD(string source, string dest)
{
return CSPExtensions.ThreadedTask(() =>
{
Utils.Log($"Extracting {Path.GetFileName(source)}");
var f = new Framework();
f.SetTempDirectory(dest);
var omod = new OMOD(source, ref f);
omod.ExtractDataFiles();
omod.ExtractPlugins();
return dest;
});
Utils.Log($"Extracting {Path.GetFileName(source)}");
var f = new Framework();
f.SetTempDirectory(dest);
var omod = new OMOD(source, ref f);
omod.ExtractDataFiles();
omod.ExtractPlugins();
return dest;
}
private static async Task ExtractAllWithBSA(string source, string dest)
private static void ExtractAllWithBSA(string source, string dest)
{
try
{
using (var arch = await BSADispatch.OpenRead(source))
using (var arch = BSADispatch.OpenRead(source))
{
await arch.Files.ToChannel()
.UnorderedPipeline(
Channel.CreateSink<IFile>(),
async f =>
{
var path = f.Path;
if (f.Path.StartsWith("\\"))
path = f.Path.Substring(1);
Utils.Status($"Extracting {path}");
var out_path = Path.Combine(dest, path);
var parent = Path.GetDirectoryName(out_path);
arch.Files
.PMap(f =>
{
var path = f.Path;
if (f.Path.StartsWith("\\"))
path = f.Path.Substring(1);
Utils.Status($"Extracting {path}");
var out_path = Path.Combine(dest, path);
var parent = Path.GetDirectoryName(out_path);
if (!Directory.Exists(parent))
Directory.CreateDirectory(parent);
if (!Directory.Exists(parent))
Directory.CreateDirectory(parent);
using (var fs = File.OpenWrite(out_path))
{
await f.CopyDataToAsync(fs);
}
return f;
});
using (var fs = File.OpenWrite(out_path))
{
f.CopyDataTo(fs);
}
});
}
}
catch (Exception ex)
@ -102,7 +93,7 @@ namespace Wabbajack.Common
}
}
private static async Task ExtractAllWith7Zip(string source, string dest)
private static void ExtractAllWith7Zip(string source, string dest)
{
Utils.Log($"Extracting {Path.GetFileName(source)}");
@ -137,7 +128,7 @@ namespace Wabbajack.Common
{
while (!p.HasExited)
{
var line = await p.StandardOutput.ReadLineAsync();
var line = p.StandardOutput.ReadLine();
if (line == null)
break;

View File

@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Subjects;
using System.Text;
using System.Threading.Tasks;
using Wabbajack.Common;
@ -11,7 +12,16 @@ namespace Wabbajack.Lib
{
public abstract class ACompiler
{
protected static string _vfsCacheName = "vfs_compile_cache.bin";
/// <summary>
/// A stream of tuples of ("Update Title", 0.25) which represent the name of the current task
/// and the current progress.
/// </summary>
public IObservable<(string, float)> ProgressUpdates => _progressUpdates;
protected readonly Subject<(string, float)> _progressUpdates = new Subject<(string, float)>();
public Context VFS { get; internal set; } = new Context();
public ModManager ModManager;
public string GamePath;
@ -39,5 +49,11 @@ namespace Wabbajack.Lib
public abstract Directive RunStack(IEnumerable<ICompilationStep> stack, RawSourceFile source);
public abstract IEnumerable<ICompilationStep> GetStack();
public abstract IEnumerable<ICompilationStep> MakeStack();
protected ACompiler()
{
ProgressUpdates.Subscribe(itm => Utils.Log($"{itm.Item2} - {itm.Item1}"));
VFS.LogSpam.Subscribe(itm => Utils.Status(itm));
}
}
}

View File

@ -77,7 +77,7 @@ namespace Wabbajack.Lib.CompilationSteps
}
CreateBSA directive;
using (var bsa = BSADispatch.OpenRead(source.AbsolutePath).Result)
using (var bsa = BSADispatch.OpenRead(source.AbsolutePath))
{
directive = new CreateBSA
{

View File

@ -28,6 +28,7 @@ namespace Wabbajack.Lib
{
public class Compiler : ACompiler
{
private string _mo2DownloadsFolder;
public Dictionary<string, IEnumerable<IndexedFileMatch>> DirectMatchIndex;
@ -49,6 +50,8 @@ namespace Wabbajack.Lib
ModListOutputFolder = "output_folder";
ModListOutputFile = MO2Profile + ExtensionManager.Extension;
VFS.ProgressUpdates.Debounce(new TimeSpan(0, 0, 0, 0, 100))
.Subscribe(itm => _progressUpdates.OnNext(itm));
}
public dynamic MO2Ini { get; }
@ -119,13 +122,24 @@ namespace Wabbajack.Lib
Info("Using Profiles: " + string.Join(", ", SelectedProfiles.OrderBy(p => p)));
VFS.IntegrateFromFile(_vfsCacheName);
Info($"Indexing {MO2Folder}");
VFS.AddRoot(MO2Folder).Wait();
VFS.AddRoot(MO2Folder);
VFS.WriteToFile(_vfsCacheName);
Info($"Indexing {GamePath}");
VFS.AddRoot(GamePath).Wait();
VFS.AddRoot(GamePath);
VFS.WriteToFile(_vfsCacheName);
Info($"Indexing {MO2DownloadsFolder}");
VFS.AddRoot(MO2DownloadsFolder).Wait();
VFS.AddRoot(MO2DownloadsFolder);
VFS.WriteToFile(_vfsCacheName);
Info("Cleaning output folder");
if (Directory.Exists(ModListOutputFolder))
@ -150,7 +164,9 @@ namespace Wabbajack.Lib
if (Directory.Exists(loot_path))
{
Info($"Indexing {loot_path}");
VFS.AddRoot(loot_path).Wait();
VFS.AddRoot(loot_path);
VFS.WriteToFile(_vfsCacheName);
loot_files = Directory.EnumerateFiles(loot_path, "userlist.yaml", SearchOption.AllDirectories)
.Where(p => p.FileExists())
@ -452,13 +468,13 @@ namespace Wabbajack.Lib
var bsa_id = to.Split('\\')[1];
var bsa = InstallDirectives.OfType<CreateBSA>().First(b => b.TempID == bsa_id);
using (var a = await BSADispatch.OpenRead(Path.Combine(MO2Folder, bsa.To)))
using (var a = BSADispatch.OpenRead(Path.Combine(MO2Folder, bsa.To)))
{
var find = Path.Combine(to.Split('\\').Skip(2).ToArray());
var file = a.Files.First(e => e.Path.Replace('/', '\\') == find);
using (var ms = new MemoryStream())
{
await file.CopyDataToAsync(ms);
file.CopyDataTo(ms);
return ms.ToArray();
}
}

View File

@ -150,7 +150,7 @@ namespace Wabbajack.Lib
Error("Cannot continue, was unable to download one or more archives");
}
PrimeVFS().Wait();
PrimeVFS();
BuildFolderStructure();
InstallArchives();
@ -233,7 +233,7 @@ namespace Wabbajack.Lib
/// We don't want to make the installer index all the archives, that's just a waste of time, so instead
/// we'll pass just enough information to VFS to let it know about the files we have.
/// </summary>
private async Task PrimeVFS()
private void PrimeVFS()
{
VFS.AddKnown(HashedArchives.Select(a => new KnownFile
{
@ -247,7 +247,7 @@ namespace Wabbajack.Lib
.OfType<FromArchive>()
.Select(f => new KnownFile { Paths = f.ArchiveHashPath}));
await VFS.BackfillMissing();
VFS.BackfillMissing();
}
private void BuildBSAs()
@ -386,7 +386,7 @@ namespace Wabbajack.Lib
return g;
}).ToList();
var on_finish = VFS.Stage(vfiles.Select(f => f.FromFile).Distinct()).Result;
var on_finish = VFS.Stage(vfiles.Select(f => f.FromFile).Distinct());
Status($"Copying files for {archive.Name}");

View File

@ -95,13 +95,13 @@ namespace Wabbajack.Lib
CreateMetaFiles();
Info($"Indexing {StagingFolder}");
VFS.AddRoot(StagingFolder).Wait();
VFS.AddRoot(StagingFolder);
Info($"Indexing {GamePath}");
VFS.AddRoot(GamePath).Wait();
VFS.AddRoot(GamePath);
Info($"Indexing {DownloadsFolder}");
VFS.AddRoot(DownloadsFolder).Wait();
VFS.AddRoot(DownloadsFolder);
AddExternalFolder();

View File

@ -156,7 +156,7 @@ namespace Wabbajack.Lib
return g;
}).ToList();
var onFinish = VFS.Stage(vFiles.Select(f => f.FromFile).Distinct()).Result;
var onFinish = VFS.Stage(vFiles.Select(f => f.FromFile).Distinct());
Status($"Copying files for {archive.Name}");

View File

@ -90,9 +90,9 @@ namespace Wabbajack.Test
File.Copy(src, Path.Combine(utils.DownloadsFolder, filename));
if (mod_name == null)
FileExtractor.ExtractAll(src, utils.MO2Folder).Wait();
FileExtractor.ExtractAll(src, utils.MO2Folder);
else
FileExtractor.ExtractAll(src, Path.Combine(utils.ModsFolder, mod_name)).Wait();
FileExtractor.ExtractAll(src, Path.Combine(utils.ModsFolder, mod_name));
}
@ -127,7 +127,7 @@ namespace Wabbajack.Test
var dest = Path.Combine(utils.DownloadsFolder, file.file_name);
File.Copy(src, dest);
FileExtractor.ExtractAll(src, Path.Combine(utils.ModsFolder, mod_name)).Wait();
FileExtractor.ExtractAll(src, Path.Combine(utils.ModsFolder, mod_name));
File.WriteAllText(dest + ".meta", ini);
}

View File

@ -29,10 +29,10 @@ namespace Wabbajack.VirtualFileSystem.Test
}
[TestMethod]
public async Task FilesAreIndexed()
public void FilesAreIndexed()
{
AddFile("test.txt", "This is a test");
await AddTestRoot();
AddTestRoot();
var file = context.Index.ByFullPath[Path.Combine(VFS_TEST_DIR_FULL, "test.txt")];
Assert.IsNotNull(file);
@ -41,11 +41,11 @@ namespace Wabbajack.VirtualFileSystem.Test
Assert.AreEqual(file.Hash, "qX0GZvIaTKM=");
}
private async Task AddTestRoot()
private void AddTestRoot()
{
await context.AddRoot(VFS_TEST_DIR_FULL);
await context.WriteToFile(Path.Combine(VFS_TEST_DIR_FULL, "vfs_cache.bin"));
await context.IntegrateFromFile(Path.Combine(VFS_TEST_DIR_FULL, "vfs_cache.bin"));
context.AddRoot(VFS_TEST_DIR_FULL);
context.WriteToFile(Path.Combine(VFS_TEST_DIR_FULL, "vfs_cache.bin"));
context.IntegrateFromFile(Path.Combine(VFS_TEST_DIR_FULL, "vfs_cache.bin"));
}
@ -54,7 +54,7 @@ namespace Wabbajack.VirtualFileSystem.Test
{
AddFile("archive/test.txt", "This is a test");
ZipUpFolder("archive", "test.zip");
await AddTestRoot();
AddTestRoot();
var abs_path = Path.Combine(VFS_TEST_DIR_FULL, "test.zip");
var file = context.Index.ByFullPath[abs_path];
@ -77,7 +77,7 @@ namespace Wabbajack.VirtualFileSystem.Test
ZipUpFolder("archive", "test.zip");
AddFile("test.txt", "This is a test");
await AddTestRoot();
AddTestRoot();
var files = context.Index.ByHash["qX0GZvIaTKM="];
@ -88,7 +88,7 @@ namespace Wabbajack.VirtualFileSystem.Test
public async Task DeletedFilesAreRemoved()
{
AddFile("test.txt", "This is a test");
await AddTestRoot();
AddTestRoot();
var file = context.Index.ByFullPath[Path.Combine(VFS_TEST_DIR_FULL, "test.txt")];
Assert.IsNotNull(file);
@ -98,21 +98,21 @@ namespace Wabbajack.VirtualFileSystem.Test
File.Delete(Path.Combine(VFS_TEST_DIR_FULL, "test.txt"));
await AddTestRoot();
AddTestRoot();
CollectionAssert.DoesNotContain(context.Index.ByFullPath, Path.Combine(VFS_TEST_DIR_FULL, "test.txt"));
}
[TestMethod]
public async Task UnmodifiedFilesAreNotReIndexed()
public void UnmodifiedFilesAreNotReIndexed()
{
AddFile("test.txt", "This is a test");
await AddTestRoot();
AddTestRoot();
var old_file = context.Index.ByFullPath[Path.Combine(VFS_TEST_DIR_FULL, "test.txt")];
var old_time = old_file.LastAnalyzed;
await AddTestRoot();
AddTestRoot();
var new_file = context.Index.ByFullPath[Path.Combine(VFS_TEST_DIR_FULL, "test.txt")];
@ -120,23 +120,23 @@ namespace Wabbajack.VirtualFileSystem.Test
}
[TestMethod]
public async Task CanStageSimpleArchives()
public void CanStageSimpleArchives()
{
AddFile("archive/test.txt", "This is a test");
ZipUpFolder("archive", "test.zip");
await AddTestRoot();
AddTestRoot();
var abs_path = Path.Combine(VFS_TEST_DIR_FULL, "test.zip");
var file = context.Index.ByFullPath[abs_path + "|test.txt"];
var cleanup = await context.Stage(new List<VirtualFile> {file});
var cleanup = context.Stage(new List<VirtualFile> {file});
Assert.AreEqual("This is a test", File.ReadAllText(file.StagedPath));
cleanup();
}
[TestMethod]
public async Task CanStageNestedArchives()
public void CanStageNestedArchives()
{
AddFile("archive/test.txt", "This is a test");
ZipUpFolder("archive", "test.zip");
@ -146,11 +146,11 @@ namespace Wabbajack.VirtualFileSystem.Test
Path.Combine(VFS_TEST_DIR_FULL, @"archive\other\dir\nested.zip"));
ZipUpFolder("archive", "test.zip");
await AddTestRoot();
AddTestRoot();
var files = context.Index.ByHash["qX0GZvIaTKM="];
var cleanup = await context.Stage(files);
var cleanup = context.Stage(files);
foreach (var file in files)
Assert.AreEqual("This is a test", File.ReadAllText(file.StagedPath));
@ -159,7 +159,7 @@ namespace Wabbajack.VirtualFileSystem.Test
}
[TestMethod]
public async Task CanRequestPortableFileTrees()
public void CanRequestPortableFileTrees()
{
AddFile("archive/test.txt", "This is a test");
ZipUpFolder("archive", "test.zip");
@ -169,7 +169,7 @@ namespace Wabbajack.VirtualFileSystem.Test
Path.Combine(VFS_TEST_DIR_FULL, @"archive\other\dir\nested.zip"));
ZipUpFolder("archive", "test.zip");
await AddTestRoot();
AddTestRoot();
var files = context.Index.ByHash["qX0GZvIaTKM="];
var archive = context.Index.ByRootPath[Path.Combine(VFS_TEST_DIR_FULL, "test.zip")];
@ -178,12 +178,12 @@ namespace Wabbajack.VirtualFileSystem.Test
var new_context = new Context();
await new_context.IntegrateFromPortable(state,
new_context.IntegrateFromPortable(state,
new Dictionary<string, string> {{archive.Hash, archive.FullPath}});
var new_files = new_context.Index.ByHash["qX0GZvIaTKM="];
var close = await new_context.Stage(new_files);
var close = new_context.Stage(new_files);
foreach (var file in new_files)
Assert.AreEqual("This is a test", File.ReadAllText(file.StagedPath));

View File

@ -4,6 +4,7 @@ using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using System.Text;
using System.Threading.Tasks;
using Alphaleonis.Win32.Filesystem;
@ -24,44 +25,54 @@ namespace Wabbajack.VirtualFileSystem
private readonly string _stagingFolder = "vfs_staging";
public IndexRoot Index { get; private set; } = IndexRoot.Empty;
/// <summary>
/// A stream of tuples of ("Update Title", 0.25) which represent the name of the current task
/// and the current progress.
/// </summary>
public IObservable<(string, float)> ProgressUpdates => _progressUpdates;
private readonly Subject<(string, float)> _progressUpdates = new Subject<(string, float)>();
/// <summary>
/// A high throughput firehose of updates from the VFS. These go into more detail on the status
/// of what's happening in the context, but is probably too high bandwidth to tie driectly to the
/// UI.
/// </summary>
public IObservable<string> LogSpam => _logSpam;
internal readonly Subject<string> _logSpam = new Subject<string>();
public TemporaryDirectory GetTemporaryFolder()
{
return new TemporaryDirectory(Path.Combine(_stagingFolder, Guid.NewGuid().ToString()));
}
public async Task<IndexRoot> AddRoot(string root)
public IndexRoot AddRoot(string root)
{
if (!Path.IsPathRooted(root))
throw new InvalidDataException($"Path is not absolute: {root}");
var filtered = await Index.AllFiles
.ToChannel()
.UnorderedPipelineRx(o => o.Where(file => File.Exists(file.Name)))
.TakeAll();
var filtered = Index.AllFiles.Where(file => File.Exists(file.Name)).ToList();
var byPath = filtered.ToImmutableDictionary(f => f.Name);
var results = Channel.Create<VirtualFile>(1024);
var pipeline = Directory.EnumerateFiles(root, "*", DirectoryEnumerationOptions.Recursive)
.ToChannel()
.UnorderedPipeline(results, async f =>
{
if (byPath.TryGetValue(f, out var found))
{
var fi = new FileInfo(f);
if (found.LastModified == fi.LastWriteTimeUtc.Ticks && found.Size == fi.Length)
return found;
}
var filesToIndex = Directory.EnumerateFiles(root, "*", DirectoryEnumerationOptions.Recursive).ToList();
var results = Channel.Create(1024, ProgressUpdater<VirtualFile>($"Indexing {root}", filesToIndex.Count));
return await VirtualFile.Analyze(this, null, f, f);
});
var allFiles= filesToIndex
.PMap(f =>
{
if (byPath.TryGetValue(f, out var found))
{
var fi = new FileInfo(f);
if (found.LastModified == fi.LastWriteTimeUtc.Ticks && found.Size == fi.Length)
return found;
}
var allFiles = await results.TakeAll();
return VirtualFile.Analyze(this, null, f, f);
});
// Should already be done but let's make the async tracker happy
await pipeline;
var newIndex = await IndexRoot.Empty.Integrate(filtered.Concat(allFiles).ToList());
var newIndex = IndexRoot.Empty.Integrate(filtered.Concat(allFiles).ToList());
lock (this)
{
@ -71,7 +82,26 @@ namespace Wabbajack.VirtualFileSystem
return newIndex;
}
public async Task WriteToFile(string filename)
class Box<T>
{
public T Value { get; set; }
}
private Func<IObservable<T>, IObservable<T>> ProgressUpdater<T>(string s, float totalCount)
{
if (totalCount == 0)
totalCount = 1;
var box = new Box<float>();
return sub => sub.Select(itm =>
{
box.Value += 1;
_progressUpdates.OnNext((s, box.Value / totalCount));
return itm;
});
}
public void WriteToFile(string filename)
{
using (var fs = File.OpenWrite(filename))
using (var bw = new BinaryWriter(fs, Encoding.UTF8, true))
@ -82,66 +112,62 @@ namespace Wabbajack.VirtualFileSystem
bw.Write(FileVersion);
bw.Write((ulong) Index.AllFiles.Count);
var sizes = await Index.AllFiles
.ToChannel()
.UnorderedPipelineSync(f =>
var sizes = Index.AllFiles
.PMap(f =>
{
var ms = new MemoryStream();
f.Write(ms);
return ms;
})
.Select(async ms =>
.Select(ms =>
{
var size = ms.Position;
ms.Position = 0;
bw.Write((ulong) size);
await ms.CopyToAsync(fs);
ms.CopyTo(fs);
return ms.Position;
})
.TakeAll();
});
Utils.Log($"Wrote {fs.Position.ToFileSizeString()} file as vfs cache file {filename}");
}
}
public async Task IntegrateFromFile(string filename)
public void IntegrateFromFile(string filename)
{
using (var fs = File.OpenRead(filename))
using (var br = new BinaryReader(fs, Encoding.UTF8, true))
try
{
var magic = Encoding.ASCII.GetString(br.ReadBytes(Encoding.ASCII.GetBytes(Magic).Length));
var fileVersion = br.ReadUInt64();
if (fileVersion != FileVersion || magic != magic)
throw new InvalidDataException("Bad Data Format");
var numFiles = br.ReadUInt64();
var input = Channel.Create<byte[]>(1024);
var pipeline = input.UnorderedPipelineSync(
data => VirtualFile.Read(this, data))
.TakeAll();
Utils.Log($"Loading {numFiles} files from {filename}");
for (ulong idx = 0; idx < numFiles; idx++)
using (var fs = File.OpenRead(filename))
using (var br = new BinaryReader(fs, Encoding.UTF8, true))
{
var size = br.ReadUInt64();
var bytes = new byte[size];
await br.BaseStream.ReadAsync(bytes, 0, (int) size);
await input.Put(bytes);
}
var magic = Encoding.ASCII.GetString(br.ReadBytes(Encoding.ASCII.GetBytes(Magic).Length));
var fileVersion = br.ReadUInt64();
if (fileVersion != FileVersion || magic != magic)
throw new InvalidDataException("Bad Data Format");
input.Close();
var numFiles = br.ReadUInt64();
var files = await pipeline;
var newIndex = await Index.Integrate(files);
lock (this)
{
Index = newIndex;
var files = Enumerable.Range(0, (int) numFiles)
.Select(idx =>
{
var size = br.ReadUInt64();
var bytes = new byte[size];
br.BaseStream.Read(bytes, 0, (int) size);
return VirtualFile.Read(this, bytes);
}).ToList();
var newIndex = Index.Integrate(files);
lock (this)
{
Index = newIndex;
}
}
}
catch (IOException)
{
if (File.Exists(filename))
File.Delete(filename);
}
}
public async Task<Action> Stage(IEnumerable<VirtualFile> files)
public Action Stage(IEnumerable<VirtualFile> files)
{
var grouped = files.SelectMany(f => f.FilesInFullPath)
.Distinct()
@ -155,7 +181,7 @@ namespace Wabbajack.VirtualFileSystem
foreach (var group in grouped)
{
var tmpPath = Path.Combine(_stagingFolder, Guid.NewGuid().ToString());
await FileExtractor.ExtractAll(group.Key.StagedPath, tmpPath);
FileExtractor.ExtractAll(group.Key.StagedPath, tmpPath);
paths.Add(tmpPath);
foreach (var file in group)
file.StagedPath = Path.Combine(tmpPath, file.Name);
@ -184,16 +210,14 @@ namespace Wabbajack.VirtualFileSystem
}).ToList();
}
public async Task IntegrateFromPortable(List<PortableFile> state, Dictionary<string, string> links)
public void IntegrateFromPortable(List<PortableFile> state, Dictionary<string, string> links)
{
var indexedState = state.GroupBy(f => f.ParentHash)
.ToDictionary(f => f.Key ?? "", f => (IEnumerable<PortableFile>) f);
var parents = await indexedState[""]
.ToChannel()
.UnorderedPipelineSync(f => VirtualFile.CreateFromPortable(this, indexedState, links, f))
.TakeAll();
var parents = indexedState[""]
.PMap(f => VirtualFile.CreateFromPortable(this, indexedState, links, f));
var newIndex = await Index.Integrate(parents);
var newIndex = Index.Integrate(parents);
lock (this)
{
Index = newIndex;
@ -202,7 +226,7 @@ namespace Wabbajack.VirtualFileSystem
public async Task<DisposableList<VirtualFile>> StageWith(IEnumerable<VirtualFile> files)
{
return new DisposableList<VirtualFile>(await Stage(files), files);
return new DisposableList<VirtualFile>(Stage(files), files);
}
@ -214,7 +238,7 @@ namespace Wabbajack.VirtualFileSystem
_knownFiles.AddRange(known);
}
public async Task BackfillMissing()
public void BackfillMissing()
{
var newFiles = _knownFiles.Where(f => f.Paths.Length == 1)
.GroupBy(f => f.Hash)
@ -248,7 +272,7 @@ namespace Wabbajack.VirtualFileSystem
}
_knownFiles.Where(f => f.Paths.Length > 1).Do(BackFillOne);
var newIndex = await Index.Integrate(newFiles.Values.ToList());
var newIndex = Index.Integrate(newFiles.Values.ToList());
lock (this)
Index = newIndex;
@ -258,7 +282,6 @@ namespace Wabbajack.VirtualFileSystem
}
#endregion
}
public class KnownFile
@ -315,30 +338,30 @@ namespace Wabbajack.VirtualFileSystem
public ImmutableDictionary<string, ImmutableStack<VirtualFile>> ByName { get; set; }
public ImmutableDictionary<string, VirtualFile> ByRootPath { get; }
public async Task<IndexRoot> Integrate(List<VirtualFile> files)
public IndexRoot Integrate(List<VirtualFile> files)
{
Utils.Log($"Integrating");
var allFiles = AllFiles.Concat(files).GroupBy(f => f.Name).Select(g => g.Last()).ToImmutableList();
var byFullPath = Task.Run(() =>
allFiles.SelectMany(f => f.ThisAndAllChildren)
.ToImmutableDictionary(f => f.FullPath));
var byFullPath = allFiles.SelectMany(f => f.ThisAndAllChildren)
.ToImmutableDictionary(f => f.FullPath);
var byHash = Task.Run(() =>
allFiles.SelectMany(f => f.ThisAndAllChildren)
.Where(f => f.Hash != null)
.ToGroupedImmutableDictionary(f => f.Hash));
var byHash = allFiles.SelectMany(f => f.ThisAndAllChildren)
.Where(f => f.Hash != null)
.ToGroupedImmutableDictionary(f => f.Hash);
var byName = Task.Run(() =>
allFiles.SelectMany(f => f.ThisAndAllChildren)
.ToGroupedImmutableDictionary(f => f.Name));
var byName = allFiles.SelectMany(f => f.ThisAndAllChildren)
.ToGroupedImmutableDictionary(f => f.Name);
var byRootPath = Task.Run(() => allFiles.ToImmutableDictionary(f => f.Name));
var byRootPath = allFiles.ToImmutableDictionary(f => f.Name);
return new IndexRoot(allFiles,
await byFullPath,
await byHash,
await byRootPath,
await byName);
var result = new IndexRoot(allFiles,
byFullPath,
byHash,
byRootPath,
byName);
Utils.Log($"Done integrating");
return result;
}
public VirtualFile FileForArchiveHashPath(string[] argArchiveHashPath)

View File

@ -117,11 +117,11 @@ namespace Wabbajack.VirtualFileSystem
}
}
public static async Task<VirtualFile> Analyze(Context context, VirtualFile parent, string abs_path,
public static VirtualFile Analyze(Context context, VirtualFile parent, string abs_path,
string rel_path)
{
var hasher = abs_path.FileHashAsync();
var fi = new FileInfo(abs_path);
context._logSpam.OnNext($"Analyzing {rel_path}");
var self = new VirtualFile
{
Context = context,
@ -129,23 +129,26 @@ namespace Wabbajack.VirtualFileSystem
Parent = parent,
Size = fi.Length,
LastModified = fi.LastWriteTimeUtc.Ticks,
LastAnalyzed = DateTime.Now.Ticks
LastAnalyzed = DateTime.Now.Ticks,
Hash = abs_path.FileHash()
};
if (FileExtractor.CanExtract(Path.GetExtension(abs_path)))
{
context._logSpam.OnNext($"Extracting {rel_path}");
using (var tempFolder = context.GetTemporaryFolder())
{
await FileExtractor.ExtractAll(abs_path, tempFolder.FullName);
FileExtractor.ExtractAll(abs_path, tempFolder.FullName);
var results = Channel.Create<VirtualFile>(1024);
var files = Directory.EnumerateFiles(tempFolder.FullName, "*", SearchOption.AllDirectories)
.ToChannel()
.UnorderedPipeline(results,
async abs_src => await Analyze(context, self, abs_src, abs_src.RelativeTo(tempFolder.FullName)));
self.Children = (await results.TakeAll()).ToImmutableList();
context._logSpam.OnNext($"Analyzing Contents {rel_path}");
self.Children = Directory.EnumerateFiles(tempFolder.FullName, "*", SearchOption.AllDirectories)
.PMap(abs_src => Analyze(context, self, abs_src, abs_src.RelativeTo(tempFolder.FullName)))
.ToImmutableList();
}
self.Hash = await hasher;
}
return self;
}