BSA building is now async, status updates during compression

This commit is contained in:
Timothy Baldridge 2020-04-20 15:36:33 -06:00
parent a82b2d5094
commit 724f0074ce
10 changed files with 180 additions and 114 deletions

View File

@ -114,14 +114,14 @@ namespace Compression.BSA.Test
await using (var w = ViaJson(a.State).MakeBuilder(size))
{
var streams = await a.Files.PMap(Queue, file =>
var streams = await a.Files.PMap(Queue, async file =>
{
var absPath = _tempDir.Combine(file.Path);
var str = absPath.OpenRead();
w.AddFile(ViaJson(file.State), str);
await w.AddFile(ViaJson(file.State), str);
return str;
});
w.Build(tempFile);
await w.Build(tempFile);
streams.Do(s => s.Dispose());
}

View File

@ -19,7 +19,7 @@ namespace Compression.BSA
int Index { get; }
void WriteData(BinaryWriter wtr);
Task WriteData(BinaryWriter wtr);
void WriteHeader(BinaryWriter wtr);
}
@ -40,26 +40,26 @@ namespace Compression.BSA
_slab.Dispose();
}
public void AddFile(FileStateObject state, Stream src)
public async Task AddFile(FileStateObject state, Stream src)
{
switch (_state.Type)
{
case EntryType.GNRL:
var result = BA2FileEntryBuilder.Create((BA2FileEntryState)state, src, _slab);
var result = await BA2FileEntryBuilder.Create((BA2FileEntryState)state, src, _slab);
lock(_entries) _entries.Add(result);
break;
case EntryType.DX10:
var resultdx10 = BA2DX10FileEntryBuilder.Create((BA2DX10EntryState)state, src, _slab);
var resultdx10 = await BA2DX10FileEntryBuilder.Create((BA2DX10EntryState)state, src, _slab);
lock(_entries) _entries.Add(resultdx10);
break;
}
}
public void Build(AbsolutePath filename)
public async Task Build(AbsolutePath filename)
{
SortEntries();
using var fs = filename.Create();
using var bw = new BinaryWriter(fs);
await using var fs = filename.Create();
await using var bw = new BinaryWriter(fs);
bw.Write(Encoding.ASCII.GetBytes(_state.HeaderMagic));
bw.Write(_state.Version);
@ -73,10 +73,10 @@ namespace Compression.BSA
entry.WriteHeader(bw);
}
foreach (var entry in _entries)
await _entries.DoProgress("Writing BSA Files", async entry =>
{
entry.WriteData(bw);
}
await entry.WriteData(bw);
});
if (!_state.HasNameTable) return;
@ -89,7 +89,7 @@ namespace Compression.BSA
{
var bytes = Encoding.UTF7.GetBytes(entry.FullName);
bw.Write((ushort)bytes.Length);
bw.BaseStream.Write(bytes, 0, bytes.Length);
await bw.BaseStream.WriteAsync(bytes, 0, bytes.Length);
}
}
@ -104,7 +104,7 @@ namespace Compression.BSA
private BA2DX10EntryState _state;
private List<ChunkBuilder> _chunks;
public static BA2DX10FileEntryBuilder Create(BA2DX10EntryState state, Stream src, DiskSlabAllocator slab)
public static async Task<BA2DX10FileEntryBuilder> Create(BA2DX10EntryState state, Stream src, DiskSlabAllocator slab)
{
var builder = new BA2DX10FileEntryBuilder {_state = state};
@ -115,7 +115,7 @@ namespace Compression.BSA
builder._chunks = new List<ChunkBuilder>();
foreach (var chunk in state.Chunks)
builder._chunks.Add(ChunkBuilder.Create(state, chunk, src, slab));
builder._chunks.Add(await ChunkBuilder.Create(state, chunk, src, slab));
return builder;
}
@ -143,10 +143,10 @@ namespace Compression.BSA
chunk.WriteHeader(bw);
}
public void WriteData(BinaryWriter wtr)
public async Task WriteData(BinaryWriter wtr)
{
foreach (var chunk in _chunks)
chunk.WriteData(wtr);
await chunk.WriteData(wtr);
}
}
@ -158,28 +158,28 @@ namespace Compression.BSA
private long _offsetOffset;
private Stream _dataSlab;
public static ChunkBuilder Create(BA2DX10EntryState state, ChunkState chunk, Stream src, DiskSlabAllocator slab)
public static async Task<ChunkBuilder> Create(BA2DX10EntryState state, ChunkState chunk, Stream src, DiskSlabAllocator slab)
{
var builder = new ChunkBuilder {_chunk = chunk};
if (!chunk.Compressed)
{
builder._dataSlab = slab.Allocate(chunk.FullSz);
src.CopyToLimit(builder._dataSlab, (int)chunk.FullSz);
await src.CopyToWithStatusAsync((int)chunk.FullSz, builder._dataSlab, $"Writing {state.Path} {chunk.StartMip}:{chunk.EndMip}");
}
else
{
var deflater = new Deflater(Deflater.BEST_COMPRESSION);
using var ms = new MemoryStream();
using (var ds = new DeflaterOutputStream(ms, deflater))
await using var ms = new MemoryStream();
await using (var ds = new DeflaterOutputStream(ms, deflater))
{
ds.IsStreamOwner = false;
src.CopyToLimit(ds, (int)chunk.FullSz);
await src.CopyToWithStatusAsync((int)chunk.FullSz, ds, $"Compressing {state.Path} {chunk.StartMip}:{chunk.EndMip}");
}
builder._dataSlab = slab.Allocate(ms.Length);
ms.Position = 0;
ms.CopyTo(builder._dataSlab);
await ms.CopyToWithStatusAsync(ms.Length, builder._dataSlab, $"Writing {state.Path} {chunk.StartMip}:{chunk.EndMip}");
builder._packSize = (uint)ms.Length;
}
builder._dataSlab.Position = 0;
@ -199,14 +199,14 @@ namespace Compression.BSA
}
public void WriteData(BinaryWriter bw)
public async Task WriteData(BinaryWriter bw)
{
var pos = bw.BaseStream.Position;
bw.BaseStream.Position = _offsetOffset;
bw.Write((ulong)pos);
bw.BaseStream.Position = pos;
_dataSlab.CopyToLimit(bw.BaseStream, (int)_dataSlab.Length);
_dataSlab.Dispose();
await _dataSlab.CopyToLimitAsync(bw.BaseStream, (int)_dataSlab.Length);
await _dataSlab.DisposeAsync();
}
}
@ -218,7 +218,7 @@ namespace Compression.BSA
private long _offsetOffset;
private Stream _dataSrc;
public static BA2FileEntryBuilder Create(BA2FileEntryState state, Stream src, DiskSlabAllocator slab)
public static async Task<BA2FileEntryBuilder> Create(BA2FileEntryState state, Stream src, DiskSlabAllocator slab)
{
var builder = new BA2FileEntryBuilder
{
@ -229,17 +229,17 @@ namespace Compression.BSA
if (!state.Compressed)
return builder;
using (var ms = new MemoryStream())
await using (var ms = new MemoryStream())
{
using (var ds = new DeflaterOutputStream(ms))
await using (var ds = new DeflaterOutputStream(ms))
{
ds.IsStreamOwner = false;
builder._dataSrc.CopyTo(ds);
await builder._dataSrc.CopyToAsync(ds);
}
builder._dataSrc = slab.Allocate(ms.Length);
ms.Position = 0;
ms.CopyTo(builder._dataSrc);
await ms.CopyToAsync(builder._dataSrc);
builder._dataSrc.Position = 0;
builder._size = (int)ms.Length;
}
@ -264,15 +264,15 @@ namespace Compression.BSA
wtr.Write(_state.Align);
}
public void WriteData(BinaryWriter wtr)
public async Task WriteData(BinaryWriter wtr)
{
var pos = wtr.BaseStream.Position;
wtr.BaseStream.Position = _offsetOffset;
wtr.Write((ulong)pos);
wtr.BaseStream.Position = pos;
_dataSrc.Position = 0;
_dataSrc.CopyToLimit(wtr.BaseStream, (int)_dataSrc.Length);
_dataSrc.Dispose();
await _dataSrc.CopyToLimitAsync(wtr.BaseStream, (int)_dataSrc.Length);
await _dataSrc.DisposeAsync();
}
}
}

View File

@ -80,11 +80,11 @@ namespace Compression.BSA
{
_slab.Dispose();
}
public void AddFile(FileStateObject state, Stream src)
public async Task AddFile(FileStateObject state, Stream src)
{
var ostate = (BSAFileStateObject) state;
var r = FileEntry.Create(this, ostate.Path, src, ostate.FlipCompression);
var r = await FileEntry.Create(this, ostate.Path, src, ostate.FlipCompression);
lock (this)
{
@ -92,39 +92,38 @@ namespace Compression.BSA
}
}
public void Build(AbsolutePath outputName)
public async Task Build(AbsolutePath outputName)
{
RegenFolderRecords();
using (var fs = outputName.Create())
using (var wtr = new BinaryWriter(fs))
await using var fs = outputName.Create();
await using var wtr = new BinaryWriter(fs);
wtr.Write(_fileId);
wtr.Write(_version);
wtr.Write(_offset);
wtr.Write(_archiveFlags);
var folders = FolderNames.ToList();
wtr.Write((uint) folders.Count);
wtr.Write((uint) _files.Count);
wtr.Write((uint) _folders.Select(f => f._nameBytes.Count() - 1).Sum()); // totalFolderNameLength
var s = _files.Select(f => f._pathBytes.Count()).Sum();
_totalFileNameLength = (uint) _files.Select(f => f._nameBytes.Count()).Sum();
wtr.Write(_totalFileNameLength); // totalFileNameLength
wtr.Write(_fileFlags);
foreach (var folder in _folders) folder.WriteFolderRecord(wtr);
foreach (var folder in _folders)
{
wtr.Write(_fileId);
wtr.Write(_version);
wtr.Write(_offset);
wtr.Write(_archiveFlags);
var folders = FolderNames.ToList();
wtr.Write((uint) folders.Count);
wtr.Write((uint) _files.Count);
wtr.Write((uint) _folders.Select(f => f._nameBytes.Count() - 1).Sum()); // totalFolderNameLength
var s = _files.Select(f => f._pathBytes.Count()).Sum();
_totalFileNameLength = (uint) _files.Select(f => f._nameBytes.Count()).Sum();
wtr.Write(_totalFileNameLength); // totalFileNameLength
wtr.Write(_fileFlags);
foreach (var folder in _folders) folder.WriteFolderRecord(wtr);
foreach (var folder in _folders)
{
if (HasFolderNames)
wtr.Write(folder._nameBytes);
foreach (var file in folder._files) file.WriteFileRecord(wtr);
}
foreach (var file in _files) wtr.Write(file._nameBytes);
foreach (var file in _files)
file.WriteData(wtr);
if (HasFolderNames)
wtr.Write(folder._nameBytes);
foreach (var file in folder._files) file.WriteFileRecord(wtr);
}
foreach (var file in _files) wtr.Write(file._nameBytes);
await _files.DoProgress("Writing BSA Body", async file =>
await file.WriteData(wtr));
}
public void RegenFolderRecords()
@ -236,7 +235,7 @@ namespace Compression.BSA
internal byte[] _pathBytes;
private Stream _srcData;
public static FileEntry Create(BSABuilder bsa, RelativePath path, Stream src, bool flipCompression)
public static async Task<FileEntry> Create(BSABuilder bsa, RelativePath path, Stream src, bool flipCompression)
{
var entry = new FileEntry();
entry._bsa = bsa;
@ -252,7 +251,7 @@ namespace Compression.BSA
entry._originalSize = (int)entry._srcData.Length;
if (entry.Compressed)
entry.CompressData();
await entry.CompressData();
return entry;
}
@ -274,20 +273,20 @@ namespace Compression.BSA
public FolderRecordBuilder Folder => _folder;
private void CompressData()
private async Task CompressData()
{
switch (_bsa.HeaderType)
{
case VersionType.SSE:
{
var r = new MemoryStream();
using (var w = LZ4Stream.Encode(r, new LZ4EncoderSettings {CompressionLevel = LZ4Level.L12_MAX}, true))
await using (var w = LZ4Stream.Encode(r, new LZ4EncoderSettings {CompressionLevel = LZ4Level.L12_MAX}, true))
{
_srcData.CopyTo(w);
await _srcData.CopyToWithStatusAsync(_srcData.Length, w, $"Compressing {_path}");
}
_srcData = _bsa._slab.Allocate(r.Length);
r.Position = 0;
r.CopyTo(_srcData);
await r.CopyToWithStatusAsync(r.Length, _srcData, $"Writing {_path}");
_srcData.Position = 0;
break;
}
@ -298,11 +297,11 @@ namespace Compression.BSA
using (var w = new DeflaterOutputStream(r))
{
w.IsStreamOwner = false;
_srcData.CopyTo(w);
await _srcData.CopyToWithStatusAsync(_srcData.Length, w, $"Compressing {_path}");
}
_srcData = _bsa._slab.Allocate(r.Length);
r.Position = 0;
r.CopyTo(_srcData);
await r.CopyToWithStatusAsync(r.Length, _srcData, $"Writing {_path}");
_srcData.Position = 0;
break;
}
@ -327,7 +326,7 @@ namespace Compression.BSA
wtr.Write(0xDEADBEEF);
}
internal void WriteData(BinaryWriter wtr)
internal async Task WriteData(BinaryWriter wtr)
{
var offset = (uint) wtr.BaseStream.Position;
wtr.BaseStream.Position = _offsetOffset;
@ -340,14 +339,14 @@ namespace Compression.BSA
{
wtr.Write((uint) _originalSize);
_srcData.Position = 0;
_srcData.CopyToLimit(wtr.BaseStream, (int)_srcData.Length);
_srcData.Dispose();
await _srcData.CopyToLimitAsync(wtr.BaseStream, (int)_srcData.Length);
await _srcData.DisposeAsync();
}
else
{
_srcData.Position = 0;
_srcData.CopyToLimit(wtr.BaseStream, (int)_srcData.Length);
_srcData.Dispose();
await _srcData.CopyToLimitAsync(wtr.BaseStream, (int)_srcData.Length);
await _srcData.DisposeAsync();
}
}
}

View File

@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace Compression.BSA
@ -17,8 +18,8 @@ namespace Compression.BSA
public interface IBSABuilder : IAsyncDisposable
{
void AddFile(FileStateObject state, Stream src);
void Build(AbsolutePath filename);
Task AddFile(FileStateObject state, Stream src);
Task Build(AbsolutePath filename);
}
public class ArchiveStateObject

View File

@ -18,16 +18,16 @@ namespace Compression.BSA
_files = new (TES3FileState state, Stream data)[_state.FileCount];
}
public void AddFile(FileStateObject state, Stream src)
public async Task AddFile(FileStateObject state, Stream src)
{
var cstate = (TES3FileState)state;
_files[state.Index] = (cstate, src);
}
public void Build(AbsolutePath filename)
public async Task Build(AbsolutePath filename)
{
using var fs = filename.Create();
using var bw = new BinaryWriter(fs);
await using var fs = filename.Create();
await using var bw = new BinaryWriter(fs);
bw.Write(_state.VersionNumber);
bw.Write(_state.HashOffset);
@ -67,8 +67,8 @@ namespace Compression.BSA
foreach (var (state, data) in _files)
{
bw.BaseStream.Position = _state.DataOffset + state.Offset;
data.CopyTo(bw.BaseStream);
data.Dispose();
await data.CopyToAsync(bw.BaseStream);
await data.DisposeAsync();
}
}

View File

@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using Wabbajack.Common;
using Path = Alphaleonis.Win32.Filesystem.Path;
@ -175,5 +176,21 @@ namespace Compression.BSA
tw.Flush();
}
public static async Task CopyToLimitAsync(this Stream frm, Stream tw, int limit)
{
var buff = new byte[1024];
while (limit > 0)
{
var to_read = Math.Min(buff.Length, limit);
var read = await frm.ReadAsync(buff, 0, to_read);
if (read == 0)
throw new Exception("End of stream before end of limit");
await tw.WriteAsync(buff, 0, read);
limit -= read;
}
await tw.FlushAsync();
}
}
}

View File

@ -1,4 +1,5 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@ -237,8 +238,8 @@ namespace Wabbajack.Common
public class JsonNameSerializationBinder : DefaultSerializationBinder
{
private static Dictionary<string, Type> _nameToType = new Dictionary<string, Type>();
private static Dictionary<Type, string> _typeToName = new Dictionary<Type, string>();
private static ConcurrentDictionary<string, Type> _nameToType = new ConcurrentDictionary<string, Type>();
private static ConcurrentDictionary<Type, string> _typeToName = new ConcurrentDictionary<Type, string>();
private static bool _init;
public JsonNameSerializationBinder()
@ -266,15 +267,13 @@ namespace Wabbajack.Common
.GetCustomAttributes(false)
.Any(y => y is JsonNameAttribute));
_nameToType = customDisplayNameTypes.ToDictionary(
t => t.GetCustomAttributes(false).OfType<JsonNameAttribute>().First().Name,
t => t);
_typeToName = _nameToType.ToDictionary(
t => t.Value,
t => t.Key);
foreach (var type in customDisplayNameTypes)
{
var strName = type.GetCustomAttributes(false).OfType<JsonNameAttribute>().First().Name;
_typeToName.TryAdd(type, strName);
_nameToType.TryAdd(strName, type);
}
_init = true;
}
public override Type BindToType(string? assemblyName, string typeName)
@ -288,6 +287,22 @@ namespace Wabbajack.Common
if (_nameToType.ContainsKey(typeName))
return _nameToType[typeName];
if (assemblyName != null)
{
var assembly = Assembly.Load(assemblyName);
var tp = assembly
.GetTypes()
.FirstOrDefault(tp => Enumerable.OfType<JsonNameAttribute>(tp.GetCustomAttributes(false))
.Any(a => a.Name == typeName));
if (tp != null)
{
_typeToName.TryAdd(tp, typeName);
_nameToType.TryAdd(typeName, tp);
return tp;
}
}
var val = Type.GetType(typeName);
if (val != null)
return val;
@ -312,16 +327,16 @@ namespace Wabbajack.Common
throw new InvalidDataException($"No Binding name for {serializedType}");
}
_nameToType[custom.Name] = serializedType;
_typeToName[serializedType] = custom.Name;
assemblyName = null;
_nameToType.TryAdd(custom.Name, serializedType);
_typeToName.TryAdd(serializedType, custom.Name);
assemblyName = serializedType.Assembly.FullName;
typeName = custom.Name;
return;
}
var name = _typeToName[serializedType];
assemblyName = null;
assemblyName = serializedType.Assembly.FullName;
typeName = name;
}
}

View File

@ -223,14 +223,19 @@ namespace Wabbajack.Common
var buffer = new byte[1024 * 64];
if (maxSize == 0) maxSize = 1;
long totalRead = 0;
long remain = maxSize;
while (true)
{
var read = await istream.ReadAsync(buffer, 0, buffer.Length);
var toRead = Math.Min(buffer.Length, remain);
var read = await istream.ReadAsync(buffer, 0, (int)toRead);
remain -= read;
if (read == 0) break;
totalRead += read;
await ostream.WriteAsync(buffer, 0, read);
Status(status, Percent.FactoryPutInRange(totalRead, maxSize));
}
await ostream.FlushAsync();
}
/// <summary>
@ -301,6 +306,16 @@ namespace Wabbajack.Common
idx += 1;
}
}
public static async Task DoIndexed<T>(this IEnumerable<T> coll, Func<int, T, Task> f)
{
var idx = 0;
foreach (var i in coll)
{
await f(idx, i);
idx += 1;
}
}
public static Task PDoIndexed<T>(this IEnumerable<T> coll, WorkQueue queue, Action<int, T> f)
{
@ -625,6 +640,16 @@ namespace Wabbajack.Common
f(i);
});
}
public static async Task DoProgress<T>(this IEnumerable<T> coll, string msg, Func<T, Task> f)
{
var lst = coll.ToList();
await lst.DoIndexed(async (idx, i) =>
{
Status(msg, Percent.FactoryPutInRange(idx, lst.Count));
await f(i);
});
}
public static void OnQueue(Action f)
{

View File

@ -60,6 +60,17 @@ namespace Wabbajack.Lib.CompilationSteps
if (_includeDirectly.Any(path => source.Path.StartsWith(path)))
defaultInclude = true;
if (source.AbsolutePath.Size > 2_000_000_000)
{
await using var bsa = BSADispatch.OpenRead(source.AbsolutePath);
if (bsa.State is BSAStateObject)
{
Utils.Error(
$"BSA {source.AbsolutePath.FileName} is over 2GB in size, very few programs (Including Wabbajack) can create BSA files this large without causing CTD issues." +
$"Please re-compress this BSA into a more manageable size.");
}
}
var sourceFiles = source.File.Children;
var stack = defaultInclude ? _microstackWithInclude(source.File) : _microstack(source.File);

View File

@ -259,20 +259,18 @@ namespace Wabbajack.Lib
var bsaSize = bsa.FileStates.Select(state => sourceDir.Combine(state.Path).Size).Sum();
await using (var a = bsa.State.MakeBuilder(bsaSize))
await using var a = bsa.State.MakeBuilder(bsaSize);
var streams = await bsa.FileStates.PMap(Queue, async state =>
{
var streams = await bsa.FileStates.PMap(Queue, state =>
{
Status($"Adding {state.Path} to BSA");
var fs = sourceDir.Combine(state.Path).OpenRead();
a.AddFile(state, fs);
return fs;
});
Status($"Adding {state.Path} to BSA");
var fs = sourceDir.Combine(state.Path).OpenRead();
await a.AddFile(state, fs);
return fs;
});
Info($"Writing {bsa.To}");
a.Build(OutputFolder.Combine(bsa.To));
streams.Do(s => s.Dispose());
}
Info($"Writing {bsa.To}");
await a.Build(OutputFolder.Combine(bsa.To));
streams.Do(s => s.Dispose());
}
var bsaDir = OutputFolder.Combine(Consts.BSACreationDir);