Many compiler fixes

This commit is contained in:
Timothy Baldridge 2022-09-25 16:36:12 -06:00
parent a3ccc58614
commit cc329bd04c
7 changed files with 88 additions and 95 deletions

View File

@ -5,6 +5,9 @@
* Improve performance of the compiler stack
* Save the location of the browser window and open the next window in the same location
* Fix a leak of msedgwebview2.exe instances when doing manual downloads
* Massively improve patch load times
* Massively improve patch build times
* Reduce situations where the UI appears to be hung due the above two issues
#### Version - 3.0.1.4 - 9/21/2022

View File

@ -16,6 +16,7 @@ using Wabbajack.DTOs;
using Wabbajack.DTOs.Directives;
using Wabbajack.DTOs.DownloadStates;
using Wabbajack.DTOs.JsonConverters;
using Wabbajack.FileExtractor.ExtractedFiles;
using Wabbajack.Hashing.xxHash64;
using Wabbajack.Installer;
using Wabbajack.Networking.WabbajackClientApi;
@ -447,7 +448,13 @@ public abstract class ACompiler
/// </summary>
protected async Task BuildPatches(CancellationToken token)
{
await using var tempPath = _manager.CreateFolder();
AbsolutePath TempPath(Hash file)
{
return tempPath.Path.Combine(file.ToHex());
}
NextStep("Compiling","Looking for patches");
var toBuild = InstallDirectives.OfType<PatchedFromArchive>()
.Where(p => _patchOptions.GetValueOrDefault(p, Array.Empty<VirtualFile>()).Length > 0)
@ -455,42 +462,50 @@ public abstract class ACompiler
{
To = p.To,
Hash = p.Hash,
FromHash = c.Hash,
ArchiveHashPath = c.MakeRelativePaths(),
Size = p.Size
}))
.ToArray();
NextStep("Compiling","Generating Patches", toBuild.Length);
if (toBuild.Length == 0) return;
if (toBuild.Any())
{
// Extract all the source files
var indexed = toBuild.GroupBy(f => _vfs.Index.FileForArchiveHashPath(f.ArchiveHashPath))
.ToDictionary(f => f.Key);
await _vfs.Extract(indexed.Keys.ToHashSet(),
async (vf, sf) =>
NextStep("Compiling", "Generating Patches", toBuild.Length);
var allFiles = toBuild.SelectMany(f => new[]
{
_vfs.Index.FileForArchiveHashPath(f.ArchiveHashPath),
FindDestFile(f.To)
})
.DistinctBy(f => f.Hash)
.ToHashSet();
_logger.LogInformation("Extracting {Count} ({Size}) files for building patches", allFiles.Count,
allFiles.Sum(f => f.Size).ToFileSizeString());
NextStep("Compiling", "Extracting Patch Files", allFiles.Count);
await _vfs.Extract(allFiles, async (vf, file) =>
{
UpdateProgress(1);
// For each, extract the destination
var matches = indexed[vf];
foreach (var match in matches)
{
var destFile = FindDestFile(match.To);
_logger.LogInformation("Patching {from} {to}", destFile, match.To);
// Build the patch
await _vfs.Extract(new[] {destFile}.ToHashSet(),
async (destvf, destsfn) =>
{
await using var ostream = TempPath(vf.Hash).Open(FileMode.Create, FileAccess.Write, FileShare.Read);
await using var istream = await file.GetStream();
await istream.CopyToAsync(ostream, token);
}, token);
await using var srcStream = await sf.GetStream();
await using var destStream = await destsfn.GetStream();
using var _ = await CompilerLimiter.Begin($"Patching {match.To}", 100, token);
var patchSize =
await _patchCache.CreatePatch(srcStream, vf.Hash, destStream, destvf.Hash);
_logger.LogInformation("Patch size {patchSize} for {to}", patchSize, match.To);
}, token);
}
}, token, runInParallel: false);
if (toBuild.Length == 0) return;
NextStep("Compiling", "Generating Patch Files", toBuild.Length);
await toBuild.PMapAllBatched(CompilerLimiter, async patch =>
{
UpdateProgress(1);
await using var src = TempPath(patch.FromHash).Open(FileMode.Open, FileAccess.Read, FileShare.Read);
await using var dst = TempPath(patch.Hash).Open(FileMode.Open, FileAccess.Read, FileShare.Read);
await _patchCache.CreatePatch(src, patch.FromHash, dst, patch.Hash);
return patch;
}).ToList();
}
NextStep("Compiling", "Loading Patch Files");
// Load in the patches
await InstallDirectives.OfType<PatchedFromArchive>()
.Where(p => p.PatchID == default)

View File

@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Wabbajack.Common;
using Wabbajack.Compression.BSA;
using Wabbajack.DTOs;
@ -72,6 +73,8 @@ public class DeconstructBSAs : ACompilationStep
$"BSA {source.AbsolutePath.FileName} is over 2GB in size, very few programs (Including Wabbajack) can create BSA files this large without causing CTD issues." +
"Please re-compress this BSA into a more manageable size.");
}
_compiler._logger.LogInformation("Deconstructing BSA: {Name}", source.File.FullPath.FileName);
var sourceFiles = source.File.Children;
@ -86,8 +89,8 @@ public class DeconstructBSAs : ACompilationStep
//_cleanup = await source.File.Context.Stage(source.File.Children);
}
var matches = await sourceFiles.PMapAll(_compiler.CompilerLimiter,
e => _mo2Compiler.RunStack(stack,
var matches = await sourceFiles.SelectAsync(
async e => await _mo2Compiler.RunStack(stack,
new RawSourceFile(e, Consts.BSACreationDir.Combine(id, (RelativePath) e.Name))))
.ToList();

View File

@ -1,7 +1,10 @@
using System;
using System.Data.SQLite;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Wabbajack.Common;
using Wabbajack.Compiler.PatchCache;
using Wabbajack.Hashing.xxHash64;
using Wabbajack.Paths;
@ -15,94 +18,63 @@ public class BinaryPatchCache : IBinaryPatchCache
private readonly SQLiteConnection _conn;
private readonly string _connectionString;
private readonly AbsolutePath _location;
private readonly ILogger<BinaryPatchCache> _logger;
public BinaryPatchCache(AbsolutePath location)
public BinaryPatchCache(ILogger<BinaryPatchCache> logger, AbsolutePath location)
{
_logger = logger;
_location = location;
if (!_location.Parent.DirectoryExists())
_location.Parent.CreateDirectory();
if (!_location.DirectoryExists())
_location.CreateDirectory();
}
_connectionString =
string.Intern($"URI=file:{location.ToString()};Pooling=True;Max Pool Size=100; Journal Mode=Memory;");
_conn = new SQLiteConnection(_connectionString);
_conn.Open();
using var cmd = new SQLiteCommand(_conn);
cmd.CommandText = @"CREATE TABLE IF NOT EXISTS PatchCache (
FromHash BIGINT,
ToHash BIGINT,
PatchSize BLOB,
Patch BLOB,
PRIMARY KEY (FromHash, ToHash))
WITHOUT ROWID;";
cmd.ExecuteNonQuery();
public AbsolutePath PatchLocation(Hash srcHash, Hash destHash)
{
return _location.Combine($"{srcHash.ToHex()}_{destHash.ToHex()}.octodiff");
}
public async Task<CacheEntry> CreatePatch(Stream srcStream, Hash srcHash, Stream destStream, Hash destHash, IJob? job)
{
await using var rcmd = new SQLiteCommand(_conn);
rcmd.CommandText = "SELECT PatchSize FROM PatchCache WHERE FromHash = @fromHash AND ToHash = @toHash";
rcmd.Parameters.AddWithValue("@fromHash", (long) srcHash);
rcmd.Parameters.AddWithValue("@toHash", (long) destHash);
await using var rdr = await rcmd.ExecuteReaderAsync();
while (await rdr.ReadAsync()) return new CacheEntry(srcHash, destHash, rdr.GetInt64(0), this);
await using var cmd = new SQLiteCommand(_conn);
cmd.CommandText = @"INSERT INTO PatchCache (FromHash, ToHash, PatchSize, Patch)
VALUES (@fromHash, @toHash, @patchSize, @patch)";
cmd.Parameters.AddWithValue("@fromHash", (long) srcHash);
cmd.Parameters.AddWithValue("@toHash", (long) destHash);
var location = PatchLocation(srcHash, destHash);
if (location.FileExists())
return new CacheEntry(srcHash, destHash, location.Size(), this);
await using var sigStream = new MemoryStream();
await using var patchStream = new MemoryStream();
OctoDiff.Create(srcStream, destStream, sigStream, patchStream, job);
cmd.Parameters.AddWithValue("@patchSize", patchStream.Length);
cmd.Parameters.AddWithValue("@patch", patchStream.ToArray());
var tempName = _location.Combine(Guid.NewGuid().ToString()).WithExtension(Ext.Temp);
await using var patchStream = tempName.Open(FileMode.Create, FileAccess.ReadWrite, FileShare.None);
try
{
await cmd.ExecuteNonQueryAsync();
OctoDiff.Create(srcStream, destStream, sigStream, patchStream, job);
patchStream.Close();
await tempName.MoveToAsync(location, true, CancellationToken.None);
}
catch (SQLiteException ex)
finally
{
if (!ex.Message.StartsWith("constraint failed"))
throw;
await patchStream.DisposeAsync();
if (tempName.FileExists())
tempName.Delete();
}
return new CacheEntry(srcHash, destHash, patchStream.Length, this);
return new CacheEntry(srcHash, destHash, location.Size(), this);
}
public async Task<CacheEntry?> GetPatch(Hash fromHash, Hash toHash)
{
await using var cmd = new SQLiteCommand(_conn);
cmd.CommandText = @"SELECT PatchSize FROM PatchCache WHERE FromHash = @fromHash AND ToHash = @toHash";
cmd.Parameters.AddWithValue("@fromHash", (long) fromHash);
cmd.Parameters.AddWithValue("@toHash", (long) toHash);
await using var rdr = await cmd.ExecuteReaderAsync();
while (await rdr.ReadAsync()) return new CacheEntry(fromHash, toHash, rdr.GetInt64(0), this);
var location = PatchLocation(fromHash, toHash);
if (location.FileExists())
return new CacheEntry(fromHash, toHash, location.Size(), this);
return null;
}
public async Task<byte[]> GetData(CacheEntry entry)
{
await using var cmd = new SQLiteCommand(_conn);
cmd.CommandText = @"SELECT PatchSize, Patch FROM PatchCache WHERE FromHash = @fromHash AND ToHash = @toHash";
cmd.Parameters.AddWithValue("@fromHash", (long) entry.From);
cmd.Parameters.AddWithValue("@toHash", (long) entry.To);
await using var rdr = await cmd.ExecuteReaderAsync();
while (await rdr.ReadAsync())
{
var array = new byte[rdr.GetInt64(0)];
rdr.GetBytes(1, 0, array, 0, array.Length);
return array;
}
var location = PatchLocation(entry.From, entry.To);
if (location.FileExists())
return await location.ReadAllBytesAsync();
return Array.Empty<byte>();
}

View File

@ -459,7 +459,7 @@ public static class GameRegistry
public static GameMetaData? GetByMO2ArchiveName(string gameName)
{
return Games.Values.FirstOrDefault(g => (g.MO2ArchiveName ?? g.NexusName)!.Equals(gameName, StringComparison.InvariantCultureIgnoreCase));
return Games.Values.FirstOrDefault(g => (g.MO2ArchiveName ?? g.NexusName ?? "")!.Equals(gameName, StringComparison.InvariantCultureIgnoreCase));
}
public static GameMetaData? GetByNexusName(string gameName)

View File

@ -155,7 +155,7 @@ public struct AbsolutePath : IPath, IComparable<AbsolutePath>, IEquatable<Absolu
return ArrayExtensions.AreEqualIgnoreCase(parent.Parts, 0, Parts, 0, parent.Parts.Length);
}
public AbsolutePath Combine(params object[] paths)
public readonly AbsolutePath Combine(params object[] paths)
{
var converted = paths.Select(p =>
{

View File

@ -69,8 +69,8 @@ public static class ServiceExtensions
});
service.AddSingleton<IBinaryPatchCache>(s => options.UseLocalCache
? new BinaryPatchCache(s.GetService<TemporaryFileManager>()!.CreateFile().Path)
: new BinaryPatchCache(KnownFolders.WabbajackAppLocal.Combine("patchCache.sqlite")));
? new BinaryPatchCache(s.GetRequiredService<ILogger<BinaryPatchCache>>(), s.GetService<TemporaryFileManager>()!.CreateFolder().Path)
: new BinaryPatchCache(s.GetRequiredService<ILogger<BinaryPatchCache>>(),KnownFolders.WabbajackAppLocal.Combine("PatchCache")));
service.AddSingleton(new ParallelOptions {MaxDegreeOfParallelism = Environment.ProcessorCount});