mirror of
https://github.com/wabbajack-tools/wabbajack.git
synced 2024-08-30 18:42:17 +00:00
Many compiler fixes
This commit is contained in:
parent
a3ccc58614
commit
cc329bd04c
@ -5,6 +5,9 @@
|
|||||||
* Improve performance of the compiler stack
|
* Improve performance of the compiler stack
|
||||||
* Save the location of the browser window and open the next window in the same location
|
* Save the location of the browser window and open the next window in the same location
|
||||||
* Fix a leak of msedgwebview2.exe instances when doing manual downloads
|
* Fix a leak of msedgwebview2.exe instances when doing manual downloads
|
||||||
|
* Massively improve patch load times
|
||||||
|
* Massively improve patch build times
|
||||||
|
* Reduce situations where the UI appears to be hung due the above two issues
|
||||||
|
|
||||||
|
|
||||||
#### Version - 3.0.1.4 - 9/21/2022
|
#### Version - 3.0.1.4 - 9/21/2022
|
||||||
|
@ -16,6 +16,7 @@ using Wabbajack.DTOs;
|
|||||||
using Wabbajack.DTOs.Directives;
|
using Wabbajack.DTOs.Directives;
|
||||||
using Wabbajack.DTOs.DownloadStates;
|
using Wabbajack.DTOs.DownloadStates;
|
||||||
using Wabbajack.DTOs.JsonConverters;
|
using Wabbajack.DTOs.JsonConverters;
|
||||||
|
using Wabbajack.FileExtractor.ExtractedFiles;
|
||||||
using Wabbajack.Hashing.xxHash64;
|
using Wabbajack.Hashing.xxHash64;
|
||||||
using Wabbajack.Installer;
|
using Wabbajack.Installer;
|
||||||
using Wabbajack.Networking.WabbajackClientApi;
|
using Wabbajack.Networking.WabbajackClientApi;
|
||||||
@ -447,6 +448,12 @@ public abstract class ACompiler
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
protected async Task BuildPatches(CancellationToken token)
|
protected async Task BuildPatches(CancellationToken token)
|
||||||
{
|
{
|
||||||
|
await using var tempPath = _manager.CreateFolder();
|
||||||
|
|
||||||
|
AbsolutePath TempPath(Hash file)
|
||||||
|
{
|
||||||
|
return tempPath.Path.Combine(file.ToHex());
|
||||||
|
}
|
||||||
|
|
||||||
NextStep("Compiling","Looking for patches");
|
NextStep("Compiling","Looking for patches");
|
||||||
var toBuild = InstallDirectives.OfType<PatchedFromArchive>()
|
var toBuild = InstallDirectives.OfType<PatchedFromArchive>()
|
||||||
@ -455,42 +462,50 @@ public abstract class ACompiler
|
|||||||
{
|
{
|
||||||
To = p.To,
|
To = p.To,
|
||||||
Hash = p.Hash,
|
Hash = p.Hash,
|
||||||
|
FromHash = c.Hash,
|
||||||
ArchiveHashPath = c.MakeRelativePaths(),
|
ArchiveHashPath = c.MakeRelativePaths(),
|
||||||
Size = p.Size
|
Size = p.Size
|
||||||
}))
|
}))
|
||||||
.ToArray();
|
.ToArray();
|
||||||
|
|
||||||
NextStep("Compiling","Generating Patches", toBuild.Length);
|
if (toBuild.Any())
|
||||||
if (toBuild.Length == 0) return;
|
{
|
||||||
|
|
||||||
// Extract all the source files
|
NextStep("Compiling", "Generating Patches", toBuild.Length);
|
||||||
var indexed = toBuild.GroupBy(f => _vfs.Index.FileForArchiveHashPath(f.ArchiveHashPath))
|
|
||||||
.ToDictionary(f => f.Key);
|
var allFiles = toBuild.SelectMany(f => new[]
|
||||||
await _vfs.Extract(indexed.Keys.ToHashSet(),
|
{
|
||||||
async (vf, sf) =>
|
_vfs.Index.FileForArchiveHashPath(f.ArchiveHashPath),
|
||||||
|
FindDestFile(f.To)
|
||||||
|
})
|
||||||
|
.DistinctBy(f => f.Hash)
|
||||||
|
.ToHashSet();
|
||||||
|
_logger.LogInformation("Extracting {Count} ({Size}) files for building patches", allFiles.Count,
|
||||||
|
allFiles.Sum(f => f.Size).ToFileSizeString());
|
||||||
|
|
||||||
|
NextStep("Compiling", "Extracting Patch Files", allFiles.Count);
|
||||||
|
await _vfs.Extract(allFiles, async (vf, file) =>
|
||||||
{
|
{
|
||||||
UpdateProgress(1);
|
UpdateProgress(1);
|
||||||
// For each, extract the destination
|
await using var ostream = TempPath(vf.Hash).Open(FileMode.Create, FileAccess.Write, FileShare.Read);
|
||||||
var matches = indexed[vf];
|
await using var istream = await file.GetStream();
|
||||||
foreach (var match in matches)
|
await istream.CopyToAsync(ostream, token);
|
||||||
{
|
|
||||||
var destFile = FindDestFile(match.To);
|
|
||||||
_logger.LogInformation("Patching {from} {to}", destFile, match.To);
|
|
||||||
// Build the patch
|
|
||||||
await _vfs.Extract(new[] {destFile}.ToHashSet(),
|
|
||||||
async (destvf, destsfn) =>
|
|
||||||
{
|
|
||||||
|
|
||||||
await using var srcStream = await sf.GetStream();
|
|
||||||
await using var destStream = await destsfn.GetStream();
|
|
||||||
using var _ = await CompilerLimiter.Begin($"Patching {match.To}", 100, token);
|
|
||||||
var patchSize =
|
|
||||||
await _patchCache.CreatePatch(srcStream, vf.Hash, destStream, destvf.Hash);
|
|
||||||
_logger.LogInformation("Patch size {patchSize} for {to}", patchSize, match.To);
|
|
||||||
}, token);
|
}, token);
|
||||||
}
|
|
||||||
}, token, runInParallel: false);
|
|
||||||
|
|
||||||
|
if (toBuild.Length == 0) return;
|
||||||
|
|
||||||
|
NextStep("Compiling", "Generating Patch Files", toBuild.Length);
|
||||||
|
await toBuild.PMapAllBatched(CompilerLimiter, async patch =>
|
||||||
|
{
|
||||||
|
UpdateProgress(1);
|
||||||
|
await using var src = TempPath(patch.FromHash).Open(FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||||
|
await using var dst = TempPath(patch.Hash).Open(FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||||
|
await _patchCache.CreatePatch(src, patch.FromHash, dst, patch.Hash);
|
||||||
|
return patch;
|
||||||
|
}).ToList();
|
||||||
|
}
|
||||||
|
|
||||||
|
NextStep("Compiling", "Loading Patch Files");
|
||||||
// Load in the patches
|
// Load in the patches
|
||||||
await InstallDirectives.OfType<PatchedFromArchive>()
|
await InstallDirectives.OfType<PatchedFromArchive>()
|
||||||
.Where(p => p.PatchID == default)
|
.Where(p => p.PatchID == default)
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
using Wabbajack.Common;
|
using Wabbajack.Common;
|
||||||
using Wabbajack.Compression.BSA;
|
using Wabbajack.Compression.BSA;
|
||||||
using Wabbajack.DTOs;
|
using Wabbajack.DTOs;
|
||||||
@ -73,6 +74,8 @@ public class DeconstructBSAs : ACompilationStep
|
|||||||
"Please re-compress this BSA into a more manageable size.");
|
"Please re-compress this BSA into a more manageable size.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_compiler._logger.LogInformation("Deconstructing BSA: {Name}", source.File.FullPath.FileName);
|
||||||
|
|
||||||
var sourceFiles = source.File.Children;
|
var sourceFiles = source.File.Children;
|
||||||
|
|
||||||
var stack = defaultInclude ? _microstackWithInclude(source.File) : _microstack(source.File);
|
var stack = defaultInclude ? _microstackWithInclude(source.File) : _microstack(source.File);
|
||||||
@ -86,8 +89,8 @@ public class DeconstructBSAs : ACompilationStep
|
|||||||
//_cleanup = await source.File.Context.Stage(source.File.Children);
|
//_cleanup = await source.File.Context.Stage(source.File.Children);
|
||||||
}
|
}
|
||||||
|
|
||||||
var matches = await sourceFiles.PMapAll(_compiler.CompilerLimiter,
|
var matches = await sourceFiles.SelectAsync(
|
||||||
e => _mo2Compiler.RunStack(stack,
|
async e => await _mo2Compiler.RunStack(stack,
|
||||||
new RawSourceFile(e, Consts.BSACreationDir.Combine(id, (RelativePath) e.Name))))
|
new RawSourceFile(e, Consts.BSACreationDir.Combine(id, (RelativePath) e.Name))))
|
||||||
.ToList();
|
.ToList();
|
||||||
|
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Data.SQLite;
|
using System.Data.SQLite;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Wabbajack.Common;
|
||||||
using Wabbajack.Compiler.PatchCache;
|
using Wabbajack.Compiler.PatchCache;
|
||||||
using Wabbajack.Hashing.xxHash64;
|
using Wabbajack.Hashing.xxHash64;
|
||||||
using Wabbajack.Paths;
|
using Wabbajack.Paths;
|
||||||
@ -15,94 +18,63 @@ public class BinaryPatchCache : IBinaryPatchCache
|
|||||||
private readonly SQLiteConnection _conn;
|
private readonly SQLiteConnection _conn;
|
||||||
private readonly string _connectionString;
|
private readonly string _connectionString;
|
||||||
private readonly AbsolutePath _location;
|
private readonly AbsolutePath _location;
|
||||||
|
private readonly ILogger<BinaryPatchCache> _logger;
|
||||||
|
|
||||||
public BinaryPatchCache(AbsolutePath location)
|
public BinaryPatchCache(ILogger<BinaryPatchCache> logger, AbsolutePath location)
|
||||||
{
|
{
|
||||||
|
_logger = logger;
|
||||||
_location = location;
|
_location = location;
|
||||||
if (!_location.Parent.DirectoryExists())
|
if (!_location.DirectoryExists())
|
||||||
_location.Parent.CreateDirectory();
|
_location.CreateDirectory();
|
||||||
|
}
|
||||||
|
|
||||||
_connectionString =
|
public AbsolutePath PatchLocation(Hash srcHash, Hash destHash)
|
||||||
string.Intern($"URI=file:{location.ToString()};Pooling=True;Max Pool Size=100; Journal Mode=Memory;");
|
{
|
||||||
_conn = new SQLiteConnection(_connectionString);
|
return _location.Combine($"{srcHash.ToHex()}_{destHash.ToHex()}.octodiff");
|
||||||
_conn.Open();
|
|
||||||
|
|
||||||
using var cmd = new SQLiteCommand(_conn);
|
|
||||||
cmd.CommandText = @"CREATE TABLE IF NOT EXISTS PatchCache (
|
|
||||||
FromHash BIGINT,
|
|
||||||
ToHash BIGINT,
|
|
||||||
PatchSize BLOB,
|
|
||||||
Patch BLOB,
|
|
||||||
PRIMARY KEY (FromHash, ToHash))
|
|
||||||
WITHOUT ROWID;";
|
|
||||||
|
|
||||||
cmd.ExecuteNonQuery();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<CacheEntry> CreatePatch(Stream srcStream, Hash srcHash, Stream destStream, Hash destHash, IJob? job)
|
public async Task<CacheEntry> CreatePatch(Stream srcStream, Hash srcHash, Stream destStream, Hash destHash, IJob? job)
|
||||||
{
|
{
|
||||||
await using var rcmd = new SQLiteCommand(_conn);
|
|
||||||
rcmd.CommandText = "SELECT PatchSize FROM PatchCache WHERE FromHash = @fromHash AND ToHash = @toHash";
|
|
||||||
rcmd.Parameters.AddWithValue("@fromHash", (long) srcHash);
|
|
||||||
rcmd.Parameters.AddWithValue("@toHash", (long) destHash);
|
|
||||||
|
|
||||||
await using var rdr = await rcmd.ExecuteReaderAsync();
|
var location = PatchLocation(srcHash, destHash);
|
||||||
while (await rdr.ReadAsync()) return new CacheEntry(srcHash, destHash, rdr.GetInt64(0), this);
|
if (location.FileExists())
|
||||||
|
return new CacheEntry(srcHash, destHash, location.Size(), this);
|
||||||
await using var cmd = new SQLiteCommand(_conn);
|
|
||||||
cmd.CommandText = @"INSERT INTO PatchCache (FromHash, ToHash, PatchSize, Patch)
|
|
||||||
VALUES (@fromHash, @toHash, @patchSize, @patch)";
|
|
||||||
|
|
||||||
cmd.Parameters.AddWithValue("@fromHash", (long) srcHash);
|
|
||||||
cmd.Parameters.AddWithValue("@toHash", (long) destHash);
|
|
||||||
|
|
||||||
await using var sigStream = new MemoryStream();
|
await using var sigStream = new MemoryStream();
|
||||||
await using var patchStream = new MemoryStream();
|
var tempName = _location.Combine(Guid.NewGuid().ToString()).WithExtension(Ext.Temp);
|
||||||
OctoDiff.Create(srcStream, destStream, sigStream, patchStream, job);
|
await using var patchStream = tempName.Open(FileMode.Create, FileAccess.ReadWrite, FileShare.None);
|
||||||
|
|
||||||
cmd.Parameters.AddWithValue("@patchSize", patchStream.Length);
|
|
||||||
cmd.Parameters.AddWithValue("@patch", patchStream.ToArray());
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
await cmd.ExecuteNonQueryAsync();
|
|
||||||
|
OctoDiff.Create(srcStream, destStream, sigStream, patchStream, job);
|
||||||
|
|
||||||
|
patchStream.Close();
|
||||||
|
await tempName.MoveToAsync(location, true, CancellationToken.None);
|
||||||
}
|
}
|
||||||
catch (SQLiteException ex)
|
finally
|
||||||
{
|
{
|
||||||
if (!ex.Message.StartsWith("constraint failed"))
|
await patchStream.DisposeAsync();
|
||||||
throw;
|
if (tempName.FileExists())
|
||||||
|
tempName.Delete();
|
||||||
}
|
}
|
||||||
|
|
||||||
return new CacheEntry(srcHash, destHash, patchStream.Length, this);
|
return new CacheEntry(srcHash, destHash, location.Size(), this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public async Task<CacheEntry?> GetPatch(Hash fromHash, Hash toHash)
|
public async Task<CacheEntry?> GetPatch(Hash fromHash, Hash toHash)
|
||||||
{
|
{
|
||||||
await using var cmd = new SQLiteCommand(_conn);
|
var location = PatchLocation(fromHash, toHash);
|
||||||
cmd.CommandText = @"SELECT PatchSize FROM PatchCache WHERE FromHash = @fromHash AND ToHash = @toHash";
|
if (location.FileExists())
|
||||||
cmd.Parameters.AddWithValue("@fromHash", (long) fromHash);
|
return new CacheEntry(fromHash, toHash, location.Size(), this);
|
||||||
cmd.Parameters.AddWithValue("@toHash", (long) toHash);
|
|
||||||
|
|
||||||
await using var rdr = await cmd.ExecuteReaderAsync();
|
|
||||||
while (await rdr.ReadAsync()) return new CacheEntry(fromHash, toHash, rdr.GetInt64(0), this);
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<byte[]> GetData(CacheEntry entry)
|
public async Task<byte[]> GetData(CacheEntry entry)
|
||||||
{
|
{
|
||||||
await using var cmd = new SQLiteCommand(_conn);
|
var location = PatchLocation(entry.From, entry.To);
|
||||||
cmd.CommandText = @"SELECT PatchSize, Patch FROM PatchCache WHERE FromHash = @fromHash AND ToHash = @toHash";
|
if (location.FileExists())
|
||||||
cmd.Parameters.AddWithValue("@fromHash", (long) entry.From);
|
return await location.ReadAllBytesAsync();
|
||||||
cmd.Parameters.AddWithValue("@toHash", (long) entry.To);
|
|
||||||
|
|
||||||
await using var rdr = await cmd.ExecuteReaderAsync();
|
|
||||||
while (await rdr.ReadAsync())
|
|
||||||
{
|
|
||||||
var array = new byte[rdr.GetInt64(0)];
|
|
||||||
rdr.GetBytes(1, 0, array, 0, array.Length);
|
|
||||||
return array;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Array.Empty<byte>();
|
return Array.Empty<byte>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -459,7 +459,7 @@ public static class GameRegistry
|
|||||||
|
|
||||||
public static GameMetaData? GetByMO2ArchiveName(string gameName)
|
public static GameMetaData? GetByMO2ArchiveName(string gameName)
|
||||||
{
|
{
|
||||||
return Games.Values.FirstOrDefault(g => (g.MO2ArchiveName ?? g.NexusName)!.Equals(gameName, StringComparison.InvariantCultureIgnoreCase));
|
return Games.Values.FirstOrDefault(g => (g.MO2ArchiveName ?? g.NexusName ?? "")!.Equals(gameName, StringComparison.InvariantCultureIgnoreCase));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static GameMetaData? GetByNexusName(string gameName)
|
public static GameMetaData? GetByNexusName(string gameName)
|
||||||
|
@ -155,7 +155,7 @@ public struct AbsolutePath : IPath, IComparable<AbsolutePath>, IEquatable<Absolu
|
|||||||
return ArrayExtensions.AreEqualIgnoreCase(parent.Parts, 0, Parts, 0, parent.Parts.Length);
|
return ArrayExtensions.AreEqualIgnoreCase(parent.Parts, 0, Parts, 0, parent.Parts.Length);
|
||||||
}
|
}
|
||||||
|
|
||||||
public AbsolutePath Combine(params object[] paths)
|
public readonly AbsolutePath Combine(params object[] paths)
|
||||||
{
|
{
|
||||||
var converted = paths.Select(p =>
|
var converted = paths.Select(p =>
|
||||||
{
|
{
|
||||||
|
@ -69,8 +69,8 @@ public static class ServiceExtensions
|
|||||||
});
|
});
|
||||||
|
|
||||||
service.AddSingleton<IBinaryPatchCache>(s => options.UseLocalCache
|
service.AddSingleton<IBinaryPatchCache>(s => options.UseLocalCache
|
||||||
? new BinaryPatchCache(s.GetService<TemporaryFileManager>()!.CreateFile().Path)
|
? new BinaryPatchCache(s.GetRequiredService<ILogger<BinaryPatchCache>>(), s.GetService<TemporaryFileManager>()!.CreateFolder().Path)
|
||||||
: new BinaryPatchCache(KnownFolders.WabbajackAppLocal.Combine("patchCache.sqlite")));
|
: new BinaryPatchCache(s.GetRequiredService<ILogger<BinaryPatchCache>>(),KnownFolders.WabbajackAppLocal.Combine("PatchCache")));
|
||||||
|
|
||||||
service.AddSingleton(new ParallelOptions {MaxDegreeOfParallelism = Environment.ProcessorCount});
|
service.AddSingleton(new ParallelOptions {MaxDegreeOfParallelism = Environment.ProcessorCount});
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user