3.0.1.8 fixes

This commit is contained in:
Halgari 2022-09-30 23:21:58 -06:00
parent b7e848b7a2
commit a4e5d41603
8 changed files with 92 additions and 63 deletions

View File

@ -1,5 +1,13 @@
### Changelog ### Changelog
#### Version - 3.0.1.8 - 10/??/2022
* Fix broken ZEditMerge code (this stream is not readable)
* Update out-of-date dependencies
* Update CLI to perform lazy initialization of command components (faster startup)
* Fix some status messages during installation
* Optimize the modlist optimizer so runs a bit faster
* Rework the file hash cache so it doesn't block the UI thread
#### Version - 3.0.1.7 - 9/27/2022 #### Version - 3.0.1.7 - 9/27/2022
* HOTFIX: fix "Could not find part of path" bug related to the profiles folder * HOTFIX: fix "Could not find part of path" bug related to the profiles folder

View File

@ -111,7 +111,7 @@ public class DownloadAll : IVerb
return; return;
} }
_cache.FileHashWriteCache(output, result.Item2); await _cache.FileHashWriteCache(output, result.Item2);
var metaFile = outputFile.WithExtension(Ext.Meta); var metaFile = outputFile.WithExtension(Ext.Meta);
await metaFile.WriteAllTextAsync(_dispatcher.MetaIniSection(file), token: token); await metaFile.WriteAllTextAsync(_dispatcher.MetaIniSection(file), token: token);

View File

@ -125,6 +125,21 @@ public static class AsyncParallelExtensions
await foreach (var itm in coll) lst.Add(itm); await foreach (var itm in coll) lst.Add(itm);
return lst; return lst;
} }
/// <summary>
/// Consumes a IAsyncEnumerable without doing anything with it
/// </summary>
/// <param name="coll"></param>
/// <typeparam name="T"></typeparam>
public static async Task Sink<T>(this IAsyncEnumerable<T> coll)
{
long count = 0;
await foreach (var itm in coll)
{
count++;
}
}
public static async Task<T[]> ToArray<T>(this IAsyncEnumerable<T> coll) public static async Task<T[]> ToArray<T>(this IAsyncEnumerable<T> coll)
{ {

View File

@ -232,7 +232,7 @@ public abstract class AInstaller<T>
{ {
var file = directive.Directive; var file = directive.Directive;
UpdateProgress(file.Size); UpdateProgress(file.Size);
var destPath = file.To.RelativeTo(_configuration.Install);
switch (file) switch (file)
{ {
case PatchedFromArchive pfa: case PatchedFromArchive pfa:
@ -240,9 +240,8 @@ public abstract class AInstaller<T>
await using var s = await sf.GetStream(); await using var s = await sf.GetStream();
s.Position = 0; s.Position = 0;
await using var patchDataStream = await InlinedFileStream(pfa.PatchID); await using var patchDataStream = await InlinedFileStream(pfa.PatchID);
var toFile = file.To.RelativeTo(_configuration.Install);
{ {
await using var os = toFile.Open(FileMode.Create, FileAccess.ReadWrite, FileShare.None); await using var os = destPath.Open(FileMode.Create, FileAccess.ReadWrite, FileShare.None);
await BinaryPatching.ApplyPatch(s, patchDataStream, os); await BinaryPatching.ApplyPatch(s, patchDataStream, os);
} }
} }
@ -252,8 +251,7 @@ public abstract class AInstaller<T>
case TransformedTexture tt: case TransformedTexture tt:
{ {
await using var s = await sf.GetStream(); await using var s = await sf.GetStream();
await using var of = directive.Directive.To.RelativeTo(_configuration.Install) await using var of = destPath.Open(FileMode.Create, FileAccess.Write);
.Open(FileMode.Create, FileAccess.Write);
_logger.LogInformation("Recompressing {Filename}", tt.To.FileName); _logger.LogInformation("Recompressing {Filename}", tt.To.FileName);
await ImageLoader.Recompress(s, tt.ImageState.Width, tt.ImageState.Height, tt.ImageState.Format, await ImageLoader.Recompress(s, tt.ImageState.Width, tt.ImageState.Height, tt.ImageState.Format,
of, token); of, token);
@ -264,19 +262,19 @@ public abstract class AInstaller<T>
case FromArchive _: case FromArchive _:
if (grouped[vf].Count() == 1) if (grouped[vf].Count() == 1)
{ {
await sf.Move(directive.Directive.To.RelativeTo(_configuration.Install), token); await sf.Move(destPath, token);
} }
else else
{ {
await using var s = await sf.GetStream(); await using var s = await sf.GetStream();
await directive.Directive.To.RelativeTo(_configuration.Install) await destPath.WriteAllAsync(s, token, false);
.WriteAllAsync(s, token, false);
} }
break; break;
default: default:
throw new Exception($"No handler for {directive}"); throw new Exception($"No handler for {directive}");
} }
await FileHashCache.FileHashWriteCache(destPath, file.Hash);
await job.Report((int) directive.VF.Size, token); await job.Report((int) directive.VF.Size, token);
} }
@ -383,7 +381,7 @@ public abstract class AInstaller<T>
} }
if (hash != default) if (hash != default)
FileHashCache.FileHashWriteCache(destination.Value, hash); await FileHashCache.FileHashWriteCache(destination.Value, hash);
if (result == DownloadResult.Update) if (result == DownloadResult.Update)
await destination.Value.MoveToAsync(destination.Value.Parent.Combine(archive.Hash.ToHex()), true, await destination.Value.MoveToAsync(destination.Value.Parent.Combine(archive.Hash.ToHex()), true,
@ -487,25 +485,26 @@ public abstract class AInstaller<T>
NextStep(Consts.StepPreparing, "Looking for files to delete", 0); NextStep(Consts.StepPreparing, "Looking for files to delete", 0);
await _configuration.Install.EnumerateFiles() await _configuration.Install.EnumerateFiles()
.PDoAll(_limiter, async f => .PMapAllBatched(_limiter, async f =>
{ {
var relativeTo = f.RelativeTo(_configuration.Install); var relativeTo = f.RelativeTo(_configuration.Install);
if (indexed.ContainsKey(relativeTo) || f.InFolder(_configuration.Downloads)) if (indexed.ContainsKey(relativeTo) || f.InFolder(_configuration.Downloads))
return ; return f;
if (f.InFolder(profileFolder) && f.Parent.FileName == savePath) return; if (f.InFolder(profileFolder) && f.Parent.FileName == savePath) return f;
if (NoDeleteRegex.IsMatch(f.ToString())) if (NoDeleteRegex.IsMatch(f.ToString()))
return ; return f;
if (bsaPathsToNotBuild.Contains(f)) if (bsaPathsToNotBuild.Contains(f))
return ; return f;
_logger.LogInformation("Deleting {RelativePath} it's not part of this ModList", relativeTo); _logger.LogInformation("Deleting {RelativePath} it's not part of this ModList", relativeTo);
f.Delete(); f.Delete();
}); return f;
}).Sink();
_logger.LogInformation("Cleaning empty folders"); NextStep(Consts.StepPreparing, "Cleaning empty folders", 0);
var expectedFolders = indexed.Keys var expectedFolders = indexed.Keys
.Select(f => f.RelativeTo(_configuration.Install)) .Select(f => f.RelativeTo(_configuration.Install))
// We ignore the last part of the path, so we need a dummy file name // We ignore the last part of the path, so we need a dummy file name
@ -542,12 +541,11 @@ public abstract class AInstaller<T>
var existingfiles = _configuration.Install.EnumerateFiles().ToHashSet(); var existingfiles = _configuration.Install.EnumerateFiles().ToHashSet();
NextStep(Consts.StepPreparing, "Looking for unmodified files", 0); NextStep(Consts.StepPreparing, "Looking for unmodified files", 0);
await indexed.Values.PMapAll<Directive, Directive?>(async d => await indexed.Values.PMapAllBatched(_limiter, async d =>
{ {
// Bit backwards, but we want to return null for // Bit backwards, but we want to return null for
// all files we *want* installed. We return the files // all files we *want* installed. We return the files
// to remove from the install list. // to remove from the install list.
using var job = await _limiter.Begin($"Hashing File {d.To}", 0, token);
var path = _configuration.Install.Combine(d.To); var path = _configuration.Install.Combine(d.To);
if (!existingfiles.Contains(path)) return null; if (!existingfiles.Contains(path)) return null;

View File

@ -271,9 +271,11 @@ public class StandardInstaller : AInstaller<StandardInstaller>
{ {
var bsas = ModList.Directives.OfType<CreateBSA>().ToList(); var bsas = ModList.Directives.OfType<CreateBSA>().ToList();
_logger.LogInformation("Building {bsasCount} bsa files", bsas.Count); _logger.LogInformation("Building {bsasCount} bsa files", bsas.Count);
NextStep("Installing", "Building BSAs", bsas.Count);
foreach (var bsa in bsas) foreach (var bsa in bsas)
{ {
UpdateProgress(1);
_logger.LogInformation("Building {bsaTo}", bsa.To.FileName); _logger.LogInformation("Building {bsaTo}", bsa.To.FileName);
var sourceDir = _configuration.Install.Combine(BSACreationDir, bsa.TempID); var sourceDir = _configuration.Install.Combine(BSACreationDir, bsa.TempID);
@ -295,9 +297,7 @@ public class StandardInstaller : AInstaller<StandardInstaller>
await a.Build(outStream, token); await a.Build(outStream, token);
streams.Do(s => s.Dispose()); streams.Do(s => s.Dispose());
FileHashCache.FileHashWriteCache(outPath, bsa.Hash); await FileHashCache.FileHashWriteCache(outPath, bsa.Hash);
sourceDir.DeleteDirectory(); sourceDir.DeleteDirectory();
} }
@ -325,9 +325,11 @@ public class StandardInstaller : AInstaller<StandardInstaller>
{ {
case RemappedInlineFile file: case RemappedInlineFile file:
await WriteRemappedFile(file); await WriteRemappedFile(file);
await FileHashCache.FileHashCachedAsync(outPath, token);
break; break;
default: default:
await outPath.WriteAllBytesAsync(await LoadBytesFromPath(directive.SourceDataID), token); await outPath.WriteAllBytesAsync(await LoadBytesFromPath(directive.SourceDataID), token);
await FileHashCache.FileHashWriteCache(outPath, directive.Hash);
break; break;
} }
}); });
@ -453,24 +455,29 @@ public class StandardInstaller : AInstaller<StandardInstaller>
public async Task GenerateZEditMerges(CancellationToken token) public async Task GenerateZEditMerges(CancellationToken token)
{ {
await _configuration.ModList var patches = _configuration.ModList
.Directives .Directives
.OfType<MergedPatch>() .OfType<MergedPatch>()
.PDoAll(async m => .ToList();
{ NextStep("Installing", "Generating ZEdit Merges", patches.Count);
_logger.LogInformation("Generating zEdit merge: {to}", m.To);
var srcData = (await m.Sources.SelectAsync(async s => await patches.PMapAllBatched(_limiter, async m =>
await _configuration.Install.Combine(s.RelativePath).ReadAllBytesAsync(token)) {
.ToReadOnlyCollection()) UpdateProgress(1);
.ConcatArrays(); _logger.LogInformation("Generating zEdit merge: {to}", m.To);
var patchData = await LoadBytesFromPath(m.PatchID); var srcData = (await m.Sources.SelectAsync(async s =>
await _configuration.Install.Combine(s.RelativePath).ReadAllBytesAsync(token))
.ToReadOnlyCollection())
.ConcatArrays();
await using var fs = _configuration.Install.Combine(m.To) var patchData = await LoadBytesFromPath(m.PatchID);
.Open(FileMode.Create, FileAccess.Write, FileShare.None);
await BinaryPatching.ApplyPatch(new MemoryStream(srcData), new MemoryStream(patchData), fs); await using var fs = _configuration.Install.Combine(m.To)
}); .Open(FileMode.Create, FileAccess.ReadWrite, FileShare.None);
await BinaryPatching.ApplyPatch(new MemoryStream(srcData), new MemoryStream(patchData), fs);
return m;
}).ToList();
} }
public static async Task<ModList> Load(DTOSerializer dtos, DownloadDispatcher dispatcher, ModlistMetadata metadata, CancellationToken token) public static async Task<ModList> Load(DTOSerializer dtos, DownloadDispatcher dispatcher, ModlistMetadata metadata, CancellationToken token)

View File

@ -48,7 +48,7 @@ public class ModListDownloadMaintainer
var path = ModListPath(metadata); var path = ModListPath(metadata);
if (!path.FileExists()) return false; if (!path.FileExists()) return false;
if (_hashCache.TryGetHashCache(path, out var hash) && hash == metadata.DownloadMetadata!.Hash) return true; if (await _hashCache.TryGetHashCache(path) == metadata.DownloadMetadata!.Hash) return true;
if (_downloadingCount > 0) return false; if (_downloadingCount > 0) return false;
return await _hashCache.FileHashCachedAsync(path, token.Value) == metadata.DownloadMetadata!.Hash; return await _hashCache.FileHashCachedAsync(path, token.Value) == metadata.DownloadMetadata!.Hash;
@ -80,7 +80,7 @@ public class ModListDownloadMaintainer
Hash = metadata.DownloadMetadata.Hash Hash = metadata.DownloadMetadata.Hash
}, path, job, token.Value); }, path, job, token.Value);
_hashCache.FileHashWriteCache(path, hash); await _hashCache.FileHashWriteCache(path, hash);
await path.WithExtension(Ext.MetaData).WriteAllTextAsync(JsonSerializer.Serialize(metadata)); await path.WithExtension(Ext.MetaData).WriteAllTextAsync(JsonSerializer.Serialize(metadata));
} }
finally finally

View File

@ -26,13 +26,15 @@ public class HashCacheTest
Assert.Equal(Hash.FromBase64("eSIyd+KOG3s="), Assert.Equal(Hash.FromBase64("eSIyd+KOG3s="),
await _cache.FileHashCachedAsync(testFile.Path, CancellationToken.None)); await _cache.FileHashCachedAsync(testFile.Path, CancellationToken.None));
Assert.True(_cache.TryGetHashCache(testFile.Path, out var hash)); Assert.True(await _cache.TryGetHashCache(testFile.Path) != default);
_cache.Purge(testFile.Path); _cache.Purge(testFile.Path);
Assert.False(_cache.TryGetHashCache(testFile.Path, out _)); var hash = await testFile.Path.Hash(CancellationToken.None);
Assert.NotEqual(hash, default);
Assert.NotEqual(hash, await _cache.TryGetHashCache(testFile.Path));
Assert.Equal(hash, await _cache.FileHashCachedAsync(testFile.Path, CancellationToken.None)); Assert.Equal(hash, await _cache.FileHashCachedAsync(testFile.Path, CancellationToken.None));
Assert.True(_cache.TryGetHashCache(testFile.Path, out _)); Assert.Equal(hash, await _cache.TryGetHashCache(testFile.Path));
_cache.VacuumDatabase(); _cache.VacuumDatabase();
} }

View File

@ -39,15 +39,15 @@ public class FileHashCache
cmd.ExecuteNonQuery(); cmd.ExecuteNonQuery();
} }
private (AbsolutePath Path, long LastModified, Hash Hash) Get(AbsolutePath path) private async Task<(AbsolutePath Path, long LastModified, Hash Hash)> Get(AbsolutePath path)
{ {
using var cmd = new SQLiteCommand(_conn); using var cmd = new SQLiteCommand(_conn);
cmd.CommandText = "SELECT LastModified, Hash FROM HashCache WHERE Path = @path"; cmd.CommandText = "SELECT LastModified, Hash FROM HashCache WHERE Path = @path";
cmd.Parameters.AddWithValue("@path", path.ToString().ToLowerInvariant()); cmd.Parameters.AddWithValue("@path", path.ToString().ToLowerInvariant());
cmd.PrepareAsync(); await cmd.PrepareAsync();
using var reader = cmd.ExecuteReader(); await using var reader = await cmd.ExecuteReaderAsync();
while (reader.Read()) return (path, reader.GetInt64(0), Hash.FromLong(reader.GetInt64(1))); while (await reader.ReadAsync()) return (path, reader.GetInt64(0), Hash.FromLong(reader.GetInt64(1)));
return default; return default;
} }
@ -62,17 +62,17 @@ public class FileHashCache
cmd.ExecuteNonQuery(); cmd.ExecuteNonQuery();
} }
private void Upsert(AbsolutePath path, long lastModified, Hash hash) private async Task Upsert(AbsolutePath path, long lastModified, Hash hash)
{ {
using var cmd = new SQLiteCommand(_conn); await using var cmd = new SQLiteCommand(_conn);
cmd.CommandText = @"INSERT INTO HashCache (Path, LastModified, Hash) VALUES (@path, @lastModified, @hash) cmd.CommandText = @"INSERT INTO HashCache (Path, LastModified, Hash) VALUES (@path, @lastModified, @hash)
ON CONFLICT(Path) DO UPDATE SET LastModified = @lastModified, Hash = @hash"; ON CONFLICT(Path) DO UPDATE SET LastModified = @lastModified, Hash = @hash";
cmd.Parameters.AddWithValue("@path", path.ToString().ToLowerInvariant()); cmd.Parameters.AddWithValue("@path", path.ToString().ToLowerInvariant());
cmd.Parameters.AddWithValue("@lastModified", lastModified); cmd.Parameters.AddWithValue("@lastModified", lastModified);
cmd.Parameters.AddWithValue("@hash", (long) hash); cmd.Parameters.AddWithValue("@hash", (long) hash);
cmd.PrepareAsync(); await cmd.PrepareAsync();
cmd.ExecuteNonQuery(); await cmd.ExecuteNonQueryAsync();
} }
public void VacuumDatabase() public void VacuumDatabase()
@ -84,46 +84,45 @@ public class FileHashCache
cmd.ExecuteNonQuery(); cmd.ExecuteNonQuery();
} }
public bool TryGetHashCache(AbsolutePath file, out Hash hash) public async Task<Hash> TryGetHashCache(AbsolutePath file)
{ {
hash = default; if (!file.FileExists()) return default;
if (!file.FileExists()) return false;
var result = Get(file); var result = await Get(file);
if (result == default || result.Hash == default) if (result == default || result.Hash == default)
return false; return default;
if (result.LastModified == file.LastModifiedUtc().ToFileTimeUtc()) if (result.LastModified == file.LastModifiedUtc().ToFileTimeUtc())
{ {
hash = result.Hash; return result.Hash;
return true;
} }
Purge(file); Purge(file);
return false; return default;
} }
private void WriteHashCache(AbsolutePath file, Hash hash) private async Task WriteHashCache(AbsolutePath file, Hash hash)
{ {
if (!file.FileExists()) return; if (!file.FileExists()) return;
Upsert(file, file.LastModifiedUtc().ToFileTimeUtc(), hash); await Upsert(file, file.LastModifiedUtc().ToFileTimeUtc(), hash);
} }
public void FileHashWriteCache(AbsolutePath file, Hash hash) public async Task FileHashWriteCache(AbsolutePath file, Hash hash)
{ {
WriteHashCache(file, hash); await WriteHashCache(file, hash);
} }
public async Task<Hash> FileHashCachedAsync(AbsolutePath file, CancellationToken token) public async Task<Hash> FileHashCachedAsync(AbsolutePath file, CancellationToken token)
{ {
if (TryGetHashCache(file, out var foundHash)) return foundHash; var hash = await TryGetHashCache(file);
if (hash != default) return hash;
using var job = await _limiter.Begin($"Hashing {file.FileName}", file.Size(), token); using var job = await _limiter.Begin($"Hashing {file.FileName}", file.Size(), token);
await using var fs = file.Open(FileMode.Open, FileAccess.Read, FileShare.Read); await using var fs = file.Open(FileMode.Open, FileAccess.Read, FileShare.Read);
var hash = await fs.HashingCopy(Stream.Null, token, job); hash = await fs.HashingCopy(Stream.Null, token, job);
if (hash != default) if (hash != default)
WriteHashCache(file, hash); await WriteHashCache(file, hash);
return hash; return hash;
} }
} }