Add CESI support, start integration into compiler

This commit is contained in:
Timothy Baldridge 2022-06-20 17:21:04 -06:00
parent 8dc13740dc
commit 6724d4ba6e
20 changed files with 533 additions and 37 deletions

View File

@ -11,7 +11,7 @@ public class ManualBlobDownloadHandler : BrowserWindowViewModel
protected override async Task Run(CancellationToken token)
{
await WaitForReady();
//await WaitForReady();
var archive = Intervention.Archive;
var md = Intervention.Archive.State as Manual;

View File

@ -135,6 +135,9 @@ public abstract class BrowserWindowViewModel : ViewModel
{
var source = new TaskCompletionSource();
var referer = _browser.Source;
while (_browser.CoreWebView2 == null)
await Task.Delay(10, token);
_browser.CoreWebView2.DownloadStarting += (sender, args) =>
{
try

View File

@ -79,6 +79,7 @@ internal class Program
services.AddSingleton<IVerb, Install>();
services.AddSingleton<IVerb, InstallCompileInstallVerify>();
services.AddSingleton<IVerb, HashUrlString>();
services.AddSingleton<IVerb, DownloadAll>();
services.AddSingleton<IUserInterventionHandler, UserInterventionHandler>();
}).Build();

View File

@ -0,0 +1,128 @@
using System;
using System.CommandLine;
using System.CommandLine.Invocation;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Wabbajack.Common;
using Wabbajack.Downloaders;
using Wabbajack.DTOs;
using Wabbajack.DTOs.DownloadStates;
using Wabbajack.DTOs.JsonConverters;
using Wabbajack.Installer;
using Wabbajack.Networking.WabbajackClientApi;
using Wabbajack.Paths;
using Wabbajack.Paths.IO;
using Wabbajack.RateLimiter;
using Wabbajack.VFS;
namespace Wabbajack.CLI.Verbs;
public class DownloadAll : IVerb
{
private readonly DownloadDispatcher _dispatcher;
private readonly ILogger<DownloadAll> _logger;
private readonly Client _wjClient;
private readonly DTOSerializer _dtos;
private readonly Resource<DownloadAll> _limiter;
private readonly FileHashCache _cache;
public const int MaxDownload = 6000;
public DownloadAll(ILogger<DownloadAll> logger, DownloadDispatcher dispatcher, Client wjClient, DTOSerializer dtos, FileHashCache cache)
{
_logger = logger;
_dispatcher = dispatcher;
_wjClient = wjClient;
_dtos = dtos;
_limiter = new Resource<DownloadAll>("Download All", 16);
_cache = cache;
}
public Command MakeCommand()
{
var command = new Command("download-all");
command.Add(new Option<AbsolutePath>(new[] {"-o", "-output"}, "Output folder"));
command.Description = "Downloads all files for all modlists in the gallery";
command.Handler = CommandHandler.Create(Run);
return command;
}
private async Task<int> Run(AbsolutePath output, CancellationToken token)
{
_logger.LogInformation("Downloading modlists");
var existing = await output.EnumerateFiles()
.Where(f => f.Extension != Ext.Meta)
.PMapAll(_limiter, async f =>
{
_logger.LogInformation("Hashing {File}", f.FileName);
return await _cache.FileHashCachedAsync(f, token);
})
.ToHashSet();
var archives = (await (await _wjClient.LoadLists())
.PMapAll(_limiter, async m =>
{
try
{
return await StandardInstaller.Load(_dtos, _dispatcher, m, token);
}
catch (Exception ex)
{
_logger.LogError(ex, "While downloading list");
return default;
}
})
.Where(d => d != default)
.SelectMany(m => m!.Archives)
.ToList())
.DistinctBy(d => d.Hash)
.Where(d => d.State is Nexus)
.Where(d => !existing.Contains(d.Hash))
.ToList();
_logger.LogInformation("Found {Count} Archives totaling {Size}", archives.Count, archives.Sum(a => a.Size).ToFileSizeString());
await archives
.OrderBy(a => a.Size)
.Take(MaxDownload)
.PDoAll(_limiter, async file => {
var outputFile = output.Combine(file.Name);
if (outputFile.FileExists())
{
outputFile = output.Combine(outputFile.FileName.WithoutExtension() + "_" + file.Hash.ToHex() +
outputFile.WithExtension(outputFile.Extension));
}
_logger.LogInformation("Downloading {File}", file.Name);
try
{
var result = await _dispatcher.DownloadWithPossibleUpgrade(file, outputFile, token);
if (result.Item1 == DownloadResult.Failure)
{
if (outputFile.FileExists())
outputFile.Delete();
return;
}
_cache.FileHashWriteCache(output, result.Item2);
var metaFile = outputFile.WithExtension(Ext.Meta);
await metaFile.WriteAllTextAsync(_dispatcher.MetaIniSection(file), token: token);
}
catch (Exception ex)
{
_logger.LogError(ex, "While downloading {Name}, Ignoring", file.Name);
}
});
return 0;
}
}

View File

@ -30,6 +30,7 @@ public static class AsyncParallelExtensions
foreach (var itm in tasks) yield return await itm;
}
// Like PMapAll but don't keep defaults
public static async IAsyncEnumerable<TOut> PKeepAll<TIn, TOut>(this IEnumerable<TIn> coll,
Func<TIn, Task<TOut>> mapFn)

View File

@ -1,5 +1,6 @@
using System.Text.Json.Serialization;
using Microsoft.Extensions.DependencyInjection;
using Wabbajack.Paths;
namespace Wabbajack.DTOs.JsonConverters;
@ -18,6 +19,7 @@ public static class DIExtensions
services.AddSingleton<JsonConverter, RelativePathConverter>();
services.AddSingleton<JsonConverter, AbsolutePathConverter>();
services.AddSingleton<JsonConverter, VersionConverter>();
services.AddSingleton<JsonConverter, IPathConverter>();
return services;
}

View File

@ -0,0 +1,45 @@
using System;
using System.Text.Json;
using System.Text.Json.Serialization;
using Wabbajack.Paths;
namespace Wabbajack.DTOs.JsonConverters;
public class IPathConverter : JsonConverter<IPath>
{
public override IPath? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
if (reader.TokenType != JsonTokenType.StartObject)
throw new JsonException("Invalid format, expected StartObject");
reader.Read();
var type = reader.GetString();
reader.Read();
var value = reader.GetString();
reader.Read();
if (type == "Absolute")
return value!.ToAbsolutePath();
else
return value!.ToRelativePath();
}
public override void Write(Utf8JsonWriter writer, IPath value, JsonSerializerOptions options)
{
writer.WriteStartObject();
switch (value)
{
case AbsolutePath a:
writer.WriteString("Absolute", a.ToString());
break;
case RelativePath r:
writer.WriteString("Relative", r.ToString());
break;
default:
throw new NotImplementedException();
}
writer.WriteEndObject();
}
}

View File

@ -44,6 +44,29 @@ public class DownloadDispatcher
return await Download(a, dest, job, token);
}
public async Task<Archive> MaybeProxy(Archive a, CancellationToken token)
{
if (a.State is not IProxyable p) return a;
var uri = p.UnParse(a.State);
var newUri = await _wjClient.MakeProxyUrl(a, uri);
if (newUri != null)
{
a = new Archive
{
Name = a.Name,
Size = a.Size,
Hash = a.Hash,
State = new DTOs.DownloadStates.Http()
{
Url = newUri
}
};
}
return a;
}
public async Task<Hash> Download(Archive a, AbsolutePath dest, Job<DownloadDispatcher> job, CancellationToken token)
{
if (!dest.Parent.DirectoryExists())
@ -53,7 +76,9 @@ public class DownloadDispatcher
if (_useProxyCache && downloader is IProxyable p)
{
var uri = p.UnParse(a.State);
var newUri = _wjClient.MakeProxyUrl(a, uri);
var newUri = await _wjClient.MakeProxyUrl(a, uri);
if (newUri != null)
{
a = new Archive
{
Name = a.Name,
@ -67,6 +92,7 @@ public class DownloadDispatcher
downloader = Downloader(a);
_logger.LogInformation("Downloading Proxy ({Hash}) {Uri}", (await uri.ToString().Hash()).ToHex(), uri);
}
}
var hash = await downloader.Download(a, dest, job, token);
return hash;

View File

@ -12,7 +12,7 @@ using Wabbajack.RateLimiter;
namespace Wabbajack.Downloaders.Manual;
public class ManualDownloader : ADownloader<DTOs.DownloadStates.Manual>
public class ManualDownloader : ADownloader<DTOs.DownloadStates.Manual>, IProxyable
{
private readonly ILogger<ManualDownloader> _logger;
private readonly IUserInterventionHandler _interventionHandler;
@ -98,4 +98,19 @@ public class ManualDownloader : ADownloader<DTOs.DownloadStates.Manual>
return new[] {$"manualURL={state.Url}", $"prompt={state.Prompt}"};
}
public IDownloadState? Parse(Uri uri)
{
return new DTOs.DownloadStates.Manual() {Url = uri};
}
public Uri UnParse(IDownloadState state)
{
return (state as DTOs.DownloadStates.Manual)!.Url;
}
public Task<T> DownloadStream<T>(Archive archive, Func<Stream, Task<T>> fn, CancellationToken token)
{
throw new NotImplementedException();
}
}

View File

@ -106,6 +106,7 @@ public abstract class AInstaller<T>
_updateStopWatch.Restart();
MaxStepProgress = maxStepProgress;
_currentStep += 1;
_currentStepProgress = 0;
_statusText = statusText;
_statusCategory = statusCategory;
_statusFormatter = formatter ?? (x => x.ToString());
@ -313,6 +314,10 @@ public abstract class AInstaller<T>
_logger.LogInformation("Downloading {Count} archives", missing.Count.ToString());
NextStep(Consts.StepDownloading, "Downloading files", missing.Count);
missing = await missing
.SelectAsync(async m => await _downloadDispatcher.MaybeProxy(m, token))
.ToList();
if (download)
{
var result = SendDownloadMetrics(missing);
@ -479,8 +484,7 @@ public abstract class AInstaller<T>
var savePath = (RelativePath) "saves";
NextStep(Consts.StepPreparing, "Looking for files to delete", 0);
await _configuration.Install.EnumerateFiles()
.PDoAll(async f =>
foreach (var f in _configuration.Install.EnumerateFiles())
{
var relativeTo = f.RelativeTo(_configuration.Install);
if (indexed.ContainsKey(relativeTo) || f.InFolder(_configuration.Downloads))
@ -496,7 +500,7 @@ public abstract class AInstaller<T>
_logger.LogInformation("Deleting {RelativePath} it's not part of this ModList", relativeTo);
f.Delete();
});
}
_logger.LogInformation("Cleaning empty folders");
var expectedFolders = indexed.Keys
@ -540,6 +544,7 @@ public abstract class AInstaller<T>
// Bit backwards, but we want to return null for
// all files we *want* installed. We return the files
// to remove from the install list.
using var job = await _limiter.Begin($"Hashing File {d.To}", 0, token);
var path = _configuration.Install.Combine(d.To);
if (!existingfiles.Contains(path)) return null;

View File

@ -1,5 +1,9 @@
using System;
using System.Buffers;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
@ -32,8 +36,105 @@ public class SingleThreadedDownloader : IHttpDownloader
if (job.Size == 0)
job.Size = response.Content.Headers.ContentLength ?? 0;
/* Need to make this mulitthreaded to be much use
if ((response.Content.Headers.ContentLength ?? 0) != 0 &&
response.Headers.AcceptRanges.FirstOrDefault() == "bytes")
{
return await ResettingDownloader(response, message, outputPath, job, token);
}
*/
await using var stream = await response.Content.ReadAsStreamAsync(token);
await using var outputStream = outputPath.Open(FileMode.Create, FileAccess.Write);
return await stream.HashingCopy(outputStream, token, job);
}
private const int CHUNK_SIZE = 1024 * 1024 * 8;
private async Task<Hash> ResettingDownloader(HttpResponseMessage response, HttpRequestMessage message, AbsolutePath outputPath, IJob job, CancellationToken token)
{
using var rented = MemoryPool<byte>.Shared.Rent(CHUNK_SIZE);
var buffer = rented.Memory;
var hasher = new xxHashAlgorithm(0);
var running = true;
ulong finalHash = 0;
var inputStream = await response.Content.ReadAsStreamAsync(token);
await using var outputStream = outputPath.Open(FileMode.Create, FileAccess.Write, FileShare.None);
long writePosition = 0;
while (running && !token.IsCancellationRequested)
{
var totalRead = 0;
while (totalRead != buffer.Length)
{
var read = await inputStream.ReadAsync(buffer.Slice(totalRead, buffer.Length - totalRead),
token);
if (read == 0)
{
running = false;
break;
}
if (job != null)
await job.Report(read, token);
totalRead += read;
}
var pendingWrite = outputStream.WriteAsync(buffer[..totalRead], token);
if (running)
{
hasher.TransformByteGroupsInternal(buffer.Span);
await pendingWrite;
}
else
{
var preSize = (totalRead >> 5) << 5;
if (preSize > 0)
{
hasher.TransformByteGroupsInternal(buffer[..preSize].Span);
finalHash = hasher.FinalizeHashValueInternal(buffer[preSize..totalRead].Span);
await pendingWrite;
break;
}
finalHash = hasher.FinalizeHashValueInternal(buffer[..totalRead].Span);
await pendingWrite;
break;
}
{
writePosition += totalRead;
await job.Report(totalRead, token);
message = CloneMessage(message);
message.Headers.Range = new RangeHeaderValue(writePosition, writePosition + CHUNK_SIZE);
await inputStream.DisposeAsync();
response.Dispose();
response = await _client.SendAsync(message, HttpCompletionOption.ResponseHeadersRead, token);
HttpException.ThrowOnFailure(response);
inputStream = await response.Content.ReadAsStreamAsync(token);
}
}
await outputStream.FlushAsync(token);
return new Hash(finalHash);
}
private HttpRequestMessage CloneMessage(HttpRequestMessage message)
{
var newMsg = new HttpRequestMessage(message.Method, message.RequestUri);
foreach (var header in message.Headers)
{
newMsg.Headers.Add(header.Key, header.Value);
}
return newMsg;
}
}

View File

@ -8,11 +8,13 @@ using System.Net.Http.Json;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Web;
using Microsoft.Extensions.Logging;
using Wabbajack.Common;
using Wabbajack.DTOs;
using Wabbajack.DTOs.CDN;
using Wabbajack.DTOs.Configs;
using Wabbajack.DTOs.DownloadStates;
using Wabbajack.DTOs.JsonConverters;
using Wabbajack.DTOs.Logins;
using Wabbajack.DTOs.ModListValidation;
@ -363,8 +365,26 @@ public class Client
return await _client.GetFromJsonAsync<SteamManifest[]>(url, _dtos.Options) ?? Array.Empty<SteamManifest>();
}
public Uri MakeProxyUrl(Archive archive, Uri uri)
public async Task<bool> ProxyHas(Uri uri)
{
return new Uri($"{_configuration.BuildServerUrl}proxy?name={archive.Name}&hash={archive.Hash.ToHex()}&uri={uri}");
var newUri = new Uri($"{_configuration.BuildServerUrl}proxy?uri={HttpUtility.UrlEncode(uri.ToString())}");
var msg = new HttpRequestMessage(HttpMethod.Head, newUri);
try
{
var result = await _client.SendAsync(msg);
return result.IsSuccessStatusCode;
}
catch (Exception ex)
{
return false;
}
}
public async ValueTask<Uri?> MakeProxyUrl(Archive archive, Uri uri)
{
if (archive.State is Manual && !await ProxyHas(uri))
return null;
return new Uri($"{_configuration.BuildServerUrl}proxy?name={archive.Name}&hash={archive.Hash.ToHex()}&uri={HttpUtility.UrlEncode(uri.ToString())}");
}
}

View File

@ -33,4 +33,15 @@ public class AppSettings
public string MetricsFolder { get; set; } = "";
public string TarLogPath { get; set; }
public string GitHubKey { get; set; } = "";
public CouchDBSetting CesiDB { get; set; }
public CouchDBSetting MetricsDB { get; set; }
}
public class CouchDBSetting
{
public Uri Endpoint { get; set; }
public string Database { get; set; }
public string Username { get; set; }
public string Password { get; set; }
}

View File

@ -0,0 +1,105 @@
using cesi.DTOs;
using CouchDB.Driver;
using CouchDB.Driver.Views;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using Wabbajack.Common;
using Wabbajack.DTOs.JsonConverters;
using Wabbajack.DTOs.Texture;
using Wabbajack.Hashing.xxHash64;
using Wabbajack.Paths;
using Wabbajack.VFS;
namespace Wabbajack.Server.Controllers;
[Route("/cesi")]
public class Cesi : ControllerBase
{
private readonly ILogger<Cesi> _logger;
private readonly ICouchDatabase<Analyzed> _db;
private readonly DTOSerializer _dtos;
public Cesi(ILogger<Cesi> logger, ICouchDatabase<Analyzed> db, DTOSerializer serializer)
{
_logger = logger;
_db = db;
_dtos = serializer;
}
[HttpGet("entry/{hash}")]
public async Task<IActionResult> Entry(string hash)
{
return Ok(await _db.FindAsync(hash));
}
[HttpGet("vfs/{hash}")]
public async Task<IActionResult> Vfs(string hash)
{
var entry = await _db.FindAsync(ReverseHash(hash));
if (entry == null) return NotFound(new {Message = "Entry not found", Hash = hash, ReverseHash = ReverseHash(hash)});
var indexed = new IndexedVirtualFile
{
Hash = Hash.FromHex(ReverseHash(entry.xxHash64)),
Size = entry.Size,
ImageState = GetImageState(entry),
Children = await GetChildrenState(entry),
};
return Ok(_dtos.Serialize(indexed, true));
}
private async Task<List<IndexedVirtualFile>> GetChildrenState(Analyzed entry)
{
if (entry.Archive == null) return new List<IndexedVirtualFile>();
var children = await _db.GetViewAsync<string, Analyzed>("Indexes", "ArchiveContents", new CouchViewOptions<string>
{
IncludeDocs = true,
Key = entry.xxHash64
});
var indexed = children.ToLookup(d => d.Document.xxHash64, v => v.Document);
return await entry.Archive.Entries.SelectAsync(async e =>
{
var found = indexed[e.Value].First();
return new IndexedVirtualFile
{
Name = e.Key.ToRelativePath(),
Size = found.Size,
Hash = Hash.FromHex(ReverseHash(found.xxHash64)),
ImageState = GetImageState(found),
Children = await GetChildrenState(found),
};
}).ToList();
}
private ImageState? GetImageState(Analyzed entry)
{
if (entry.DDS == null) return null;
return new ImageState
{
Width = entry.DDS.Width,
Height = entry.DDS.Height,
Format = Enum.Parse<DXGI_FORMAT>(entry.DDS.Format),
PerceptualHash = new PHash(entry.DDS.PHash.FromHex())
};
}
private Hash ReverseHash(Hash hash)
{
return Hash.FromHex(hash.ToArray().Reverse().ToArray().ToHex());
}
private string ReverseHash(string hash)
{
return hash.FromHex().Reverse().ToArray().ToHex();
}
}

View File

@ -34,8 +34,10 @@ public class NexusCacheManager
_nexusAPI = nexusApi;
_discord = discord;
/* TODO - uncomment me!
_timer = new Timer(_ => UpdateNexusCacheAPI().FireAndForget(), null, TimeSpan.FromSeconds(2),
TimeSpan.FromHours(4));
*/
}

View File

@ -2,7 +2,12 @@
using System.IO;
using System.Net.Http;
using System.Runtime.InteropServices;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading.Tasks;
using cesi.DTOs;
using CouchDB.Driver;
using CouchDB.Driver.Options;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http.Features;
@ -34,6 +39,7 @@ using Wabbajack.Services.OSIntegrated.TokenProviders;
using Wabbajack.Networking.WabbajackClientApi;
using Wabbajack.Paths.IO;
using Wabbajack.VFS;
using YamlDotNet.Serialization.NamingConventions;
using Client = Wabbajack.Networking.GitHub.Client;
namespace Wabbajack.Server;
@ -136,9 +142,27 @@ public class Startup
options.MimeTypes = new[] {"application/json"};
});
// CouchDB
services.AddSingleton(s =>
{
var settings = s.GetRequiredService<AppSettings>();
var client = new CouchClient(settings.CesiDB.Endpoint, b =>
{
b.UseBasicAuthentication("cesi", "password");
b.SetPropertyCase(PropertyCaseType.None);
b.SetJsonNullValueHandling(NullValueHandling.Ignore);
});
return client.GetDatabase<Analyzed>("cesi");
});
services.AddMvc();
services.AddControllers()
.AddNewtonsoftJson(o => { o.SerializerSettings.ReferenceLoopHandling = ReferenceLoopHandling.Ignore; });
services
.AddControllers()
.AddJsonOptions(j =>
{
j.JsonSerializerOptions.PropertyNamingPolicy = null;
j.JsonSerializerOptions.DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingDefault;
});
NettleEngine.GetCompiler().RegisterWJFunctions();
}

View File

@ -8,6 +8,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="cesi.DTOs" Version="1.0.0" />
<PackageReference Include="Chronic.Core" Version="0.4.0" />
<PackageReference Include="Dapper" Version="2.0.123" />
<PackageReference Include="Discord.Net.WebSocket" Version="3.6.1" />

View File

@ -15,7 +15,13 @@
"MirrorFilesFolder": "c:\\tmp\\mirrors",
"NexusCacheFolder": "c:\\tmp\\nexus-cache",
"ProxyFolder": "c:\\tmp\\proxy",
"GitHubKey": ""
"GitHubKey": "",
"CesiDB": {
"Endpoint": "http://localhost:15984",
"Database": "cesi",
"Username": "cesi",
"Password": "password"
}
},
"AllowedHosts": "*"
}