Test downloading and indexing jobs

This commit is contained in:
Timothy Baldridge 2020-04-09 06:46:33 -06:00
parent b1909a0851
commit 45716c76e0
10 changed files with 159 additions and 33 deletions

View File

@ -27,6 +27,8 @@ namespace Wabbajack.BuildServer.Test
public AbsolutePath ServerPublicFolder => "public".RelativeTo(AbsolutePath.EntryPoint);
public AbsolutePath ServerArchivesFolder => "archives".RelativeTo(AbsolutePath.EntryPoint);
public BuildServerFixture()
{
@ -62,6 +64,8 @@ namespace Wabbajack.BuildServer.Test
return (T)_host.Services.GetService(typeof(T));
}
public void Dispose()
{
if (_disposed) return;
@ -111,8 +115,10 @@ namespace Wabbajack.BuildServer.Test
return _singleton;
}
}
}
[Collection("ServerTests")]
public class ABuildServerSystemTest : XunitContextBase, IClassFixture<SingletonAdaptor<BuildServerFixture>>
{
@ -121,6 +127,7 @@ namespace Wabbajack.BuildServer.Test
private readonly IDisposable _unsubErr;
protected Client _authedClient;
protected WorkQueue _queue;
private Random _random;
public ABuildServerSystemTest(ITestOutputHelper output, SingletonAdaptor<BuildServerFixture> fixture) : base(output)
@ -136,6 +143,8 @@ namespace Wabbajack.BuildServer.Test
AuthorAPI.ApiKeyOverride = Fixture.APIKey;
_queue = new WorkQueue();
Queue = new WorkQueue();
_random = new Random();
Consts.ModlistSummaryURL = MakeURL("lists/status.json");
Consts.ServerWhitelistURL = MakeURL("ServerWhitelist.yaml");
}
@ -149,6 +158,14 @@ namespace Wabbajack.BuildServer.Test
return "http://localhost:8080/" + path;
}
protected byte[] RandomData()
{
var arr = new byte[_random.Next(1024)];
_random.NextBytes(arr);
return arr;
}
protected async Task ClearJobQueue()
{
var sql = Fixture.GetService<SqlService>();
@ -161,6 +178,20 @@ namespace Wabbajack.BuildServer.Test
await sql.FinishJob(job);
}
}
protected async Task RunAllJobs()
{
var sql = Fixture.GetService<SqlService>();
var settings = Fixture.GetService<AppSettings>();
while (true)
{
var job = await sql.GetJob();
if (job == null) break;
job.Result = await job.Payload.Execute(sql, settings);
await sql.FinishJob(job);
}
}
public override void Dispose()
{

View File

@ -7,6 +7,7 @@ using System.Reflection;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using Dapper;
using Wabbajack.BuildServer.Controllers;
using Wabbajack.Common;
using Wabbajack.BuildServer.Model.Models;
@ -46,6 +47,7 @@ namespace Wabbajack.BuildServer.Test
await using var conn = new SqlConnection(CONN_STR);
await conn.OpenAsync();
await KillTestDatabases(conn);
//await new SqlCommand($"CREATE DATABASE {DBName};", conn).ExecuteNonQueryAsync();
await using var schemaStream = Assembly.GetExecutingAssembly().GetManifestResourceStream("Wabbajack.BuildServer.Test.sql.wabbajack_db.sql");
@ -88,8 +90,22 @@ namespace Wabbajack.BuildServer.Test
await using var conn = new SqlConnection(CONN_STR);
await conn.OpenAsync();
await KillTestDatabases(conn);
}
private async Task KillTestDatabases(SqlConnection conn)
{
await KillAll(conn);
await new SqlCommand($"DROP DATABASE {DBName};", conn).ExecuteNonQueryAsync();
var dbs = await conn.QueryAsync<string>("SELECT name from [master].[sys].[databases]");
foreach (var db in dbs.Where(name => name.StartsWith("test_")))
{
await new SqlCommand(
$"DROP DATABASE {db};",
conn)
.ExecuteNonQueryAsync();
}
}
private async Task KillAll(SqlConnection conn)

View File

@ -0,0 +1,83 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models.JobQueue;
using Wabbajack.BuildServer.Models.Jobs;
using Wabbajack.Common;
using Wabbajack.Lib;
using Wabbajack.Lib.Downloaders;
using Wabbajack.Lib.NexusApi;
using Wabbajack.VirtualFileSystem;
using Xunit;
using Xunit.Abstractions;
using Xunit.Priority;
namespace Wabbajack.BuildServer.Test
{
public class ModlistUpdater : ABuildServerSystemTest
{
public ModlistUpdater(ITestOutputHelper output, SingletonAdaptor<BuildServerFixture> fixture) : base(output, fixture)
{
}
[Fact, Priority(0)]
public async Task CanIndexFiles()
{
var sql = Fixture.GetService<SqlService>();
var modId = long.MaxValue >> 1;
var oldFileId = long.MaxValue >> 2;
var newFileId = (long.MaxValue >> 2) + 1;
var oldFileData = RandomData();
var newFileData = RandomData();
var oldDataHash = oldFileData.xxHash();
var newDataHash = newFileData.xxHash();
await "old_file_data.random".RelativeTo(Fixture.ServerPublicFolder).WriteAllBytesAsync(oldFileData);
await "new_file_data.random".RelativeTo(Fixture.ServerPublicFolder).WriteAllBytesAsync(newFileData);
await sql.EnqueueJob(new Job
{
Payload = new IndexJob
{
Archive = new Archive
{
Name = "Oldfile",
State = new HTTPDownloader.State
{
Url = MakeURL("old_file_data.random"),
}
}
}
});
await sql.EnqueueJob(new Job
{
Payload = new IndexJob
{
Archive = new Archive
{
Name = "Newfile",
State = new HTTPDownloader.State
{
Url = MakeURL("new_file_data.random"),
}
}
}
});
await RunAllJobs();
Assert.True(await sql.HaveIndexdFile(oldDataHash));
Assert.True(await sql.HaveIndexdFile(newDataHash));
var settings = Fixture.GetService<AppSettings>();
Assert.Equal($"Oldfile_{oldDataHash.ToHex()}_".RelativeTo(Fixture.ServerArchivesFolder), settings.PathForArchive(oldDataHash));
Assert.Equal($"Newfile_{newDataHash.ToHex()}_".RelativeTo(Fixture.ServerArchivesFolder), settings.PathForArchive(newDataHash));
}
}
}

View File

@ -10,8 +10,10 @@ namespace Wabbajack.BuildServer
config.Bind("WabbajackSettings", this);
}
public AbsolutePath DownloadDir { get; set; }
public AbsolutePath ArchiveDir { get; set; }
public string DownloadDir { get; set; }
public AbsolutePath DownloadPath => (AbsolutePath)DownloadDir;
public string ArchiveDir { get; set; }
public AbsolutePath ArchivePath => (AbsolutePath)ArchiveDir;
public string TempFolder { get; set; }

View File

@ -46,7 +46,7 @@ namespace Wabbajack.BuildServer
var hexHash = hash.ToHex();
var ends = "_" + hexHash + "_";
var file = settings.ArchiveDir.EnumerateFiles()
var file = settings.ArchivePath.EnumerateFiles()
.FirstOrDefault(f => ((string)f.FileNameWithoutExtension).EndsWith(ends));
if (file != default)

View File

@ -28,36 +28,36 @@ namespace Wabbajack.BuildServer.Models.Jobs
var pk = new List<object>();
pk.Add(AbstractDownloadState.TypeToName[Archive.State.GetType()]);
pk.AddRange(Archive.State.PrimaryKey);
var pk_str = string.Join("|",pk.Select(p => p.ToString()));
var pkStr = string.Join("|",pk.Select(p => p.ToString()));
var found = await sql.DownloadStateByPrimaryKey(pk_str);
if (found == null)
var found = await sql.DownloadStateByPrimaryKey(pkStr);
if (found != null)
return JobResult.Success();
string fileName = Archive.Name;
string folder = Guid.NewGuid().ToString();
Utils.Log($"Indexer is downloading {fileName}");
var downloadDest = settings.DownloadDir.Combine(folder, fileName);
var downloadDest = settings.DownloadPath.Combine(folder, fileName);
await Archive.State.Download(downloadDest);
using (var queue = new WorkQueue())
{
var vfs = new Context(queue, true);
await vfs.AddRoot(settings.DownloadDir.Combine(folder));
await vfs.AddRoot(settings.DownloadPath.Combine(folder));
var archive = vfs.Index.ByRootPath.First().Value;
await sql.MergeVirtualFile(archive);
await sql.AddDownloadState(archive.Hash, Archive.State);
var to_path = settings.ArchiveDir.Combine(
var to_path = settings.ArchivePath.Combine(
$"{Path.GetFileName(fileName)}_{archive.Hash.ToHex()}_{Path.GetExtension(fileName)}");
if (to_path.Exists)
downloadDest.Delete();
else
downloadDest.MoveTo(to_path);
await settings.DownloadDir.Combine(folder).DeleteDirectory();
await settings.DownloadPath.Combine(folder).DeleteDirectory();
}
return JobResult.Success();

View File

@ -20,7 +20,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
{
using (var queue = new WorkQueue())
{
var files = settings.ArchiveDir.EnumerateFiles()
var files = settings.ArchivePath.EnumerateFiles()
.Where(f => f.Extension != Consts.HashFileExtension)
.ToList();
var total_count = files.Count;
@ -40,7 +40,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
}
var sub_folder = Guid.NewGuid().ToString();
var folder = settings.DownloadDir.Combine(sub_folder);
var folder = settings.DownloadPath.Combine(sub_folder);
Utils.Log($"({completed}/{total_count}) Copying {file}");
folder.CreateDirectory();

View File

@ -197,7 +197,7 @@ namespace Wabbajack.BuildServer.Model.Models
{
await using var conn = await Open();
await conn.ExecuteAsync(
@"UPDATE dbo.Jobs SET Finshed = GETDATE(), Success = @Success, ResultContent = @ResultContent WHERE Id = @Id",
@"UPDATE dbo.Jobs SET Ended = GETDATE(), Success = @Success, ResultContent = @ResultContent WHERE Id = @Id",
new {
job.Id,
Success = job.Result.ResultType == JobResultType.Success,

View File

@ -134,7 +134,17 @@ namespace Wabbajack.Common
throw;
}
}
public static Hash xxHash(this byte[] data)
{
var hash = new xxHashConfig();
hash.HashSizeInBits = 64;
hash.Seed = 0x42;
using var fs = new MemoryStream(data);
var config = new xxHashConfig {HashSizeInBits = 64};
using var f = new StatusFileStream(fs, $"Hashing memory stream");
var value = xxHashFactory.Instance.Create(config).ComputeHash(f);
return Hash.FromULong(BitConverter.ToUInt64(value.Hash));
}
public static Hash FileHashCached(this AbsolutePath file, bool nullOnIoError = false)
{
if (TryGetHashCache(file, out var foundHash)) return foundHash;

View File

@ -232,22 +232,6 @@ namespace Wabbajack.Common
Status(status, Percent.FactoryPutInRange(totalRead, maxSize));
}
}
public static string xxHash(this byte[] data)
{
var hash = new xxHashConfig();
hash.HashSizeInBits = 64;
hash.Seed = 0x42;
using (var fs = new MemoryStream(data))
{
var config = new xxHashConfig();
config.HashSizeInBits = 64;
using (var f = new StatusFileStream(fs, $"Hashing memory stream"))
{
var value = xxHashFactory.Instance.Create(config).ComputeHash(f);
return value.AsBase64String();
}
}
}
/// <summary>
/// Returns a Base64 encoding of these bytes
@ -691,8 +675,8 @@ namespace Wabbajack.Common
public static async Task CreatePatch(byte[] a, byte[] b, Stream output)
{
var dataA = a.xxHash().FromBase64().ToHex();
var dataB = b.xxHash().FromBase64().ToHex();
var dataA = a.xxHash().ToHex();
var dataB = b.xxHash().ToHex();
var cacheFile = Consts.PatchCacheFolder.Combine($"{dataA}_{dataB}.patch");
Consts.PatchCacheFolder.CreateDirectory();