Merge pull request #429 from wabbajack-tools/bug-fixes-and-purging

MSSQL backend for IndexedFiles
This commit is contained in:
Timothy Baldridge 2020-01-27 14:28:10 -07:00 committed by GitHub
commit 88674b0fe4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 414 additions and 117 deletions

View File

@ -0,0 +1,14 @@
using System;
using System.Linq;
using System.Linq.Expressions;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Internal;
namespace Wabbajack.BuildServer.Model
{
public static class Extensions
{
public static void AddWabbajackDB<T>(this )
}
}

View File

@ -0,0 +1,18 @@
using System.Data;
using System.Data.Common;
using Microsoft.Data.SqlClient;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Hosting;
namespace Wabbajack.BuildServer.Model.Models
{
public class DbFactory
{
public static IDbConnection Connect()
{
return new SqlConnection(Configuration);
}
}
}

View File

@ -12,12 +12,15 @@ namespace Wabbajack.BuildServer
public string DownloadDir { get; set; }
public string ArchiveDir { get; set; }
public bool MinimalMode { get; set; }
public bool JobScheduler { get; set; }
public bool JobRunner { get; set; }
public bool RunFrontEndJobs { get; set; }
public bool RunBackEndJobs { get; set; }
public string BunnyCDN_User { get; set; }
public string BunnyCDN_Password { get; set; }
public string SqlConnection { get; set; }
}
}

View File

@ -1,22 +1,31 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using DynamicData;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using MongoDB.Bson;
using MongoDB.Driver;
using MongoDB.Driver.Linq;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models;
using Wabbajack.Common;
using Wabbajack.Lib.Downloaders;
using Wabbajack.VirtualFileSystem;
using IndexedFile = Wabbajack.BuildServer.Models.IndexedFile;
namespace Wabbajack.BuildServer.Controllers
{
[Route("/indexed_files")]
public class IndexedFiles : AControllerBase<IndexedFiles>
{
public IndexedFiles(ILogger<IndexedFiles> logger, DBContext db) : base(logger, db)
private SqlService _sql;
public IndexedFiles(ILogger<IndexedFiles> logger, DBContext db, SqlService sql) : base(logger, db)
{
_sql = sql;
}
[HttpGet]
@ -36,83 +45,54 @@ namespace Wabbajack.BuildServer.Controllers
return Ok(string.Join("\r\n", state.FirstOrDefault().State.GetMetaIni()));
}
[Authorize]
[HttpDelete]
[Route("/indexed_files/nexus/{Game}/mod/{ModId}")]
public async Task<IActionResult> PurgeBySHA256(string Game, string ModId)
{
var files = await Db.DownloadStates.AsQueryable().Where(d => d.State is NexusDownloader.State &&
((NexusDownloader.State)d.State).GameName == Game &&
((NexusDownloader.State)d.State).ModID == ModId)
.ToListAsync();
async Task DeleteParentsOf(HashSet<string> acc, string hash)
{
var parents = await Db.IndexedFiles.AsQueryable().Where(f => f.Children.Any(c => c.Hash == hash))
.ToListAsync();
foreach (var parent in parents)
await DeleteThisAndAllChildren(acc, parent.Hash);
}
async Task DeleteThisAndAllChildren(HashSet<string> acc, string hash)
{
acc.Add(hash);
var children = await Db.IndexedFiles.AsQueryable().Where(f => f.Hash == hash).FirstOrDefaultAsync();
if (children == null) return;
foreach (var child in children.Children)
{
await DeleteThisAndAllChildren(acc, child.Hash);
}
}
var acc = new HashSet<string>();
foreach (var file in files)
await DeleteThisAndAllChildren(acc, file.Hash);
var acclst = acc.ToList();
await Db.DownloadStates.DeleteManyAsync(d => acc.Contains(d.Hash));
await Db.IndexedFiles.DeleteManyAsync(d => acc.Contains(d.Hash));
return Ok(acc.ToList());
}
[HttpGet]
[Route("{xxHashAsBase64}")]
public async Task<IActionResult> GetFile(string xxHashAsBase64)
{
var id = xxHashAsBase64.FromHex().ToBase64();
var query = new[]
{
new BsonDocument("$match",
new BsonDocument("_id", id)),
new BsonDocument("$graphLookup",
new BsonDocument
{
{"from", "indexed_files"},
{"startWith", "$Children.Hash"},
{"connectFromField", "Hash"},
{"connectToField", "_id"},
{"as", "ChildFiles"},
{"maxDepth", 8},
{"restrictSearchWithMatch", new BsonDocument()}
}),
new BsonDocument("$project",
new BsonDocument
{
// If we return all fields some BSAs will return more that 16MB which is the
// maximum doc size that can can be returned from MongoDB
{ "_id", 1 },
{ "Size", 1 },
{ "Children.Name", 1 },
{ "Children.Hash", 1 },
{ "ChildFiles._id", 1 },
{ "ChildFiles.Size", 1 },
{ "ChildFiles.Children.Name", 1 },
{ "ChildFiles.Children.Hash", 1 },
{ "ChildFiles.ChildFiles._id", 1 },
{ "ChildFiles.ChildFiles.Size", 1 },
{ "ChildFiles.ChildFiles.Children.Name", 1 },
{ "ChildFiles.ChildFiles.Children.Hash", 1 },
{ "ChildFiles.ChildFiles.ChildFiles._id", 1 },
{ "ChildFiles.ChildFiles.ChildFiles.Size", 1 },
{ "ChildFiles.ChildFiles.ChildFiles.Children.Name", 1 },
{ "ChildFiles.ChildFiles.ChildFiles.Children.Hash", 1 },
{ "ChildFiles.ChildFiles.ChildFiles.ChildFiles._id", 1 },
{ "ChildFiles.ChildFiles.ChildFiles.ChildFiles.Size", 1 },
{ "ChildFiles.ChildFiles.ChildFiles.ChildFiles.Children.Name", 1 },
{ "ChildFiles.ChildFiles.ChildFiles.ChildFiles.Children.Hash", 1 },
{ "ChildFiles.ChildFiles.ChildFiles.ChildFiles.ChildFiles._id", 1 },
{ "ChildFiles.ChildFiles.ChildFiles.ChildFiles.ChildFiles.Size", 1 }
})
};
var result = await Db.IndexedFiles.AggregateAsync<TreeResult>(query);
IndexedVirtualFile Convert(TreeResult t, string Name = null)
{
if (t == null)
return null;
Dictionary<string, TreeResult> indexed_children = new Dictionary<string, TreeResult>();
if (t.ChildFiles != null && t.ChildFiles.Count > 0)
indexed_children = t.ChildFiles.ToDictionary(t => t.Hash);
var file = new IndexedVirtualFile
{
Name = Name,
Size = t.Size,
Hash = t.Hash,
Children = t.ChildFiles != null
? t.Children.Select(child => Convert(indexed_children[child.Hash], child.Name)).ToList()
: new List<IndexedVirtualFile>()
};
return file;
}
var first = result.FirstOrDefault();
if (first == null)
return NotFound();
return Ok(Convert(first));
var result = await _sql.AllArchiveContents(BitConverter.ToInt64(xxHashAsBase64.FromHex()));
return Ok(result);
}
public class TreeResult : IndexedFile

View File

@ -1,10 +1,13 @@
using System;
using System.Data;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Builder;
using Microsoft.Data.SqlClient;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using MongoDB.Driver;
using MongoDB.Driver.Linq;
@ -38,8 +41,7 @@ namespace Wabbajack.BuildServer
}
}
}
public static AuthenticationBuilder AddApiKeySupport(this AuthenticationBuilder authenticationBuilder, Action<ApiKeyAuthenticationOptions> options)
{
return authenticationBuilder.AddScheme<ApiKeyAuthenticationOptions, ApiKeyAuthenticationHandler>(ApiKeyAuthenticationOptions.DefaultScheme, options);

View File

@ -5,6 +5,7 @@ using Microsoft.Extensions.Logging;
using MongoDB.Driver;
using MongoDB.Driver.Linq;
using Nettle;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models;
using Wabbajack.BuildServer.Models.JobQueue;
using Wabbajack.BuildServer.Models.Jobs;
@ -17,17 +18,20 @@ namespace Wabbajack.BuildServer
protected readonly ILogger<JobManager> Logger;
protected readonly DBContext Db;
protected readonly AppSettings Settings;
protected SqlService Sql;
public JobManager(ILogger<JobManager> logger, DBContext db, AppSettings settings)
public JobManager(ILogger<JobManager> logger, DBContext db, SqlService sql, AppSettings settings)
{
Db = db;
Logger = logger;
Settings = settings;
Sql = sql;
}
public void StartJobRunners()
{
if (Settings.MinimalMode) return;
if (!Settings.JobRunner) return;
for (var idx = 0; idx < 2; idx++)
{
Task.Run(async () =>
@ -47,7 +51,7 @@ namespace Wabbajack.BuildServer
JobResult result;
try
{
result = await job.Payload.Execute(Db, Settings);
result = await job.Payload.Execute(Db, Sql, Settings);
}
catch (Exception ex)
{
@ -69,8 +73,8 @@ namespace Wabbajack.BuildServer
public async Task JobScheduler()
{
if (Settings.MinimalMode) return;
Utils.LogMessages.Subscribe(msg => Logger.Log(LogLevel.Information, msg.ToString()));
if (!Settings.JobScheduler) return;
while (true)
{
await KillOrphanedJobs();

View File

@ -41,5 +41,6 @@ namespace Wabbajack.BuildServer.Models
public string Host { get; set; }
public string Database { get; set; }
public Dictionary<string, string> Collections { get; set; }
public string SqlConnection { get; set; }
}
}

View File

@ -4,6 +4,7 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
using MongoDB.Bson.Serialization.Attributes;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models.Jobs;
namespace Wabbajack.BuildServer.Models.JobQueue
@ -19,7 +20,8 @@ namespace Wabbajack.BuildServer.Models.JobQueue
typeof(EnqueueAllGameFiles),
typeof(EnqueueRecentFiles),
typeof(UploadToCDN),
typeof(IndexDynDOLOD)
typeof(IndexDynDOLOD),
typeof(ReindexArchives)
};
public static Dictionary<Type, string> TypeToName { get; set; }
public static Dictionary<string, Type> NameToType { get; set; }
@ -30,7 +32,7 @@ namespace Wabbajack.BuildServer.Models.JobQueue
public virtual bool UsesNexus { get; } = false;
public abstract Task<JobResult> Execute(DBContext db, AppSettings settings);
public abstract Task<JobResult> Execute(DBContext db, SqlService sql,AppSettings settings);
static AJobPayload()
{

View File

@ -4,6 +4,7 @@ using Alphaleonis.Win32.Filesystem;
using System.Linq;
using MongoDB.Driver;
using MongoDB.Driver.Linq;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models.JobQueue;
using Wabbajack.Common;
using Wabbajack.Lib;
@ -15,7 +16,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
public class EnqueueAllArchives : AJobPayload, IBackEndJob
{
public override string Description => "Add missing modlist archives to indexer";
public override async Task<JobResult> Execute(DBContext db, AppSettings settings)
public override async Task<JobResult> Execute(DBContext db, SqlService sql, AppSettings settings)
{
Utils.Log("Starting ModList indexing");
var modlists = await ModlistMetadata.LoadFromGithub();

View File

@ -7,6 +7,7 @@ using Wabbajack.Lib.Downloaders;
using System.IO;
using MongoDB.Driver;
using MongoDB.Driver.Linq;
using Wabbajack.BuildServer.Model.Models;
using Directory = Alphaleonis.Win32.Filesystem.Directory;
using Path = Alphaleonis.Win32.Filesystem.Path;
@ -15,7 +16,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
public class EnqueueAllGameFiles : AJobPayload, IBackEndJob
{
public override string Description { get => $"Enqueue all game files for indexing"; }
public override async Task<JobResult> Execute(DBContext db, AppSettings settings)
public override async Task<JobResult> Execute(DBContext db, SqlService sql, AppSettings settings)
{
using (var queue = new WorkQueue(4))
{

View File

@ -5,6 +5,7 @@ using System.Threading.Tasks;
using MongoDB.Driver;
using MongoDB.Driver.Core.Authentication;
using MongoDB.Driver.Linq;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models.JobQueue;
using Wabbajack.Common;
using Wabbajack.Lib;
@ -21,7 +22,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
{
Game.Fallout3, Game.Fallout4, Game.Skyrim, Game.SkyrimSpecialEdition, Game.SkyrimVR, Game.FalloutNewVegas, Game.Oblivion
};
public override async Task<JobResult> Execute(DBContext db, AppSettings settings)
public override async Task<JobResult> Execute(DBContext db, SqlService sql, AppSettings settings)
{
using (var queue = new WorkQueue())
{

View File

@ -6,6 +6,7 @@ using Wabbajack.Common;
using Wabbajack.Lib.NexusApi;
using MongoDB.Driver;
using Newtonsoft.Json;
using Wabbajack.BuildServer.Model.Models;
namespace Wabbajack.BuildServer.Models.Jobs
@ -14,7 +15,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
{
public override string Description => "Poll the Nexus for updated mods, and clean any references to those mods";
public override async Task<JobResult> Execute(DBContext db, AppSettings settings)
public override async Task<JobResult> Execute(DBContext db, SqlService sql, AppSettings settings)
{
var api = await NexusApiClient.Get();

View File

@ -5,6 +5,7 @@ using System.Threading.Tasks;
using HtmlAgilityPack;
using MongoDB.Driver;
using MongoDB.Driver.Linq;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models.JobQueue;
using Wabbajack.Common;
using Wabbajack.Lib;
@ -19,7 +20,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
public class IndexDynDOLOD : AJobPayload
{
public override string Description => "Queue MEGA URLs from the DynDOLOD Post";
public override async Task<JobResult> Execute(DBContext db, AppSettings settings)
public override async Task<JobResult> Execute(DBContext db, SqlService sql, AppSettings settings)
{
var doc = new HtmlDocument();
var body = await new HttpClient().GetStringAsync(new Uri(

View File

@ -6,6 +6,7 @@ using System.Threading.Tasks;
using Alphaleonis.Win32.Filesystem;
using MongoDB.Driver;
using MongoDB.Driver.Linq;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models;
using Wabbajack.BuildServer.Models.JobQueue;
using Wabbajack.Common;
@ -21,7 +22,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
public Archive Archive { get; set; }
public override string Description => $"Index ${Archive.State.PrimaryKeyString} and save the download/file state";
public override bool UsesNexus { get => Archive.State is NexusDownloader.State; }
public override async Task<JobResult> Execute(DBContext db, AppSettings settings)
public override async Task<JobResult> Execute(DBContext db, SqlService sql, AppSettings settings)
{
var pk = new List<object>();
pk.Add(AbstractDownloadState.TypeToName[Archive.State.GetType()]);
@ -42,26 +43,12 @@ namespace Wabbajack.BuildServer.Models.Jobs
{
var vfs = new Context(queue, true);
await vfs.AddRoot(Path.Combine(settings.DownloadDir, folder));
var archive = vfs.Index.ByRootPath.First();
var converted = ConvertArchive(new List<IndexedFile>(), archive.Value);
try
{
await db.IndexedFiles.InsertManyAsync(converted, new InsertManyOptions {IsOrdered = false});
}
catch (MongoBulkWriteException)
{
}
await db.DownloadStates.InsertOneAsync(new DownloadState
{
Key = pk_str,
Hash = archive.Value.Hash,
State = Archive.State,
IsValid = true
});
var archive = vfs.Index.ByRootPath.First().Value;
await sql.MergeVirtualFile(archive);
var to_path = Path.Combine(settings.ArchiveDir,
$"{Path.GetFileName(fileName)}_{archive.Value.Hash.FromBase64().ToHex()}_{Path.GetExtension(fileName)}");
$"{Path.GetFileName(fileName)}_{archive.Hash.FromBase64().ToHex()}_{Path.GetExtension(fileName)}");
if (File.Exists(to_path))
File.Delete(downloadDest);
else

View File

@ -0,0 +1,69 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Alphaleonis.Win32.Filesystem;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models.JobQueue;
using Wabbajack.Common;
using Wabbajack.VirtualFileSystem;
namespace Wabbajack.BuildServer.Models.Jobs
{
public class ReindexArchives : AJobPayload
{
public override string Description => "Reindex all files in the mod archive folder";
public override async Task<JobResult> Execute(DBContext db, SqlService sql, AppSettings settings)
{
using (var queue = new WorkQueue())
{
var total_count = Directory.EnumerateFiles(settings.ArchiveDir).Count();
int completed = 0;
await Directory.EnumerateFiles(settings.ArchiveDir)
.PMap(queue, async file =>
{
try
{
Interlocked.Increment(ref completed);
if (await sql.HaveIndexdFile(await file.FileHashAsync()))
{
Utils.Log($"({completed}/{total_count}) Skipping {Path.GetFileName(file)}, it's already indexed");
return;
}
var sub_folder = Guid.NewGuid().ToString();
string folder = Path.Combine(settings.DownloadDir, sub_folder);
Utils.Log($"({completed}/{total_count}) Copying {file}");
Directory.CreateDirectory(folder);
Utils.Log($"({completed}/{total_count}) Copying {file}");
File.Copy(file, Path.Combine(folder, Path.GetFileName(file)));
Utils.Log($"({completed}/{total_count}) Analyzing {file}");
var vfs = new Context(queue, true);
await vfs.AddRoot(folder);
var root = vfs.Index.ByRootPath.First().Value;
Utils.Log($"({completed}/{total_count}) Ingesting {root.ThisAndAllChildren.Count()} files");
await sql.MergeVirtualFile(root);
Utils.Log($"({completed}/{total_count}) Cleaning up {file}");
Utils.DeleteDirectory(folder);
}
catch (Exception ex)
{
Utils.Log(ex.ToString());
}
});
}
return JobResult.Success();
}
}
}

View File

@ -2,6 +2,7 @@
using System.Linq;
using System.Threading.Tasks;
using Alphaleonis.Win32.Filesystem;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models.JobQueue;
using Wabbajack.Common;
using Wabbajack.Lib;
@ -15,7 +16,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
public class UpdateModLists : AJobPayload, IFrontEndJob
{
public override string Description => "Validate curated modlists";
public override async Task<JobResult> Execute(DBContext db, AppSettings settings)
public override async Task<JobResult> Execute(DBContext db, SqlService sql, AppSettings settings)
{
Utils.Log("Starting Modlist Validation");
var modlists = await ModlistMetadata.LoadFromGithub();

View File

@ -7,6 +7,7 @@ using CG.Web.MegaApiClient;
using FluentFTP;
using MongoDB.Driver;
using MongoDB.Driver.Linq;
using Wabbajack.BuildServer.Model.Models;
using Wabbajack.BuildServer.Models.JobQueue;
using Wabbajack.Common;
using Wabbajack.Lib;
@ -21,7 +22,7 @@ namespace Wabbajack.BuildServer.Models.Jobs
public string FileId { get; set; }
public override async Task<JobResult> Execute(DBContext db, AppSettings settings)
public override async Task<JobResult> Execute(DBContext db, SqlService sql, AppSettings settings)
{
var file = await db.UploadedFiles.AsQueryable().Where(f => f.Id == FileId).FirstOrDefaultAsync();
using (var client = new FtpClient("storage.bunnycdn.com"))

View File

@ -0,0 +1,13 @@
using System;
using System.Collections.Generic;
namespace Wabbajack.BuildServer.Model.Models
{
public partial class ArchiveContent
{
public long Parent { get; set; }
public long Child { get; set; }
public string Path { get; set; }
public byte[] PathHash { get; set; }
}
}

View File

@ -0,0 +1,40 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
namespace Wabbajack.BuildServer.Model.Models
{
public static class Extensions
{
public static DataTable ToDataTable(this IEnumerable<IndexedFile> coll)
{
var ut = new DataTable("dbo.IndexedFileType");
ut.Columns.Add("Hash", typeof(long));
ut.Columns.Add("Sha256", typeof(byte[]));
ut.Columns.Add("Sha1", typeof(byte[]));
ut.Columns.Add("Md5", typeof(byte[]));
ut.Columns.Add("Crc32", typeof(int));
ut.Columns.Add("Size", typeof(long));
foreach (var itm in coll)
ut.Rows.Add(itm.Hash, itm.Sha256, itm.Sha1, itm.Md5, itm.Crc32, itm.Size);
return ut;
}
public static DataTable ToDataTable(this IEnumerable<ArchiveContent> coll)
{
var ut = new DataTable("dbo.ArchiveContentType");
ut.Columns.Add("Parent", typeof(long));
ut.Columns.Add("Child", typeof(long));
ut.Columns.Add("Path", typeof(string));
foreach (var itm in coll)
ut.Rows.Add(itm.Parent, itm.Child, itm.Path);
return ut;
}
}
}

View File

@ -0,0 +1,15 @@
using System;
using System.Collections.Generic;
namespace Wabbajack.BuildServer.Model.Models
{
public class IndexedFile
{
public long Hash { get; set; }
public byte[] Sha256 { get; set; }
public byte[] Sha1 { get; set; }
public byte[] Md5 { get; set; }
public int Crc32 { get; set; }
public long Size { get; set; }
}
}

View File

@ -0,0 +1,124 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Linq;
using System.Threading.Tasks;
using Dapper;
using Microsoft.Extensions.Configuration;
using Wabbajack.BuildServer.Models;
using Wabbajack.Common;
using Wabbajack.VirtualFileSystem;
namespace Wabbajack.BuildServer.Model.Models
{
public class SqlService
{
private IConfiguration _configuration;
private IDbConnection _conn;
public SqlService(AppSettings configuration)
{
_conn = new SqlConnection(configuration.SqlConnection);
_conn.Open();
}
public IDbConnection Connection { get => _conn; }
public async Task MergeVirtualFile(VirtualFile vfile)
{
var files = new List<IndexedFile>();
var contents = new List<ArchiveContent>();
IngestFile(vfile, files, contents);
files = files.DistinctBy(f => f.Hash).ToList();
contents = contents.DistinctBy(c => (c.Parent, c.Path)).ToList();
await Connection.ExecuteAsync("dbo.MergeIndexedFiles", new {Files = files.ToDataTable(), Contents = contents.ToDataTable()},
commandType: CommandType.StoredProcedure);
}
private static void IngestFile(VirtualFile root, ICollection<IndexedFile> files, ICollection<ArchiveContent> contents)
{
var hash = BitConverter.ToInt64(root.Hash.FromBase64());
files.Add(new IndexedFile
{
Hash = hash,
Sha256 = root.ExtendedHashes.SHA256.FromHex(),
Sha1 = root.ExtendedHashes.SHA1.FromHex(),
Md5 = root.ExtendedHashes.MD5.FromHex(),
Crc32 = BitConverter.ToInt32(root.ExtendedHashes.CRC.FromHex()),
Size = root.Size
});
if (root.Children == null) return;
foreach (var child in root.Children)
{
IngestFile(child, files, contents);
var child_hash = BitConverter.ToInt64(child.Hash.FromBase64());
contents.Add(new ArchiveContent
{
Parent = hash,
Child = child_hash,
Path = child.Name
});
}
}
public async Task<bool> HaveIndexdFile(string hash)
{
var row = await Connection.QueryAsync(@"SELECT * FROM IndexedFile WHERE Hash = @Hash",
new {Hash = BitConverter.ToInt64(hash.FromBase64())});
return row.Any();
}
class ArchiveContentsResult
{
public long Parent { get; set; }
public long Hash { get; set; }
public long Size { get; set; }
public string Path { get; set; }
}
/// <summary>
/// Get the name, path, hash and size of the file with the provided hash, and all files perhaps
/// contained inside this file. Note: files themselves do not have paths, so the top level result
/// will have a null path
/// </summary>
/// <param name="hash">The xxHash64 of the file to look up</param>
/// <returns></returns>
public async Task<IndexedVirtualFile> AllArchiveContents(long hash)
{
var files = await Connection.QueryAsync<ArchiveContentsResult>(@"
SELECT 0 as Parent, i.Hash, i.Size, null as Path FROM IndexedFile WHERE Hash = @Hash
UNION ALL
SELECT a.Parent, i.Hash, i.Size, a.Path FROM AllArchiveContent a
LEFT JOIN IndexedFile i ON i.Hash = a.Child
WHERE TopParent = @Hash",
new {Hash = hash});
var grouped = files.GroupBy(f => f.Parent).ToDictionary(f => f.Key, f=> (IEnumerable<ArchiveContentsResult>)f);
List<IndexedVirtualFile> Build(long parent)
{
return grouped[parent].Select(f => new IndexedVirtualFile
{
Name = f.Path,
Hash = BitConverter.GetBytes(f.Hash).ToBase64(),
Size = f.Size,
Children = Build(f.Hash)
}).ToList();
}
return Build(0).First();
}
}
}

View File

@ -29,6 +29,7 @@ using Microsoft.AspNetCore.Mvc.NewtonsoftJson;
using Microsoft.AspNetCore.StaticFiles;
using Wabbajack.BuildServer.Controllers;
using Microsoft.Extensions.FileProviders;
using Wabbajack.BuildServer.Model.Models;
using Directory = System.IO.Directory;
@ -67,6 +68,7 @@ namespace Wabbajack.BuildServer
services.AddSingleton<DBContext>();
services.AddSingleton<JobManager>();
services.AddSingleton<AppSettings>();
services.AddSingleton<SqlService>();
services.AddMvc();
services.AddControllers()
.AddNewtonsoftJson(o =>

View File

@ -11,6 +11,7 @@
<ItemGroup>
<PackageReference Include="BunnyCDN.Net.Storage" Version="1.0.2" />
<PackageReference Include="Dapper" Version="2.0.30" />
<PackageReference Include="FluentFTP" Version="29.0.3" />
<PackageReference Include="graphiql" Version="1.2.0" />
<PackageReference Include="GraphQL" Version="3.0.0-preview-1352" />
@ -24,6 +25,7 @@
<PackageReference Include="MongoDB.Driver.Core" Version="2.10.1" />
<PackageReference Include="Nettle" Version="1.3.0" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="5.0.0" />
<PackageReference Include="System.Data.SqlClient" Version="4.8.0" />
</ItemGroup>
<ItemGroup>
@ -82,4 +84,10 @@
<Folder Include="public\files" />
</ItemGroup>
<ItemGroup>
<Reference Include="Microsoft.Data.SqlClient, Version=1.0.19269.1, Culture=neutral, PublicKeyToken=23ec7fc2d6eaa4a5">
<HintPath>..\Wabbajack.MassImport\bin\Release\netcoreapp3.1\Microsoft.Data.SqlClient.dll</HintPath>
</Reference>
</ItemGroup>
</Project>

View File

@ -31,12 +31,14 @@
},
"WabbajackSettings": {
"DownloadDir": "c:\\tmp\\downloads",
"ArchiveDir": "c:\\archives",
"MinimalMode": true,
"ArchiveDir": "w:\\archives",
"JobRunner": true,
"JobScheduler": false,
"RunFrontEndJobs": true,
"RunBackEndJobs": true,
"BunnyCDN_User": "wabbajackcdn",
"BunnyCDN_Password": "XXXX"
"BunnyCDN_Password": "XXXX",
"SQLConnection": "Data Source=192.168.3.1,1433;Initial Catalog=wabbajack_dev;User ID=wabbajack;Password=wabbajack;MultipleActiveResultSets=true"
},
"AllowedHosts": "*"
}

Binary file not shown.

View File

@ -207,7 +207,7 @@ namespace Wabbajack.Common
Utils.Status($"Extracting {name} - 100%", 100, alsoLog: true);
return;
}
Utils.Log(new _7zipReturnError(p.ExitCode, source, dest, p.StandardOutput.ReadToEnd()));
Utils.Error(new _7zipReturnError(p.ExitCode, source, dest, p.StandardOutput.ReadToEnd()));
}
/// <summary>

View File

@ -6,7 +6,7 @@ using System.Threading.Tasks;
namespace Wabbajack.Common.StatusFeed.Errors
{
public class _7zipReturnError : AStatusMessage, IError
public class _7zipReturnError : AErrorMessage
{
public string Destination { get; }
public string Filename;

View File

@ -391,7 +391,7 @@ namespace Wabbajack.Lib
Directory.EnumerateDirectories(OutputFolder, DirectoryEnumerationOptions.Recursive)
.Where(p => !expectedFolders.Contains(p))
.OrderByDescending(p => p.Length)
.Do(p => Directory.Delete(p));
.Do(Utils.DeleteDirectory);
}
catch (Exception)
{

View File

@ -158,7 +158,13 @@ namespace Wabbajack.Lib
var mo2Files = Directory.EnumerateFiles(MO2Folder, "*", SearchOption.AllDirectories)
.Where(p => p.FileExists())
.Select(p => new RawSourceFile(VFS.Index.ByRootPath[p], p.RelativeTo(MO2Folder)));
.Select(p =>
{
if (!VFS.Index.ByFullPath.ContainsKey(p))
Utils.Log($"WELL THERE'S YOUR PROBLEM: {p} {VFS.Index.ByRootPath.Count}");
return new RawSourceFile(VFS.Index.ByRootPath[p], p.RelativeTo(MO2Folder));
});
// If Game Folder Files exists, ignore the game folder
IEnumerable<RawSourceFile> gameFiles;