massive refactoring, it compiles, but it's surely broken

This commit is contained in:
Timothy Baldridge 2019-08-19 22:57:08 -06:00
parent fa8f038f70
commit c343a74359
11 changed files with 295 additions and 457 deletions

View File

@ -1,8 +1,4 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Wabbajack.Common;
namespace VirtualFileSystem.Test
@ -15,7 +11,7 @@ namespace VirtualFileSystem.Test
Utils.SetStatusFn((s, i) => Console.WriteLine(s));
WorkQueue.Init((a, b, c) => { return; },
(a, b) => { return; });
VirtualFileSystem.VFS.AddRoot(@"D:\MO2 Instances\Mod Organizer 2");
VFS.VirtualFileSystem.VFS.AddRoot(@"D:\MO2 Instances\Mod Organizer 2");
}
}
}

View File

@ -10,7 +10,7 @@ using System.Linq;
using System.Reflection;
using Wabbajack.Common;
namespace VirtualFileSystem
namespace VFS
{
public class VirtualFileSystem
{
@ -18,7 +18,7 @@ namespace VirtualFileSystem
internal static string _stagedRoot;
public static VirtualFileSystem VFS;
private Dictionary<string, VirtualFile> _files = new Dictionary<string, VirtualFile>();
private bool _disableDiskCache;
public static string RootFolder { get; }
public Dictionary<string, IEnumerable<VirtualFile>> HashIndex { get; private set; }
@ -26,13 +26,12 @@ namespace VirtualFileSystem
static VirtualFileSystem()
{
VFS = new VirtualFileSystem();
RootFolder = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location);
RootFolder = ".\\";
_stagedRoot = Path.Combine(RootFolder, "vfs_staged_files");
if (Directory.Exists(_stagedRoot))
Directory.Delete(_stagedRoot, true);
Directory.CreateDirectory(_stagedRoot);
}
public VirtualFileSystem ()
@ -42,20 +41,41 @@ namespace VirtualFileSystem
private void LoadFromDisk()
{
Utils.Log("Loading VFS Cache");
if (!File.Exists("vfs_cache.bson")) return;
_files = "vfs_cache.bson".FromBSON<IEnumerable<VirtualFile>>(root_is_array:true).ToDictionary(f => f.FullPath);
CleanDB();
try
{
Utils.Log("Loading VFS Cache");
if (!File.Exists("vfs_cache.bson")) return;
_files = "vfs_cache.bson".FromBSON<IEnumerable<VirtualFile>>(root_is_array: true).ToDictionary(f => f.FullPath);
CleanDB();
}
catch(Exception ex)
{
Utils.Log($"Purging cache due to {ex}");
File.Delete("vfs_cache.bson");
_files.Clear();
}
}
public void SyncToDisk()
{
if (!_disableDiskCache)
lock(this)
{
_files.Values.OfType<VirtualFile>().ToBSON("vfs_cache.bson");
}
}
public IList<VirtualFile> FilesInArchive(VirtualFile f)
{
var path = f.FullPath + "|";
lock (this)
{
return _files.Values
.Where(v => v.FullPath.StartsWith(path))
.ToList();
}
}
public void Purge(VirtualFile f)
{
@ -117,6 +137,36 @@ namespace VirtualFileSystem
}
}
public void BackfillMissing()
{
lock(this)
{
_files.Values
.Select(f => f.ParentPath)
.Where(s => !_files.ContainsKey(s))
.ToHashSet()
.Do(s =>
{
AddKnown(new VirtualFile() { Paths = s.Split('|') });
});
}
}
/// <summary>
/// Add a known file to the index, bit of a hack as we won't assume that all the fields for the archive are filled in.
/// you will need to manually update the SHA hash when you are done adding files, by calling `RefreshIndexes`
/// </summary>
/// <param name="virtualFile"></param>
public void AddKnown(VirtualFile virtualFile)
{
lock(this)
{
// We don't know enough about these files to be able to store them in the disk cache
_disableDiskCache = true;
_files[virtualFile.FullPath] = virtualFile;
}
}
/// <summary>
/// Adds the root path to the filesystem. This may take quite some time as every file in the folder will be hashed,
/// and every archive examined.
@ -128,7 +178,7 @@ namespace VirtualFileSystem
RefreshIndexes();
}
private void RefreshIndexes()
public void RefreshIndexes()
{
Utils.Log("Building Hash Index");
lock(this)
@ -141,8 +191,9 @@ namespace VirtualFileSystem
private void IndexPath(string path)
{
Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories)
.PMap(f => UpdateFile(f));
var file_list = Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories).ToList();
Utils.Log($"Updating the cache for {file_list.Count} files");
file_list.PMap(f => UpdateFile(f));
SyncToDisk();
}
@ -152,7 +203,7 @@ namespace VirtualFileSystem
var lv = Lookup(f);
if (lv == null)
{
Utils.Log($"Analyzing {0}");
Utils.Status($"Analyzing {f}");
lv = new VirtualFile()
{
@ -200,11 +251,9 @@ namespace VirtualFileSystem
f.FinishedIndexing = true;
SyncToDisk();
Utils.Log($"{_files.Count} docs in VFS cache");
}
private void Stage(IEnumerable<VirtualFile> files)
public void Stage(IEnumerable<VirtualFile> files)
{
var grouped = files.GroupBy(f => f.ParentArchive)
.OrderBy(f => f.Key == null ? 0 : f.Key.Paths.Length)
@ -224,6 +273,13 @@ namespace VirtualFileSystem
}
}
public StagingGroup StageWith(IEnumerable<VirtualFile> files)
{
return new StagingGroup(files);
}
public VirtualFile this[string path]
{
get
@ -268,8 +324,36 @@ namespace VirtualFileSystem
}
}
/// <summary>
/// Given a path that starts with a HASH, return the Virtual file referenced
/// </summary>
/// <param name="archiveHashPath"></param>
/// <returns></returns>
public VirtualFile FileForArchiveHashPath(string[] archiveHashPath)
{
var archive = HashIndex[archiveHashPath[0]].Where(a => a.IsArchive).OrderByDescending(a => a.LastModified).First();
string fullPath = HashIndex[archiveHashPath[0]] + "|" + String.Join("|", archiveHashPath.Skip(1));
return Lookup(fullPath);
}
}
public class StagingGroup : List<VirtualFile>, IDisposable
{
public StagingGroup(IEnumerable<VirtualFile> files) : base(files)
{
}
public void Dispose()
{
this.Do(f => f.Unstage());
}
internal void Stage()
{
VirtualFileSystem.VFS.Stage(this);
}
}
[JsonObject(MemberSerialization = MemberSerialization.OptIn)]
public class VirtualFile
@ -367,6 +451,12 @@ namespace VirtualFileSystem
if (IsConcrete) return Paths[0];
return _stagedPath;
}
set
{
if (IsStaged && value != null)
throw new InvalidDataException("Can't change the path of a already staged file");
_stagedPath = value;
}
}
public FileStream OpenRead()
@ -405,6 +495,7 @@ namespace VirtualFileSystem
internal string GenerateStagedName()
{
if (_stagedPath != null) return _stagedPath;
_stagedPath = Path.Combine(VirtualFileSystem._stagedRoot, Guid.NewGuid().ToString() + Path.GetExtension(Paths.Last()));
return _stagedPath;
}
@ -447,6 +538,21 @@ namespace VirtualFileSystem
return _parentPath;
}
}
public IEnumerable<VirtualFile> FileInArchive
{
get
{
return VirtualFileSystem.VFS.FilesInArchive(this);
}
}
public string[] MakeRelativePaths()
{
var path_copy = (string[])Paths.Clone();
path_copy[0] = VirtualFileSystem.VFS.Lookup(Paths[0]).Hash;
return path_copy;
}
}

View File

@ -118,6 +118,8 @@ namespace Wabbajack.Common
return Consts.SupportedArchives.Contains(v) || v == ".bsa";
}
// Probably replace this with VFS?
/*
public static void DeepExtract(string file, IEnumerable<FromArchive> files, Func<FromArchive, Entry, Stream> fnc, bool leave_open = false, int depth = 1)
{
// Files we need to extract at this level
@ -168,6 +170,8 @@ namespace Wabbajack.Common
DeepExtract(archive.Value, archives_for_level[archive.Key], fnc, leave_open, depth + 1);
File.Delete(archive.Value);
}
}
*/
}
}

View File

@ -1,127 +0,0 @@
using Compression.BSA;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Wabbajack.Common
{
public class HashCache : IDisposable
{
public class Entry
{
public string name;
public string hash;
public long size;
public DateTime last_modified;
}
public class BSA
{
public string full_path;
public string hash;
public long size;
public DateTime last_modified;
public Dictionary<string, string> entries;
}
private ConcurrentDictionary<string, Entry> _hashes = new ConcurrentDictionary<string, Entry>();
private ConcurrentDictionary<string, BSA> _bsas = new ConcurrentDictionary<string, BSA>();
private bool disposed;
public class DB
{
public List<Entry> entries;
public List<BSA> bsas;
}
public HashCache()
{
if (Consts.HashCacheName.FileExists())
{
var json = Consts.HashCacheName.FromJSON<DB>();
_hashes = new ConcurrentDictionary<string, Entry>(json.entries.Select(e => new KeyValuePair<string, Entry>(e.name, e)));
_bsas = new ConcurrentDictionary<string, BSA>(json.bsas.Select(e => new KeyValuePair<string, BSA>(e.full_path, e)));
}
}
public string HashFile(string filename)
{
TOP:
var result = _hashes.GetOrAdd(filename,
s =>
{
var fi = new FileInfo(filename);
return new Entry
{
name = filename,
hash = Utils.FileSHA256(filename),
size = fi.Length,
last_modified = fi.LastWriteTimeUtc
};
});
var info = new FileInfo(filename);
if (info.LastWriteTimeUtc != result.last_modified || info.Length != result.size)
{
_hashes.TryRemove(filename, out Entry v);
goto TOP;
}
return result.hash;
}
public void Dispose()
{
if (disposed) return;
new DB
{
entries = _hashes.Values.ToList(),
bsas = _bsas.Values.ToList()
}.ToJSON(Consts.HashCacheName);
disposed = true;
_hashes = null;
_bsas = null;
}
public List<(string, string)> HashBSA(string absolutePath, Action<string> status)
{
TOP:
var finfo = new FileInfo(absolutePath);
if (_bsas.TryGetValue(absolutePath, out BSA ar))
{
if (ar.last_modified == finfo.LastWriteTimeUtc && ar.size == finfo.Length)
return ar.entries.Select(kv => (kv.Key, kv.Value)).ToList();
_bsas.TryRemove(absolutePath, out BSA value);
}
var bsa = new BSA()
{
full_path = absolutePath,
size = finfo.Length,
last_modified = finfo.LastAccessTimeUtc,
};
var entries = new ConcurrentBag<(string, string)>();
status($"Hashing BSA: {absolutePath}");
using (var a = new BSAReader(absolutePath))
{
a.Files.PMap(entry =>
{
status($"Hashing BSA: {absolutePath} - {entry.Path}");
var data = entry.GetData();
entries.Add((entry.Path, data.SHA256()));
});
}
bsa.entries = entries.ToDictionary(e => e.Item1, e => e.Item2);
_bsas.TryAdd(absolutePath, bsa);
goto TOP;
}
}
}

View File

@ -78,10 +78,8 @@
<ItemGroup>
<Compile Include="BSDiff.cs" />
<Compile Include="Consts.cs" />
<Compile Include="Data.cs" />
<Compile Include="DynamicIniData.cs" />
<Compile Include="FileExtractor.cs" />
<Compile Include="HashCache.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="SplittingStream.cs" />
<Compile Include="Utils.cs" />

View File

@ -312,7 +312,6 @@ namespace Wabbajack
{
try
{
compiler.LoadArchives();
compiler.Compile();
}
catch (Exception ex)

View File

@ -1,6 +1,5 @@
using Compression.BSA;
using Newtonsoft.Json;
using SharpCompress.Archives;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
@ -10,10 +9,10 @@ using System.Reflection;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Web;
using Wabbajack.Common;
using static Wabbajack.NexusAPI;
using VFS;
namespace Wabbajack
{
@ -59,9 +58,17 @@ namespace Wabbajack
public ConcurrentBag<Directive> ExtraFiles { get; private set; }
public Dictionary<string, dynamic> ModInis { get; private set; }
public List<IndexedArchive> IndexedArchives;
public VirtualFileSystem VFS
{
get
{
return VirtualFileSystem.VFS;
}
public List<IndexedArchiveEntry> IndexedFiles { get; private set; }
}
public List<IndexedArchive> IndexedArchives { get; private set; }
public Dictionary<string, IEnumerable<VirtualFile>> IndexedFiles { get; private set; }
public class IndexedFileMatch
{
@ -103,85 +110,21 @@ namespace Wabbajack
GamePath = ((string)MO2Ini.General.gamePath).Replace("\\\\", "\\");
}
public void LoadArchives()
{
IndexedArchives = Directory.EnumerateFiles(MO2DownloadsFolder)
.Where(file => Consts.SupportedArchives.Contains(Path.GetExtension(file)))
.PMap(file => LoadArchive(file));
IndexedFiles = FlattenFiles(IndexedArchives);
Info($"Found {IndexedFiles.Count} files in archives");
}
private List<IndexedArchiveEntry> FlattenFiles(IEnumerable<IndexedArchive> archives)
{
return archives.PMap(e => FlattenArchiveEntries(e, null, new string[0]))
.SelectMany(e => e)
.ToList();
}
private IEnumerable<IndexedArchiveEntry> FlattenArchiveEntries(IndexedArchiveCache archive, string name, string[] path)
{
var new_path = new string[path.Length + 1];
Array.Copy(path, 0, new_path, 0, path.Length);
new_path[path.Length] = path.Length == 0 ? archive.Hash : name;
foreach (var e in archive.Entries)
{
yield return new IndexedArchiveEntry()
{
Path = e.Path,
Size = e.Size,
Hash = e.Hash,
HashPath = new_path
};
}
if (archive.InnerArchives != null) {
foreach (var inner in archive.InnerArchives)
{
foreach (var entry in FlattenArchiveEntries(inner.Value, inner.Key, new_path))
{
yield return entry;
}
}
}
}
private const int ARCHIVE_CONTENTS_VERSION = 1;
private IndexedArchive LoadArchive(string file)
{
TOP:
string metaname = file + ".archive_contents";
var info = new IndexedArchive();
info.File = VFS.Lookup(file);
if (metaname.FileExists() && new FileInfo(metaname).LastWriteTime >= new FileInfo(file).LastWriteTime)
info.Name = Path.GetFileName(file);
var ini_name = file + ".meta";
if (ini_name.FileExists())
{
Status("Loading Archive Index for {0}", Path.GetFileName(file));
var info = metaname.FromJSON<IndexedArchive>();
if (info.Version != ARCHIVE_CONTENTS_VERSION)
{
File.Delete(metaname);
goto TOP;
}
info.Name = Path.GetFileName(file);
info.AbsolutePath = file;
var ini_name = file + ".meta";
if (ini_name.FileExists())
{
info.IniData = ini_name.LoadIniFile();
info.Meta = File.ReadAllText(ini_name);
}
return info;
info.IniData = ini_name.LoadIniFile();
info.Meta = File.ReadAllText(ini_name);
}
IndexArchive(file).ToJSON(metaname);
goto TOP;
return info;
}
private bool IsArchiveFile(string name)
@ -192,89 +135,39 @@ namespace Wabbajack
return false;
}
private IndexedArchiveCache IndexArchive(string file)
{
Status("Indexing {0}", Path.GetFileName(file));
var streams = new Dictionary<string, (SHA256Managed, long)>();
var inner_archives = new Dictionary<string, string>();
FileExtractor.Extract(file, entry =>
{
Stream inner;
if (IsArchiveFile(entry.Name))
{
var name = Path.GetTempFileName() + Path.GetExtension(entry.Name);
inner_archives.Add(entry.Name, name);
inner = File.OpenWrite(name);
}
else
{
inner = Stream.Null;
}
var sha = new SHA256Managed();
var os = new CryptoStream(inner, sha, CryptoStreamMode.Write);
streams.Add(entry.Name, (sha, (long)entry.Size));
return os;
});
var indexed = new IndexedArchiveCache();
indexed.Version = ARCHIVE_CONTENTS_VERSION;
indexed.Hash = file.FileSHA256();
indexed.Entries = streams.Select(entry =>
{
return new IndexedEntry()
{
Hash = entry.Value.Item1.Hash.ToBase64(),
Size = (long)entry.Value.Item2,
Path = entry.Key
};
}).ToList();
streams.Do(e => e.Value.Item1.Dispose());
if (inner_archives.Count > 0)
{
var result = inner_archives.Select(archive =>
{
return (archive.Key, IndexArchive(archive.Value));
}).ToDictionary(e => e.Key, e => e.Item2);
indexed.InnerArchives = result;
inner_archives.Do(e => File.Delete(e.Value));
}
return indexed;
}
public void Compile()
{
VFS.AddRoot(MO2Folder);
VFS.AddRoot(GamePath);
var mo2_files = Directory.EnumerateFiles(MO2Folder, "*", SearchOption.AllDirectories)
.Where(p => p.FileExists())
.Select(p => new RawSourceFile() { Path = p.RelativeTo(MO2Folder), AbsolutePath = p });
.Select(p => new RawSourceFile(VFS.Lookup(p)));
var game_files = Directory.EnumerateFiles(GamePath, "*", SearchOption.AllDirectories)
.Where(p => p.FileExists())
.Select(p => new RawSourceFile() { Path = Path.Combine(Consts.GameFolderFilesDir, p.RelativeTo(GamePath)), AbsolutePath = p });
.Select(p => new RawSourceFile(VFS.Lookup(p)) { Path = Path.Combine(Consts.GameFolderFilesDir, p.RelativeTo(GamePath))});
IndexedArchives = Directory.EnumerateFiles(MO2DownloadsFolder)
.Where(f => Consts.SupportedArchives.Contains(Path.GetExtension(f)))
.Where(f => File.Exists(f + ".meta"))
.Select(f => new IndexedArchive()
{
File = VFS.Lookup(f),
IniData = (f + ".meta").LoadIniFile(),
Meta = File.ReadAllText(f + ".meta")
})
.ToList();
IndexedFiles = IndexedArchives.SelectMany(f => VFS.FilesInArchive(f.File))
.OrderByDescending(f => f.TopLevelArchive.LastModified)
.GroupBy(f => f.Hash)
.ToDictionary(f => f.Key, f => f.AsEnumerable());
Info("Searching for mod files");
AllFiles = mo2_files.Concat(game_files).ToList();
Info("Hashing files");
HashCache cache;
using (cache = new HashCache())
{
AllFiles.PMap(f => {
Status($"Hashing {f.Path}");
try
{
f.LoadHashFromCache(cache);
}
catch (IOException ex) { }
return f;
});
}
Info("Found {0} files to build into mod list", AllFiles.Count);
ExtraFiles = new ConcurrentBag<Directive>();
@ -291,15 +184,11 @@ namespace Wabbajack
.Where(f => f.Item2 != null)
.ToDictionary(f => f.Item1, f => f.Item2);
cache = new HashCache();
var stack = MakeStack(cache);
var stack = MakeStack();
Info("Running Compilation Stack");
var results = AllFiles.PMap(f => RunStack(stack, f)).ToList();
cache.Dispose();
// Add the extra files that were generated by the stack
Info($"Adding {ExtraFiles.Count} that were generated by the stack");
results = results.Concat(ExtraFiles).ToList();
@ -356,7 +245,6 @@ namespace Wabbajack
private void ResetMembers()
{
AllFiles = null;
IndexedArchives = null;
InstallDirectives = null;
SelectedArchives = null;
ExtraFiles = null;
@ -385,42 +273,25 @@ namespace Wabbajack
private void BuildArchivePatches(string archive_sha, IEnumerable<PatchedFromArchive> group, Dictionary<string, string> absolute_paths)
{
var archive = IndexedArchives.First(a => a.Hash == archive_sha);
var paths = group.Select(g => g.FullPath)
.ToHashSet();
var streams = new Dictionary<string, MemoryStream>();
Status($"Extracting {paths.Count} patch files from {archive.Name}");
// First we fetch the source files from the input archive
FileExtractor.DeepExtract(archive.AbsolutePath, group.DistinctBy(f => f.FullPath), (fe, entry) =>
var archive = VFS.HashIndex[archive_sha];
using (var files = VFS.StageWith(group.Select(g => VFS.FileForArchiveHashPath(g.ArchiveHashPath))))
{
if (!paths.Contains(fe.FullPath)) return null;
if (streams.ContainsKey(fe.FullPath)) return null;
var result = new MemoryStream();
streams.Add(fe.FullPath, result);
return result;
}, false);
var extracted = streams.ToDictionary(k => k.Key, v => v.Value.ToArray());
// Now Create the patches
Status("Building Patches for {0}", archive.Name);
Info("Building Patches for {0}", archive.Name);
group.PMap(entry =>
{
Info("Patching {0}", entry.To);
var ss = extracted[entry.FullPath];
using (var origin = new MemoryStream(ss))
using (var output = new MemoryStream())
var by_path = files.GroupBy(f => string.Join("|", f.Paths.Skip(1))).ToDictionary(f => f.Key, f => f.First());
// Now Create the patches
group.PMap(entry =>
{
var a = origin.ReadAll();
var b = LoadDataForTo(entry.To, absolute_paths);
BSDiff.Create(a, b, output);
entry.Patch = output.ToArray().ToBase64();
Info($"Patch size {entry.Patch.Length} for {entry.To}");
}
});
Info("Patching {0}", entry.To);
using (var origin = by_path[string.Join("|", entry.ArchiveHashPath.Skip(1))].OpenRead())
using (var output = new MemoryStream())
{
var a = origin.ReadAll();
var b = LoadDataForTo(entry.To, absolute_paths);
BSDiff.Create(a, b, output);
entry.Patch = output.ToArray().ToBase64();
Info($"Patch size {entry.Patch.Length} for {entry.To}");
}
});
}
}
@ -448,17 +319,20 @@ namespace Wabbajack
private void GatherArchives()
{
Info($"Building a list of archives based on the files required");
var archives = IndexedArchives.GroupBy(a => a.Hash).ToDictionary(k => k.Key, k => k.First());
var shas = InstallDirectives.OfType<FromArchive>()
.Select(a => a.ArchiveHashPath[0])
.Distinct();
var archives = IndexedArchives.OrderByDescending(f => f.File.LastModified)
.GroupBy(f => f.File.Hash)
.ToDictionary(f => f.Key, f => f.First());
SelectedArchives = shas.PMap(sha => ResolveArchive(sha, archives));
}
private Archive ResolveArchive(string sha, Dictionary<string, IndexedArchive> archives)
private Archive ResolveArchive(string sha, IDictionary<string, IndexedArchive> archives)
{
if (archives.TryGetValue(sha, out var found))
{
@ -566,7 +440,7 @@ namespace Wabbajack
}
result.Name = found.Name;
result.Hash = found.Hash;
result.Hash = found.File.Hash;
result.Meta = found.Meta;
return result;
@ -594,7 +468,7 @@ namespace Wabbajack
/// result included into the pack
/// </summary>
/// <returns></returns>
private IEnumerable<Func<RawSourceFile, Directive>> MakeStack(HashCache cache)
private IEnumerable<Func<RawSourceFile, Directive>> MakeStack()
{
Info("Generating compilation stack");
return new List<Func<RawSourceFile, Directive>>()
@ -617,7 +491,7 @@ namespace Wabbajack
IgnoreRegex(Consts.GameFolderFilesDir + "\\\\.*\\.bsa"),
IncludeModIniData(),
DirectMatch(),
DeconstructBSAs(cache), // Deconstruct BSAs before building patches so we don't generate massive patch files
DeconstructBSAs(), // Deconstruct BSAs before building patches so we don't generate massive patch files
IncludePatches(),
IncludeDummyESPs(),
IncludeTaggedFiles(),
@ -728,7 +602,7 @@ namespace Wabbajack
/// all of the files.
/// </summary>
/// <returns></returns>
private Func<RawSourceFile, Directive> DeconstructBSAs(HashCache cache)
private Func<RawSourceFile, Directive> DeconstructBSAs()
{
var include_directly = ModInis.Where(kv => {
var general = kv.Value.General;
@ -771,17 +645,11 @@ namespace Wabbajack
}
}
var hashed = cache.HashBSA(source.AbsolutePath, s => Status(s));
var source_files = hashed.Select(e => new RawSourceFile() {
Hash = e.Item2,
Path = e.Item1,
AbsolutePath = e.Item1
});
var source_files = source.File.FileInArchive;
var stack = default_include ? microstack_with_include : microstack;
var matches = source_files.PMap(e => RunStack(stack, e));
var matches = source_files.PMap(e => RunStack(stack, new RawSourceFile(e)));
var id = Guid.NewGuid().ToString();
@ -823,28 +691,6 @@ namespace Wabbajack
};
}
/// <summary>
/// Given a BSA on disk, index it and return a dictionary of SHA256 -> filename
/// </summary>
/// <param name="absolutePath"></param>
/// <returns></returns>
private List<(string, string)> HashBSA(HashCache cache, string absolutePath)
{
Status($"Hashing BSA: {absolutePath}");
var results = new List<(string, string)>();
using (var a = new BSAReader(absolutePath))
{
a.Files.PMap(entry =>
{
Status($"Hashing BSA: {absolutePath} - {entry.Path}");
var data = entry.GetData();
results.Add((entry.Path, data.SHA256()));
});
}
return results;
}
private Func<RawSourceFile, Directive> IgnoreDisabledMods()
{
var disabled_mods = File.ReadAllLines(Path.Combine(MO2ProfileDir, "modlist.txt"))
@ -865,23 +711,20 @@ namespace Wabbajack
private Func<RawSourceFile, Directive> IncludePatches()
{
var archive_shas = IndexedArchives.GroupBy(e => e.Hash)
.ToDictionary(e => e.Key);
var indexed = (from entry in IndexedFiles
select new { archive = archive_shas[entry.HashPath[0]].First(),
entry = entry })
.GroupBy(e => Path.GetFileName(e.entry.Path).ToLower())
.ToDictionary(e => e.Key);
var indexed = IndexedFiles.Values
.SelectMany(f => f)
.GroupBy(f => Path.GetFileName(f.Paths.Last()))
.ToDictionary(f => f.Key);
return source =>
{
if (indexed.TryGetValue(Path.GetFileName(source.Path.ToLower()), out var value))
{
// TODO: Improve this
var found = value.First();
var e = source.EvolveTo<PatchedFromArchive>();
e.From = found.entry.Path;
e.ArchiveHashPath = found.entry.HashPath;
e.ArchiveHashPath = found.MakeRelativePaths();
e.To = source.Path;
return e;
}
@ -1006,39 +849,21 @@ namespace Wabbajack
private Func<RawSourceFile, Directive> DirectMatch()
{
var archive_shas = IndexedArchives.GroupBy(e => e.Hash)
.ToDictionary(e => e.Key);
if (DirectMatchIndex == null)
{
DirectMatchIndex = IndexedFiles.PMap(entry => {
var archive = archive_shas[entry.HashPath[0]].First();
return new IndexedFileMatch
{
Archive = archive,
Entry = entry,
LastModified = new FileInfo(archive.AbsolutePath).LastAccessTimeUtc
};
})
.OrderByDescending(e => e.LastModified)
.GroupBy(e => e.Entry.Hash)
.ToDictionary(e => e.Key, e => e.AsEnumerable());
}
return source =>
{
if (DirectMatchIndex.TryGetValue(source.Hash, out var found))
if (IndexedFiles.TryGetValue(source.Hash, out var found))
{
var result = source.EvolveTo<FromArchive>();
var match = found.Where(f => Path.GetFileName(f.Entry.Path) == Path.GetFileName(source.Path))
var match = found.Where(f => Path.GetFileName(f.Paths[0]) == Path.GetFileName(source.Path))
.FirstOrDefault();
if (match == null)
match = found.FirstOrDefault();
result.ArchiveHashPath = match.Entry.HashPath;
result.From = match.Entry.Path;
result.ArchiveHashPath = match.MakeRelativePaths();
return result;
}
return null;

View File

@ -4,27 +4,36 @@ using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using VFS;
namespace Wabbajack.Common
namespace Wabbajack
{
public class RawSourceFile
public class RawSourceFile
{
public string AbsolutePath;
public RawSourceFile(VirtualFile file)
{
File = file;
}
public string AbsolutePath
{
get
{
return File.StagedPath;
}
}
public string Path;
private string _hash;
public VirtualFile File { get; private set; }
public string Hash
{
get
{
if (_hash != null) return _hash;
_hash = AbsolutePath.FileSHA256();
return _hash;
}
set
{
_hash = value;
return File.Hash;
}
}
public T EvolveTo<T>() where T : Directive, new()
@ -33,11 +42,6 @@ namespace Wabbajack.Common
v.To = Path;
return v;
}
public void LoadHashFromCache(HashCache cache)
{
_hash = cache.HashFile(AbsolutePath);
}
}
public class ModList
@ -98,10 +102,9 @@ namespace Wabbajack.Common
/// MurMur3 hash of the archive this file comes from
/// </summary>
public string[] ArchiveHashPath;
/// <summary>
/// The relative path of the file in the archive
/// </summary>
public string From;
[JsonIgnore]
public VirtualFile FromFile;
private string _fullPath = null;
[JsonIgnore]
@ -110,9 +113,7 @@ namespace Wabbajack.Common
get
{
if (_fullPath == null) {
var path = ArchiveHashPath.ToList();
path.Add(From);
_fullPath = String.Join("|", path);
_fullPath = String.Join("|", ArchiveHashPath);
}
return _fullPath;
}
@ -216,26 +217,12 @@ namespace Wabbajack.Common
{
}
/// <summary>
/// The indexed contents of an archive
/// </summary>
public class IndexedArchiveCache
{
public string Hash;
public int Version;
public List<IndexedEntry> Entries;
[JsonProperty(NullValueHandling = NullValueHandling.Ignore)]
public Dictionary<string, IndexedArchiveCache> InnerArchives;
}
public class IndexedArchive : IndexedArchiveCache
public class IndexedArchive
{
public dynamic IniData;
public string Name;
public string Meta;
public string AbsolutePath;
public List<string> HashPath;
public VirtualFile File { get; internal set; }
}
/// <summary>

View File

@ -9,12 +9,21 @@ using System.Reflection;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using VFS;
using Wabbajack.Common;
namespace Wabbajack
{
public class Installer
{
public VirtualFileSystem VFS
{
get
{
return VirtualFileSystem.VFS;
}
}
public Installer(ModList mod_list, string output_folder, Action<string> log_fn)
{
Outputfolder = output_folder;
@ -88,6 +97,9 @@ namespace Wabbajack
Error("Cannot continue, was unable to download one or more archives");
}
}
PrimeVFS();
BuildFolderStructure();
InstallArchives();
InstallIncludedFiles();
@ -96,6 +108,37 @@ namespace Wabbajack
Info("Installation complete! You may exit the program.");
}
/// <summary>
/// We don't want to make the installer index all the archives, that's just a waste of time, so instead
/// we'll pass just enough information to VFS to let it know about the files we have.
/// </summary>
private void PrimeVFS()
{
HashedArchives.Do(a => VFS.AddKnown(new VirtualFile()
{
Paths = new string[] { a.Key },
Hash = a.Value
}));
VFS.RefreshIndexes();
ModList.Directives
.OfType<FromArchive>()
.Do(f =>
{
var updated_path = new string[f.ArchiveHashPath.Length + 1];
f.ArchiveHashPath.CopyTo(updated_path, 0);
updated_path[0] = VFS.HashIndex[updated_path[0]].Where(e => e.IsConcrete).First().FullPath;
VFS.AddKnown(new VirtualFile() { Paths = updated_path });
});
VFS.BackfillMissing();
}
private void BuildBSAs()
{
var bsas = ModList.Directives.OfType<CreateBSA>().ToList();
@ -187,18 +230,18 @@ namespace Wabbajack
var files = grouping.GroupBy(e => e.FullPath)
.ToDictionary(e => e.Key);
FileExtractor.DeepExtract(absolutePath, files.Select(f => f.Value.First()),
(fe, entry) =>
var vfiles = files.Select(g =>
{
if (files.TryGetValue(fe.FullPath, out var directives))
{
var directive = directives.First();
var absolute = Path.Combine(Outputfolder, directive.To);
if (absolute.FileExists()) File.Delete(absolute);
return File.OpenWrite(absolute);
}
return null;
});
var first_file = g.Value.First();
var file = VFS.FileForArchiveHashPath(first_file.ArchiveHashPath);
file.StagedPath = first_file.To;
return file;
}).ToList();
VFS.Stage(vfiles);
vfiles.Do(f => f.StagedPath = null);
Status("Copying duplicated files for {0}", archive.Name);

View File

@ -48,6 +48,9 @@ namespace Wabbajack
WorkQueue.Init((id, msg, progress) => context.SetProgress(id, msg, progress),
(max, current) => context.SetQueueSize(max, current));
Utils.SetLoggerFn(s => context.LogMsg(s));
Utils.SetStatusFn((msg, progress) => WorkQueue.Report(msg, progress));
if (DebugMode)
@ -60,7 +63,6 @@ namespace Wabbajack
context.ModListName = compiler.MO2Profile;
context.Mode = "Building";
compiler.LoadArchives();
compiler.Compile();
var modlist = compiler.ModList.ToJSON();

View File

@ -139,6 +139,7 @@
<Generator>MSBuild:Compile</Generator>
<SubType>Designer</SubType>
</ApplicationDefinition>
<Compile Include="Data.cs" />
<Compile Include="LambdaCommand.cs" />
<Compile Include="Themes\LeftMarginMultiplierConverter.cs" />
<Compile Include="Themes\TreeViewItemExtensions.cs" />
@ -200,6 +201,10 @@
<Project>{8aa97f58-5044-4bba-b8d9-a74b6947a660}</Project>
<Name>SevenZipExtractor</Name>
</ProjectReference>
<ProjectReference Include="..\VirtualFileSystem\VirtualFileSystem.csproj">
<Project>{5128b489-bc28-4f66-9f0b-b4565af36cbc}</Project>
<Name>VirtualFileSystem</Name>
</ProjectReference>
<ProjectReference Include="..\Wabbajack.Common\Wabbajack.Common.csproj">
<Project>{b3f3fb6e-b9eb-4f49-9875-d78578bc7ae5}</Project>
<Name>Wabbajack.Common</Name>